id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
/CsuSort-1.0.15.tar.gz/CsuSort-1.0.15/Sort/yolo3/model.py
from functools import wraps import numpy as np import tensorflow as tf from keras import backend as K from keras.layers import Conv2D, Add, ZeroPadding2D, UpSampling2D, Concatenate from keras.layers.advanced_activations import LeakyReLU from keras.layers.normalization import BatchNormalization from keras.models import Model from keras.regularizers import l2 from Sort.yolo3.utils import compose @wraps(Conv2D) def DarknetConv2D(*args, **kwargs): """Wrapper to set Darknet parameters for Convolution2D.""" darknet_conv_kwargs = {'kernel_regularizer': l2(5e-4)} darknet_conv_kwargs['padding'] = 'valid' if kwargs.get('strides')==(2,2) else 'same' darknet_conv_kwargs.update(kwargs) return Conv2D(*args, **darknet_conv_kwargs) def DarknetConv2D_BN_Leaky(*args, **kwargs): """Darknet Convolution2D followed by BatchNormalization and LeakyReLU.""" no_bias_kwargs = {'use_bias': False} no_bias_kwargs.update(kwargs) return compose( DarknetConv2D(*args, **no_bias_kwargs), BatchNormalization(), LeakyReLU(alpha=0.1)) def resblock_body(x, num_filters, num_blocks): '''A series of resblocks starting with a downsampling Convolution2D''' # Darknet uses left and top padding instead of 'same' mode x = ZeroPadding2D(((1,0),(1,0)))(x) x = DarknetConv2D_BN_Leaky(num_filters, (3,3), strides=(2,2))(x) for i in range(num_blocks): y = compose( DarknetConv2D_BN_Leaky(num_filters//2, (1,1)), DarknetConv2D_BN_Leaky(num_filters, (3,3)))(x) x = Add()([x,y]) return x def darknet_body(x): '''Darknent body having 52 Convolution2D layers''' x = DarknetConv2D_BN_Leaky(32, (3,3))(x) x = resblock_body(x, 64, 1) x = resblock_body(x, 128, 2) x = resblock_body(x, 256, 8) x = resblock_body(x, 512, 8) x = resblock_body(x, 1024, 4) return x def make_last_layers(x, num_filters, out_filters): '''6 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer''' x = compose( DarknetConv2D_BN_Leaky(num_filters, (1,1)), DarknetConv2D_BN_Leaky(num_filters*2, (3,3)), DarknetConv2D_BN_Leaky(num_filters, (1,1)), DarknetConv2D_BN_Leaky(num_filters*2, (3,3)), DarknetConv2D_BN_Leaky(num_filters, (1,1)))(x) y = compose( DarknetConv2D_BN_Leaky(num_filters*2, (3,3)), DarknetConv2D(out_filters, (1,1)))(x) return x, y def yolo_body(inputs, num_anchors, num_classes): """Create YOLO_V3 model CNN body in Keras.""" darknet = Model(inputs, darknet_body(inputs)) x, y1 = make_last_layers(darknet.output, 512, num_anchors*(num_classes+5)) x = compose( DarknetConv2D_BN_Leaky(256, (1,1)), UpSampling2D(2))(x) x = Concatenate()([x,darknet.layers[152].output]) x, y2 = make_last_layers(x, 256, num_anchors*(num_classes+5)) x = compose( DarknetConv2D_BN_Leaky(128, (1,1)), UpSampling2D(2))(x) x = Concatenate()([x,darknet.layers[92].output]) x, y3 = make_last_layers(x, 128, num_anchors*(num_classes+5)) return Model(inputs, [y1,y2,y3]) def yolo_head(feats, anchors, num_classes, input_shape): """Convert final layer features to bounding box parameters.""" num_anchors = len(anchors) # Reshape to batch, height, width, num_anchors, box_params. anchors_tensor = K.reshape(K.constant(anchors), [1, 1, 1, num_anchors, 2]) grid_shape = K.shape(feats)[1:3] # height, width grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], 1, 1]) grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, 1, 1]) grid = K.concatenate([grid_x, grid_y]) grid = K.cast(grid, K.dtype(feats)) feats = K.reshape( feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5]) box_xy = K.sigmoid(feats[..., :2]) box_wh = K.exp(feats[..., 2:4]) box_confidence = K.sigmoid(feats[..., 4:5]) box_class_probs = K.sigmoid(feats[..., 5:]) # Adjust preditions to each spatial grid point and anchor size. box_xy = (box_xy + grid) / K.cast(grid_shape[::-1], K.dtype(feats)) box_wh = box_wh * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats)) return box_xy, box_wh, box_confidence, box_class_probs def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape): '''Get corrected boxes''' box_yx = box_xy[..., ::-1] box_hw = box_wh[..., ::-1] input_shape = K.cast(input_shape, K.dtype(box_yx)) image_shape = K.cast(image_shape, K.dtype(box_yx)) new_shape = K.round(image_shape * K.min(input_shape/image_shape)) offset = (input_shape-new_shape)/2./input_shape scale = input_shape/new_shape box_yx = (box_yx - offset) * scale box_hw *= scale box_mins = box_yx - (box_hw / 2.) box_maxes = box_yx + (box_hw / 2.) boxes = K.concatenate([ box_mins[..., 0:1], # y_min box_mins[..., 1:2], # x_min box_maxes[..., 0:1], # y_max box_maxes[..., 1:2] # x_max ]) # Scale boxes back to original image shape. boxes *= K.concatenate([image_shape, image_shape]) return boxes def yolo_boxes_and_scores(feats, anchors, num_classes, input_shape, image_shape): '''Process Conv layer output''' box_xy, box_wh, box_confidence, box_class_probs = yolo_head(feats, anchors, num_classes, input_shape) boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape) boxes = K.reshape(boxes, [-1, 4]) box_scores = box_confidence * box_class_probs box_scores = K.reshape(box_scores, [-1, num_classes]) return boxes, box_scores def yolo_eval(yolo_outputs, anchors, num_classes, image_shape, max_boxes=20, score_threshold=.6, iou_threshold=.5): """Evaluate YOLO model on given input and return filtered boxes.""" anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] input_shape = K.shape(yolo_outputs[0])[1:3] * 32 boxes = [] box_scores = [] for l in range(3): _boxes, _box_scores = yolo_boxes_and_scores(yolo_outputs[l], anchors[anchor_mask[l]], num_classes, input_shape, image_shape) boxes.append(_boxes) box_scores.append(_box_scores) boxes = K.concatenate(boxes, axis=0) box_scores = K.concatenate(box_scores, axis=0) mask = box_scores >= score_threshold max_boxes_tensor = K.constant(max_boxes, dtype='int32') boxes_ = [] scores_ = [] classes_ = [] for c in range(num_classes): # TODO: use keras backend instead of tf. class_boxes = tf.boolean_mask(boxes, mask[:, c]) class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c]) nms_index = tf.image.non_max_suppression( class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=iou_threshold) class_boxes = K.gather(class_boxes, nms_index) class_box_scores = K.gather(class_box_scores, nms_index) classes = K.ones_like(class_box_scores, 'int32') * c boxes_.append(class_boxes) scores_.append(class_box_scores) classes_.append(classes) boxes_ = K.concatenate(boxes_, axis=0) scores_ = K.concatenate(scores_, axis=0) classes_ = K.concatenate(classes_, axis=0) return boxes_, scores_, classes_ def preprocess_true_boxes(true_boxes, input_shape, anchors, num_classes): '''Preprocess true boxes to training input format Parameters ---------- true_boxes: array, shape=(m, T, 5) Absolute x_min, y_min, x_max, y_max, class_code reletive to input_shape. input_shape: array-like, hw, multiples of 32 anchors: array, shape=(N, 2), wh num_classes: integer Returns ------- y_true: list of array, shape like yolo_outputs, xywh are reletive value ''' anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] true_boxes = np.array(true_boxes, dtype='float32') input_shape = np.array(input_shape, dtype='int32') boxes_xy = (true_boxes[..., 0:2] + true_boxes[..., 2:4]) // 2 boxes_wh = true_boxes[..., 2:4] - true_boxes[..., 0:2] true_boxes[..., 0:2] = boxes_xy/input_shape[::-1] true_boxes[..., 2:4] = boxes_wh/input_shape[::-1] m = true_boxes.shape[0] grid_shapes = [input_shape//{0:32, 1:16, 2:8}[l] for l in range(3)] y_true = [np.zeros((m,grid_shapes[l][0],grid_shapes[l][1],len(anchor_mask[l]),5+num_classes), dtype='float32') for l in range(3)] # Expand dim to apply broadcasting. anchors = np.expand_dims(anchors, 0) anchor_maxes = anchors / 2. anchor_mins = -anchor_maxes valid_mask = boxes_wh[..., 0]>0 for b in range(m): # Discard zero rows. wh = boxes_wh[b, valid_mask[b]] # Expand dim to apply broadcasting. wh = np.expand_dims(wh, -2) box_maxes = wh / 2. box_mins = -box_maxes intersect_mins = np.maximum(box_mins, anchor_mins) intersect_maxes = np.minimum(box_maxes, anchor_maxes) intersect_wh = np.maximum(intersect_maxes - intersect_mins, 0.) intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] box_area = wh[..., 0] * wh[..., 1] anchor_area = anchors[..., 0] * anchors[..., 1] iou = intersect_area / (box_area + anchor_area - intersect_area) # Find best anchor for each true box best_anchor = np.argmax(iou, axis=-1) for t, n in enumerate(best_anchor): for l in range(3): if n in anchor_mask[l]: i = np.floor(true_boxes[b,t,0]*grid_shapes[l][1]).astype('int32') j = np.floor(true_boxes[b,t,1]*grid_shapes[l][0]).astype('int32') n = anchor_mask[l].index(n) c = true_boxes[b,t, 4].astype('int32') y_true[l][b, j, i, n, 0:4] = true_boxes[b,t, 0:4] y_true[l][b, j, i, n, 4] = 1 y_true[l][b, j, i, n, 5+c] = 1 break return y_true def box_iou(b1, b2): '''Return iou tensor Parameters ---------- b1: tensor, shape=(i1,...,iN, 4), xywh b2: tensor, shape=(j, 4), xywh Returns ------- iou: tensor, shape=(i1,...,iN, j) ''' # Expand dim to apply broadcasting. b1 = K.expand_dims(b1, -2) b1_xy = b1[..., :2] b1_wh = b1[..., 2:4] b1_wh_half = b1_wh/2. b1_mins = b1_xy - b1_wh_half b1_maxes = b1_xy + b1_wh_half # Expand dim to apply broadcasting. b2 = K.expand_dims(b2, 0) b2_xy = b2[..., :2] b2_wh = b2[..., 2:4] b2_wh_half = b2_wh/2. b2_mins = b2_xy - b2_wh_half b2_maxes = b2_xy + b2_wh_half intersect_mins = K.maximum(b1_mins, b2_mins) intersect_maxes = K.minimum(b1_maxes, b2_maxes) intersect_wh = K.maximum(intersect_maxes - intersect_mins, 0.) intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] b1_area = b1_wh[..., 0] * b1_wh[..., 1] b2_area = b2_wh[..., 0] * b2_wh[..., 1] iou = intersect_area / (b1_area + b2_area - intersect_area) return iou def yolo_loss(args, anchors, num_classes, ignore_thresh=.5): '''Return yolo_loss tensor Parameters ---------- yolo_outputs: list of tensor, the output of yolo_body y_true: list of array, the output of preprocess_true_boxes anchors: array, shape=(T, 2), wh num_classes: integer ignore_thresh: float, the iou threshold whether to ignore object confidence loss Returns ------- loss: tensor, shape=(1,) ''' yolo_outputs = args[:3] y_true = args[3:] anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] input_shape = K.cast(K.shape(yolo_outputs[0])[1:3] * 32, K.dtype(y_true[0])) grid_shapes = [K.cast(K.shape(yolo_outputs[l])[1:3], K.dtype(y_true[0])) for l in range(3)] loss = 0 m = K.shape(yolo_outputs[0])[0] for l in range(3): object_mask = y_true[l][..., 4:5] true_class_probs = y_true[l][..., 5:] pred_xy, pred_wh, pred_confidence, pred_class_probs = yolo_head(yolo_outputs[l], anchors[anchor_mask[l]], num_classes, input_shape) pred_box = K.concatenate([pred_xy, pred_wh]) # Darknet box loss. xy_delta = (y_true[l][..., :2]-pred_xy)*grid_shapes[l][::-1] wh_delta = K.log(y_true[l][..., 2:4]) - K.log(pred_wh) # Avoid log(0)=-inf. wh_delta = K.switch(object_mask, wh_delta, K.zeros_like(wh_delta)) box_delta = K.concatenate([xy_delta, wh_delta], axis=-1) box_delta_scale = 2 - y_true[l][...,2:3]*y_true[l][...,3:4] # Find ignore mask, iterate over each of batch. ignore_mask = tf.TensorArray(K.dtype(y_true[0]), size=1, dynamic_size=True) object_mask_bool = K.cast(object_mask, 'bool') def loop_body(b, ignore_mask): true_box = tf.boolean_mask(y_true[l][b,...,0:4], object_mask_bool[b,...,0]) iou = box_iou(pred_box[b], true_box) best_iou = K.max(iou, axis=-1) ignore_mask = ignore_mask.write(b, K.cast(best_iou<ignore_thresh, K.dtype(true_box))) return b+1, ignore_mask _, ignore_mask = K.control_flow_ops.while_loop(lambda b,*args: b<m, loop_body, [0, ignore_mask]) ignore_mask = ignore_mask.stack() ignore_mask = K.expand_dims(ignore_mask, -1) box_loss = object_mask * K.square(box_delta*box_delta_scale) confidence_loss = object_mask * K.square(1-pred_confidence) + \ (1-object_mask) * K.square(0-pred_confidence) * ignore_mask class_loss = object_mask * K.square(true_class_probs-pred_class_probs) loss += K.sum(box_loss) + K.sum(confidence_loss) + K.sum(class_loss) return loss / K.cast(m, K.dtype(loss))
PypiClean
/FanFicFare-4.27.0.tar.gz/FanFicFare-4.27.0/fanficfare/fetchers/base_fetcher.py
# Copyright 2022 FanFicFare team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import logging logger = logging.getLogger(__name__) # py2 vs py3 transition from ..six.moves.urllib.parse import quote_plus from ..six.moves.http_cookiejar import LWPCookieJar, MozillaCookieJar from ..six import text_type as unicode from ..six import ensure_binary class FetcherResponse(object): def __init__(self,content,redirecturl=None,fromcache=False,json=None): self.content = content self.redirecturl = redirecturl self.fromcache = fromcache self.json = json class Fetcher(object): def __init__(self,getConfig_fn,getConfigList_fn): self.getConfig = getConfig_fn self.getConfigList = getConfigList_fn self.cookiejar = None def get_cookiejar(self,filename=None,mozilla=False): if self.cookiejar is None: if mozilla: ParentCookieJar = MozillaCookieJar else: ParentCookieJar = LWPCookieJar class BasicCookieJar(ParentCookieJar,object): def __init__(self,*args,**kargs): super(BasicCookieJar,self).__init__(*args,**kargs) self.autosave = False # self.filename from parent(s) ## used by CLI --save-cache dev debugging feature def set_autosave(self,autosave=False,filename=None): self.autosave = autosave self.filename = filename def load_cookiejar(self,filename=None): self.load(self.filename or filename, ignore_discard=True, ignore_expires=True) def save_cookiejar(self,filename=None): self.save(filename or self.filename, ignore_discard=True, ignore_expires=True) self.cookiejar = BasicCookieJar(filename=filename) if filename: try: self.cookiejar.load(ignore_discard=True, ignore_expires=True) except: logger.debug("Failed to load cookiejar(%s), going on without."%filename) return self.cookiejar def set_cookiejar(self,cookiejar): self.cookiejar = cookiejar def make_headers(self,url,referer=None): headers = {} headers['User-Agent']=self.getConfig('user_agent') if referer: headers['Referer']=referer # if "xf2test" in url: # import base64 # base64string = base64.encodestring(b"sbreview2019:Fs2PwuVE9").replace(b'\n', b'') # headers['Authorization']="Basic %s" % base64string # logger.debug("http login for SB xf2test") return headers def request(self,*args,**kargs): '''Returns a FetcherResponse regardless of mechanism''' raise NotImplementedError() def do_request(self, method, url, parameters=None, referer=None, usecache=True): # logger.debug("fetcher do_request") # logger.debug(self.get_cookiejar()) headers = self.make_headers(url,referer=referer) fetchresp = self.request(method,url, headers=headers, parameters=parameters) data = fetchresp.content if self.get_cookiejar().autosave and self.get_cookiejar().filename: self.get_cookiejar().save_cookiejar() return fetchresp def condition_url(self, url): if not url.startswith('file:'): # file fetches fail on + for space url = quote_plus(ensure_binary(url),safe=';/?:@&=+$,%&#') if self.getConfig('force_https'): ## For developer testing only. url = url.replace("http:","https:") return url def post_request(self, url, parameters=None, usecache=True): fetchresp = self.do_request('POST', self.condition_url(url), parameters=parameters, usecache=usecache) return fetchresp.content def get_request_redirected(self, url, referer=None, usecache=True): fetchresp = self.do_request('GET', self.condition_url(url), referer=referer, usecache=usecache) return (fetchresp.content,fetchresp.redirecturl)
PypiClean
/DTSR-0.2.0.tar.gz/DTSR-0.2.0/dtsr/bin/get_table_betas.py
import os import argparse from dtsr.config import Config from dtsr.util import filter_models def print_table(beta_summaries, names, beta_names): table_str = ''' \\begin{table} \\begin{tabular}{r|%s} & %s \\\\ & %s \\\\ \\hline ''' %( '|'.join(['l'*3 for x in names]), ' & '.join(['\\multicolumn{3}{c}{%s}'%x for x in names]), ' & '.join(['Mean & 2.5\\% & 97.5\\%' for x in names]) ) for x in sorted(list(beta_names)): row = x.split('-')[0] for m in names: data = beta_summaries[m].get(x, None) if data is not None: row += ' & ' + ' & '.join([ '%.2e' % beta_summaries[m][x]['mean'], '%.2e' % beta_summaries[m][x]['lower'], '%.2e' % beta_summaries[m][x]['upper'] ]) else: row += ' & ---' * 3 table_str += row + '\\\\\n' table_str += '\\end{tabular}\n\\end{table}' print(table_str) if __name__ == '__main__': argparser = argparse.ArgumentParser(''' Generates a LaTeX table of beta summaries for one or more DTSR models ''') argparser.add_argument('config', help='Path to config file defining models') argparser.add_argument('-m', '--models', nargs='+', default=[], help='List of model names from which to extract betas. Regex permitted. If unspecified, extracts betas from all DTSR models.') argparser.add_argument('-n', '--names', nargs='*', default=[], help='Model names to print in table (must be omitted or same length as --models).') args = argparser.parse_args() p = Config(args.config) if not p.use_gpu_if_available: os.environ['CUDA_VISIBLE_DEVICES'] = '-1' models = filter_models(p.model_list, args.models, dtsr_only=True) if len(args.names) == 0: names = p.model_list[:] else: assert len(args.names) == len(models), 'Length mismatch between number of models and number of model names' names = args.names beta_summaries = {} beta_names = set() for i in range(len(models)): m = models[i] name = names[i] beta_summaries[name] = {} with open(p.outdir + '/' + m + '/summary.txt', 'r') as f: l = f.readline() while l and not l.startswith('Posterior integral summaries by predictor'): l = f.readline() f.readline() l = f.readline() while l and len(l.strip()) > 0: row = l.strip().split() assert len(row) == 4, 'Ill-formed row in effect table: "%s"' %l.strip() beta_names.add(row[0]) beta_summaries[name][row[0]] = { 'mean': float(row[1]), 'lower': float(row[2]), 'upper': float(row[3]) } l = f.readline() print_table(beta_summaries, names, beta_names)
PypiClean
/EOxServer-1.2.12-py3-none-any.whl/eoxserver/services/ows/wps/exceptions.py
class OWS10Exception(Exception): """ Base OWS 1.0 exception of the WPS 1.0.0 exceptions """ http_status_code = 400 def __init__(self, code, locator, message): self.code = code self.locator = locator Exception.__init__(self, message) #------------------------------------------------------------------------------- # All possible WPS 1.0.0 exceptions. For list of OWS exception used by WPS # see OGC 05-007r7 Table 38 and Table 62 class NoApplicableCode(OWS10Exception): http_status_code = 500 def __init__(self, message, locator=None): OWS10Exception.__init__(self, "NoApplicableCode", locator, message) class MissingParameterValue(OWS10Exception): def __init__(self, message, locator): OWS10Exception.__init__(self, "MissingParameterValue", locator, message) class InvalidParameterValue(OWS10Exception): def __init__(self, message, locator): OWS10Exception.__init__(self, "InvalidParameterValue", locator, message) class NotEnoughStorage(OWS10Exception): http_status_code = 507 def __init__(self, message): OWS10Exception.__init__(self, "NotEnoughStorage", None, message) class ServerBusy(OWS10Exception): http_status_code = 503 def __init__(self, message): OWS10Exception.__init__(self, "ServerBusy", None, message) class FileSizeExceeded(OWS10Exception): def __init__(self, message, locator): OWS10Exception.__init__(self, "FileSizeExceeded", locator, message) class StorageNotSupported(OWS10Exception): def __init__(self, message): OWS10Exception.__init__(self, "StorageNotSupported", None, message) class VersionNegotiationFailed(OWS10Exception): def __init__(self, message, locator): OWS10Exception.__init__(self, "VersionNegotiationFailed", locator, message) #------------------------------------------------------------------------------- # Derived specific exceptions. # # Note that WPS 1.0.0 allows use of "vendor specific exception code" as locator # for the default "NoApplicableCode" exceptions. class NoSuchProcessError(InvalidParameterValue): def __init__(self, identifier): msg = "No such process: %s" % identifier InvalidParameterValue.__init__(self, msg, "process identifier") class InvalidOutputError(InvalidParameterValue): def __init__(self, output_id): message = "Invalid output '%s'!"%(output_id) InvalidParameterValue.__init__(self, message, output_id) class InvalidOutputValueError(NoApplicableCode): def __init__(self, output_id, message=""): message = "Invalid output value of '%s'! %s"%(output_id, message) NoApplicableCode.__init__(self, message, output_id) class InvalidOutputDefError(InvalidParameterValue): def __init__(self, output_id, message=""): message = "Invalid output definition of '%s'! %s"%(output_id, message) InvalidParameterValue.__init__(self, message, output_id) class InvalidInputError(InvalidParameterValue): def __init__(self, input_id): message = "Invalid input '%s'!"%(input_id) InvalidParameterValue.__init__(self, message, input_id) class InvalidInputValueError(InvalidParameterValue): def __init__(self, input_id, message=""): message = "Invalid input value of '%s'! %s"%(input_id, message) InvalidParameterValue.__init__(self, message, input_id) class InvalidInputReferenceError(InvalidParameterValue): def __init__(self, input_id, message=""): message = "Invalid input '%s' reference! %s"%(input_id, message) InvalidParameterValue.__init__(self, message, input_id) class MissingRequiredInputError(InvalidParameterValue): def __init__(self, input_id): message = "Missing required input '%s'!"%(input_id) InvalidParameterValue.__init__(self, message, input_id) class ExecuteError(NoApplicableCode): def __init__(self, message="", locator="process.execute()"): NoApplicableCode.__init__(self, message, locator) # This is defined in OGC 06-121r9 (referenced by WPS 2.0), not sure if also applicable in WPS 1.0 class OperationNotSupportedError(OWS10Exception): def __init__(self, message, locator=None): OWS10Exception.__init__(self, code="OperationNotSupported", locator=None, message=message)
PypiClean
/BO4ML-0.3.1.tar.gz/BO4ML-0.3.1/Component/mHyperopt/hyperopt.py
from . import hyperopt, tpe, space_eval, rand from functools import partial import time class HyperOpt(object): def __init__(self,**kwargs): for key, value in kwargs.items(): setattr(self, key, value) self.def_max_fails=10 self.fmin_todict= None self.isInitMode=True def run(self,round_id=0): self.isError = False _org_max_eval = self.fix_max_evals _this_max_eval=self.max_evals try: ieval_count = len([x['loss'] for x in self.trials.results if x['status'] == 'ok']) eval_count = len([x['loss'] for x in self.trials.results]) except: ieval_count,eval_count=0,0 _stattime=time.time() if self.algo_str=='tpe': self.isInitMode = False if ieval_count >= self.n_init_sample else True if self.isInitMode==True: #print('****INIT >>>>>') #_initStep = self.n_init_sample + ((eval_count-ieval_count) if self.isInitMode else 0) _errorCount = 0 while (_org_max_eval>ieval_count and (self.timeout>0 if self.timeout!=None else True)): #_initStep = self.n_init_sample + (_org_max_eval - _this_max_eval) _algo=rand.suggest _initMax_evals=eval_count+(_org_max_eval-ieval_count) try: self.fmin = hyperopt.fmin(fn=self.obj_func, space=self.search_space, algo=_algo, max_evals=_initMax_evals, trials=self.trials, rstate=self.rstate, pass_expr_memo_ctrl=self.pass_expr_memo_ctrl, verbose=self.verbose, return_argmin=self.return_argmin, max_queue_len=self.max_queue_len, timeout=self.timeout, show_progressbar=self.show_progressbar) except Exception as e: print(e) _errorCount +=1 pass ieval_count = len([x['loss'] for x in self.trials.results if x['status'] == 'ok']) eval_count = len([x['loss'] for x in self.trials.results]) #_initStep = self.n_init_sample + (_org_max_eval - _this_max_eval) _lastresults = [x for x in self.trials.results[-self.def_max_fails:] if x["status"] == "ok"] self.isInitMode = False if ieval_count >= self.n_init_sample else True if (eval_count > _this_max_eval + self.def_max_fails) or _errorCount>self.def_max_fails : print("Hyperopt message: too many fails, stop mining on this area") self.isError = True break if self.isInitMode==False: print("Hyperopt message: Finish Random Mode => Move to BO Mode*****") break else: # If not Initial sampling #print('****BO >>>>>') _errorCount=0 while ieval_count < _org_max_eval and (self.timeout > 0 if self.timeout != None else True): _addeval = _org_max_eval - ieval_count _initMax_evals =eval_count+ _addeval try: self.fmin = hyperopt.fmin(fn=self.obj_func, space=self.search_space, algo=self.algo, max_evals=_initMax_evals, trials=self.trials, rstate=self.rstate, pass_expr_memo_ctrl=self.pass_expr_memo_ctrl, verbose=self.verbose, return_argmin=self.return_argmin, max_queue_len=self.max_queue_len, timeout=self.timeout, show_progressbar=self.show_progressbar) _errorCount =0 except Exception as e: _errorCount+=1 print(e) pass ieval_count = len([x['loss'] for x in self.trials.results if x['status'] == 'ok']) #_lastresults = [x for x in self.trials.results[-self.def_max_fails:] if x["status"] == "ok"] eval_count = len([x['loss'] for x in self.trials.results]) #print(eval_count, _this_max_eval ) if _errorCount>self.def_max_fails*2: print("too many fails, stop mining on this area", _errorCount) self.isError = True break self.timeout = self.timeout - (time.time() - _stattime) if self.timeout != None else None else: try: self.fmin = hyperopt.fmin(fn=self.obj_func, space=self.search_space, algo=self.algo, max_evals=self.max_evals, trials=self.trials, rstate=self.rstate, pass_expr_memo_ctrl=self.pass_expr_memo_ctrl, verbose=self.verbose, return_argmin=self.return_argmin, max_queue_len=self.max_queue_len, timeout=self.timeout, show_progressbar=self.show_progressbar) except Exception as e: print(e) pass self.eval_count = len([x['loss'] for x in self.trials.results]) self.ieval_count = len([x['loss'] for x in self.trials.results if x['status'] == 'ok']) self.eval_hist = self.ieval_count try: self.fopt = self.trials.best_trial['result']['loss'] #print('_org_max_eval: ',_org_max_eval,' ieval_count:',ieval_count,' eval_count:',eval_count) if not hasattr(self, 'fmin'): self.fmin={k:v[0] for k,v in self.trials.best_trial['misc']['vals'].items() if len(v)>0} try: self.fmin_todict = space_eval(self.search_space, {k: v for k, v in self.fmin.items()}) except Exception as e: self.fmin_todict={} print(e) if hasattr(self,"isParallel"): if self.isParallel==True: return self.fmin_todict,self.fopt,self.eval_count, self.ieval_count#self.trials, self.sp_id, self.rstate return self.fmin_todict,self.fopt,self.eval_count, self.ieval_count except Exception as e: print('An Unknown error: ', e) return {}, 1, self.eval_count, self.ieval_count def AddBudget_run(self,add_eval, round_id=1): try: self.fix_max_evals=len([x['loss'] for x in self.trials.results if x['status'] == 'ok']) self.max_evals=len([x['loss'] for x in self.trials.results]) except: pass self.fix_max_evals +=add_eval self.max_evals += add_eval return self.run(round_id)
PypiClean
/ExpressPigeon-0.0.8.tar.gz/ExpressPigeon-0.0.8/expresspigeon/__init__.py
import os import json from collections import namedtuple from expresspigeon.autoresponders import AutoResponders from expresspigeon.campaigns import Campaigns from expresspigeon.contacts import Contacts from expresspigeon.lists import Lists from expresspigeon.messages import Messages from expresspigeon.templates import Templates from expresspigeon.dictionaries import Dictionaries from expresspigeon.flows import Flows try: from urllib import request as url_lib except ImportError: import urllib2 as url_lib class InvalidAuthKey(Exception): pass class ExpressPigeonException(Exception): pass class ExpressPigeon(object): ROOT = "https://api.expresspigeon.com/" class Request(url_lib.Request): METHODS = ["get", "post", "put", "delete"] def __init__(self, url, method=None, headers=None, data=None, origin_req_host=None, unverifiable=False): if not headers: headers = {} url_lib.Request.__init__(self, url, data, headers, origin_req_host, unverifiable) self.method = method def get_method(self): if self.method: return self.method return url_lib.Request.get_method(self) def __init__(self, auth_key=None): """ Initialize the ExpressPigeon API client. :param auth_key: ExpressPigeon API key. If not provided, the API key will be acquired from the EXPRESSPIGEON_AUTH_KEY environment variable. :type auth_key: string :returns: ExpressPigeon object for query API :rtype: ExpressPigeon :raises: :py:class:`InvalidAuthKey`: if the ExpressPigeon API key not found """ if auth_key is None: if 'EXPRESSPIGEON_AUTH_KEY' in os.environ: auth_key = os.environ['EXPRESSPIGEON_AUTH_KEY'] else: raise InvalidAuthKey('You must provide a ExpressPigeon API key') self.auth_key = auth_key self.lists = Lists(self) self.contacts = Contacts(self) self.campaigns = Campaigns(self) self.messages = Messages(self) self.templates = Templates(self) self.auto_responders = AutoResponders(self) self.dictionaries = Dictionaries(self) self.flows = Flows(self) def __getattr__(self, name): """ This is metaprogramming trick to send get, post, put, delete requests implicitly """ return ( lambda endpoint, **kwargs: self.__send_request__(endpoint, name, **kwargs) if name in self.Request.METHODS else super(ExpressPigeon, self).__getattribute__(name) ) def __send_request__(self, endpoint, method, **kwargs): content_type = kwargs["content_type"] if "content_type" in kwargs else "application/json" body = kwargs["body"] if "body" in kwargs else json.dumps(kwargs["params"] if "params" in kwargs else {}) opener = url_lib.build_opener(url_lib.HTTPSHandler) if isinstance(body, str): d = body.encode("utf-8") else: d = body req = self.Request(url=(self.ROOT if self.ROOT.endswith("/") else self.ROOT + "/") + endpoint, method=method.upper(), headers={"X-auth-key": self.auth_key, "Content-type": content_type, "User-Agent": "Mozilla/5.0"}, data=d) self.request_hook(req) try: response = opener.open(req); ct = response.info()['Content-Type'] if ('text/plain' in ct): return response.read().decode("utf-8"); return json.loads(response.read().decode("utf-8"), encoding="UTF-8", object_hook=lambda d: namedtuple('EpResponse', d.keys())(*d.values())) except url_lib.HTTPError as e: return json.loads(e.fp.read().decode("utf-8"), encoding="UTF-8", object_hook=lambda d: namedtuple('EpResponse', d.keys())(*d.values())) def read_stream(self, endpoint, **kwargs): opener = url_lib.build_opener(url_lib.HTTPSHandler) req = self.Request(url=(self.ROOT if self.ROOT.endswith("/") else self.ROOT + "/") + endpoint, method="GET", headers={"X-auth-key": self.auth_key, "User-Agent": "Mozilla/5.0"}) self.request_hook(req) try: return opener.open(req).read().decode("utf-8") except url_lib.HTTPError as e: return json.loads(e.fp.read().decode("utf-8"), encoding="UTF-8", object_hook=lambda d: namedtuple('EpResponse', d.keys())(*d.values())) def request_hook(self, request): pass
PypiClean
/Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/rockchip/rk3566/pin.py
from adafruit_blinka.microcontroller.generic_linux.libgpiod_pin import Pin GPIO0_A2 = Pin((0, 2)) GPIO0_A4 = Pin((0, 4)) GPIO0_A5 = Pin((0, 5)) GPIO0_B1 = Pin((0, 9)) GPIO0_B2 = Pin((0, 10)) GPIO0_B5 = Pin((0, 13)) GPIO0_B6 = Pin((0, 14)) GPIO0_B7 = Pin((0, 15)) GPIO0_C2 = Pin((0, 18)) GPIO0_C3 = Pin((0, 19)) GPIO0_C5 = Pin((0, 21)) GPIO0_C6 = Pin((0, 22)) GPIO0_C7 = Pin((0, 23)) GPIO0_D0 = Pin((0, 24)) GPIO0_D1 = Pin((0, 25)) GPIO1_A0 = Pin((1, 0)) GPIO1_A1 = Pin((1, 1)) GPIO1_A2 = Pin((1, 2)) GPIO1_A3 = Pin((1, 3)) GPIO1_A4 = Pin((1, 4)) GPIO1_A5 = Pin((1, 5)) GPIO1_A7 = Pin((1, 7)) GPIO1_B0 = Pin((1, 8)) GPIO1_B1 = Pin((1, 9)) GPIO1_B2 = Pin((1, 10)) GPIO1_B3 = Pin((1, 11)) GPIO1_D5 = Pin((1, 29)) GPIO1_D6 = Pin((1, 30)) GPIO1_D7 = Pin((1, 31)) GPIO2_A0 = Pin((2, 0)) GPIO2_A1 = Pin((2, 1)) GPIO2_A2 = Pin((2, 2)) GPIO2_C3 = Pin((2, 19)) GPIO2_C4 = Pin((2, 20)) GPIO2_C5 = Pin((2, 21)) GPIO2_C6 = Pin((2, 22)) GPIO3_A5 = Pin((3, 5)) GPIO3_A6 = Pin((3, 6)) GPIO3_A7 = Pin((3, 7)) GPIO3_B1 = Pin((3, 9)) GPIO3_B2 = Pin((3, 10)) GPIO3_B3 = Pin((3, 11)) GPIO3_B4 = Pin((3, 12)) GPIO3_B5 = Pin((3, 13)) GPIO3_B6 = Pin((3, 14)) GPIO3_B7 = Pin((3, 15)) GPIO3_C0 = Pin((3, 16)) GPIO3_C1 = Pin((3, 17)) GPIO3_C2 = Pin((3, 18)) GPIO3_C3 = Pin((3, 19)) GPIO3_C4 = Pin((3, 20)) GPIO3_C5 = Pin((3, 21)) GPIO3_C6 = Pin((3, 22)) GPIO3_C7 = Pin((3, 23)) GPIO3_D0 = Pin((3, 24)) GPIO3_D1 = Pin((3, 25)) GPIO3_D2 = Pin((3, 26)) GPIO3_D3 = Pin((3, 27)) GPIO3_D4 = Pin((3, 28)) GPIO3_D5 = Pin((3, 29)) GPIO4_A4 = Pin((4, 4)) GPIO4_A5 = Pin((4, 5)) GPIO4_A6 = Pin((4, 6)) GPIO4_A7 = Pin((4, 7)) GPIO4_B0 = Pin((4, 8)) GPIO4_B1 = Pin((4, 9)) GPIO4_B2 = Pin((4, 10)) GPIO4_B3 = Pin((4, 11)) GPIO4_B4 = Pin((4, 12)) GPIO4_B5 = Pin((4, 13)) GPIO4_B6 = Pin((4, 14)) GPIO4_B7 = Pin((4, 15)) GPIO4_C0 = Pin((4, 16)) GPIO4_C1 = Pin((4, 17)) GPIO4_C2 = Pin((4, 18)) GPIO4_C3 = Pin((4, 19)) GPIO4_C4 = Pin((4, 20)) GPIO4_C5 = Pin((4, 21)) GPIO4_C6 = Pin((4, 22)) ADC_AIN3 = 37 # I2C I2C0_SCL = GPIO0_B1 I2C0_SDA = GPIO0_B2 I2C2_SCL_M0 = GPIO0_B5 I2C2_SDA_M0 = GPIO0_B6 I2C2_SCL_M1 = GPIO4_B5 I2C2_SDA_M1 = GPIO4_B4 I2C3_SCL_M0 = GPIO1_A1 I2C3_SDA_M0 = GPIO1_A0 I2C4_SCL_M0 = GPIO4_B3 I2C4_SDA_M0 = GPIO4_B2 I2C5_SCL_M0 = GPIO3_B3 I2C5_SDA_M0 = GPIO3_B4 # SPI SPI0_CS0_M0 = GPIO0_C6 SPI0_CLK_M0 = GPIO0_B5 SPI0_MISO_M0 = GPIO0_C5 SPI0_MOSI_M0 = GPIO0_B6 SPI3_CS0_M0 = GPIO4_A6 SPI3_CLK_M0 = GPIO4_B3 SPI3_MISO_M0 = GPIO4_B0 SPI3_MOSI_M0 = GPIO4_B2 SPI3_CS0_M1 = GPIO4_C6 SPI3_CLK_M1 = GPIO4_C2 SPI3_MISO_M1 = GPIO4_C5 SPI3_MOSI_M1 = GPIO4_C3 # UART UART2_TX = GPIO0_D1 UART2_RX = GPIO0_D0 UART3_TX_M1 = GPIO3_B7 UART3_RX_M1 = GPIO3_C0 UART8_TX_M0 = GPIO2_C5 UART8_RX_M0 = GPIO2_C6 # PWM PWM0 = GPIO0_B7 PWM1 = GPIO0_C7 # ordered as i2cId, SCL, SDA i2cPorts = ( (2, I2C2_SCL_M0, I2C2_SDA_M0), (3, I2C3_SCL_M0, I2C3_SDA_M0), (5, I2C5_SCL_M0, I2C5_SDA_M0), ) # ordered as spiId, sckId, mosiId, misoId spiPorts = ( (3, SPI3_CLK_M0, SPI3_MOSI_M0, SPI3_MISO_M0), (3, SPI3_CLK_M1, SPI3_MOSI_M1, SPI3_MISO_M1), ) # SysFS pwm outputs, pwm channel and pin in first tuple pwmOuts = ( ((0, 0), PWM0), ((0, 0), PWM1), ) # SysFS analog inputs, Ordered as analog analogInId, device, and channel analogIns = ((ADC_AIN3, 0, 3),)
PypiClean
/AmorphSC-1.0.tar.gz/AmorphSC-1.0/README.md
# Amorphous Semiconductors Analysis Library This is a Python library created for my master thesis on amorphous semiconductors. It is composed by many files, each one to perform a different kind of analysis. At the moment it is possible to analyze capacitance - voltage, photocurrent and Kelvin Probe Force Microscopy data. ## Installation To install simply write pip install AmorphSC All dependecies should be automatically installed, but if something goes wrong it is possible to find the list in *AmorphSC.egg-info -> requires.txt* If you want to update the installed package at the latest version use pip install --upgrade AmorphSC ## Structure The package is divided into different files that can be used for both analysis and input/output. A detailed description of every function is present in the wiki. Now a general description of every file will be given. ### in_out.py This is a file that contains functions dedicated to input and output. For example with **import_file** it is possible to import more than one file per time with the same prefix and path. Here you can also find functions called **s** that can print any number in scientific notation. ### CV.py This file contains all the functions to analyze CV data. It is possible to plot CV characteristics, density of states and more. ### photocurrent.py This module contains functions to permorm analysis on photocurrent data. More in detail, it is possible to calculate photocurrent spectra, lamp spectra, tauc plots,...
PypiClean
/MACE-1.1.2.tar.gz/MACE-1.1.2/scripts/number_of_variants.py
__author__ = 'Sergei F. Kliver' import os, sys import argparse from MACE.General.File import split_filename, make_list_of_path_to_files from MACE.Parsers.VCF import CollectionVCF def vcf_filter(filename): return True if filename[-4:] == ".vcf" else False parser = argparse.ArgumentParser() parser.add_argument("-o", "--output", action="store", dest="output", default="stdout", help="Output file with number of variants in vcf file(files). Default: stdout") parser.add_argument("-a", "--write_header", action="store_true", dest="write_header", help="Write header in output file. Default: false") parser.add_argument("-i", "--input_vcf", action="store", dest="input_vcf", type=lambda s: s.split(","), help="Comma-separated list of vcf files or directories containing them", required=True) parser.add_argument("-w", "--write_dir_path", action="store_true", dest="write_dir_path", help="write directory name(if directory is source of vcf files) in output file. Default: false") parser.add_argument("-e", "--write_ext", action="store_true", dest="write_ext", help="write extensions of vcf files in output file. Default: false") args = parser.parse_args() files_list = sorted(make_list_of_path_to_files(args.input_vcf, vcf_filter)) out_fd = sys.stdout if args.output == "stdout" else open(args.output, "w") if args.write_header: out_fd.write("#file/sample\tnumber_of_variants\thomozygous\theterozygous\n") for filename in files_list: if args.output != "stdout": print("Counting variants in %s ..." % filename) directory, prefix, extension = split_filename(filename) variants = CollectionVCF(from_file=True, in_file=filename) homo, hetero = variants.count_zygoty() number_of_variants = len(variants) if args.write_dir_path and args.write_ext: name = filename elif args.write_dir_path: name = (directory + prefix) if directory else prefix elif args.write_ext: name = prefix + extension else: name = prefix out_fd.write("%s\t%i\t%i\t%i\n" % (name, number_of_variants, homo, hetero)) if args.output != "stdout": out_fd.close()
PypiClean
/DataGridBWC-0.2.2.tar.gz/DataGridBWC-0.2.2/datagridbwc_ta/views.py
from blazeweb.views import View from sqlalchemybwc import db from datagridbwc.lib import DataGrid, Col from datagridbwc_ta.model.orm import Car, Radio from datagridbwc.lib.declarative import NumericColumn from datagridbwc_ta.tests.grids import PeopleGrid as PGBase from datagridbwc_ta.model.orm import Person class DataGrid1(View): def default(self): dg = DataGrid( db.sess.execute, per_page = 1, ) dg.add_col( 'id', Car.id, inresult=True ) #dg.add_tablecol( # Col('Actions', # extractor=self.action_links, # width_th='8%' # ), # orm.CorpEntity.id, # sort=None #) dg.add_tablecol( Col('Make'), Car.make, filter_on=True, sort='both' ) dg.add_tablecol( Col('Model'), Car.model, filter_on=True, sort='both' ) dg.add_tablecol( Col('Year'), Car.year, filter_on=True, sort='both' ) self.assign('dg', dg) self.render_template() class DataGrid2(View): def default(self): dg = DataGrid( db.sess.execute, rs_customizer = lambda q: q.where(Car.radio_id == Radio.id), per_page = 1, ) dg.add_col( 'id', Car.id, inresult=True ) dg.add_tablecol( Col('Make'), Car.make, filter_on=True, sort='both' ) dg.add_tablecol( Col('Model'), Car.model, filter_on=True, sort='both' ) dg.add_tablecol( Col('Radio'), Radio.model.label('radio_model'), filter_on=True, sort='both' ) self.assign('dg', dg) self.render_template() class CurrencyCol(NumericColumn): def format_data(self, data): return data if int(data) % 2 else data * -1 class PeopleGrid(PGBase): CurrencyCol('Currency', Person.numericcol, format_as='percent', places=5) CurrencyCol('C2', Person.numericcol.label('n2'), format_as='accounting') class ManagePeople(View): def default(self): pg = PeopleGrid() pg.apply_qs_args() if pg.export_to == 'xls': pg.xls.as_response() self.assign('people_grid', pg) self.render_template()
PypiClean
/OWSLib-0.29.2.tar.gz/OWSLib-0.29.2/owslib/ogcapi/features.py
from copy import deepcopy import logging from urllib.parse import urlencode from owslib.ogcapi import Collections from owslib.util import Authentication LOGGER = logging.getLogger(__name__) class Features(Collections): """Abstraction for OGC API - Features""" def __init__(self, url: str, json_: str = None, timeout: int = 30, headers: dict = None, auth: Authentication = None): __doc__ = Collections.__doc__ # noqa super().__init__(url, json_, timeout, headers, auth) def feature_collections(self) -> list: """ implements /collections filtered on features @returns: `list` of filtered collections object """ features_ = [] collections_ = super().collections() for c_ in collections_['collections']: if 'itemType' in c_ and c_['itemType'].lower() == 'feature': features_.append(c_['id']) return features_ def collection_items(self, collection_id: str, **kwargs: dict) -> dict: """ implements /collection/{collectionId}/items @type collection_id: string @param collection_id: id of collection @type bbox: list @param bbox: list of minx,miny,maxx,maxy @type datetime_: string @param datetime_: time extent or time instant @type limit: int @param limit: limit number of features @type offset: int @param offset: start position of results @type q: string @param q: full text search @type filter: string @param filter: CQL TEXT expression @type cql: dict @param cql: CQL JSON payload @returns: feature results """ if 'bbox' in kwargs: kwargs['bbox'] = ','.join(list(map(str, kwargs['bbox']))) if 'datetime_' in kwargs: kwargs['datetime'] = kwargs['datetime_'] if 'cql' in kwargs: LOGGER.debug('CQL query detected') kwargs2 = deepcopy(kwargs) cql = kwargs2.pop('cql') path = f'collections/{collection_id}/items?{urlencode(kwargs2)}' return self._request(method='POST', path=path, data=cql, kwargs=kwargs2) else: path = f'collections/{collection_id}/items' return self._request(path=path, kwargs=kwargs) def collection_item(self, collection_id: str, identifier: str) -> dict: """ implements /collections/{collectionId}/items/{featureId} @type collection_id: string @param collection_id: id of collection @type identifier: string @param identifier: feature identifier @returns: single feature result """ path = f'collections/{collection_id}/items/{identifier}' return self._request(path=path) def collection_item_create(self, collection_id: str, data: str) -> bool: """ implements POST /collections/{collectionId}/items @type collection_id: string @param collection_id: id of collection @type data: string @param data: raw representation of data @returns: single feature result """ path = f'collections/{collection_id}/items' if isinstance(data, dict): # JSON LOGGER.debug('Detected JSON payload') self.headers['Content-Type'] = 'application/geo+json' elif data.startswith('<'): # XML data = data.strip() LOGGER.debug('Detected XML payload') self.headers['Content-Type'] = 'application/xml' _ = self._request(method='POST', path=path, data=data) return True def collection_item_update(self, collection_id: str, identifier: str, data: str) -> bool: """ implements PUT /collections/{collectionId}/items/{featureId} @type collection_id: string @param collection_id: id of collection @type identifier: string @param identifier: feature identifier @type data: string @param data: raw representation of data @returns: ``bool`` of deletion result """ path = f'collections/{collection_id}/items/{identifier}' _ = self._request(method='PUT', path=path, data=data) return True def collection_item_delete(self, collection_id: str, identifier: str) -> bool: """ implements DELETE /collections/{collectionId}/items/{featureId} @type collection_id: string @param collection_id: id of collection @type identifier: string @param identifier: feature identifier @returns: ``bool`` of deletion result """ path = f'collections/{collection_id}/items/{identifier}' _ = self._request(method='DELETE', path=path) return True
PypiClean
/BlueWhale3_Network-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl/orangecontrib/network/widgets/OWNxAnalysis.py
import inspect from collections import namedtuple import numpy as np from scipy.sparse import csgraph from AnyQt.QtCore import QThread, Qt from AnyQt.QtWidgets import QWidget, QGridLayout from Orange.data import ContinuousVariable, Table, Domain from Orange.widgets import gui, widget from Orange.widgets.settings import Setting from Orange.widgets.widget import Input, Output, Msg from orangecontrib.network.network import Network from orangecontrib.network.i18n_config import * def __(key): return i18n.t('network.OWNxAnalysis.' + key) NODELEVEL, GRAPHLEVEL, INTERNAL = range(3) UNDIRECTED, DIRECTED, GENERAL = range(3) ERRORED = object() TERMINATED = object() def shortest_paths_nan_diag(network): paths = csgraph.floyd_warshall( network.edges[0].edges, network.edges[0].directed).astype(float) diag = np.lib.stride_tricks.as_strided( paths, (len(paths), ), ((len(paths) + 1) * paths.dtype.itemsize,)) diag[:] = np.nan return paths def density(network): n = network.number_of_nodes() if n < 2: return 0.0 num = network.number_of_edges() if not network.edges[0].directed: num *= 2 return num / (n * (n - 1)) def avg_degree(network): m = network.number_of_edges() n = max(network.number_of_nodes(), 1) return m / n if network.edges[0].directed else 2 * m / n # TODO: adapt betweenness_centrality and perhaps other statistics from # https://github.com/networkdynamics/zenlib/blob/master/src/zen/algorithms/centrality.pyx # TODO: Combo that lets the user set edge types to use (or all) # Order of this list also defines execution priority METHODS = ( ("n", lambda network: network.number_of_nodes()), ("e", lambda network: network.number_of_edges()), ("degrees", lambda network: network.degrees(), __("btn.degree"), NODELEVEL, GENERAL), ("number_of_nodes", lambda n: n, __("btn.number_node"), GRAPHLEVEL, GENERAL), ("number_of_edges", lambda e: e, __("btn.number_edge"), GRAPHLEVEL, GENERAL), ("average_degree", lambda network: avg_degree(network), __("btn.average_degree"), GRAPHLEVEL, GENERAL), ("density", lambda network: density(network), __("btn.density"), GRAPHLEVEL, GENERAL), ("shortest_paths", shortest_paths_nan_diag, __("btn.shortest_paths")), ("diameter", lambda shortest_paths: np.nanmax(shortest_paths), __("btn.diameter"), GRAPHLEVEL, GENERAL), ("radius", lambda shortest_paths: np.nanmin(np.nanmax(shortest_paths, axis=1)), __("btn.radius"), GRAPHLEVEL, GENERAL), ("average_shortest_path_length", lambda shortest_paths: np.nanmean(shortest_paths), __("btn.average_shortest_path_length"), GRAPHLEVEL, GENERAL), ("number_strongly_connected_components", lambda network: csgraph.connected_components(network.edges[0].edges, False)[0], __("btn.number_strongly_connect_component"), GRAPHLEVEL, DIRECTED), ("number_weakly_connected_components", lambda network: csgraph.connected_components(network.edges[0].edges, True)[0], __("btn.number_weakly_connect_component"), GRAPHLEVEL, DIRECTED), ("in_degrees", lambda network: network.in_degrees(), __("btn.in_degree"), NODELEVEL, GENERAL), ("out_degrees", lambda network: network.out_degrees(), __("btn.out_degree"), NODELEVEL, GENERAL), ("average_neighbour_degrees", lambda network, degrees: np.fromiter( (np.mean(degrees[network.neighbours(i)]) if degree else np.nan for i, degree in enumerate(degrees)), dtype=float, count=len(degrees)), __("btn.average_neighbor_degree"), NODELEVEL, GENERAL), ("degree_centrality", lambda degrees, n: n and degrees / (n - 1) if n > 1 else 0, __("btn.degree_centrality"), NODELEVEL, GENERAL), ("in_degree_centrality", lambda in_degrees, n: in_degrees / (n - 1) if n > 1 else 0, __("btn.in_degree_centrality"), NODELEVEL, GENERAL), ("out_degree_centrality", lambda out_degrees, n: out_degrees / (n - 1) if n > 1 else 0, __("btn.out_degree_centrality"), NODELEVEL, GENERAL), ("closeness_centrality", lambda shortest_paths: 1 / np.nanmean(shortest_paths, axis=1), __("btn.closeness_centrality"), NODELEVEL, GENERAL) ) MethodDefinition = namedtuple( "MethodDefinition", ["name", "func", "label", "level", "edge_constraint", "args"]) METHODS = {definition[0]: MethodDefinition(*(definition + ("", None, "", INTERNAL, GENERAL)[len(definition):]), inspect.getfullargspec(definition[1]).args) for definition in METHODS} """ ("degree_assortativity_coefficient", False, \ "Degree assortativity coefficient", GRAPHLEVEL, \ nx.degree_assortativity_coefficient if \ hasattr(nx, "degree_assortativity_coefficient") else None), ("degree_pearson_correlation_coefficient", False, \ "Degree pearson correlation coefficient", GRAPHLEVEL, \ nx.degree_pearson_correlation_coefficient if\ hasattr(nx, "degree_pearson_correlation_coefficient") else None), ("estrada_index", False, "Estrada index", GRAPHLEVEL, \ nx.estrada_index if hasattr(nx, "estrada_index") else None), ("graph_clique_number", False, "Graph clique number", GRAPHLEVEL, nx.graph_clique_number), ("graph_number_of_cliques", False, "Graph number of cliques", GRAPHLEVEL, nx.graph_number_of_cliques), ("transitivity", False, "Graph transitivity", GRAPHLEVEL, nx.transitivity), ("average_clustering", False, "Average clustering coefficient", GRAPHLEVEL, nx.average_clustering), ("number_attracting_components", False, "Number of attracting components", GRAPHLEVEL, nx.number_attracting_components), ("clustering", False, "Clustering coefficient", NODELEVEL, nx.clustering), ("triangles", False, "Number of triangles", NODELEVEL, nx.triangles), ("square_clustering", False, "Squares clustering coefficient", NODELEVEL, nx.square_clustering), ("number_of_cliques", False, "Number of cliques", NODELEVEL, nx.number_of_cliques), ("betweenness_centrality", False, "Betweenness centrality", NODELEVEL, nx.betweenness_centrality), ("current_flow_closeness_centrality", False, "Information centrality", NODELEVEL, nx.current_flow_closeness_centrality), ("current_flow_betweenness_centrality", False, "Random-walk betweenness centrality", NODELEVEL, nx.current_flow_betweenness_centrality), ("approximate_current_flow_betweenness_centrality", False, \ "Approx. random-walk betweenness centrality", NODELEVEL, \ nx.approximate_current_flow_betweenness_centrality if \ hasattr(nx, "approximate_current_flow_betweenness_centrality") \ else None), ("eigenvector_centrality", False, "Eigenvector centrality", NODELEVEL, nx.eigenvector_centrality), ("eigenvector_centrality_numpy", False, "Eigenvector centrality (NumPy)", NODELEVEL, nx.eigenvector_centrality_numpy), ("load_centrality", False, "Load centrality", NODELEVEL, nx.load_centrality), ("core_number", False, "Core number", NODELEVEL, nx.core_number), ("eccentricity", False, "Eccentricity", NODELEVEL, nx.eccentricity), ("closeness_vitality", False, "Closeness vitality", NODELEVEL, nx.closeness_vitality), """ class WorkerThread(QThread): def __init__(self, method, data): super().__init__() self.method = method self.data = data self.stopped = 0 self.result = None self.error = None self.is_terminated = False def run(self): args = tuple(self.data[arg] for arg in self.method.args) if any(arg is TERMINATED for arg in args): # "in" doesn't work with np self.result = TERMINATED elif any(arg is ERRORED for arg in args): self.result = TERMINATED else: try: self.result = self.method.func(*args) except Exception as ex: self.error = ex print(ex) class OWNxAnalysis(widget.OWWidget): name = __('name') description = __('desc') icon = 'icons/NetworkAnalysis.svg' priority = 6425 resizing_enabled = False class Inputs: network = Input("Network", Network, label=i18n.t("network.common.network")) items = Input("Items", Table, i18n.t("network.common.items")) class Outputs: network = Output("Network", Network, label=i18n.t("network.common.network")) items = Output("Items", Table, i18n.t("network.common.items")) class Information(widget.OWWidget.Information): computing = Msg(__('msg_compute')) want_main_area = False want_control_area = True auto_commit = Setting(False) enabled_methods = Setting( {"number_of_nodes", "number_of_edges", "average_degree"}) def __init__(self): super().__init__() self.graph = None self.items = None # items set by Items signal self.items_graph = None # items set by graph.items by Network signal self.items_analysis = None # items to output and merge with analysis result self.known = {} self.running_jobs = {} # Indicates that node level statistics have changed or are pending to self._nodelevel_invalidated = False self.controlArea = QWidget(self.controlArea) self.layout().addWidget(self.controlArea) layout = QGridLayout() self.controlArea.setLayout(layout) layout.setContentsMargins(4, 4, 4, 4) tabs = gui.tabWidget(self.controlArea) tabs.setMinimumWidth(450) graph_indices = gui.createTabPage(tabs, __("box.graph-level_index"), orientation=Qt.Horizontal) node_indices = gui.createTabPage(tabs, __("box.node-level_index"), orientation=Qt.Horizontal) graph_methods = gui.vBox(graph_indices) gui.rubber(graph_indices) graph_labels = gui.vBox(graph_indices) node_methods = gui.vBox(node_indices) gui.rubber(node_indices) node_labels = gui.vBox(node_indices) graph_labels.layout().setAlignment(Qt.AlignRight) self.method_cbs = {} for method in METHODS.values(): if method.level == INTERNAL: continue setattr(self, method.name, method.name in self.enabled_methods) setattr(self, "lbl_" + method.name, "") methods = node_methods if method.level == NODELEVEL else graph_methods labels = node_labels if method.level == NODELEVEL else graph_labels cb = gui.checkBox( methods, self, method.name, method.label, callback=lambda attr=method.name: self.method_clicked(attr) ) self.method_cbs[method.name] = cb lbl = gui.label(labels, self, f"%(lbl_{method.name})s") labels.layout().setAlignment(lbl, Qt.AlignRight) setattr(self, "tool_" + method.name, lbl) # todo: is this accessible through controls? graph_indices.layout().addStretch(1) node_indices.layout().addStretch(1) @Inputs.network def set_graph(self, graph): self.cancel_job() self.graph = graph allowed_edge_types = {GENERAL, DIRECTED, UNDIRECTED} if graph is not None: allowed_edge_types.remove(UNDIRECTED if self.graph.edges[0].directed else DIRECTED) self.known = {} for name in METHODS: curr_method = METHODS[name] if curr_method.level == INTERNAL: continue lbl_obj = getattr(self, "tool_{}".format(name)) cb_obj = self.method_cbs[name] # disable/re-enable valid graph indices if curr_method.edge_constraint not in allowed_edge_types: lbl_obj.setDisabled(True) cb_obj.setChecked(False) cb_obj.setEnabled(False) else: lbl_obj.setDisabled(False) cb_obj.setEnabled(True) setattr(self, f"lbl_{name}", "") if graph is not None: self.known["network"] = graph self.items_graph = graph.nodes @Inputs.items def set_items(self, items): self.items = items def handleNewSignals(self): if self.items is not None: self.items_analysis = self.items elif self.graph: self.items_analysis = self.graph.nodes else: self.items_analysis = None self._nodelevel_invalidated = True self.run_more_jobs() def needed_methods(self): # Preconditions for methods could be precomputed, so this function would # only compute the union of sets of conditions for enabled checkboxes tasks = [ name for name in METHODS if getattr(self, name, False)] for name in tasks: tasks += [name for name in METHODS[name].args if name != "network"] tasks = set(tasks) - set(self.known) return [method for name, method in METHODS.items() if name in tasks] def run_more_jobs(self): known = set(self.known) needed = self.needed_methods() for method in needed: if method.name not in self.running_jobs: setattr(self, "lbl_" + method.name, __("status.pend")) doable = [method for method in needed if method.name not in self.running_jobs and set(method.args) <= known] free = max(1, QThread.idealThreadCount()) - len(self.running_jobs) if not doable: # This will output new data when everything is finished self.send_data() for method in doable[:free]: job = WorkerThread(method, self.known) job.finished.connect(lambda job=job: self.job_finished(job)) self.running_jobs[method.name] = job job.start() if not method.level == INTERNAL: setattr(self, "lbl_" + method.name, __("status.run")) self.show_computing() def job_finished(self, job): method = job.method self.known[method.name] = job.result del self.running_jobs[method.name] self.set_label_for(method.name) self.run_more_jobs() def set_label_for(self, name): level = METHODS[name].level if level == INTERNAL: return value = self.known.get(name, None) txt = "" if getattr(self, name, False): if value is TERMINATED: txt = "terminated" elif value is ERRORED: txt = "error" elif value is None: txt = "computing" if name in self.running_jobs else "pending" elif level == GRAPHLEVEL: txt = f"{value:.4g}" setattr(self, "lbl_" + name, txt) def show_computing(self): computing = ", ".join(METHODS[name].label for name in self.running_jobs) self.Information.computing(computing, shown=bool(computing)) def cancel_job(self, name=None): # This does not really work because functions called in those # threads do not observe the "is_terminated" flag and won't quit if name is None: to_stop = list(self.running_jobs) elif name in self.running_jobs: to_stop = [name] else: # This task is not running; but are its preconditions running? still_needed = self.needed_methods() to_stop = [ name for name in (job.method.name for job in self.running_jobs) if METHODS[name] not in still_needed] for name in to_stop: job = self.running_jobs[name] job.is_terminated = True job.finished.disconnect() job.quit() for name in to_stop: job = self.running_jobs[name] job.wait() setattr(self, "lbl_" + name, __("status.terminate")) del self.running_jobs[name] self.show_computing() def onDeleteWidget(self): self.cancel_job() super().onDeleteWidget() def send_data(self): # Don't send when computation is still on, or it's done but no node # level statistics have changed if self.running_jobs or not self._nodelevel_invalidated: return self._nodelevel_invalidated = False if self.graph is None: self.Outputs.network.send(None) self.Outputs.items.send(None) return to_report = [ method for attr, method in METHODS.items() if method.level == NODELEVEL and getattr(self, attr) and attr in self.known] items = self.items_analysis graph = self.graph n = graph.number_of_nodes() if isinstance(items, Table): dom = self.items_analysis.domain attrs, class_vars, metas = dom.attributes, dom.class_vars, dom.metas x, y, m = items.X, items.Y, items.metas else: attrs, class_vars, metas = [], [], [] x = y = m = np.empty((n, 0)) attrs += tuple(ContinuousVariable(method.label) for method in to_report) x = np.hstack( (x, ) + tuple(self.known[method.name].reshape((n, 1)) for method in to_report)) domain = Domain(attrs, class_vars, metas) table = Table(domain, x, y, m) new_graph = Network(table, graph.edges, graph.name, graph.coordinates) self.Outputs.network.send(new_graph) self.Outputs.items.send(table) def method_clicked(self, name): if METHODS[name].level == NODELEVEL: self._nodelevel_invalidated = True if getattr(self, name): self.enabled_methods.add(name) if name in self.known: self.set_label_for(name) self.send_data() else: self.run_more_jobs() else: self.enabled_methods.remove(name) if name in self.running_jobs: self.cancel_job(name) else: self.set_label_for(name) self.send_data() def send_report(self): self.report_items("", items=[(method.label, f"{self.known[attr]:.4g}") for attr, method in METHODS.items() if method.level == GRAPHLEVEL and getattr(self, attr) and attr in self.known and isinstance(self.known[attr], (int, float))]) def main(): from Orange.widgets.utils.widgetpreview import WidgetPreview from orangecontrib.network.network.readwrite \ import read_pajek, transform_data_to_orange_table from os.path import join, dirname #network = read_pajek(join(dirname(dirname(__file__)), 'networks', 'leu_by_genesets.net')) network = read_pajek(join(dirname(dirname(__file__)), 'networks', 'lastfm.net')) #network = read_pajek(join(dirname(dirname(__file__)), 'networks', 'Erdos02.net')) #transform_data_to_orange_table(network) WidgetPreview(OWNxAnalysis).run(set_graph=network) if __name__ == "__main__": main()
PypiClean
/KL_Audit_supportV4.0-1.0-py3-none-any.whl/AuditModule/core/applications/AuditManagementModules.py
from AuditModule.common import AppConstants from AuditModule.util import Logging as LOGG import traceback from datetime import datetime # from AuditModule.core.persistences.adaptors.CassandraPersistenceAdaptor import CassandraDButility # cassandra_obj = CassandraDButility() Logger = LOGG.get_logger() def audit_logs_modules(application_type, content_type, application_data): try: user_name = "" client_id = "" user_role_name = "" operations = "" module = "" parameter_lable = {} status = "" strategy_json = AppConstants.AuditLogsConstants.audit_logs_mapping_json.get(application_type) user_name, client_id, user_role_name, operations, parameter_lable, status = \ audit_logs_user_access_strategies(application_data) return user_name, client_id, user_role_name, operations, parameter_lable, status except Exception as e: audit_message = "" action = "" user_id = "" json_string = {} label = "" Logger.error('Error in audit Log modules ', str(e)) return audit_message, action, user_id, json_string, label def audit_logs_user_access_strategies(user_data): try: user_name = "" client_id = "" user_role_name = "" operations = "" module = "" parameter_lable = {} status = "" if 'query_json' in user_data: response = user_data.get('query_json', "") if type(response) is not str: user_name = response.get('user_name', "") if not user_name and type(response) is not str: user_name = response.get('user_id', "") if not user_name and 'cookies' in user_data: user_name = user_data['cookies']['user_id'] if not user_name and 'user_id' in user_data: user_name = user_data['user_id'] if type(user_data.get('user_id')) is dict: user_name = user_data['user_id'].get("user_id", "") operations = user_data.get("action", "") client_id = response.get("client_id", "") if not client_id and 'client_id' in user_data: client_id = user_data.get("client_id", "") if type(user_data.get('client_id')) is dict: client_id = user_data['client_id'].get("client_id", "") user_role_name = response.get("user_role_name", "") parameter_lable = user_data['query_json'] # module = response.get("module", "") status = user_data['query_json'].get("status", "success") return user_name, client_id, user_role_name, operations, parameter_lable, status except Exception as e: print((traceback.format_exc())) Logger.error("Error in user Access ", str(e)) raise Exception(str(e)) # def generate_id(table_name, op_type): # try: # if table_name: # data = dict() # # check = cassandra_obj.table_check(table_name) # if check: # counter = 0 # data['id'] = counter + 1 # data['time'] = datetime.utcnow() # data['name'] = "audit_id" # cassandra_obj.insert_table_id(table_name, data) # Logger.info("created and inserted data successfully") # return data['id'] # else: # name = "audit_id" # response = cassandra_obj.fetch_table_id(name) # for i in response: # resp_id = i[0] # data['id'] = resp_id + 1 # data['time'] = datetime.utcnow() # data['name'] = name # cassandra_obj.insert_table_id(table_name, data) # Logger.info("updated data successfully") # return data['id'] # except Exception as e: # print((traceback.format_exc())) # Logger.error("Error in user Access ", str(e)) # raise Exception(str(e))
PypiClean
/CB_IPO-0.2.0.tar.gz/CB_IPO-0.2.0/CB_IPO/CB_IPO.py
import pandas as pd import time import math from bs4 import BeautifulSoup as bs from selenium import webdriver class scrape: """ This class creates an object that can scrape for various filings and and attributes of SEC filings Attributes: url_info: The url to be used for scraping the site """ def __init__(self): """ Instantiates a scraper object, with a default search space of recent S-1 filings """ self.url_info = "https://www.sec.gov/edgar/search/#/filter_forms=S-1" def set_page(self, page_number): """ Modifies the page number being opened from the search results, and modifies url_info as such Args: page_number (int): The page number in search results to be opened Returns: str: A string of the modified url """ pstr = '&page={}'.format(page_number) i = self.url_info.find('&page=') if i < 0: self.url_info += pstr else: self.url_info = self.url_info[:i] + pstr return self.url_info def reset_url(self): """ Resets the url to only search for the most recent S-1 forms, and modifies url_info as such Returns: str: A string of the original url """ self.url_info = "https://www.sec.gov/edgar/search/#/filter_forms=S-1" return self.url_info def set_search_date(self, start_date, end_date): """ Modified the search to look between two specific dates, and modifies url_info as such Args: start_date (str): Start of the date range YYYY-MM-DD format end_date (str): End of the date range YYYY-MM-DD format Returns: str: A string of the modified url """ dates = 'dateRange=custom&category=custom&startdt={}&enddt={}&'.format(start_date, end_date) self.url_info = "https://www.sec.gov/edgar/search/#/{}filter_forms=S-1".format(dates) return self.url_info def edgar_scrape(self, num): """ Finds the names, dates, and types of forms filed by different companies Args: num (int): The number of entities to be scraped from a page Returns: tuple: A list of company names, a list of filing dates, and a set of filing types in chronological order Raises: ValueError: If `num` is greater than 100 """ if num > 100: raise ValueError('Cannot find more than 100 entires per page') driver = webdriver.Chrome('chromedriver') c_names = [] c_dates = [] form_types = set() driver.get(self.url_info) time.sleep(10) source = driver.page_source html_s = bs(source, 'html.parser') i = 0 for item in html_s.findAll(attrs={'class': 'entity-name'}): if i == num: break if item.text != 'Filing entity/person': c_names.append(item.text) i += 1 i2 = 0 for item in html_s.findAll(attrs={'class': 'filed'}): if i2 == num: break if 'Filed' not in item.text: c_dates.append(item.text) i2 += 1 i3 = 0 for item in html_s.findAll(attrs={'class': 'filetype'}): if i3 == num: break if item.text != 'Form & File': i = item.text.find(' ') form_types.add(item.text[:i]) i3 += 1 driver.quit() return (c_names, c_dates, form_types) def generate_df(self, num_entries=100, num_pages=1): """ Finds the names, dates, and types of forms filed by different companies Args: num_entries (int): The number of entities to be scraped from a page num_pages (int): The number of pages to be scraped Returns: pandas.DataFrame: A dataframe of the companies scraped and the dates they filed Raises: ValueError: If `num_entries` is greater than 100 """ if num_entries > 100: raise ValueError('Cannot find more than 100 entires per page') ns, ds, form = self.edgar_scrape(num_entries) d = {'names': ns, 'filing date': ds} if num_pages > 1: for i in range(num_pages - 1): self.set_page(i + 2) ns2, ds2 = self.edgar_scrape(num_entries) d['names'] += ns2 d['filing date'] += ds2 df = pd.DataFrame(data=d) return df def add_forms(self, forms_list): """ Updates query to include certain form types, and modifies url_info as such Args: forms_list (list): List of strings for forms to search for Returns: tuple: A tuple of the string for the new url, and the appended forms """ i = self.url_info.find('filter_forms=') pstr = '' pstr += forms_list[0] for form in forms_list[1:]: pstr += '%252C' pstr += form self.url_info = self.url_info[:i] + 'forms=' + pstr return (self.url_info, pstr) def get_anums(self, cik, num): """ Scrapes accession numbers from a page when given a cik Args: cik (int): The cik id for a company num (int): The number of entities to be scraped from a page Returns: list: a list of accession numbers relating to files for a cik """ annual10k_url = "https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK={}&type=10-k".format(cik) doc_links = [] driver = webdriver.Chrome('chromedriver') driver.get(annual10k_url) time.sleep(10) page = driver.page_source soup = bs(page, "html.parser") i = 0 for item in soup.findAll(id='interactiveDataBtn'): if i == num: break else: s_ind = str(item).find('accession_number=') e_ind = str(item).find('&amp;xbrl') doc_links.append(str(item)[s_ind + len('accession_number=') : e_ind].replace('-', '')) i += 1 driver.quit() return doc_links def get_refs(self, cik, num): """ Finds the reference numbers for filings and company name for a given cik Args: cik (int): The cik id for a company num (int): The number of entities to be scraped from a page Returns: tuple: A list of filing reference numbers and the name of the company associated with a cik """ edgar_url = "https://www.sec.gov/edgar/search/#/ciks={}&forms=10-K".format(cik) refs = [] driver = webdriver.Chrome('chromedriver') driver.get(edgar_url) time.sleep(10) page = driver.page_source soup = bs(page, "html.parser") comp_name = '' i = 0 for item in soup.findAll(attrs={"class": "preview-file"}): if i == num: break else: refs.append(item['data-file-name']) i += 1 i2 = 0 for item in soup.findAll(attrs={'class': 'entity-name'}): if i2 == num: break elif item.text != 'Filing entity/person': comp_name = item.text i += 1 driver.quit() return refs, comp_name def create_links(self, cik, num): """ Finds the links for xrbl versions of filings for a given cik Args: cik (int): The cik id for a company num (int): The number of entities to be scraped from a page Returns: tuple: A list of links to filings and the name of the company associated with a cik """ anum_list = self.get_anums(cik, num) reflist, c_name = self.get_refs(cik, num) link_list = [] for i in range(num): a = anum_list[i] r = reflist[i] link_list.append('https://www.sec.gov/ix?doc=/Archives/edgar/data/{}/{}/{}'.format(cik, a, r)) return link_list, c_name def scrape_xbrl(self, link): """ Finds the account value for elements like total assets, liabilities, and net income in a filing Args: link (str): link to an xbrl for a 10-K filing Returns: dict: A dictionary of financial statement elements such as total assets, liabilities, and net income Raises: Exception: If the scraped Assets, Liabilities, and Equity fail the Accounting Equation """ Financials = {} driver = webdriver.Chrome('chromedriver') driver.get(link) time.sleep(10) page = driver.page_source soup = bs(page, "html.parser") registrant = soup.find(attrs={'name': "dei:EntityRegistrantName"}).text TA_s = soup.find(attrs={'name': "us-gaap:Assets"}).text TL_s = soup.find(attrs={'name': "us-gaap:Liabilities"}).text NI_s = soup.find(attrs={'name': "us-gaap:NetIncomeLoss"}).text TE_s = soup.find(attrs={'name': "us-gaap:StockholdersEquityIncludingPortionAttributableToNoncontrollingInterest"}).text CA_s = soup.find(attrs={'name': "us-gaap:AssetsCurrent"}).text CL_s = soup.find(attrs={'name': "us-gaap:LiabilitiesCurrent"}).text LongTerm_Debt_s = soup.find(attrs={'name': "us-gaap:LongTermDebtNoncurrent"}).text Current_Debt_s = soup.find(attrs={'name': "us-gaap:DebtCurrent"}).text Inventory_s = soup.find(attrs={'name': "us-gaap:InventoryNet"}).text if '(' in TE_s and ')' in TE_s: TE_s = TE_s.replace('(', '').replace(')', '') TE_s = TE_s.insert(0, '-') if '(' in NI_s and ')' in NI_s: NI_s = NI_s.replace('(', '').replace(')', '') NI_s = NI_s.insert(0, '-') Financials['Total Assets'] = float(TA_s.replace(',', '')) Financials['Total Liabilities'] = float(TL_s.replace(',', '')) Financials['Total Equity'] = float(TE_s.replace(',', '')) Financials['Current Assets'] = float(CA_s.replace(',', '')) Financials['Current Liabilities'] = int(CL_s.replace(',', '')) Financials['Net Income'] = float(NI_s.replace(',', '')) Financials['Long Term Debt'] = float(LongTerm_Debt_s.replace(',', '')) Financials['Current Debt'] = float(Current_Debt_s.replace(',', '')) Financials['Inventory'] = float(Inventory_s.replace(',', '')) Financials['Registrant'] = registrant if not math.isclose(Financials['Total Assets'] - Financials['Total Liabilities'], Financials['Total Equity'], abs_tol=1): driver.quit() raise Exception("Total Assets and Liabilities not consistent with equity") driver.quit() return Financials def calculate_ratios(self, financials): """ Calculates financial ratios relevant to balance sheet and income statements Args: financials (dict): a dictionary containg Asset and Liability information, as formatted by scrape_xrbl() Returns: dict: A dictionary of financial ratios relating to profitability, liquidity, and leverage Raises: ValueError: If `financials` is improperly formated and doesn't contain the requisite values """ ratios = {} info = financials TA, TL, NI, TE, CA, CL, LtD, StD, Ivt = ( info['Total Assets'], info['Total Liabilities'], info['Net Income'], info['Total Equity'], info['Current Assets'], info['Current Liabilities'], info['Long Term Debt'], info['Current Debt'], info['Inventory'], ) values = (TA, TL, NI, TE, CA, CL, LtD, StD, Ivt) for v in values: if not isinstance(v, float) and not isinstance(v, int): print(type(v)) raise ValueError("Improper Formatting of finanical values") if TE != 0: ratios['D/E'] = TL * 1.0 / TE ratios['ROE'] = NI * 1.0 / TE if CL != 0: ratios['Working Capital'] = CA * 1.0 / CL ratios['Quick'] = (CA - Ivt) * 1.0 / CL if TA != 0: ratios['TD/TA'] = (LtD + StD) * 1.0 / TA ratios['ROA'] = (NI * 1.0) / TA return ratios def summarize_10k(self, link, flag='raw'): """ Creates dataframe that summarize information scraped from the 10-K filing Args: link (str): link to an xbrl for a 10-K filing flag (str): str indicating summary type, 'raw', 'liquidity', 'profitability', etc Returns: pandas.DataFrame: A dataframe of the companies scraped and the dates they filed """ finances = self.scrape_xbrl(link) ratios = self.calculate_ratios(finances) if flag == 'raw': raw_df = pd.DataFrame(finances.items(), columns=['Account', 'Amount']) return raw_df elif flag == 'ratios': ratio_df = pd.DataFrame(ratios.items(), columns=['Ratio', 'Value']) return ratio_df elif flag == 'leverage': d2e = ratios['D/E'] tdta = ratios['TD/TA'] data = [['D/E', d2e], ['TD/TA', tdta]] lev_df = pd.DataFrame(data, columns=['Ratio', 'Value']) return lev_df elif flag == 'profitability': roa = ratios['ROA'] roe = ratios['ROE'] data = [['ROE', roe], ['ROA', roa]] profit_df = pd.DataFrame(data, columns=['Ratio', 'Value']) return profit_df elif flag == 'liquidity': quick = ratios['Quick'] wcap = ratios['Working Capital'] data = [['Quick', quick], ['Working Capital', wcap]] liq_df = pd.DataFrame(data, columns=['Ratio', 'Value']) return liq_df elif flag == 'totals': TA = finances['Total Assets'] TL = finances['Total Liabilities'] TE = finances['Total Equity'] data = [['Total Assets', TA], ['Total Liabilities', TL], ['Total Equity', TE]] total_df = pd.DataFrame(data, columns=['Account', 'Amount']) return total_df elif flag == 'current': CA = finances['Current Assets'] CL = finances['Current Liabilities'] CD = finances['Current Debt'] data = [['Current Assets', CA], ['Current Liabilities', CL], ['Current Debt', CD]] curr_df = pd.DataFrame(data, columns=['Account', 'Amount']) return curr_df elif flag == 'debt': LtD = finances['Long Term Debt'] CD = finances['Current Debt'] data = [['Long Term Debt', LtD], ['Current Debt', CD]] debt_df = pd.DataFrame(data, columns=['Account', 'Amount']) return debt_df return pd.DataFrame()
PypiClean
/Cerealizer-0.8.3.tar.gz/Cerealizer-0.8.3/README.rst
Cerealizer %%%%%%%%%% Cerealizer is a secure pickle-like module for Python 2 and 3. It support basic types (int, string, unicode, tuple, list, dict, set,...), old and new-style classes (you need to register the class for security), object cycles, and it can be extended to support C-defined type. Cerealizer is available under the Python licence. Requirements ============ - Python >= 2.4 (http://www.python.org) Installation ============ Do in a console:: python ./setup.py build and then as root:: python ./setup.py install Documentation ============= Use PyDoc. Contact and links ================= Jiba -- Jean-Baptiste LAMY -- <jibalamy *@* free *.* fr> Cerealizer website : http://www.lesfleursdunormal.fr/static/informatique/cerealizer/index_en.html Cerealizer on BitBucket (development repository): https://bitbucket.org/jibalamy/cerealizer
PypiClean
/MindsDB-23.8.3.0.tar.gz/MindsDB-23.8.3.0/mindsdb/api/mongo/responders/delete.py
from mindsdb.api.mongo.classes import Responder import mindsdb.api.mongo.functions as helpers from mindsdb_sql.parser.ast import Delete, Identifier, BinaryOperation, Constant from mindsdb_sql.parser.dialects.mindsdb import DropPredictor, DropJob, DropMLEngine from mindsdb.interfaces.jobs.jobs_controller import JobsController from mindsdb.api.mongo.classes.query_sql import run_sql_command class Responce(Responder): when = {'delete': helpers.is_true} def result(self, query, request_env, mindsdb_env, session): try: res = self._result(query, request_env, mindsdb_env) except Exception as e: res = { 'n': 0, 'writeErrors': [{ 'index': 0, 'code': 0, 'errmsg': str(e) }], 'ok': 1 } return res def _result(self, query, request_env, mindsdb_env): table = query['delete'] if table == 'predictors': table = 'models' project_name = request_env['database'] allowed_tables = ('models_versions', 'models', 'jobs', 'ml_engines') if table not in allowed_tables: raise Exception(f"Only removing from this collections is supported: {', '.join(allowed_tables)}") if len(query['deletes']) != 1: raise Exception("Should be only one argument in REMOVE operation") obj_name, obj_id = None, None delete_filter = query['deletes'][0]['q'] if '_id' in delete_filter: # get name of object obj_id = helpers.objectid_to_int(delete_filter['_id']) if 'name' in delete_filter: obj_name = delete_filter['name'] version = None if 'version' in delete_filter: version = delete_filter['version'] if obj_name is None and obj_id is None: raise Exception("Can't find object to delete, use filter by name or _id") if obj_name is None: if table == 'models' or table == 'models_versions': model_id = obj_id >> 20 if obj_name is None: models = mindsdb_env['model_controller'].get_models( ml_handler_name=None, project_name=project_name ) for model in models: if model['id'] == model_id: obj_name = model['name'] break if obj_name is None: raise Exception("Can't find model by _id") elif table == 'jobs': jobs_controller = JobsController() for job in jobs_controller.get_list(project_name): if job['id'] == obj_id: obj_name = job['name'] break # delete model if table == 'models': ast_query = DropPredictor(Identifier(parts=[project_name, obj_name])) run_sql_command(request_env, ast_query) # delete model version elif table == 'models_versions': if version is None: if obj_id is None: raise Exception("Can't find object version") version = obj_id & (2**20 - 1) ast_query = Delete(table=Identifier(parts=[project_name, 'models_versions']), where=BinaryOperation(op='and', args=[ BinaryOperation(op='=', args=[Identifier('name'), Constant(obj_name)]), BinaryOperation(op='=', args=[Identifier('version'), Constant(version)]) ])) run_sql_command(request_env, ast_query) elif table == 'jobs': ast_query = DropJob(Identifier(parts=[project_name, obj_name])) run_sql_command(request_env, ast_query) elif table == 'ml_engines': ast_query = DropMLEngine(Identifier(parts=[obj_name])) run_sql_command(request_env, ast_query) return { 'n': 1, 'ok': 1 } responder = Responce()
PypiClean
/CrApsim-0.0.1a0.tar.gz/CrApsim-0.0.1a0/README.rst
CrApsim =============================== version number: 0.0.1 author: Torsten Zielke Overview -------- Creates Apsim-Project files from given parameters and files. Installation / Usage -------------------- To install use pip: $ pip install CrApsim Or clone the repo: $ git clone TBD $ python setup.py install Contributing ------------ - Dr. Munir Hoffmann [TROPAGS](https://www.uni-goettingen.de/en/team/541835.html) - Dr. Marian Koch [TROPAGS](https://www.uni-goettingen.de/en/team/541835.html) Example ------- TBD Links ----- - [Apsim](https://apsim.info) aka Agricultural Production Systems sIMulator - The Model this whole project is based on.
PypiClean
/ImmuneDB-0.29.11.tar.gz/ImmuneDB-0.29.11/README.md
ImmuneDB ======== [![Build Status](https://img.shields.io/travis/arosenfeld/immunedb/master.svg)](https://travis-ci.org/arosenfeld/immunedb) [![Documentation Status](https://readthedocs.org/projects/immunedb/badge/?version=latest)](https://immunedb.readthedocs.io/en/latest/?badge=latest) [![codecov](https://codecov.io/gh/arosenfeld/immunedb/branch/master/graph/badge.svg)](https://codecov.io/gh/arosenfeld/immunedb) [![PyPI](https://img.shields.io/pypi/v/immunedb.svg)](https://pypi.python.org/pypi/ImmuneDB) [![Docker Pulls](https://img.shields.io/docker/pulls/arosenfeld/immunedb.svg)](https://hub.docker.com/r/arosenfeld/immunedb) [![](https://img.shields.io/static/v1?label=AIRR-C%20sw-tools%20v1&message=compliant&color=008AFF&labelColor=000000&style=plastic)](https://docs.airr-community.org/en/stable/swtools/airr_swtools_standard.html) ImmuneDB is a Python module that facilitates efficient storage and analysis of high-throughput B- and T-cell sequence data. # Documentation Installation and usage instructions can be found at [immunedb.com](http://immunedb.com) # Support Basic questions regarding installing and running ImmuneDB can be emailed to [email protected]. Bugs and feature requests should be submitted as [Issues](https://github.com/arosenfeld/immunedb/issues). I try to respond to both within 2 business days if possible. # Citing If you use ImmuneDB, please cite the tool as: Rosenfeld, A. M., Meng, W., Luning Prak, E. T., Hershberg, U., **ImmuneDB, a Novel Tool for the Analysis, Storage, and Dissemination of Immune Repertoire Sequencing Data**. Frontiers in Immunology **9** (2018). ImmuneDB was originally announced previously in: Rosenfeld, A. M., Meng, W., Luning Prak, E. T., Hershberg, U., **ImmuneDB: a system for the analysis and exploration of high-throughput adaptive immune receptor sequencing data**, Bioinformatics **33** (2016), no. 2, 292–293.
PypiClean
/COAsT-3.2.1.tar.gz/COAsT-3.2.1/coast/data/gridded.py
import os.path as path_lib import re import warnings # from dask import delayed, compute, visualize # import graphviz import gsw import numpy as np import xarray as xr from .._utils import general_utils, stats_util from .coast import Coast from .config_parser import ConfigParser from .._utils.logging_util import get_slug, debug, info, warn, error, warning import pandas as pd class Gridded(Coast): # TODO Complete this docstring """ Words to describe the NEMO class kwargs -- define addition keyworded arguemts for domain file. E.g. ln_sco=1 if using s-scoord in an old domain file that does not carry this flag. """ def __init__( self, fn_data=None, fn_domain=None, # TODO Super init not called + add a docstring multiple=False, config: str = " ", workers=2, threads=2, memory_limit_per_worker="2GB", **kwargs, ): debug(f"Creating new {get_slug(self)}") self.dataset = xr.Dataset() self.grid_ref = None self.domain_loaded = False self.fn_data = fn_data self.fn_domain = fn_domain self.grid_vars = None if path_lib.isfile(config): self.config = ConfigParser(config).config if self.config.chunks: self._setup_grid_obj(self.config.chunks, multiple, **kwargs) else: self._setup_grid_obj(None, multiple, **kwargs) else: # allow for usage without config file, this will be limted and dosen't bring the full COAST features debug("Config file expected. Limited functionality without config file") if self.fn_data is not None: self.load(self.fn_data, None, multiple) if self.fn_domain is not None: self.filename_domain = self.fn_domain dataset_domain = self.load_domain(self.fn_domain, None) self.dataset["domain"] = dataset_domain def _setup_grid_obj(self, chunks, multiple, **kwargs): """This is a helper method to reduce the size of def __init__ Args: chunks: This is a setting for xarray as to whether dask (parrell processing) should be on and how it works multiple: flag to tell if we are loading one or more files **kwargs: pass direct to loaded xarray dataset lims = [x_dim index 1, x_dim_index 2, y_dim index 1, y_dim_index 2] - subset region defined from lower left to upper right corners calculate_bathymetry [boolean]: default-False """ self.set_grid_vars() self.set_dimension_mapping() self.set_variable_mapping() lims = kwargs.get("lims", []) if self.fn_data is not None: self.load(self.fn_data, chunks, multiple) self.set_dimension_names(self.config.dataset.dimension_map) self.set_variable_names(self.config.dataset.variable_map) self.dataset = self.spatial_subset(self.dataset, lims) # Trim data size if indices specified if self.fn_domain is None: self.filename_domain = "" # empty store for domain fileanme warn("No NEMO domain specified, only limited functionality" + " will be available") else: self.filename_domain = self.fn_domain # store domain fileanme dataset_domain = self.load_domain(self.fn_domain, chunks) # Define extra domain attributes using kwargs dictionary # This is a bit of a placeholder. Some domain/nemo files will have missing variables for key, value in kwargs.items(): dataset_domain[key] = value dataset_domain = self.spatial_subset(dataset_domain, lims) # Trim domain size if indices specified if self.fn_data is not None: dataset_domain = self.trim_domain_size(dataset_domain) # Trim domain size if self.data is smaller self.set_timezero_depths( dataset_domain, **kwargs ) # THIS ADDS TO dataset_domain. Should it be 'return'ed (as in trim_domain_size) or is implicit OK? self.merge_domain_into_dataset(dataset_domain) debug(f"Initialised {get_slug(self)}") def make_lonLat_2d(self): """Expand 1D latitude and longitude variables to 2D.""" # jth is there a lazy way of doing this? if len(self.dataset.longitude.shape) == 1: lat = self.dataset.latitude.values lon = self.dataset.longitude.values nx = self.dataset.longitude.size ny = self.dataset.latitude.size self.dataset["latitude"] = xr.DataArray(np.repeat(lat[:, np.newaxis], nx, axis=1), dims=["y_dim", "x_dim"]) self.dataset["longitude"] = xr.DataArray(np.repeat(lon[np.newaxis, :], ny, axis=0), dims=["y_dim", "x_dim"]) def set_grid_vars(self): """Define the variables to map from the domain file to the NEMO obj""" # Define grid specific variables to pull across # for key, value in self.config.grid_ref.items(): self.grid_ref = key self.grid_vars = value # TODO Add parameter type hints and a docstring def load_domain(self, fn_domain, chunks): # TODO Do something with this unused parameter or remove it """Loads domain file and renames dimensions with dim_mapping_domain""" # Load xarray dataset info(f'Loading domain: "{fn_domain}"') dataset_domain = xr.open_dataset(fn_domain) self.domain_loaded = True # Rename dimensions for key, value in self.config.domain.dimension_map.items(): mapping = {key: value} try: dataset_domain = dataset_domain.rename_dims(mapping) except ValueError as err: warning( f"{get_slug(self)}: Problem renaming domain dimension from {get_slug(self.dataset)}: {key} -> {value}." f"{chr(10)}Error message of '{err}'" ) # Rename domain variables. for key, value in self.config.domain.variable_map.items(): mapping = {key: value} try: dataset_domain = dataset_domain.rename_vars(mapping) except ValueError as err: warning( f"{get_slug(self)}: Problem renaming domain variable from {get_slug(self.dataset)}: {key} -> {value}." f"{chr(10)}Error message of '{err}'" ) return dataset_domain def merge_domain_into_dataset(self, dataset_domain): """Merge domain dataset variables into self.dataset, using grid_ref""" debug(f"Merging {get_slug(dataset_domain)} into {get_slug(self)}") # Define grid independent variables to pull across all_vars = self.grid_vars + self.config.code_processing.not_grid_variables # Trim domain DataArray area if necessary. self.copy_domain_vars_to_dataset(dataset_domain, self.grid_vars) # Reset & set specified coordinates self.dataset = self.dataset.reset_coords() for var in self.config.dataset.coord_var: try: self.dataset = self.dataset.set_coords(var) except ValueError as err: warning(f"Issue with settings coordinates using value {var}.{chr(10)}Error message of {err}") # Delete specified variables for var in self.config.code_processing.delete_variables: try: self.dataset = self.dataset.drop_vars(var) except ValueError as err: warning(f"Issue with dropping variable {var}.{chr(10)}Error message of {err}") def __getitem__(self, name: str): return self.dataset[name] def set_grid_ref_attr(self): # possible not used debug(f"{get_slug(self)} grid_ref_attr set to {self.grid_ref_attr_mapping}") self.grid_ref_attr_mapping = { "temperature": "t-grid", "coast_name_for_u_velocity": "u-grid", "coast_name_for_v_velocity": "v-grid", "coast_name_for_w_velocity": "w-grid", "coast_name_for_vorticity": "f-grid", } def get_contour_complex(self, var, points_x, points_y, points_z, tolerance: int = 0.2): debug(f"Fetching contour complex from {get_slug(self)}") smaller = self.dataset[var].sel(z=points_z, x=points_x, y=points_y, method="nearest", tolerance=tolerance) return smaller def set_timezero_depths(self, dataset_domain, **kwargs): """ Calculates the depths at time zero (from the domain_cfg input file) for the appropriate grid. The depths are assigned to domain_dataset.depth_0 Args: dataset_domain: a complex data object. calculate_bathymetry: Flag that will either calculate bathymetry (true) or load it from dataset_domain file (false). """ debug(f"Setting timezero depths for {get_slug(self)} with {get_slug(dataset_domain)}") # keyword to allow calcution of bathymetry from scale factors # All bathymetry should now be mapped to bathy_metry calculate_bathymetry = kwargs.get("calculate_bathymetry", False) try: if calculate_bathymetry: # calculate bathymetry from scale factors bathymetry, mask, time_mask = self.calc_bathymetry(dataset_domain) else: bathymetry = dataset_domain.bathy_metry.squeeze() except AttributeError as err: bathymetry = xr.zeros_like(dataset_domain.e1.squeeze()) ( warnings.warn( f"The model domain loaded, '{self.filename_domain}', does not contain the " "bathy_metry' variable. This will result in the " "NEMO.dataset.bathymetry variable being set to zero, which " "may result in unexpected behaviour from routines that require " "this variable." ) ) debug( f"The bathy_metry variable was missing from the domain_cfg for " f"{get_slug(self)} with {get_slug(dataset_domain)}" f"{chr(10)}Error message of {err}" ) try: if self.grid_ref == "t-grid": e3w_0 = np.squeeze(dataset_domain.e3w_0.values) depth_0 = np.zeros_like(e3w_0) depth_0[0, :, :] = 0.5 * e3w_0[0, :, :] depth_0[1:, :, :] = depth_0[0, :, :] + np.cumsum(e3w_0[1:, :, :], axis=0) elif self.grid_ref == "w-grid": e3t_0 = np.squeeze(dataset_domain.e3t_0.values) depth_0 = np.zeros_like(e3t_0) depth_0[0, :, :] = 0.0 depth_0[1:, :, :] = np.cumsum(e3t_0, axis=0)[:-1, :, :] elif self.grid_ref == "u-grid": e3w_0 = dataset_domain.e3w_0.values.squeeze() e3w_0_on_u = 0.5 * (e3w_0[:, :, :-1] + e3w_0[:, :, 1:]) depth_0 = np.zeros_like(e3w_0) depth_0[0, :, :-1] = 0.5 * e3w_0_on_u[0, :, :] depth_0[1:, :, :-1] = depth_0[0, :, :-1] + np.cumsum(e3w_0_on_u[1:, :, :], axis=0) if not calculate_bathymetry: # jth only valid for pure sigma bathymetry[:, :-1] = 0.5 * (bathymetry[:, :-1] + bathymetry[:, 1:]) elif self.grid_ref == "v-grid": e3w_0 = dataset_domain.e3w_0.values.squeeze() e3w_0_on_v = 0.5 * (e3w_0[:, :-1, :] + e3w_0[:, 1:, :]) depth_0 = np.zeros_like(e3w_0) depth_0[0, :-1, :] = 0.5 * e3w_0_on_v[0, :, :] depth_0[1:, :-1, :] = depth_0[0, :-1, :] + np.cumsum(e3w_0_on_v[1:, :, :], axis=0) if not calculate_bathymetry: bathymetry[:-1, :] = 0.5 * (bathymetry[:-1, :] + bathymetry[1:, :]) elif self.grid_ref == "f-grid": e3w_0 = dataset_domain.e3w_0.values.squeeze() e3w_0_on_f = 0.25 * (e3w_0[:, :-1, :-1] + e3w_0[:, :-1, 1:] + e3w_0[:, 1:, :-1] + e3w_0[:, 1:, 1:]) depth_0 = np.zeros_like(e3w_0) depth_0[0, :-1, :-1] = 0.5 * e3w_0_on_f[0, :, :] depth_0[1:, :-1, :-1] = depth_0[0, :-1, :-1] + np.cumsum(e3w_0_on_f[1:, :, :], axis=0) if not calculate_bathymetry: bathymetry[:-1, :-1] = 0.25 * ( bathymetry[:-1, :-1] + bathymetry[:-1, 1:] + bathymetry[1:, :-1] + bathymetry[1:, 1:] ) else: raise ValueError(str(self) + ": " + self.grid_ref + " depth calculation not implemented") # Write the depth_0 variable to the domain_dataset DataSet, with grid type dataset_domain[f"depth{self.grid_ref.replace('-grid', '')}_0"] = xr.DataArray( depth_0, dims=["z_dim", "y_dim", "x_dim"], attrs={"units": "m", "standard_name": "Depth at time zero on the {}".format(self.grid_ref)}, ) self.dataset["bathymetry"] = xr.DataArray( bathymetry, dims=["y_dim", "x_dim"], attrs={ "units": "m", "standard_name": "bathymetry", "description": "depth of last wet w-level on the horizontal {}".format(self.grid_ref), }, ) except ValueError as err: print(err) error(err) def calc_bathymetry(self, dataset_domain): """ NEMO approach to defining bathymetry by summing scale factors at various grid locations. Works with z-coordinates on u- and v- faces where bathymetry is defined at the top of the cliff, not at the bottom Args: dataset_domain: a complex data object. """ # jth not set for lazy loading e3_0 = dataset_domain.e3_0.squeeze() time_mask = xr.zeros_like(e3_0) bottom_level = dataset_domain.bottom_level.values.squeeze() debug(f"Bottom_level type {type(bottom_level)}") top_level = dataset_domain.top_level.values.squeeze() bathymetry = np.zeros_like(bottom_level) # np.array([[]]) mask = None for k in range(1, e3_0.shape[0] + 1): time_mask[k - 1, :, :] = np.logical_and(k <= bottom_level, k >= top_level) if self.grid_ref == "t-grid": e3t = dataset_domain.e3_0.squeeze() bathymetry[:, :] = np.sum(e3t.values * time_mask.values, axis=0) elif self.grid_ref == "w-grid": e3t = dataset_domain.e3t_0.squeeze() bathymetry[:, :] = np.sum(e3t.values * time_mask.values, axis=0) elif self.grid_ref == "u-grid": e3u = dataset_domain.e3u_0.squeeze() mask = xr.zeros_like(e3u) mask[:, :, :-1] = time_mask[:, :, :-1] * time_mask[:, :, 1:] bathymetry[:, :] = np.sum(e3u.values * mask.values, axis=0) elif self.grid_ref == "v-grid": e3v = dataset_domain.e3v_0.squeeze() mask = xr.zeros_like(e3v) mask[:, :-1, :] = time_mask[:, :-1, :] * time_mask[:, 1:, :] bathymetry[:, :] = np.sum(e3v.values * mask.values, axis=0) elif self.grid_ref == "f-grid": e3f = dataset_domain.e3_0.squeeze() mask = xr.zeros_like(e3f) mask[:, :-1, :-1] = ( time_mask[:, :-1, :-1] * time_mask[:, :-1, 1:] * time_mask[:, 1:, :-1] * time_mask[:, 1:, 1:] ) bathymetry[:, :] = np.sum(e3f.values * mask.values, axis=0) return bathymetry, mask, time_mask # Add subset method to NEMO class def subset_indices(self, *, start: tuple, end: tuple) -> tuple: """ based on transect_indices, this method looks to return all indices between the given points. This results in a 'box' (Quadrilateral) of indices. consequently the returned lists may have different lengths. :param start: A lat/lon pair :param end: A lat/lon pair :return: list of y indices, list of x indices, """ debug(f"Subsetting {get_slug(self)} indices from {start} to {end}") [j1, i1] = self.find_j_i(lat=start[0], lon=start[1]) # lat , lon [j2, i2] = self.find_j_i(lat=end[0], lon=end[1]) # lat , lon return list(np.arange(j1, j2 + 1)), list(np.arange(i1, i2 + 1)) def find_j_i(self, *, lat: float, lon: float): """ A routine to find the nearest y x coordinates for a given latitude and longitude Usage: [y,x] = find_j_i(lat=49, lon=-12) :param lat: latitude :param lon: longitude :return: the y and x coordinates for the NEMO object's grid_ref, i.e. t,u,v,f,w. """ debug(f"Finding j,i for {lat},{lon} from {get_slug(self)}") dist2 = np.square(self.dataset.latitude - lat) + np.square(self.dataset.longitude - lon) [y, x] = np.unravel_index(np.argmin(dist2.data), dist2.shape) return [y, x] def find_j_i_list(self, *, lat: float, lon: float, n_nn=1): """ A routine to find the nearest y x coordinates for a list of latitude and longitude values Usage: [y,x] = find_j_i(lat=[49,50,51], lon=[-12,-11,10]) :param lat: latitude :param lon: longitude :optional n_nn=1 number of nearest neighbours :return: the j, i coordinates for the NEMO object's grid_ref, i.e. t,u,v,f,w. and a distance measure """ grid_lon = self.dataset.longitude.values grid_lat = self.dataset.latitude.values # efficient nearest neighbour search import scipy.spatial as sp XY = np.dstack([grid_lat.ravel(), grid_lon.ravel()])[0] XYp = np.dstack([lat, lon])[0] mytree = sp.cKDTree(XY) dist, indx = mytree.query(XYp, n_nn) I = np.nonzero(np.isnan(lon)) indx[I] = 0 i, j = np.unravel_index(indx, grid_lon.shape) return [i, j, dist] def find_j_i_domain(self, *, lat: float, lon: float, dataset_domain: xr.DataArray, KDTree=False): """ A routine to find the nearest y x coordinates for a given latitude and longitude Usage: [y,x] = find_j_i_domain(lat=49, lon=-12, dataset_domain=dataset_domain) :param lat: latitude :param lon: longitude :param dataset_domain: dataset domain :return: the y and x coordinates for the grid_ref variable within the domain file """ debug(f"Finding j,i domain for {lat},{lon} from {get_slug(self)} using {get_slug(dataset_domain)}") internal_lat = dataset_domain["latitude"] # [f"gphi{self.grid_ref.replace('-grid','')}"] internal_lon = dataset_domain["longitude"] # [f"glam{self.grid_ref.replace('-grid','')}"] dist2 = np.square(internal_lat - lat) + np.square(internal_lon - lon) [_, y, x] = np.unravel_index(np.argmin(dist2.data), dist2.shape) return [y, x] def transect_indices(self, start: tuple, end: tuple) -> tuple: """ This method returns the indices of a simple straight line transect between two lat lon points defined on the NEMO object's grid_ref, i.e. t,u,v,f,w. :type start: tuple A lat/lon pair :type end: tuple A lat/lon pair :return: array of y indices, array of x indices, number of indices in transect """ debug(f"Fetching transect indices for {start} to {end} from {get_slug(self)}") [j1, i1] = self.find_j_i(lat=start[0], lon=start[1]) # lat , lon [j2, i2] = self.find_j_i(lat=end[0], lon=end[1]) # lat , lon line_length = max(np.abs(j2 - j1), np.abs(i2 - i1)) + 1 jj1 = [int(jj) for jj in np.round(np.linspace(j1, j2, num=line_length))] ii1 = [int(ii) for ii in np.round(np.linspace(i1, i2, num=line_length))] return jj1, ii1, line_length @staticmethod def interpolate_in_space(model_array, new_lon, new_lat, mask=None): """ Interpolates a provided xarray.DataArray in space to new longitudes and latitudes using a nearest neighbour method (BallTree). Example Usage ---------- # Get an interpolated DataArray for temperature onto two locations interpolated = nemo.interpolate_in_space(nemo.dataset.votemper, [0,1], [45,46]) Parameters ---------- model_array (xr.DataArray): Model variable DataArray to interpolate new_lons (1Darray): Array of longitudes (degrees) to compare with model new_lats (1Darray): Array of latitudes (degrees) to compare with model mask (2D array): Mask array. Where True (or 1), elements of array will not be included. For example, use to mask out land in case it ends up as the nearest point. Returns ------- Interpolated DataArray """ debug(f"Interpolating {get_slug(model_array)} in space with nearest neighbour") # Get nearest indices ind_x, ind_y = general_utils.nearest_indices_2d( model_array.longitude, model_array.latitude, new_lon, new_lat, mask=mask ) # Geographical interpolation (using BallTree indices) interpolated = model_array.isel(x_dim=ind_x, y_dim=ind_y) if "dim_0" in interpolated.dims: interpolated = interpolated.rename({"dim_0": "interp_dim"}) return interpolated @staticmethod def interpolate_in_time(model_array, new_times, interp_method="nearest", extrapolate=True): """ Interpolates a provided xarray.DataArray in time to new python datetimes using a specified scipy.interpolate method. Example Useage ---------- # Get an interpolated DataArray for temperature onto altimetry times new_times = altimetry.dataset.time interpolated = nemo.interpolate_in_space(nemo.dataset.votemper, new_times) Parameters ---------- model_array (xr.DataArray): Model variable DataArray to interpolate new_times (array): New times to interpolate to (array of datetimes) interp_method (str): Interpolation method Returns ------- Interpolated DataArray """ debug(f'Interpolating {get_slug(model_array)} in time with method "{interp_method}"') # Time interpolation interpolated = model_array.swap_dims({"t_dim": "time"}) if extrapolate: interpolated = interpolated.interp( time=new_times, method=interp_method, kwargs={"fill_value": "extrapolate"} ) else: interpolated = interpolated.interp(time=new_times, method=interp_method) # interpolated = interpolated.swap_dims({'time':'t_dim'}) # TODO Do something with this or delete it return interpolated def construct_density( self, eos="EOS10", rhobar=False, Zd_mask=[], CT_AS=False, pot_dens=False, Tbar=True, Sbar=True ): """ Constructs the in-situ density using the salinity, temperture and depth_0 fields and adds a density attribute to the t-grid dataset Requirements: The supplied t-grid dataset must contain the Practical Salinity and the Potential Temperature variables. The depth_0 field must also be supplied. The GSW package is used to calculate The Absolute Pressure, Absolute Salinity and Conservate Temperature. Note that currently density can only be constructed using the EOS10 equation of state. Parameters ---------- eos : equation of state, optional DESCRIPTION. The default is 'EOS10'. rhobar : Calculate density with depth mean T and S DESCRIPTION. The default is 'False'. Zd_mask : Provide a 3D mask for rhobar calculation Calculate using calculate_vertical_mask DESCRIPTION. The default is empty. CT_AS : Conservative Temperature and Absolute Salinity already provided DESCRIPTION. The default is 'False'. pot_dens :Calculation at zero pressure DESCRIPTION. The default is 'False'. Tbar and Sbar : If rhobar is True then these can be switch to False to allow one component to remain depth varying. So Tbar=Flase gives temperature component, Sbar=Flase gives Salinity component DESCRIPTION. The default is 'True'. Returns ------- None. adds attribute NEMO.dataset.density """ debug(f'Constructing in-situ density for {get_slug(self)} with EOS "{eos}"') try: if eos != "EOS10": raise ValueError(str(self) + ": Density calculation for " + eos + " not implemented.") if self.grid_ref != "t-grid": raise ValueError( str(self) + ": Density calculation can only be performed for a t-grid object,\ the tracer grid for NEMO." ) No_time = False try: shape_ds = ( self.dataset.t_dim.size, self.dataset.z_dim.size, self.dataset.y_dim.size, self.dataset.x_dim.size, ) sal = self.dataset.salinity.to_masked_array() temp = self.dataset.temperature.to_masked_array() except AttributeError: No_time = True shape_ds = (1, self.dataset.z_dim.size, self.dataset.y_dim.size, self.dataset.x_dim.size) sal = self.dataset.salinity.to_masked_array()[np.newaxis, ...] temp = self.dataset.temperature.to_masked_array()[np.newaxis, ...] density = np.ma.zeros(shape_ds) s_levels = self.dataset.depth_0.to_masked_array() lat = self.dataset.latitude.values lon = self.dataset.longitude.values # Absolute Pressure if pot_dens: pressure_absolute = 0.0 # calculate potential density else: pressure_absolute = np.ma.masked_invalid(gsw.p_from_z(-s_levels, lat)) # depth must be negative if not rhobar: # calculate full depth # Absolute Salinity if not CT_AS: # abs salinity not provided sal_absolute = np.ma.masked_invalid(gsw.SA_from_SP(sal, pressure_absolute, lon, lat)) else: # abs salinity provided sal_absolute = np.ma.masked_invalid(sal) sal_absolute = np.ma.masked_less(sal_absolute, 0) # Conservative Temperature if not CT_AS: # conservative temp not provided temp_conservative = np.ma.masked_invalid(gsw.CT_from_pt(sal_absolute, temp)) else: # conservative temp provided temp_conservative = np.ma.masked_invalid(temp) # In-situ density density = np.ma.masked_invalid(gsw.rho(sal_absolute, temp_conservative, pressure_absolute)) new_var_name = "density" else: # calculate with depth integrated T S # prepare coordinate variables if np.size(Zd_mask) == 0: DZ = self.dataset.e3_0.to_masked_array() else: DZ = self.dataset.e3_0.to_masked_array() * Zd_mask DP = np.sum(DZ, axis=0) # DP=np.repeat(DP[np.newaxis,:,:],shape_ds[1],axis=0) DZ = np.repeat(DZ[np.newaxis, :, :, :], shape_ds[0], axis=0) DP = np.repeat(DP[np.newaxis, :, :], shape_ds[0], axis=0) # Absolute Salinity if not CT_AS: # abs salinity not provided sal_absolute = np.ma.masked_invalid(gsw.SA_from_SP(sal, pressure_absolute, lon, lat)) else: # abs salinity provided sal_absolute = np.ma.masked_invalid(sal) # Conservative Temperature if not CT_AS: # Conservative temperature not provided temp_conservative = np.ma.masked_invalid(gsw.CT_from_pt(sal_absolute, temp)) else: # conservative temp provided temp_conservative = np.ma.masked_invalid(temp) if pot_dens and (Sbar and Tbar): # usual case pot_dens and depth averaged everything sal_absolute = np.sum(np.ma.masked_less(sal_absolute, 0) * DZ, axis=1) / DP temp_conservative = np.sum(np.ma.masked_less(temp_conservative, 0) * DZ, axis=1) / DP density = np.ma.masked_invalid(gsw.rho(sal_absolute, temp_conservative, pressure_absolute)) density = np.repeat(density[:, np.newaxis, :, :], shape_ds[1], axis=1) else: # Either insitue density or one of Tbar or Sbar Flase if Sbar: sal_absolute = np.repeat( (np.sum(np.ma.masked_less(sal_absolute, 0) * DZ, axis=1) / DP)[:, np.newaxis, :, :], shape_ds[1], axis=1, ) if Tbar: temp_conservative = np.repeat( (np.sum(np.ma.masked_less(temp_conservative, 0) * DZ, axis=1) / DP)[:, np.newaxis, :, :], shape_ds[1], axis=1, ) density = np.ma.masked_invalid(gsw.rho(sal_absolute, temp_conservative, pressure_absolute)) if Tbar and Sbar: new_var_name = "density_bar" else: if not Tbar: new_var_name = "density_T" else: new_var_name = "density_S" # rho and rhobar coords = { "depth_0": (("z_dim", "y_dim", "x_dim"), self.dataset.depth_0.values), "latitude": (("y_dim", "x_dim"), self.dataset.latitude.values), "longitude": (("y_dim", "x_dim"), self.dataset.longitude.values), } dims = ["z_dim", "y_dim", "x_dim"] if pot_dens: attributes = {"units": "kg / m^3", "standard name": "Potential density "} else: attributes = {"units": "kg / m^3", "standard name": "In-situ density "} if not No_time: coords["time"] = (("t_dim"), self.dataset.time.values) dims.insert(0, "t_dim") else: density = np.squeeze(density) self.dataset[new_var_name] = xr.DataArray(density, coords=coords, dims=dims, attrs=attributes) except AttributeError as err: error(err) def spatial_subset(self, dataset, lims): """ Specify indices to subset the data. Subset region defined as a 2D box from lower left to upper right corners lims = [x_dim index_1, x_dim_index_2, y_dim index_1, y_dim_index_2] - Modifies self.dataset """ if len(lims) == 4: # if lims are provided take a subset debug(f"Trimming by indices: {lims}") # subsetting will wrap longitude across dateline if 1st longitude is larger than 2nd. if "x_dim" in dataset.dims: if lims[0] < lims[1]: # usual case dataset = dataset.isel(y_dim=range(lims[2], lims[3]), x_dim=range(lims[0], lims[1])) else: # longitude wrap around nx = dataset.dims["x_dim"] ds1 = dataset.isel(y_dim=range(lims[2], lims[3]), x_dim=range(lims[0], nx)) ds2 = dataset.isel(y_dim=range(lims[2], lims[3]), x_dim=range(0, lims[1])) dataset = xr.concat([ds1, ds2], dim="x_dim") else: print("limits not used as only work with datasets having dimension x_dim") return dataset def trim_domain_size(self, dataset_domain): """ Trim the domain variables if the dataset object is a spatial subset Note: This breaks if the SW & NW corner values of nav_lat and nav_lon are masked, as can happen if on land... """ debug(f"Trimming {get_slug(self)} variables with {get_slug(dataset_domain)}") if (self.dataset["x_dim"].size != dataset_domain["x_dim"].size) or ( self.dataset["y_dim"].size != dataset_domain["y_dim"].size ): info( "The domain and dataset objects are different sizes:" " [{},{}] cf [{},{}]. Trim domain.".format( dataset_domain["x_dim"].size, dataset_domain["y_dim"].size, self.dataset["x_dim"].size, self.dataset["y_dim"].size, ) ) # Find the corners of the cut out domain. try: [j0, i0] = self.find_j_i_domain( lat=self.dataset.latitude[0, 0], lon=self.dataset.longitude[0, 0], dataset_domain=dataset_domain ) [j1, i1] = self.find_j_i_domain( lat=self.dataset.latitude[-1, -1], lon=self.dataset.longitude[-1, -1], dataset_domain=dataset_domain ) debug(f"trim_domain_size(): USED dataset.longitude") except: # if called before variables are re-mapped. Not very pretty... [j0, i0] = self.find_j_i_domain( lat=self.dataset.nav_lat[0, 0], lon=self.dataset.nav_lon[0, 0], dataset_domain=dataset_domain ) [j1, i1] = self.find_j_i_domain( lat=self.dataset.nav_lat[-1, -1], lon=self.dataset.nav_lon[-1, -1], dataset_domain=dataset_domain ) debug(f"trim_domain_size(): USED dataset.nav_lon") dataset_subdomain = dataset_domain.isel(y_dim=slice(j0, j1 + 1), x_dim=slice(i0, i1 + 1)) return dataset_subdomain else: return dataset_domain def copy_domain_vars_to_dataset(self, dataset_domain, grid_vars): """ Map the domain coordinates and metric variables to the dataset object. Expects the source and target DataArrays to be same sizes. """ debug(f"Copying domain vars from {get_slug(dataset_domain)}/{get_slug(grid_vars)} to {get_slug(self)}") for var in grid_vars: try: new_name = self.config.domain.variable_map[var] m = re.search( "depth[a-z]_0", var ) # Check necessary because of hardcoded calculated depth variable names. if m: self.dataset[new_name] = dataset_domain[var].squeeze() else: self.dataset[new_name] = dataset_domain[new_name].squeeze() debug("map: {} --> {}".format(var, new_name)) except: # FIXME Catch specific exception(s) pass # TODO Should we log something here? def differentiate(self, in_var_str, config_path=None, dim="z_dim", out_var_str=None, out_obj=None): """ Derivatives are computed in x_dim, y_dim, z_dim (or i,j,k) directions wrt lambda, phi, or z coordinates (with scale factor in metres not degrees). Derivatives are calculated using the approach adopted in NEMO, specifically using the 1st order accurate central difference approximation. For reference see section 3.1.2 (sec. Discrete operators) of the NEMO v4 Handbook. Currently the method does not accomodate all possible eventualities. It covers: 1) d(grid_t)/dz --> grid_w Returns an object (with the appropriate target grid_ref) containing derivative (out_var_str) as xr.DataArray This is hardwired to expect: 1) depth_0 and e3_0 fields exist 2) xr.DataArrays are 4D 3) self.filename_domain if out_obj not specified 4) If out_obj is not specified, one is built that is the size of self.filename_domain. I.e. automatic subsetting of out_obj is not supported. Example usage: -------------- # Initialise DataArrays nemo_t = coast.NEMO( fn_data, fn_domain, grid_ref='t-grid' ) # Compute dT/dz nemo_w_1 = nemo_t.differentiate( 'temperature', dim='z_dim' ) # For f(z)=-z. Compute df/dz = -1. Surface value is set to zero nemo_t.dataset['depth4D'],_ = xr.broadcast( nemo_t.dataset['depth_0'], nemo_t.dataset['temperature'] ) nemo_w_4 = nemo_t.differentiate( 'depth4D', dim='z_dim', out_var_str='dzdz' ) Provide an existing target NEMO object and target variable name: nemo_w_1 = nemo_t.differentiate( 'temperature', dim='z_dim', out_var_str='dTdz', out_obj=nemo_w_1 ) Parameters ---------- in_var_str : str, name of variable to differentiate config_path : str, path to the w grid config file dim : str, dimension to operate over. E.g. {'z_dim', 'y_dim', 'x_dim', 't_dim'} out_var_str : str, (optional) name of the target xr.DataArray out_obj : exiting NEMO obj to store xr.DataArray (optional) """ import xarray as xr new_units = "" # Check in_var_str exists in self. if hasattr(self.dataset, in_var_str): # self.dataset[in_var_str] exists var = self.dataset[in_var_str] # for convenience nt = var.sizes["t_dim"] nz = var.sizes["z_dim"] ny = var.sizes["y_dim"] nx = var.sizes["x_dim"] # Compute d(t_grid)/dz --> w-grid # Check grid_ref and dir. Determine target grid_ref. if (self.grid_ref == "t-grid") and (dim == "z_dim"): out_grid = "w-grid" # If out_obj exists check grid_ref, else create out_obj. if (out_obj is None) or (out_obj.grid_ref != out_grid): try: out_obj = Gridded(fn_domain=self.filename_domain, config=config_path) except: # TODO Catch specific exception(s) warn( "Failed to create target NEMO obj. Perhaps self.", "filename_domain={} is empty?".format(self.filename_domain), ) # Check is out_var_str is defined, else create it if out_var_str is None: out_var_str = in_var_str + "_dz" # Create new DataArray with the same dimensions as the parent # Crucially have a coordinate value that is appropriate to the target location. blank = xr.zeros_like(var.isel(z_dim=[0])) # Using "z_dim=[0]" as a list preserves z-dimension blank.coords["depth_0"] -= blank.coords["depth_0"] # reset coord vals to zero # Add blank slice to the 'surface'. Concat over the 'dim' coords diff = xr.concat([blank, var.diff(dim)], dim) diff_ndim, e3w_ndim = xr.broadcast(diff, out_obj.dataset.e3_0.squeeze()) # Compute the derivative out_obj.dataset[out_var_str] = -diff_ndim / e3w_ndim # Assign attributes new_units = var.units + "/" + out_obj.dataset.depth_0.units # Convert to a xr.DataArray and return out_obj.dataset[out_var_str].attrs = {"units": new_units, "standard_name": out_var_str} # Return in object. return out_obj else: warn("Not ready for that combination of grid ({}) and " "derivative ({})".format(self.grid_ref, dim)) return None else: warn(f"{in_var_str} does not exist in {get_slug(self)} dataset") return None def apply_doodson_x0_filter(self, var_str): """Applies Doodson X0 filter to a variable. Input variable is expected to be hourly. Output is saved back to original dataset as {var_str}_dxo !!WARNING: Will load in entire variable to memory. If dataset large, then subset before using this method or ensure you have enough free RAM to hold the variable (twice). DB:: Currently not tested in unit_test.py""" var = self.dataset[var_str] new_var_str = var_str + "_dx0" old_dims = var.dims time_index = old_dims.index("t_dim") filtered = stats_util.doodson_x0_filter(var, ax=time_index) if filtered is not None: self.dataset[new_var_str] = (old_dims, filtered) return @staticmethod def get_e3_from_ssh(nemo_t, e3t=True, e3u=False, e3v=False, e3f=False, e3w=False, dom_fn: str = None): """ Where the model has been run with a nonlinear free surface and z* variable volumne (ln_vvl_zstar=True) then the vertical scale factors will vary in time (and space). This function will compute the vertical scale factors e3t, e3u, e3v, e3f and e3w by using the sea surface height field (ssh variable) and initial scale factors from the domain_cfg file. The vertical scale factors will be computed at the same model time as the ssh and if the ssh field is averaged in time then the scale factors will also be time averages. A t-grid NEMO object containing the ssh variable must be passed in. Either the domain_cfg path must have been passed in as an argument when the NEMO object was created or it must be passed in here using the dom_fn argument. e.g. e3t,e3v,e3f = coast.NEMO.get_e3_from_ssh(nemo_t,true,false,true,true,false) Parameters ---------- nemo_t : (Coast.NEMO), NEMO object on the t-grid containing the ssh variable e3t : (boolean), true if e3t is to be returned. Default True. e3u : (boolean), true if e3u is to be returned. Default False. e3v : (boolean), true if e3v is to be returned. Default False. e3f : (boolean), true if e3f is to be returned. Default False. e3w : (boolean), true if e3w is to be returned. Default False. dom_fn : (str), Optional, path to domain_cfg file. Returns ------- Tuple of xarray.DataArrays (e3t, e3u, e3v, e3f, e3w) Only those requested will be returned, but the ordering is always the same. """ e3_return = [] try: ssh = nemo_t.dataset.ssh except AttributeError: print("The nemo_t dataset must contain the ssh variable.") return if "t_dim" not in ssh.dims: ssh = ssh.expand_dims("t_dim", axis=0) # Load domain_cfg if dom_fn is None: dom_fn = nemo_t.filename_domain try: ds_dom = xr.open_dataset(dom_fn).squeeze().rename({"z": "z_dim", "x": "x_dim", "y": "y_dim"}) except OSError: print(f"Problem opening domain_cfg file: {dom_fn}") return e3t_0 = ds_dom.e3t_0 # Water column thickness, i.e. depth of bottom w-level on horizontal t-grid H = e3t_0.cumsum(dim="z_dim").isel(z_dim=ds_dom.bottom_level.astype("int") - 1) # Add correction to e3t_0 due to change in ssh e3t_new = e3t_0 * (1 + ssh / H) # preserve dimension ordering e3t_new = e3t_new.transpose("t_dim", "z_dim", "y_dim", "x_dim") # mask out correction at layers below bottom level e3t_new = e3t_new.where(e3t_new.z_dim < ds_dom.bottom_level, e3t_0.data) # preserve any other t mask e3t_new = e3t_new.where(~np.isnan(ssh)) if e3t: e3_return.append(e3t_new.squeeze()) if np.any([e3u, e3v, e3f]): e1e2t = ds_dom.e1t * ds_dom.e2t if np.any([e3u, e3v, e3w]): e3t_dt = e3t_new - e3t_0 # area averaged interpolation onto the u-grid to get e3u if np.any([e3u, e3f]): e1e2u = ds_dom.e1u * ds_dom.e2u # interpolate onto u-grid e3u_temp = ( (0.5 / e1e2u[:, :-1]) * ((e1e2t[:, :-1] * e3t_dt[:, :, :, :-1]) + (e1e2t[:, 1:] * e3t_dt[:, :, :, 1:])) ).transpose("t_dim", "z_dim", "y_dim", "x_dim") # u mask e3u_temp = e3u_temp.where(e3t_dt[:, :, :, 1:] != 0, 0) # mask out correction at layers below bottom level e3u_temp = e3u_temp.where(e3u_temp.z_dim < ds_dom.bottom_level[:, :-1], 0) # Add correction to e3u_0 e3u_temp = e3u_temp + ds_dom.e3u_0[:, :, :-1] e3u_new = xr.zeros_like(e3t_new) e3u_new = e3u_new.load() e3u_new[:, :, :, :-1] = e3u_temp e3u_new[:, :, :, -1] = ds_dom.e3u_0[:, :, -1] e3u_new["longitude"] = ds_dom.glamu e3u_new["latitude"] = ds_dom.gphiu if e3u: e3_return.append(e3u_new.squeeze()) # area averaged interpolation onto the u-grid to get e3v if e3v: e1e2v = ds_dom.e1v * ds_dom.e2v e3v_temp = ( (0.5 / e1e2v[:-1, :]) * ((e1e2t[:-1, :] * e3t_dt[:, :, :-1, :]) + (e1e2t[1:, :] * e3t_dt[:, :, 1:, :])) ).transpose("t_dim", "z_dim", "y_dim", "x_dim") e3v_temp = e3v_temp.where(e3t_dt[:, :, 1:, :] != 0, 0) e3v_temp = e3v_temp.where(e3v_temp.z_dim < ds_dom.bottom_level[:-1, :], 0) e3v_temp = e3v_temp + ds_dom.e3v_0[:, :-1, :] e3v_new = xr.zeros_like(e3t_new) e3v_new = e3v_new.load() e3v_new[:, :, :-1, :] = e3v_temp e3v_new[:, :, -1, :] = ds_dom.e3v_0[:, -1, :] e3v_new["longitude"] = ds_dom.glamv e3v_new["latitude"] = ds_dom.gphiv e3_return.append(e3v_new.squeeze()) # area averaged interpolation onto the u-grid to get e3f if e3f: e1e2f = ds_dom.e1f * ds_dom.e2f e3u_dt = e3u_new - ds_dom.e3u_0 e3f_temp = ( (0.5 / e1e2f[:-1, :]) * ((e1e2u[:-1, :] * e3u_dt[:, :, :-1, :]) + (e1e2u[1:, :] * e3u_dt[:, :, 1:, :])) ).transpose("t_dim", "z_dim", "y_dim", "x_dim") e3f_temp = e3f_temp.where(e3u_dt[:, :, 1:, :] != 0, 0) e3f_temp = e3f_temp.where(e3f_temp.z_dim < ds_dom.bottom_level[:-1, :], 0) e3f_temp = e3f_temp + ds_dom.e3f_0[:, :-1, :] e3f_new = xr.zeros_like(e3t_new) e3f_new = e3f_new.load() e3f_new[:, :, :-1, :] = e3f_temp e3f_new[:, :, -1, :] = ds_dom.e3f_0[:, -1, :] e3f_new["longitude"] = ds_dom.glamf e3f_new["latitude"] = ds_dom.gphif e3_return.append(e3f_new.squeeze()) # simple vertical interpolation for e3w. Special treatment of top and bottom levels if e3w: # top levels correction same at e3t e3w_new = (ds_dom.e3w_0 + e3t_dt).transpose("t_dim", "z_dim", "y_dim", "x_dim") # levels between top and bottom e3w_new = e3w_new.load() e3w_new[dict(z_dim=slice(1, None))] = ( 0.5 * e3t_dt[:, :-1, :, :] + 0.5 * e3t_dt[:, 1:, :, :] + ds_dom.e3w_0[1:, :, :] ) # bottom and below levels e3w_new = e3w_new.where(e3w_new.z_dim < ds_dom.bottom_level, e3t_dt.shift(z_dim=1) + ds_dom.e3w_0) e3_return.append(e3w_new.squeeze()) return tuple(e3_return) def harmonics_combine(self, constituents, components=["x", "y"]): """ Contains a new NEMO object containing combined harmonic information from the original object. NEMO saves harmonics to individual variables such as M2x, M2y... etc. This routine will combine these variables (depending on constituents) into a single data array. This new array will have the new dimension 'constituent' and a new data coordinate 'constituent_name'. Parameters ---------- constituents : List of strings containing constituent names to combine. The case of these strings should match that used in NEMO output. If a constituent is not found, no problem, it just won't be in the combined dataset. components : List of strings containing harmonic components to look for. By default, this looks for the complex components 'x' and 'y'. E.g. if constituents = ['M2'] and components is left as default, then the routine looks for ['M2x', and 'M2y']. Returns ------- NEMO() object, containing combined harmonic variables in a new dataset. """ # Select only the specified constituents. NEMO model harmonics names are # things like "M2x" and "M2y". Ignore current harmonics. Start by constructing # the possible variable names names_x = np.array([cc + components[0] for cc in constituents]) names_y = np.array([cc + components[1] for cc in constituents]) constituents = np.array(constituents, dtype="str") # Compare against names in file var_keys = np.array(list(self.dataset.keys())) indices = [np.where(names_x == ss) for ss in names_x if ss in var_keys] indices = np.array(indices).T.squeeze() # Index the possible names to match file names names_x = names_x[indices] names_y = names_y[indices] constituents = constituents[indices] # Concatenate x and y variables into one array x_arrays = [self.dataset[ss] for ss in names_x] harmonic_x = "harmonic_" + components[0] x_data = xr.concat(x_arrays, dim="constituent").rename(harmonic_x) y_arrays = [self.dataset[ss] for ss in names_y] harmonic_y = "harmonic_" + components[1] y_data = xr.concat(y_arrays, dim="constituent").rename(harmonic_y) nemo_harmonics = Gridded() nemo_harmonics.dataset = xr.merge([x_data, y_data]) nemo_harmonics.dataset["constituent"] = constituents return nemo_harmonics def harmonics_convert( self, direction="cart2polar", x_var="harmonic_x", y_var="harmonic_y", a_var="harmonic_a", g_var="harmonic_g", degrees=True, ): """ Converts NEMO harmonics from cartesian to polar or vice versa. Make sure this NEMO object contains combined harmonic variables obtained using harmonics_combine(). *Note: Parameters ---------- direction (str) : Choose 'cart2polar' or 'polar2cart'. If 'cart2polar' Then will look for variables x_var and y_var. If polar2cart, will look for a_var (amplitude) and g_var (phase). x_var (str) : Harmonic x variable name in dataset (or output) default = 'harmonic_x'. y_var (str) : Harmonic y variable name in dataset (or output) default = 'harmonic_y'. a_var (str) : Harmonic amplitude variable name in dataset (or output) default = 'harmonic_a'. g_var (str) : Harmonic phase variable name in dataset (or output) default = 'harmonic_g'. degrees (bool) : Whether input/output phase are/will be in degrees. Default is True. Returns ------- Modifies NEMO() dataset in place. New variables added. """ if direction == "cart2polar": a, g = general_utils.cartesian_to_polar(self.dataset[x_var], self.dataset[y_var], degrees=degrees) self.dataset[a_var] = a self.dataset[g_var] = g elif direction == "polar2cart": x, y = general_utils.polar_to_cartesian(self.dataset[a_var], self.dataset[g_var], degrees=degrees) self.dataset[x_var] = x self.dataset[y_var] = y else: print("Unknown direction setting. Choose cart2polar or polar2cart") return def time_slice(self, date0, date1): """Return new Gridded object, indexed between dates date0 and date1""" dataset = self.dataset t_ind = pd.to_datetime(dataset.time.values) >= date0 dataset = dataset.isel(t_dim=t_ind) t_ind = pd.to_datetime(dataset.time.values) < date1 dataset = dataset.isel(t_dim=t_ind) gridded_out = Gridded() gridded_out.dataset = dataset return gridded_out def calculate_vertical_mask(self, Zmax): """ Calculates a 3D mask to a specified level Zmax. 1 for sea; 0 for below sea bed and linearly ramped for last level """ Z = self.dataset.variables["depth_0"].values e3_0 = self.dataset.variables["e3_0"].values # calculate W-level - might want this done as stanbdard in gridded ZW = np.zeros_like(e3_0) ZW[0, :, :] = 0.0 ZW[1:, :, :] = np.cumsum(e3_0, axis=0)[:-1, :, :] mbot = self.dataset.variables["bottom_level"].values.astype(int) mask = mbot != 0 ZZ = ZW[1:, :, :] ZZ[ZZ == 0] = np.nan ZW[1:, :, :] = ZZ Zd_mask = np.zeros((Z.shape)) nz, ny, nx = np.shape(Z) kmax = np.zeros((ny, nx)).astype(int) IIkmax = np.zeros(np.shape(Z)) # # careful assumes mbot is 1st sea point above bed ie new definition for i in range(nx): for j in range(ny): if mask[j, i] == 1: Zd_mask[0 : mbot[j, i], j, i] = 1 # mbot is not python style index so no +1 kmax[j, i] = mbot[j, i] if ZW[mbot[j, i], j, i] > Zmax: kkmax = np.max(np.where(ZW[:, j, i] < Zmax)) Zd_mask[kkmax + 1 :, j, i] = 0 Zd_mask[kkmax, j, i] = (Zmax - ZW[kkmax, j, i]) / (ZW[kkmax + 1, j, i] - ZW[kkmax, j, i]) kmax[j, i] = kkmax IIkmax[kkmax, j, i] = 1 Ikmax = np.nonzero(IIkmax.ravel()) return Zd_mask, kmax, Ikmax
PypiClean
/Electrum-Zcash-Random-Fork-3.1.3b5.tar.gz/Electrum-Zcash-Random-Fork-3.1.3b5/lib/simple_config.py
import json import threading import time import os import stat from copy import deepcopy from .util import (user_dir, print_error, PrintError, NoDynamicFeeEstimates) from .i18n import _ FEE_ETA_TARGETS = [25, 10, 5, 2] FEE_DEPTH_TARGETS = [10000000, 5000000, 2000000, 1000000, 500000, 200000, 100000] # satoshi per kbyte FEERATE_MAX_DYNAMIC = 150000 FEERATE_WARNING_HIGH_FEE = 100000 FEERATE_FALLBACK_STATIC_FEE = 1000 FEERATE_DEFAULT_RELAY = 1000 FEERATE_STATIC_VALUES = [150, 300, 500, 1000, 1500, 2500, 3500, 5000, 7500, 10000] config = None def get_config(): global config return config def set_config(c): global config config = c FINAL_CONFIG_VERSION = 2 class SimpleConfig(PrintError): """ The SimpleConfig class is responsible for handling operations involving configuration files. There are two different sources of possible configuration values: 1. Command line options. 2. User configuration (in the user's config directory) They are taken in order (1. overrides config options set in 2.) """ def __init__(self, options=None, read_user_config_function=None, read_user_dir_function=None): if options is None: options = {} # This lock needs to be acquired for updating and reading the config in # a thread-safe way. self.lock = threading.RLock() self.mempool_fees = {} self.fee_estimates = {} self.fee_estimates_last_updated = {} self.last_time_fee_estimates_requested = 0 # zero ensures immediate fees # The following two functions are there for dependency injection when # testing. if read_user_config_function is None: read_user_config_function = read_user_config if read_user_dir_function is None: self.user_dir = user_dir else: self.user_dir = read_user_dir_function # The command line options self.cmdline_options = deepcopy(options) # don't allow to be set on CLI: self.cmdline_options.pop('config_version', None) # Set self.path and read the user config self.user_config = {} # for self.get in electrum_path() self.path = self.electrum_path() self.user_config = read_user_config_function(self.path) if not self.user_config: # avoid new config getting upgraded self.user_config = {'config_version': FINAL_CONFIG_VERSION} # config "upgrade" - CLI options self.rename_config_keys( self.cmdline_options, {'auto_cycle': 'auto_connect'}, True) # config upgrade - user config if self.requires_upgrade(): self.upgrade() # Make a singleton instance of 'self' set_config(self) def electrum_path(self): # Read electrum_path from command line # Otherwise use the user's default data directory. path = self.get('electrum_path') if path is None: path = self.user_dir() def make_dir(path): # Make directory if it does not yet exist. if not os.path.exists(path): if os.path.islink(path): raise Exception('Dangling link: ' + path) os.mkdir(path) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) make_dir(path) if self.get('testnet'): path = os.path.join(path, 'testnet') make_dir(path) elif self.get('regtest'): path = os.path.join(path, 'regtest') make_dir(path) self.print_error("electrum-zcash directory", path) return path def rename_config_keys(self, config, keypairs, deprecation_warning=False): """Migrate old key names to new ones""" updated = False for old_key, new_key in keypairs.items(): if old_key in config: if new_key not in config: config[new_key] = config[old_key] if deprecation_warning: self.print_stderr('Note that the {} variable has been deprecated. ' 'You should use {} instead.'.format(old_key, new_key)) del config[old_key] updated = True return updated def set_key(self, key, value, save=True): if not self.is_modifiable(key): self.print_stderr("Warning: not changing config key '%s' set on the command line" % key) return self._set_key_in_user_config(key, value, save) def _set_key_in_user_config(self, key, value, save=True): with self.lock: if value is not None: self.user_config[key] = value else: self.user_config.pop(key, None) if save: self.save_user_config() def get(self, key, default=None): with self.lock: out = self.cmdline_options.get(key) if out is None: out = self.user_config.get(key, default) return out def requires_upgrade(self): return self.get_config_version() < FINAL_CONFIG_VERSION def upgrade(self): with self.lock: self.print_error('upgrading config') self.convert_version_2() self.set_key('config_version', FINAL_CONFIG_VERSION, save=True) def convert_version_2(self): if not self._is_upgrade_method_needed(1, 1): return self.rename_config_keys(self.user_config, {'auto_cycle': 'auto_connect'}) try: # change server string FROM host:port:proto TO host:port:s server_str = self.user_config.get('server') host, port, protocol = str(server_str).rsplit(':', 2) assert protocol in ('s', 't') int(port) # Throw if cannot be converted to int server_str = '{}:{}:s'.format(host, port) self._set_key_in_user_config('server', server_str) except BaseException: self._set_key_in_user_config('server', None) self.set_key('config_version', 2) def _is_upgrade_method_needed(self, min_version, max_version): cur_version = self.get_config_version() if cur_version > max_version: return False elif cur_version < min_version: raise Exception( ('config upgrade: unexpected version %d (should be %d-%d)' % (cur_version, min_version, max_version))) else: return True def get_config_version(self): config_version = self.get('config_version', 1) if config_version > FINAL_CONFIG_VERSION: self.print_stderr('WARNING: config version ({}) is higher than ours ({})' .format(config_version, FINAL_CONFIG_VERSION)) return config_version def is_modifiable(self, key): return key not in self.cmdline_options def save_user_config(self): if not self.path: return path = os.path.join(self.path, "config") s = json.dumps(self.user_config, indent=4, sort_keys=True) try: with open(path, "w", encoding='utf-8') as f: f.write(s) os.chmod(path, stat.S_IREAD | stat.S_IWRITE) except FileNotFoundError: # datadir probably deleted while running... if os.path.exists(self.path): # or maybe not? raise def get_wallet_path(self): """Set the path of the wallet.""" # command line -w option if self.get('wallet_path'): return os.path.join(self.get('cwd'), self.get('wallet_path')) # path in config file path = self.get('default_wallet_path') if path and os.path.exists(path): return path # default path if not os.path.exists(self.path): raise FileNotFoundError( _('Electrum-Zcash datadir does not exist. Was it deleted while running?') + '\n' + _('Should be at {}').format(self.path)) dirpath = os.path.join(self.path, "wallets") if not os.path.exists(dirpath): if os.path.islink(dirpath): raise Exception('Dangling link: ' + dirpath) os.mkdir(dirpath) os.chmod(dirpath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) new_path = os.path.join(self.path, "wallets", "default_wallet") # default path in pre 1.9 versions old_path = os.path.join(self.path, "electrum.dat") if os.path.exists(old_path) and not os.path.exists(new_path): os.rename(old_path, new_path) return new_path def remove_from_recently_open(self, filename): recent = self.get('recently_open', []) if filename in recent: recent.remove(filename) self.set_key('recently_open', recent) def set_session_timeout(self, seconds): self.print_error("session timeout -> %d seconds" % seconds) self.set_key('session_timeout', seconds) def get_session_timeout(self): return self.get('session_timeout', 300) def open_last_wallet(self): if self.get('wallet_path') is None: last_wallet = self.get('gui_last_wallet') if last_wallet is not None and os.path.exists(last_wallet): self.cmdline_options['default_wallet_path'] = last_wallet def save_last_wallet(self, wallet): if self.get('wallet_path') is None: path = wallet.storage.path self.set_key('gui_last_wallet', path) def impose_hard_limits_on_fee(func): def get_fee_within_limits(self, *args, **kwargs): fee = func(self, *args, **kwargs) if fee is None: return fee fee = min(FEERATE_MAX_DYNAMIC, fee) fee = max(FEERATE_DEFAULT_RELAY, fee) return fee return get_fee_within_limits @impose_hard_limits_on_fee def eta_to_fee(self, i): """Returns fee in sat/kbyte.""" if i < 4: j = FEE_ETA_TARGETS[i] fee = self.fee_estimates.get(j) else: assert i == 4 fee = self.fee_estimates.get(2) if fee is not None: fee += fee/2 return fee def fee_to_depth(self, target_fee): depth = 0 for fee, s in self.mempool_fees: depth += s if fee <= target_fee: break else: return 0 return depth @impose_hard_limits_on_fee def depth_to_fee(self, i): """Returns fee in sat/kbyte.""" target = self.depth_target(i) depth = 0 for fee, s in self.mempool_fees: depth += s if depth > target: break else: return 0 return fee * 1000 def depth_target(self, i): return FEE_DEPTH_TARGETS[i] def eta_target(self, i): if i == len(FEE_ETA_TARGETS): return 1 return FEE_ETA_TARGETS[i] def fee_to_eta(self, fee_per_kb): import operator l = list(self.fee_estimates.items()) + [(1, self.eta_to_fee(4))] dist = map(lambda x: (x[0], abs(x[1] - fee_per_kb)), l) min_target, min_value = min(dist, key=operator.itemgetter(1)) if fee_per_kb < self.fee_estimates.get(25)/2: min_target = -1 return min_target def depth_tooltip(self, depth): return "%.1f MB from tip"%(depth/1000000) def eta_tooltip(self, x): if x < 0: return _('Low fee') elif x == 1: return _('In the next block') else: return _('Within {} blocks').format(x) def get_fee_status(self): dyn = self.is_dynfee() mempool = self.use_mempool_fees() pos = self.get_depth_level() if mempool else self.get_fee_level() fee_rate = self.fee_per_kb() target, tooltip = self.get_fee_text(pos, dyn, mempool, fee_rate) return tooltip + ' [%s]'%target if dyn else target + ' [Static]' def get_fee_text(self, pos, dyn, mempool, fee_rate): """Returns (text, tooltip) where text is what we target: static fee / num blocks to confirm in / mempool depth tooltip is the corresponding estimate (e.g. num blocks for a static fee) """ rate_str = ('%s sat/kB' % round(fee_rate)) if fee_rate is not None else 'unknown' if dyn: if mempool: depth = self.depth_target(pos) text = self.depth_tooltip(depth) else: eta = self.eta_target(pos) text = self.eta_tooltip(eta) tooltip = rate_str else: text = rate_str if mempool and self.has_fee_mempool(): depth = self.fee_to_depth(fee_rate) tooltip = self.depth_tooltip(depth) elif not mempool and self.has_fee_etas(): eta = self.fee_to_eta(fee_rate) tooltip = self.eta_tooltip(eta) else: tooltip = '' return text, tooltip def get_depth_level(self): maxp = len(FEE_DEPTH_TARGETS) - 1 return min(maxp, self.get('depth_level', 2)) def get_fee_level(self): maxp = len(FEE_ETA_TARGETS) # not (-1) to have "next block" return min(maxp, self.get('fee_level', 2)) def get_fee_slider(self, dyn, mempool): if dyn: if mempool: pos = self.get_depth_level() maxp = len(FEE_DEPTH_TARGETS) - 1 fee_rate = self.depth_to_fee(pos) else: pos = self.get_fee_level() maxp = len(FEE_ETA_TARGETS) # not (-1) to have "next block" fee_rate = self.eta_to_fee(pos) else: fee_rate = self.fee_per_kb(dyn=False) pos = self.static_fee_index(fee_rate) maxp = 9 return maxp, pos, fee_rate def static_fee(self, i): return FEERATE_STATIC_VALUES[i] def static_fee_index(self, value): if value is None: raise TypeError('static fee cannot be None') dist = list(map(lambda x: abs(x - value), FEERATE_STATIC_VALUES)) return min(range(len(dist)), key=dist.__getitem__) def has_fee_etas(self): return len(self.fee_estimates) == 4 def has_fee_mempool(self): return bool(self.mempool_fees) def has_dynamic_fees_ready(self): if self.use_mempool_fees(): return self.has_fee_mempool() else: return self.has_fee_etas() def is_dynfee(self): return bool(self.get('dynamic_fees', True)) def use_mempool_fees(self): return False def fee_per_kb(self, dyn=None, mempool=None): """Returns sat/kvB fee to pay for a txn. Note: might return None. """ if dyn is None: dyn = self.is_dynfee() if mempool is None: mempool = self.use_mempool_fees() if dyn: if mempool: fee_rate = self.depth_to_fee(self.get_depth_level()) else: fee_rate = self.eta_to_fee(self.get_fee_level()) else: fee_rate = self.get('fee_per_kb', FEERATE_FALLBACK_STATIC_FEE) return fee_rate def estimate_fee(self, size): fee_per_kb = self.fee_per_kb() if fee_per_kb is None: raise NoDynamicFeeEstimates() return self.estimate_fee_for_feerate(fee_per_kb, size) @classmethod def estimate_fee_for_feerate(cls, fee_per_kb, size): return round(fee_per_kb * size / 1000) def update_fee_estimates(self, key, value): self.fee_estimates[key] = value self.fee_estimates_last_updated[key] = time.time() def is_fee_estimates_update_required(self): """Checks time since last requested and updated fee estimates. Returns True if an update should be requested. """ now = time.time() return now - self.last_time_fee_estimates_requested > 60 def requested_fee_estimates(self): self.last_time_fee_estimates_requested = time.time() def get_video_device(self): device = self.get("video_device", "default") if device == 'default': device = '' return device def read_user_config(path): """Parse and store the user config settings in electrum-zcash.conf into user_config[].""" if not path: return {} config_path = os.path.join(path, "config") if not os.path.exists(config_path): return {} try: with open(config_path, "r", encoding='utf-8') as f: data = f.read() result = json.loads(data) except: print_error("Warning: Cannot read config file.", config_path) return {} if not type(result) is dict: return {} return result
PypiClean
/MDPOW-0.8.0.tar.gz/MDPOW-0.8.0/mdpow/restart.py
import pickle import os import logging logger = logging.getLogger('mdpow.checkpoint') def checkpoint(name, sim, filename): """Execute the :meth:`Journalled.save` method and log the event.""" logger.info("checkpoint: %(name)s", vars()) sim.save(filename) class JournalSequenceError(Exception): """Raised when a stage is started without another one having been completed.""" class Journal(object): """Class that keeps track of the stage in a protocol. Transaction blocks have to be bracketed by calls to :meth:`~Journal.start` and :meth:`~Journal.completed`. If a block is started before completion, a :exc:`JournalSequenceError` will be raised. Other methods such as :meth:`~Journal.has_completed` and :meth:`~Journal.has_not_completed` can be used to query the status. The attribute :attr:`~Journal.incomplete` flags the state of the current stage (:attr:`~Journal.current`). All completed stages are recorded in the attribute :attr:`~Journal.history`. The current (incomplete) stage can be reset to its initial state with :meth:`Journal.clear`. Example:: J = Journal(['pre', 'main', 'post']) J.start('pre') ... J.completed('pre') J.start('main') ... # main does not finish properly print(J.incomplete) # --> 'main' J.start('post') # raises JournalSequenceError """ def __init__(self, stages): """Initialise the journal that keeps track of stages. :Arguments: *stages* list of the stage identifiers, in the order that they should per performed. Stage identifiers are checked against this list before they are accepted as arguments to most methods. """ self.stages = stages # list of stage identifiers self.__current = None self.__history = [] self.__incomplete = None @property def current(self): """Current stage identifier""" return self.__current @current.setter def current(self, stage): if not stage in self.stages: raise ValueError("Can only assign a registered stage from %r, not %r" % (self.stages, stage)) self.__current = stage @current.deleter def current(self): self.__current = None @property def incomplete(self): """This last stage was not completed.""" return self.__incomplete @incomplete.setter def incomplete(self, stage): if not stage in self.stages: raise ValueError("can only assign a registered stage from %(stages)r" % vars(self)) self.__incomplete = stage @incomplete.deleter def incomplete(self): self.__incomplete = None @property def history(self): """List of stages completed""" return self.__history @history.deleter def history(self): self.__history = [] def completed(self, stage): """Record completed stage and reset :attr:`Journal.current`""" assert stage == self.current, "Program logic error: can only complete the current stage" self.__history.append(self.current) del self.current def start(self, stage): """Record that *stage* is starting.""" if self.current is not None: errmsg = "Cannot start stage %s because previously started stage %s " \ "has not been completed." % (stage, self.current) logger.error(errmsg) raise JournalSequenceError(errmsg) self.current = stage def has_completed(self, stage): """Returns ``True`` if the *stage* has been started and completed at any time.""" return stage in self.history def has_not_completed(self, stage): """Returns ``True`` if the *stage* had been started but not completed yet.""" return self.current is None and not self.has_completed(stage) def clear(self): """Reset incomplete status and current stage""" del self.incomplete del self.current def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.stages) class Journalled(object): """A base class providing methods for journalling and restarts. It installs an instance of :class:`Journal` in the attribute :attr:`Journalled.journal` if it does not exist already. """ #: Class-attribute that contains the names of computation protocols #: supported by the class. These are either method names or dummy names, #: whose logic is provided by an external callback function. #: The method :meth:`get_protocol` raises a :exc:`ValueError` if a #: protocol is not listed in :attr:`~Journalled.protocols`. protocols = [] def __init__(self, *args, **kwargs): # add journal unless we are starting from a save file that already # contains the journal try: len(self.journal.history) except AttributeError: self.journal = Journal(self.protocols) super(Journalled, self).__init__(*args, **kwargs) def get_protocol(self, protocol): """Return method for *protocol*. - If *protocol* is a real method of the class then the method is returned. - If *protocol* is a registered protocol name but no method of the name exists (i.e. *protocol* is a "dummy protocol") then a wrapper function is returned. The wrapper has the signature .. function:: dummy_protocol(func, *args, **kwargs) Runs *func* with the arguments and keywords between calls to :meth:`Journal.start` and :meth:`Journal.completed`, with the stage set to *protocol*. - Raises a :exc:`ValueError` if the *protocol* is not registered (i.e. not found in :attr:`Journalled.protocols`). """ if protocol not in self.protocols: raise ValueError("%r: protocol must be one of %r" % (protocol, self.protocols)) try: return self.__getattribute__(protocol) except AttributeError: # catch *_run dummy protocols and have the user provide the function return self._journalled_func(protocol) def _journalled_func(self, protocol): def dummy_protocol(*args, **kwargs): """Wrap call to func(args) in journaling.""" assert len(args) > 0, "f(func, *args, **kwargs) --> func(*args,**kwargs)" func = args[0] self.journal.start(protocol) success = func(*args[1:], **kwargs) if success: self.journal.completed(protocol) return success return dummy_protocol def save(self, filename=None): """Save instance to a pickle file. The default filename is the name of the file that was last loaded from or saved to. Also sets the attribute :attr:`~Journalled.filename` to the absolute path of the saved file. """ if filename is None: try: if self.filename is not None: filename = self.filename else: raise AttributeError except AttributeError: errmsg = "Neither filename nor default filename provided to save to." logger.error(errmsg) raise ValueError(errmsg) else: self.filename = os.path.abspath(filename) with open(self.filename, 'wb') as f: pickle.dump(self, f) logger.debug("Instance pickled to %(filename)r" % vars(self)) def load(self, filename=None): """Re-instantiate class from pickled file. If no *filename* was supplied then the filename is taken from the attribute :attr:`~Journalled.filename`. .. versionchanged:: 0.7.1 Can read pickle files with either Python 2.7 or 3.x, regardless of the Python version that created the pickle. """ if filename is None: try: if self.filename is not None: filename = self.filename else: raise AttributeError except AttributeError: errmsg = "Neither filename nor default filename provided to load from." logger.error(errmsg) raise ValueError(errmsg) # Do not remove this code when dropping Py 2.7 support as it is needed to # be able to read old data files with Python 3 MDPOW. with open(filename, 'rb') as f: try: instance = pickle.load(f) except UnicodeDecodeError: logger.debug("Reading old Python 2 Pickle file %(filename)r" % vars()) instance = pickle.load(f, encoding='latin1') self.__dict__.update(instance.__dict__) logger.debug("Instance loaded from %(filename)r" % vars())
PypiClean
/BIA_OBS-1.0.3.tar.gz/BIA_OBS-1.0.3/BIA/static/dist/node_modules/acorn/CHANGELOG.md
## 7.4.0 (2020-08-03) ### New features Add support for logical assignment operators. Add support for numeric separators. ## 7.3.1 (2020-06-11) ### Bug fixes Make the string in the `version` export match the actual library version. ## 7.3.0 (2020-06-11) ### Bug fixes Fix a bug that caused parsing of object patterns with a property named `set` that had a default value to fail. ### New features Add support for optional chaining (`?.`). ## 7.2.0 (2020-05-09) ### Bug fixes Fix precedence issue in parsing of async arrow functions. ### New features Add support for nullish coalescing. Add support for `import.meta`. Support `export * as ...` syntax. Upgrade to Unicode 13. ## 6.4.1 (2020-03-09) ### Bug fixes More carefully check for valid UTF16 surrogate pairs in regexp validator. ## 7.1.1 (2020-03-01) ### Bug fixes Treat `\8` and `\9` as invalid escapes in template strings. Allow unicode escapes in property names that are keywords. Don't error on an exponential operator expression as argument to `await`. More carefully check for valid UTF16 surrogate pairs in regexp validator. ## 7.1.0 (2019-09-24) ### Bug fixes Disallow trailing object literal commas when ecmaVersion is less than 5. ### New features Add a static `acorn` property to the `Parser` class that contains the entire module interface, to allow plugins to access the instance of the library that they are acting on. ## 7.0.0 (2019-08-13) ### Breaking changes Changes the node format for dynamic imports to use the `ImportExpression` node type, as defined in [ESTree](https://github.com/estree/estree/blob/master/es2020.md#importexpression). Makes 10 (ES2019) the default value for the `ecmaVersion` option. ## 6.3.0 (2019-08-12) ### New features `sourceType: "module"` can now be used even when `ecmaVersion` is less than 6, to parse module-style code that otherwise conforms to an older standard. ## 6.2.1 (2019-07-21) ### Bug fixes Fix bug causing Acorn to treat some characters as identifier characters that shouldn't be treated as such. Fix issue where setting the `allowReserved` option to `"never"` allowed reserved words in some circumstances. ## 6.2.0 (2019-07-04) ### Bug fixes Improve valid assignment checking in `for`/`in` and `for`/`of` loops. Disallow binding `let` in patterns. ### New features Support bigint syntax with `ecmaVersion` >= 11. Support dynamic `import` syntax with `ecmaVersion` >= 11. Upgrade to Unicode version 12. ## 6.1.1 (2019-02-27) ### Bug fixes Fix bug that caused parsing default exports of with names to fail. ## 6.1.0 (2019-02-08) ### Bug fixes Fix scope checking when redefining a `var` as a lexical binding. ### New features Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins. ## 6.0.7 (2019-02-04) ### Bug fixes Check that exported bindings are defined. Don't treat `\u180e` as a whitespace character. Check for duplicate parameter names in methods. Don't allow shorthand properties when they are generators or async methods. Forbid binding `await` in async arrow function's parameter list. ## 6.0.6 (2019-01-30) ### Bug fixes The content of class declarations and expressions is now always parsed in strict mode. Don't allow `let` or `const` to bind the variable name `let`. Treat class declarations as lexical. Don't allow a generator function declaration as the sole body of an `if` or `else`. Ignore `"use strict"` when after an empty statement. Allow string line continuations with special line terminator characters. Treat `for` bodies as part of the `for` scope when checking for conflicting bindings. Fix bug with parsing `yield` in a `for` loop initializer. Implement special cases around scope checking for functions. ## 6.0.5 (2019-01-02) ### Bug fixes Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type. Don't treat `let` as a keyword when the next token is `{` on the next line. Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on. ## 6.0.4 (2018-11-05) ### Bug fixes Further improvements to tokenizing regular expressions in corner cases. ## 6.0.3 (2018-11-04) ### Bug fixes Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression. Remove stray symlink in the package tarball. ## 6.0.2 (2018-09-26) ### Bug fixes Fix bug where default expressions could fail to parse inside an object destructuring assignment expression. ## 6.0.1 (2018-09-14) ### Bug fixes Fix wrong value in `version` export. ## 6.0.0 (2018-09-14) ### Bug fixes Better handle variable-redefinition checks for catch bindings and functions directly under if statements. Forbid `new.target` in top-level arrow functions. Fix issue with parsing a regexp after `yield` in some contexts. ### New features The package now comes with TypeScript definitions. ### Breaking changes The default value of the `ecmaVersion` option is now 9 (2018). Plugins work differently, and will have to be rewritten to work with this version. The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`). ## 5.7.3 (2018-09-10) ### Bug fixes Fix failure to tokenize regexps after expressions like `x.of`. Better error message for unterminated template literals. ## 5.7.2 (2018-08-24) ### Bug fixes Properly handle `allowAwaitOutsideFunction` in for statements. Treat function declarations at the top level of modules like let bindings. Don't allow async function declarations as the only statement under a label. ## 5.7.0 (2018-06-15) ### New features Upgraded to Unicode 11. ## 5.6.0 (2018-05-31) ### New features Allow U+2028 and U+2029 in string when ECMAVersion >= 10. Allow binding-less catch statements when ECMAVersion >= 10. Add `allowAwaitOutsideFunction` option for parsing top-level `await`. ## 5.5.3 (2018-03-08) ### Bug fixes A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps. ## 5.5.2 (2018-03-08) ### Bug fixes A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0. ## 5.5.1 (2018-03-06) ### Bug fixes Fix misleading error message for octal escapes in template strings. ## 5.5.0 (2018-02-27) ### New features The identifier character categorization is now based on Unicode version 10. Acorn will now validate the content of regular expressions, including new ES9 features. ## 5.4.0 (2018-02-01) ### Bug fixes Disallow duplicate or escaped flags on regular expressions. Disallow octal escapes in strings in strict mode. ### New features Add support for async iteration. Add support for object spread and rest. ## 5.3.0 (2017-12-28) ### Bug fixes Fix parsing of floating point literals with leading zeroes in loose mode. Allow duplicate property names in object patterns. Don't allow static class methods named `prototype`. Disallow async functions directly under `if` or `else`. Parse right-hand-side of `for`/`of` as an assignment expression. Stricter parsing of `for`/`in`. Don't allow unicode escapes in contextual keywords. ### New features Parsing class members was factored into smaller methods to allow plugins to hook into it. ## 5.2.1 (2017-10-30) ### Bug fixes Fix a token context corruption bug. ## 5.2.0 (2017-10-30) ### Bug fixes Fix token context tracking for `class` and `function` in property-name position. Make sure `%*` isn't parsed as a valid operator. Allow shorthand properties `get` and `set` to be followed by default values. Disallow `super` when not in callee or object position. ### New features Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements. ## 5.1.2 (2017-09-04) ### Bug fixes Disable parsing of legacy HTML-style comments in modules. Fix parsing of async methods whose names are keywords. ## 5.1.1 (2017-07-06) ### Bug fixes Fix problem with disambiguating regexp and division after a class. ## 5.1.0 (2017-07-05) ### Bug fixes Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`. Parse zero-prefixed numbers with non-octal digits as decimal. Allow object/array patterns in rest parameters. Don't error when `yield` is used as a property name. Allow `async` as a shorthand object property. ### New features Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9. ## 5.0.3 (2017-04-01) ### Bug fixes Fix spurious duplicate variable definition errors for named functions. ## 5.0.2 (2017-03-30) ### Bug fixes A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error. ## 5.0.0 (2017-03-28) ### Bug fixes Raise an error for duplicated lexical bindings. Fix spurious error when an assignement expression occurred after a spread expression. Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions. Allow labels in front or `var` declarations, even in strict mode. ### Breaking changes Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`. ## 4.0.11 (2017-02-07) ### Bug fixes Allow all forms of member expressions to be parenthesized as lvalue. ## 4.0.10 (2017-02-07) ### Bug fixes Don't expect semicolons after default-exported functions or classes, even when they are expressions. Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode. ## 4.0.9 (2017-02-06) ### Bug fixes Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again. ## 4.0.8 (2017-02-03) ### Bug fixes Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet. ## 4.0.7 (2017-02-02) ### Bug fixes Accept invalidly rejected code like `(x).y = 2` again. Don't raise an error when a function _inside_ strict code has a non-simple parameter list. ## 4.0.6 (2017-02-02) ### Bug fixes Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check. ## 4.0.5 (2017-02-02) ### Bug fixes Disallow parenthesized pattern expressions. Allow keywords as export names. Don't allow the `async` keyword to be parenthesized. Properly raise an error when a keyword contains a character escape. Allow `"use strict"` to appear after other string literal expressions. Disallow labeled declarations. ## 4.0.4 (2016-12-19) ### Bug fixes Fix crash when `export` was followed by a keyword that can't be exported. ## 4.0.3 (2016-08-16) ### Bug fixes Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode. Properly parse properties named `async` in ES2017 mode. Fix bug where reserved words were broken in ES2017 mode. ## 4.0.2 (2016-08-11) ### Bug fixes Don't ignore period or 'e' characters after octal numbers. Fix broken parsing for call expressions in default parameter values of arrow functions. ## 4.0.1 (2016-08-08) ### Bug fixes Fix false positives in duplicated export name errors. ## 4.0.0 (2016-08-07) ### Breaking changes The default `ecmaVersion` option value is now 7. A number of internal method signatures changed, so plugins might need to be updated. ### Bug fixes The parser now raises errors on duplicated export names. `arguments` and `eval` can now be used in shorthand properties. Duplicate parameter names in non-simple argument lists now always produce an error. ### New features The `ecmaVersion` option now also accepts year-style version numbers (2015, etc). Support for `async`/`await` syntax when `ecmaVersion` is >= 8. Support for trailing commas in call expressions when `ecmaVersion` is >= 8. ## 3.3.0 (2016-07-25) ### Bug fixes Fix bug in tokenizing of regexp operator after a function declaration. Fix parser crash when parsing an array pattern with a hole. ### New features Implement check against complex argument lists in functions that enable strict mode in ES7. ## 3.2.0 (2016-06-07) ### Bug fixes Improve handling of lack of unicode regexp support in host environment. Properly reject shorthand properties whose name is a keyword. ### New features Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object. ## 3.1.0 (2016-04-18) ### Bug fixes Properly tokenize the division operator directly after a function expression. Allow trailing comma in destructuring arrays. ## 3.0.4 (2016-02-25) ### Fixes Allow update expressions as left-hand-side of the ES7 exponential operator. ## 3.0.2 (2016-02-10) ### Fixes Fix bug that accidentally made `undefined` a reserved word when parsing ES7. ## 3.0.0 (2016-02-10) ### Breaking changes The default value of the `ecmaVersion` option is now 6 (used to be 5). Support for comprehension syntax (which was dropped from the draft spec) has been removed. ### Fixes `let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code. A parenthesized class or function expression after `export default` is now parsed correctly. ### New features When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`). The identifier character ranges are now based on Unicode 8.0.0. Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled. ## 2.7.0 (2016-01-04) ### Fixes Stop allowing rest parameters in setters. Disallow `y` rexexp flag in ES5. Disallow `\00` and `\000` escapes in strict mode. Raise an error when an import name is a reserved word. ## 2.6.2 (2015-11-10) ### Fixes Don't crash when no options object is passed. ## 2.6.0 (2015-11-09) ### Fixes Add `await` as a reserved word in module sources. Disallow `yield` in a parameter default value for a generator. Forbid using a comma after a rest pattern in an array destructuring. ### New features Support parsing stdin in command-line tool. ## 2.5.0 (2015-10-27) ### Fixes Fix tokenizer support in the command-line tool. Stop allowing `new.target` outside of functions. Remove legacy `guard` and `guardedHandler` properties from try nodes. Stop allowing multiple `__proto__` properties on an object literal in strict mode. Don't allow rest parameters to be non-identifier patterns. Check for duplicate paramter names in arrow functions.
PypiClean
/Jam-Sesh-Server-2.0.0.tar.gz/Jam-Sesh-Server-2.0.0/static/js/bootstrap/bootstrap.esm.min.js
import*as Popper from"@popperjs/core";const NODE_TEXT=3,SelectorEngine={find:(e,t=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(t,e)),findOne:(e,t=document.documentElement)=>Element.prototype.querySelector.call(t,e),children:(e,t)=>[].concat(...e.children).filter(e=>e.matches(t)),parents(e,t){const n=[];let i=e.parentNode;for(;i&&i.nodeType===Node.ELEMENT_NODE&&3!==i.nodeType;)i.matches(t)&&n.push(i),i=i.parentNode;return n},prev(e,t){let n=e.previousElementSibling;for(;n;){if(n.matches(t))return[n];n=n.previousElementSibling}return[]},next(e,t){let n=e.nextElementSibling;for(;n;){if(n.matches(t))return[n];n=n.nextElementSibling}return[]}},MAX_UID=1e6,MILLISECONDS_MULTIPLIER=1e3,TRANSITION_END="transitionend",toType=e=>null==e?""+e:{}.toString.call(e).match(/\s([a-z]+)/i)[1].toLowerCase(),getUID=e=>{do{e+=Math.floor(1e6*Math.random())}while(document.getElementById(e));return e},getSelector=e=>{let t=e.getAttribute("data-bs-target");if(!t||"#"===t){let n=e.getAttribute("href");if(!n||!n.includes("#")&&!n.startsWith("."))return null;n.includes("#")&&!n.startsWith("#")&&(n="#"+n.split("#")[1]),t=n&&"#"!==n?n.trim():null}return t},getSelectorFromElement=e=>{const t=getSelector(e);return t&&document.querySelector(t)?t:null},getElementFromSelector=e=>{const t=getSelector(e);return t?document.querySelector(t):null},getTransitionDurationFromElement=e=>{if(!e)return 0;let{transitionDuration:t,transitionDelay:n}=window.getComputedStyle(e);const i=Number.parseFloat(t),s=Number.parseFloat(n);return i||s?(t=t.split(",")[0],n=n.split(",")[0],1e3*(Number.parseFloat(t)+Number.parseFloat(n))):0},triggerTransitionEnd=e=>{e.dispatchEvent(new Event(TRANSITION_END))},isElement=e=>!(!e||"object"!=typeof e)&&(void 0!==e.jquery&&(e=e[0]),void 0!==e.nodeType),getElement=e=>isElement(e)?e.jquery?e[0]:e:"string"==typeof e&&e.length>0?SelectorEngine.findOne(e):null,typeCheckConfig=(e,t,n)=>{Object.keys(n).forEach(i=>{const s=n[i],o=t[i],r=o&&isElement(o)?"element":null==(a=o)?""+a:{}.toString.call(a).match(/\s([a-z]+)/i)[1].toLowerCase();var a;if(!new RegExp(s).test(r))throw new TypeError(`${e.toUpperCase()}: Option "${i}" provided type "${r}" but expected type "${s}".`)})},isVisible=e=>!(!isElement(e)||0===e.getClientRects().length)&&"visible"===getComputedStyle(e).getPropertyValue("visibility"),isDisabled=e=>!e||e.nodeType!==Node.ELEMENT_NODE||!!e.classList.contains("disabled")||(void 0!==e.disabled?e.disabled:e.hasAttribute("disabled")&&"false"!==e.getAttribute("disabled")),findShadowRoot=e=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof e.getRootNode){const t=e.getRootNode();return t instanceof ShadowRoot?t:null}return e instanceof ShadowRoot?e:e.parentNode?findShadowRoot(e.parentNode):null},noop=()=>{},reflow=e=>e.offsetHeight,getjQuery=()=>{const{jQuery:e}=window;return e&&!document.body.hasAttribute("data-bs-no-jquery")?e:null},DOMContentLoadedCallbacks=[],onDOMContentLoaded=e=>{"loading"===document.readyState?(DOMContentLoadedCallbacks.length||document.addEventListener("DOMContentLoaded",()=>{DOMContentLoadedCallbacks.forEach(e=>e())}),DOMContentLoadedCallbacks.push(e)):e()},isRTL=()=>"rtl"===document.documentElement.dir,defineJQueryPlugin=e=>{var t;t=()=>{const t=getjQuery();if(t){const n=e.NAME,i=t.fn[n];t.fn[n]=e.jQueryInterface,t.fn[n].Constructor=e,t.fn[n].noConflict=()=>(t.fn[n]=i,e.jQueryInterface)}},"loading"===document.readyState?(DOMContentLoadedCallbacks.length||document.addEventListener("DOMContentLoaded",()=>{DOMContentLoadedCallbacks.forEach(e=>e())}),DOMContentLoadedCallbacks.push(t)):t()},execute=e=>{"function"==typeof e&&e()},executeAfterTransition=(e,t,n=!0)=>{if(!n)return void execute(e);const i=getTransitionDurationFromElement(t)+5;let s=!1;const o=({target:n})=>{n===t&&(s=!0,t.removeEventListener(TRANSITION_END,o),execute(e))};t.addEventListener(TRANSITION_END,o),setTimeout(()=>{s||triggerTransitionEnd(t)},i)},getNextActiveElement=(e,t,n,i)=>{let s=e.indexOf(t);if(-1===s)return e[!n&&i?e.length-1:0];const o=e.length;return s+=n?1:-1,i&&(s=(s+o)%o),e[Math.max(0,Math.min(s,o-1))]},namespaceRegex=/[^.]*(?=\..*)\.|.*/,stripNameRegex=/\..*/,stripUidRegex=/::\d+$/,eventRegistry={};let uidEvent=1;const customEvents={mouseenter:"mouseover",mouseleave:"mouseout"},customEventsRegex=/^(mouseenter|mouseleave)/i,nativeEvents=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function getUidEvent(e,t){return t&&`${t}::${uidEvent++}`||e.uidEvent||uidEvent++}function getEvent(e){const t=getUidEvent(e);return e.uidEvent=t,eventRegistry[t]=eventRegistry[t]||{},eventRegistry[t]}function bootstrapHandler(e,t){return function n(i){return i.delegateTarget=e,n.oneOff&&EventHandler.off(e,i.type,t),t.apply(e,[i])}}function bootstrapDelegationHandler(e,t,n){return function i(s){const o=e.querySelectorAll(t);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(let a=o.length;a--;)if(o[a]===r)return s.delegateTarget=r,i.oneOff&&EventHandler.off(e,s.type,t,n),n.apply(r,[s]);return null}}function findHandler(e,t,n=null){const i=Object.keys(e);for(let s=0,o=i.length;s<o;s++){const o=e[i[s]];if(o.originalHandler===t&&o.delegationSelector===n)return o}return null}function normalizeParams(e,t,n){const i="string"==typeof t,s=i?n:t;let o=getTypeEvent(e);return nativeEvents.has(o)||(o=e),[i,s,o]}function addHandler(e,t,n,i,s){if("string"!=typeof t||!e)return;if(n||(n=i,i=null),customEventsRegex.test(t)){const e=e=>function(t){if(!t.relatedTarget||t.relatedTarget!==t.delegateTarget&&!t.delegateTarget.contains(t.relatedTarget))return e.call(this,t)};i?i=e(i):n=e(n)}const[o,r,a]=normalizeParams(t,n,i),l=getEvent(e),c=l[a]||(l[a]={}),E=findHandler(c,r,o?n:null);if(E)return void(E.oneOff=E.oneOff&&s);const _=getUidEvent(r,t.replace(namespaceRegex,"")),h=o?bootstrapDelegationHandler(e,n,i):bootstrapHandler(e,n);h.delegationSelector=o?n:null,h.originalHandler=r,h.oneOff=s,h.uidEvent=_,c[_]=h,e.addEventListener(a,h,o)}function removeHandler(e,t,n,i,s){const o=findHandler(t[n],i,s);o&&(e.removeEventListener(n,o,Boolean(s)),delete t[n][o.uidEvent])}function removeNamespacedHandlers(e,t,n,i){const s=t[n]||{};Object.keys(s).forEach(o=>{if(o.includes(i)){const i=s[o];removeHandler(e,t,n,i.originalHandler,i.delegationSelector)}})}function getTypeEvent(e){return e=e.replace(stripNameRegex,""),customEvents[e]||e}const EventHandler={on(e,t,n,i){addHandler(e,t,n,i,!1)},one(e,t,n,i){addHandler(e,t,n,i,!0)},off(e,t,n,i){if("string"!=typeof t||!e)return;const[s,o,r]=normalizeParams(t,n,i),a=r!==t,l=getEvent(e),c=t.startsWith(".");if(void 0!==o){if(!l||!l[r])return;return void removeHandler(e,l,r,o,s?n:null)}c&&Object.keys(l).forEach(n=>{removeNamespacedHandlers(e,l,n,t.slice(1))});const E=l[r]||{};Object.keys(E).forEach(n=>{const i=n.replace(stripUidRegex,"");if(!a||t.includes(i)){const t=E[n];removeHandler(e,l,r,t.originalHandler,t.delegationSelector)}})},trigger(e,t,n){if("string"!=typeof t||!e)return null;const i=getjQuery(),s=getTypeEvent(t),o=t!==s,r=nativeEvents.has(s);let a,l=!0,c=!0,E=!1,_=null;return o&&i&&(a=i.Event(t,n),i(e).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),E=a.isDefaultPrevented()),r?(_=document.createEvent("HTMLEvents"),_.initEvent(s,l,!0)):_=new CustomEvent(t,{bubbles:l,cancelable:!0}),void 0!==n&&Object.keys(n).forEach(e=>{Object.defineProperty(_,e,{get:()=>n[e]})}),E&&_.preventDefault(),c&&e.dispatchEvent(_),_.defaultPrevented&&void 0!==a&&a.preventDefault(),_}},elementMap=new Map;var Data={set(e,t,n){elementMap.has(e)||elementMap.set(e,new Map);const i=elementMap.get(e);i.has(t)||0===i.size?i.set(t,n):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(i.keys())[0]}.`)},get:(e,t)=>elementMap.has(e)&&elementMap.get(e).get(t)||null,remove(e,t){if(!elementMap.has(e))return;const n=elementMap.get(e);n.delete(t),0===n.size&&elementMap.delete(e)}};const VERSION="5.0.2";class BaseComponent{constructor(e){(e=getElement(e))&&(this._element=e,Data.set(this._element,this.constructor.DATA_KEY,this))}dispose(){Data.remove(this._element,this.constructor.DATA_KEY),EventHandler.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach(e=>{this[e]=null})}_queueCallback(e,t,n=!0){executeAfterTransition(e,t,n)}static getInstance(e){return Data.get(e,this.DATA_KEY)}static getOrCreateInstance(e,t={}){return this.getInstance(e)||new this(e,"object"==typeof t?t:null)}static get VERSION(){return"5.0.2"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return"bs."+this.NAME}static get EVENT_KEY(){return"."+this.DATA_KEY}}const NAME$c="alert",DATA_KEY$b="bs.alert",EVENT_KEY$b=".bs.alert",DATA_API_KEY$8=".data-api",SELECTOR_DISMISS='[data-bs-dismiss="alert"]',EVENT_CLOSE="close.bs.alert",EVENT_CLOSED="closed.bs.alert",EVENT_CLICK_DATA_API$7="click.bs.alert.data-api",CLASS_NAME_ALERT="alert",CLASS_NAME_FADE$6="fade",CLASS_NAME_SHOW$9="show";class Alert extends BaseComponent{static get NAME(){return NAME$c}close(e){const t=e?this._getRootElement(e):this._element,n=this._triggerCloseEvent(t);null===n||n.defaultPrevented||this._removeElement(t)}_getRootElement(e){return getElementFromSelector(e)||e.closest(".alert")}_triggerCloseEvent(e){return EventHandler.trigger(e,EVENT_CLOSE)}_removeElement(e){e.classList.remove("show");const t=e.classList.contains("fade");this._queueCallback(()=>this._destroyElement(e),e,t)}_destroyElement(e){e.remove(),EventHandler.trigger(e,EVENT_CLOSED)}static jQueryInterface(e){return this.each((function(){const t=Alert.getOrCreateInstance(this);"close"===e&&t[e](this)}))}static handleDismiss(e){return function(t){t&&t.preventDefault(),e.close(this)}}}EventHandler.on(document,EVENT_CLICK_DATA_API$7,SELECTOR_DISMISS,Alert.handleDismiss(new Alert)),defineJQueryPlugin(Alert);const NAME$b="button",DATA_KEY$a="bs.button",EVENT_KEY$a=".bs.button",DATA_API_KEY$7=".data-api",CLASS_NAME_ACTIVE$3="active",SELECTOR_DATA_TOGGLE$5='[data-bs-toggle="button"]',EVENT_CLICK_DATA_API$6="click.bs.button.data-api";class Button extends BaseComponent{static get NAME(){return NAME$b}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(e){return this.each((function(){const t=Button.getOrCreateInstance(this);"toggle"===e&&t[e]()}))}}function normalizeData(e){return"true"===e||"false"!==e&&(e===Number(e).toString()?Number(e):""===e||"null"===e?null:e)}function normalizeDataKey(e){return e.replace(/[A-Z]/g,e=>"-"+e.toLowerCase())}EventHandler.on(document,EVENT_CLICK_DATA_API$6,SELECTOR_DATA_TOGGLE$5,e=>{e.preventDefault();const t=e.target.closest(SELECTOR_DATA_TOGGLE$5);Button.getOrCreateInstance(t).toggle()}),defineJQueryPlugin(Button);const Manipulator={setDataAttribute(e,t,n){e.setAttribute("data-bs-"+normalizeDataKey(t),n)},removeDataAttribute(e,t){e.removeAttribute("data-bs-"+normalizeDataKey(t))},getDataAttributes(e){if(!e)return{};const t={};return Object.keys(e.dataset).filter(e=>e.startsWith("bs")).forEach(n=>{let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),t[i]=normalizeData(e.dataset[n])}),t},getDataAttribute:(e,t)=>normalizeData(e.getAttribute("data-bs-"+normalizeDataKey(t))),offset(e){const t=e.getBoundingClientRect();return{top:t.top+document.body.scrollTop,left:t.left+document.body.scrollLeft}},position:e=>({top:e.offsetTop,left:e.offsetLeft})},NAME$a="carousel",DATA_KEY$9="bs.carousel",EVENT_KEY$9=".bs.carousel",DATA_API_KEY$6=".data-api",ARROW_LEFT_KEY="ArrowLeft",ARROW_RIGHT_KEY="ArrowRight",TOUCHEVENT_COMPAT_WAIT=500,SWIPE_THRESHOLD=40,Default$9={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},DefaultType$9={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},ORDER_NEXT="next",ORDER_PREV="prev",DIRECTION_LEFT="left",DIRECTION_RIGHT="right",KEY_TO_DIRECTION={ArrowLeft:DIRECTION_RIGHT,ArrowRight:DIRECTION_LEFT},EVENT_SLIDE="slide.bs.carousel",EVENT_SLID="slid.bs.carousel",EVENT_KEYDOWN="keydown.bs.carousel",EVENT_MOUSEENTER="mouseenter.bs.carousel",EVENT_MOUSELEAVE="mouseleave.bs.carousel",EVENT_TOUCHSTART="touchstart.bs.carousel",EVENT_TOUCHMOVE="touchmove.bs.carousel",EVENT_TOUCHEND="touchend.bs.carousel",EVENT_POINTERDOWN="pointerdown.bs.carousel",EVENT_POINTERUP="pointerup.bs.carousel",EVENT_DRAG_START="dragstart.bs.carousel",EVENT_LOAD_DATA_API$2="load.bs.carousel.data-api",EVENT_CLICK_DATA_API$5="click.bs.carousel.data-api",CLASS_NAME_CAROUSEL="carousel",CLASS_NAME_ACTIVE$2="active",CLASS_NAME_SLIDE="slide",CLASS_NAME_END="carousel-item-end",CLASS_NAME_START="carousel-item-start",CLASS_NAME_NEXT="carousel-item-next",CLASS_NAME_PREV="carousel-item-prev",CLASS_NAME_POINTER_EVENT="pointer-event",SELECTOR_ACTIVE$1=".active",SELECTOR_ACTIVE_ITEM=".active.carousel-item",SELECTOR_ITEM=".carousel-item",SELECTOR_ITEM_IMG=".carousel-item img",SELECTOR_NEXT_PREV=".carousel-item-next, .carousel-item-prev",SELECTOR_INDICATORS=".carousel-indicators",SELECTOR_INDICATOR="[data-bs-target]",SELECTOR_DATA_SLIDE="[data-bs-slide], [data-bs-slide-to]",SELECTOR_DATA_RIDE='[data-bs-ride="carousel"]',POINTER_TYPE_TOUCH="touch",POINTER_TYPE_PEN="pen";class Carousel extends BaseComponent{constructor(e,t){super(e),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(t),this._indicatorsElement=SelectorEngine.findOne(SELECTOR_INDICATORS,this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return Default$9}static get NAME(){return NAME$a}next(){this._slide(ORDER_NEXT)}nextWhenVisible(){!document.hidden&&isVisible(this._element)&&this.next()}prev(){this._slide(ORDER_PREV)}pause(e){e||(this._isPaused=!0),SelectorEngine.findOne(SELECTOR_NEXT_PREV,this._element)&&(triggerTransitionEnd(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(e){e||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(e){this._activeElement=SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM,this._element);const t=this._getItemIndex(this._activeElement);if(e>this._items.length-1||e<0)return;if(this._isSliding)return void EventHandler.one(this._element,EVENT_SLID,()=>this.to(e));if(t===e)return this.pause(),void this.cycle();const n=e>t?ORDER_NEXT:ORDER_PREV;this._slide(n,this._items[e])}_getConfig(e){return e={...Default$9,...Manipulator.getDataAttributes(this._element),..."object"==typeof e?e:{}},typeCheckConfig(NAME$a,e,DefaultType$9),e}_handleSwipe(){const e=Math.abs(this.touchDeltaX);if(e<=40)return;const t=e/this.touchDeltaX;this.touchDeltaX=0,t&&this._slide(t>0?DIRECTION_RIGHT:DIRECTION_LEFT)}_addEventListeners(){this._config.keyboard&&EventHandler.on(this._element,EVENT_KEYDOWN,e=>this._keydown(e)),"hover"===this._config.pause&&(EventHandler.on(this._element,EVENT_MOUSEENTER,e=>this.pause(e)),EventHandler.on(this._element,EVENT_MOUSELEAVE,e=>this.cycle(e))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const e=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType?this._pointerEvent||(this.touchStartX=e.touches[0].clientX):this.touchStartX=e.clientX},t=e=>{this.touchDeltaX=e.touches&&e.touches.length>1?0:e.touches[0].clientX-this.touchStartX},n=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType||(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout(e=>this.cycle(e),500+this._config.interval))};SelectorEngine.find(SELECTOR_ITEM_IMG,this._element).forEach(e=>{EventHandler.on(e,EVENT_DRAG_START,e=>e.preventDefault())}),this._pointerEvent?(EventHandler.on(this._element,EVENT_POINTERDOWN,t=>e(t)),EventHandler.on(this._element,EVENT_POINTERUP,e=>n(e)),this._element.classList.add("pointer-event")):(EventHandler.on(this._element,EVENT_TOUCHSTART,t=>e(t)),EventHandler.on(this._element,EVENT_TOUCHMOVE,e=>t(e)),EventHandler.on(this._element,EVENT_TOUCHEND,e=>n(e)))}_keydown(e){if(/input|textarea/i.test(e.target.tagName))return;const t=KEY_TO_DIRECTION[e.key];t&&(e.preventDefault(),this._slide(t))}_getItemIndex(e){return this._items=e&&e.parentNode?SelectorEngine.find(SELECTOR_ITEM,e.parentNode):[],this._items.indexOf(e)}_getItemByOrder(e,t){const n=e===ORDER_NEXT;return getNextActiveElement(this._items,t,n,this._config.wrap)}_triggerSlideEvent(e,t){const n=this._getItemIndex(e),i=this._getItemIndex(SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM,this._element));return EventHandler.trigger(this._element,EVENT_SLIDE,{relatedTarget:e,direction:t,from:i,to:n})}_setActiveIndicatorElement(e){if(this._indicatorsElement){const t=SelectorEngine.findOne(".active",this._indicatorsElement);t.classList.remove("active"),t.removeAttribute("aria-current");const n=SelectorEngine.find("[data-bs-target]",this._indicatorsElement);for(let t=0;t<n.length;t++)if(Number.parseInt(n[t].getAttribute("data-bs-slide-to"),10)===this._getItemIndex(e)){n[t].classList.add("active"),n[t].setAttribute("aria-current","true");break}}}_updateInterval(){const e=this._activeElement||SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM,this._element);if(!e)return;const t=Number.parseInt(e.getAttribute("data-bs-interval"),10);t?(this._config.defaultInterval=this._config.defaultInterval||this._config.interval,this._config.interval=t):this._config.interval=this._config.defaultInterval||this._config.interval}_slide(e,t){const n=this._directionToOrder(e),i=SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM,this._element),s=this._getItemIndex(i),o=t||this._getItemByOrder(n,i),r=this._getItemIndex(o),a=Boolean(this._interval),l=n===ORDER_NEXT,c=l?CLASS_NAME_START:CLASS_NAME_END,E=l?CLASS_NAME_NEXT:CLASS_NAME_PREV,_=this._orderToDirection(n);if(o&&o.classList.contains("active"))return void(this._isSliding=!1);if(this._isSliding)return;if(this._triggerSlideEvent(o,_).defaultPrevented)return;if(!i||!o)return;this._isSliding=!0,a&&this.pause(),this._setActiveIndicatorElement(o),this._activeElement=o;const h=()=>{EventHandler.trigger(this._element,EVENT_SLID,{relatedTarget:o,direction:_,from:s,to:r})};if(this._element.classList.contains("slide")){o.classList.add(E),reflow(o),i.classList.add(c),o.classList.add(c);const e=()=>{o.classList.remove(c,E),o.classList.add("active"),i.classList.remove("active",E,c),this._isSliding=!1,setTimeout(h,0)};this._queueCallback(e,i,!0)}else i.classList.remove("active"),o.classList.add("active"),this._isSliding=!1,h();a&&this.cycle()}_directionToOrder(e){return[DIRECTION_RIGHT,DIRECTION_LEFT].includes(e)?isRTL()?e===DIRECTION_LEFT?ORDER_PREV:ORDER_NEXT:e===DIRECTION_LEFT?ORDER_NEXT:ORDER_PREV:e}_orderToDirection(e){return[ORDER_NEXT,ORDER_PREV].includes(e)?isRTL()?e===ORDER_PREV?DIRECTION_LEFT:DIRECTION_RIGHT:e===ORDER_PREV?DIRECTION_RIGHT:DIRECTION_LEFT:e}static carouselInterface(e,t){const n=Carousel.getOrCreateInstance(e,t);let{_config:i}=n;"object"==typeof t&&(i={...i,...t});const s="string"==typeof t?t:i.slide;if("number"==typeof t)n.to(t);else if("string"==typeof s){if(void 0===n[s])throw new TypeError(`No method named "${s}"`);n[s]()}else i.interval&&i.ride&&(n.pause(),n.cycle())}static jQueryInterface(e){return this.each((function(){Carousel.carouselInterface(this,e)}))}static dataApiClickHandler(e){const t=getElementFromSelector(this);if(!t||!t.classList.contains("carousel"))return;const n={...Manipulator.getDataAttributes(t),...Manipulator.getDataAttributes(this)},i=this.getAttribute("data-bs-slide-to");i&&(n.interval=!1),Carousel.carouselInterface(t,n),i&&Carousel.getInstance(t).to(i),e.preventDefault()}}EventHandler.on(document,EVENT_CLICK_DATA_API$5,SELECTOR_DATA_SLIDE,Carousel.dataApiClickHandler),EventHandler.on(window,EVENT_LOAD_DATA_API$2,()=>{const e=SelectorEngine.find(SELECTOR_DATA_RIDE);for(let t=0,n=e.length;t<n;t++)Carousel.carouselInterface(e[t],Carousel.getInstance(e[t]))}),defineJQueryPlugin(Carousel);const NAME$9="collapse",DATA_KEY$8="bs.collapse",EVENT_KEY$8=".bs.collapse",DATA_API_KEY$5=".data-api",Default$8={toggle:!0,parent:""},DefaultType$8={toggle:"boolean",parent:"(string|element)"},EVENT_SHOW$5="show.bs.collapse",EVENT_SHOWN$5="shown.bs.collapse",EVENT_HIDE$5="hide.bs.collapse",EVENT_HIDDEN$5="hidden.bs.collapse",EVENT_CLICK_DATA_API$4="click.bs.collapse.data-api",CLASS_NAME_SHOW$8="show",CLASS_NAME_COLLAPSE="collapse",CLASS_NAME_COLLAPSING="collapsing",CLASS_NAME_COLLAPSED="collapsed",WIDTH="width",HEIGHT="height",SELECTOR_ACTIVES=".show, .collapsing",SELECTOR_DATA_TOGGLE$4='[data-bs-toggle="collapse"]';class Collapse extends BaseComponent{constructor(e,t){super(e),this._isTransitioning=!1,this._config=this._getConfig(t),this._triggerArray=SelectorEngine.find(`${SELECTOR_DATA_TOGGLE$4}[href="#${this._element.id}"],${SELECTOR_DATA_TOGGLE$4}[data-bs-target="#${this._element.id}"]`);const n=SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);for(let e=0,t=n.length;e<t;e++){const t=n[e],i=getSelectorFromElement(t),s=SelectorEngine.find(i).filter(e=>e===this._element);null!==i&&s.length&&(this._selector=i,this._triggerArray.push(t))}this._parent=this._config.parent?this._getParent():null,this._config.parent||this._addAriaAndCollapsedClass(this._element,this._triggerArray),this._config.toggle&&this.toggle()}static get Default(){return Default$8}static get NAME(){return NAME$9}toggle(){this._element.classList.contains("show")?this.hide():this.show()}show(){if(this._isTransitioning||this._element.classList.contains("show"))return;let e,t;this._parent&&(e=SelectorEngine.find(SELECTOR_ACTIVES,this._parent).filter(e=>"string"==typeof this._config.parent?e.getAttribute("data-bs-parent")===this._config.parent:e.classList.contains("collapse")),0===e.length&&(e=null));const n=SelectorEngine.findOne(this._selector);if(e){const i=e.find(e=>n!==e);if(t=i?Collapse.getInstance(i):null,t&&t._isTransitioning)return}if(EventHandler.trigger(this._element,EVENT_SHOW$5).defaultPrevented)return;e&&e.forEach(e=>{n!==e&&Collapse.collapseInterface(e,"hide"),t||Data.set(e,DATA_KEY$8,null)});const i=this._getDimension();this._element.classList.remove("collapse"),this._element.classList.add("collapsing"),this._element.style[i]=0,this._triggerArray.length&&this._triggerArray.forEach(e=>{e.classList.remove("collapsed"),e.setAttribute("aria-expanded",!0)}),this.setTransitioning(!0);const s="scroll"+(i[0].toUpperCase()+i.slice(1));this._queueCallback(()=>{this._element.classList.remove("collapsing"),this._element.classList.add("collapse","show"),this._element.style[i]="",this.setTransitioning(!1),EventHandler.trigger(this._element,EVENT_SHOWN$5)},this._element,!0),this._element.style[i]=this._element[s]+"px"}hide(){if(this._isTransitioning||!this._element.classList.contains("show"))return;if(EventHandler.trigger(this._element,EVENT_HIDE$5).defaultPrevented)return;const e=this._getDimension();this._element.style[e]=this._element.getBoundingClientRect()[e]+"px",reflow(this._element),this._element.classList.add("collapsing"),this._element.classList.remove("collapse","show");const t=this._triggerArray.length;if(t>0)for(let e=0;e<t;e++){const t=this._triggerArray[e],n=getElementFromSelector(t);n&&!n.classList.contains("show")&&(t.classList.add("collapsed"),t.setAttribute("aria-expanded",!1))}this.setTransitioning(!0),this._element.style[e]="",this._queueCallback(()=>{this.setTransitioning(!1),this._element.classList.remove("collapsing"),this._element.classList.add("collapse"),EventHandler.trigger(this._element,EVENT_HIDDEN$5)},this._element,!0)}setTransitioning(e){this._isTransitioning=e}_getConfig(e){return(e={...Default$8,...e}).toggle=Boolean(e.toggle),typeCheckConfig(NAME$9,e,DefaultType$8),e}_getDimension(){return this._element.classList.contains(WIDTH)?WIDTH:HEIGHT}_getParent(){let{parent:e}=this._config;e=getElement(e);const t=`${SELECTOR_DATA_TOGGLE$4}[data-bs-parent="${e}"]`;return SelectorEngine.find(t,e).forEach(e=>{const t=getElementFromSelector(e);this._addAriaAndCollapsedClass(t,[e])}),e}_addAriaAndCollapsedClass(e,t){if(!e||!t.length)return;const n=e.classList.contains("show");t.forEach(e=>{n?e.classList.remove("collapsed"):e.classList.add("collapsed"),e.setAttribute("aria-expanded",n)})}static collapseInterface(e,t){let n=Collapse.getInstance(e);const i={...Default$8,...Manipulator.getDataAttributes(e),..."object"==typeof t&&t?t:{}};if(!n&&i.toggle&&"string"==typeof t&&/show|hide/.test(t)&&(i.toggle=!1),n||(n=new Collapse(e,i)),"string"==typeof t){if(void 0===n[t])throw new TypeError(`No method named "${t}"`);n[t]()}}static jQueryInterface(e){return this.each((function(){Collapse.collapseInterface(this,e)}))}}EventHandler.on(document,EVENT_CLICK_DATA_API$4,SELECTOR_DATA_TOGGLE$4,(function(e){("A"===e.target.tagName||e.delegateTarget&&"A"===e.delegateTarget.tagName)&&e.preventDefault();const t=Manipulator.getDataAttributes(this),n=getSelectorFromElement(this);SelectorEngine.find(n).forEach(e=>{const n=Collapse.getInstance(e);let i;n?(null===n._parent&&"string"==typeof t.parent&&(n._config.parent=t.parent,n._parent=n._getParent()),i="toggle"):i=t,Collapse.collapseInterface(e,i)})})),defineJQueryPlugin(Collapse);const NAME$8="dropdown",DATA_KEY$7="bs.dropdown",EVENT_KEY$7=".bs.dropdown",DATA_API_KEY$4=".data-api",ESCAPE_KEY$2="Escape",SPACE_KEY="Space",TAB_KEY="Tab",ARROW_UP_KEY="ArrowUp",ARROW_DOWN_KEY="ArrowDown",RIGHT_MOUSE_BUTTON=2,REGEXP_KEYDOWN=new RegExp("ArrowUp|ArrowDown|Escape"),EVENT_HIDE$4="hide.bs.dropdown",EVENT_HIDDEN$4="hidden.bs.dropdown",EVENT_SHOW$4="show.bs.dropdown",EVENT_SHOWN$4="shown.bs.dropdown",EVENT_CLICK="click.bs.dropdown",EVENT_CLICK_DATA_API$3="click.bs.dropdown.data-api",EVENT_KEYDOWN_DATA_API="keydown.bs.dropdown.data-api",EVENT_KEYUP_DATA_API="keyup.bs.dropdown.data-api",CLASS_NAME_SHOW$7="show",CLASS_NAME_DROPUP="dropup",CLASS_NAME_DROPEND="dropend",CLASS_NAME_DROPSTART="dropstart",CLASS_NAME_NAVBAR="navbar",SELECTOR_DATA_TOGGLE$3='[data-bs-toggle="dropdown"]',SELECTOR_MENU=".dropdown-menu",SELECTOR_NAVBAR_NAV=".navbar-nav",SELECTOR_VISIBLE_ITEMS=".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",PLACEMENT_TOP=isRTL()?"top-end":"top-start",PLACEMENT_TOPEND=isRTL()?"top-start":"top-end",PLACEMENT_BOTTOM=isRTL()?"bottom-end":"bottom-start",PLACEMENT_BOTTOMEND=isRTL()?"bottom-start":"bottom-end",PLACEMENT_RIGHT=isRTL()?"left-start":"right-start",PLACEMENT_LEFT=isRTL()?"right-start":"left-start",Default$7={offset:[0,2],boundary:"clippingParents",reference:"toggle",display:"dynamic",popperConfig:null,autoClose:!0},DefaultType$7={offset:"(array|string|function)",boundary:"(string|element)",reference:"(string|element|object)",display:"string",popperConfig:"(null|object|function)",autoClose:"(boolean|string)"};class Dropdown extends BaseComponent{constructor(e,t){super(e),this._popper=null,this._config=this._getConfig(t),this._menu=this._getMenuElement(),this._inNavbar=this._detectNavbar(),this._addEventListeners()}static get Default(){return Default$7}static get DefaultType(){return DefaultType$7}static get NAME(){return NAME$8}toggle(){isDisabled(this._element)||(this._element.classList.contains("show")?this.hide():this.show())}show(){if(isDisabled(this._element)||this._menu.classList.contains("show"))return;const e=Dropdown.getParentFromElement(this._element),t={relatedTarget:this._element};if(!EventHandler.trigger(this._element,EVENT_SHOW$4,t).defaultPrevented){if(this._inNavbar)Manipulator.setDataAttribute(this._menu,"popper","none");else{if(void 0===Popper)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=e:isElement(this._config.reference)?t=getElement(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const n=this._getPopperConfig(),i=n.modifiers.find(e=>"applyStyles"===e.name&&!1===e.enabled);this._popper=Popper.createPopper(t,this._menu,n),i&&Manipulator.setDataAttribute(this._menu,"popper","static")}"ontouchstart"in document.documentElement&&!e.closest(".navbar-nav")&&[].concat(...document.body.children).forEach(e=>EventHandler.on(e,"mouseover",noop)),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.toggle("show"),this._element.classList.toggle("show"),EventHandler.trigger(this._element,EVENT_SHOWN$4,t)}}hide(){if(isDisabled(this._element)||!this._menu.classList.contains("show"))return;const e={relatedTarget:this._element};this._completeHide(e)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_addEventListeners(){EventHandler.on(this._element,EVENT_CLICK,e=>{e.preventDefault(),this.toggle()})}_completeHide(e){EventHandler.trigger(this._element,EVENT_HIDE$4,e).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>EventHandler.off(e,"mouseover",noop)),this._popper&&this._popper.destroy(),this._menu.classList.remove("show"),this._element.classList.remove("show"),this._element.setAttribute("aria-expanded","false"),Manipulator.removeDataAttribute(this._menu,"popper"),EventHandler.trigger(this._element,EVENT_HIDDEN$4,e))}_getConfig(e){if(e={...this.constructor.Default,...Manipulator.getDataAttributes(this._element),...e},typeCheckConfig(NAME$8,e,this.constructor.DefaultType),"object"==typeof e.reference&&!isElement(e.reference)&&"function"!=typeof e.reference.getBoundingClientRect)throw new TypeError(NAME$8.toUpperCase()+': Option "reference" provided type "object" without a required "getBoundingClientRect" method.');return e}_getMenuElement(){return SelectorEngine.next(this._element,SELECTOR_MENU)[0]}_getPlacement(){const e=this._element.parentNode;if(e.classList.contains("dropend"))return PLACEMENT_RIGHT;if(e.classList.contains("dropstart"))return PLACEMENT_LEFT;const t="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return e.classList.contains("dropup")?t?PLACEMENT_TOPEND:PLACEMENT_TOP:t?PLACEMENT_BOTTOMEND:PLACEMENT_BOTTOM}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map(e=>Number.parseInt(e,10)):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(){const e={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(e.modifiers=[{name:"applyStyles",enabled:!1}]),{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_selectMenuItem({key:e,target:t}){const n=SelectorEngine.find(SELECTOR_VISIBLE_ITEMS,this._menu).filter(isVisible);n.length&&getNextActiveElement(n,t,"ArrowDown"===e,!n.includes(t)).focus()}static dropdownInterface(e,t){const n=Dropdown.getOrCreateInstance(e,t);if("string"==typeof t){if(void 0===n[t])throw new TypeError(`No method named "${t}"`);n[t]()}}static jQueryInterface(e){return this.each((function(){Dropdown.dropdownInterface(this,e)}))}static clearMenus(e){if(e&&(2===e.button||"keyup"===e.type&&"Tab"!==e.key))return;const t=SelectorEngine.find(SELECTOR_DATA_TOGGLE$3);for(let n=0,i=t.length;n<i;n++){const i=Dropdown.getInstance(t[n]);if(!i||!1===i._config.autoClose)continue;if(!i._element.classList.contains("show"))continue;const s={relatedTarget:i._element};if(e){const t=e.composedPath(),n=t.includes(i._menu);if(t.includes(i._element)||"inside"===i._config.autoClose&&!n||"outside"===i._config.autoClose&&n)continue;if(i._menu.contains(e.target)&&("keyup"===e.type&&"Tab"===e.key||/input|select|option|textarea|form/i.test(e.target.tagName)))continue;"click"===e.type&&(s.clickEvent=e)}i._completeHide(s)}}static getParentFromElement(e){return getElementFromSelector(e)||e.parentNode}static dataApiKeydownHandler(e){if(/input|textarea/i.test(e.target.tagName)?"Space"===e.key||"Escape"!==e.key&&("ArrowDown"!==e.key&&"ArrowUp"!==e.key||e.target.closest(SELECTOR_MENU)):!REGEXP_KEYDOWN.test(e.key))return;const t=this.classList.contains("show");if(!t&&"Escape"===e.key)return;if(e.preventDefault(),e.stopPropagation(),isDisabled(this))return;const n=()=>this.matches(SELECTOR_DATA_TOGGLE$3)?this:SelectorEngine.prev(this,SELECTOR_DATA_TOGGLE$3)[0];return"Escape"===e.key?(n().focus(),void Dropdown.clearMenus()):"ArrowUp"===e.key||"ArrowDown"===e.key?(t||n().click(),void Dropdown.getInstance(n())._selectMenuItem(e)):void(t&&"Space"!==e.key||Dropdown.clearMenus())}}EventHandler.on(document,EVENT_KEYDOWN_DATA_API,SELECTOR_DATA_TOGGLE$3,Dropdown.dataApiKeydownHandler),EventHandler.on(document,EVENT_KEYDOWN_DATA_API,SELECTOR_MENU,Dropdown.dataApiKeydownHandler),EventHandler.on(document,EVENT_CLICK_DATA_API$3,Dropdown.clearMenus),EventHandler.on(document,EVENT_KEYUP_DATA_API,Dropdown.clearMenus),EventHandler.on(document,EVENT_CLICK_DATA_API$3,SELECTOR_DATA_TOGGLE$3,(function(e){e.preventDefault(),Dropdown.dropdownInterface(this)})),defineJQueryPlugin(Dropdown);const SELECTOR_FIXED_CONTENT=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",SELECTOR_STICKY_CONTENT=".sticky-top";class ScrollBarHelper{constructor(){this._element=document.body}getWidth(){const e=document.documentElement.clientWidth;return Math.abs(window.innerWidth-e)}hide(){const e=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,"paddingRight",t=>t+e),this._setElementAttributes(SELECTOR_FIXED_CONTENT,"paddingRight",t=>t+e),this._setElementAttributes(".sticky-top","marginRight",t=>t-e)}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(e,t,n){const i=this.getWidth();this._applyManipulationCallback(e,e=>{if(e!==this._element&&window.innerWidth>e.clientWidth+i)return;this._saveInitialAttribute(e,t);const s=window.getComputedStyle(e)[t];e.style[t]=n(Number.parseFloat(s))+"px"})}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,"paddingRight"),this._resetElementAttributes(SELECTOR_FIXED_CONTENT,"paddingRight"),this._resetElementAttributes(".sticky-top","marginRight")}_saveInitialAttribute(e,t){const n=e.style[t];n&&Manipulator.setDataAttribute(e,t,n)}_resetElementAttributes(e,t){this._applyManipulationCallback(e,e=>{const n=Manipulator.getDataAttribute(e,t);void 0===n?e.style.removeProperty(t):(Manipulator.removeDataAttribute(e,t),e.style[t]=n)})}_applyManipulationCallback(e,t){isElement(e)?t(e):SelectorEngine.find(e,this._element).forEach(t)}isOverflowing(){return this.getWidth()>0}}const Default$6={isVisible:!0,isAnimated:!1,rootElement:"body",clickCallback:null},DefaultType$6={isVisible:"boolean",isAnimated:"boolean",rootElement:"(element|string)",clickCallback:"(function|null)"},NAME$7="backdrop",CLASS_NAME_BACKDROP="modal-backdrop",CLASS_NAME_FADE$5="fade",CLASS_NAME_SHOW$6="show",EVENT_MOUSEDOWN="mousedown.bs.backdrop";class Backdrop{constructor(e){this._config=this._getConfig(e),this._isAppended=!1,this._element=null}show(e){this._config.isVisible?(this._append(),this._config.isAnimated&&reflow(this._getElement()),this._getElement().classList.add("show"),this._emulateAnimation(()=>{execute(e)})):execute(e)}hide(e){this._config.isVisible?(this._getElement().classList.remove("show"),this._emulateAnimation(()=>{this.dispose(),execute(e)})):execute(e)}_getElement(){if(!this._element){const e=document.createElement("div");e.className="modal-backdrop",this._config.isAnimated&&e.classList.add("fade"),this._element=e}return this._element}_getConfig(e){return(e={...Default$6,..."object"==typeof e?e:{}}).rootElement=getElement(e.rootElement),typeCheckConfig(NAME$7,e,DefaultType$6),e}_append(){this._isAppended||(this._config.rootElement.appendChild(this._getElement()),EventHandler.on(this._getElement(),EVENT_MOUSEDOWN,()=>{execute(this._config.clickCallback)}),this._isAppended=!0)}dispose(){this._isAppended&&(EventHandler.off(this._element,EVENT_MOUSEDOWN),this._element.remove(),this._isAppended=!1)}_emulateAnimation(e){executeAfterTransition(e,this._getElement(),this._config.isAnimated)}}const NAME$6="modal",DATA_KEY$6="bs.modal",EVENT_KEY$6=".bs.modal",DATA_API_KEY$3=".data-api",ESCAPE_KEY$1="Escape",Default$5={backdrop:!0,keyboard:!0,focus:!0},DefaultType$5={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"},EVENT_HIDE$3="hide.bs.modal",EVENT_HIDE_PREVENTED="hidePrevented.bs.modal",EVENT_HIDDEN$3="hidden.bs.modal",EVENT_SHOW$3="show.bs.modal",EVENT_SHOWN$3="shown.bs.modal",EVENT_FOCUSIN$2="focusin.bs.modal",EVENT_RESIZE="resize.bs.modal",EVENT_CLICK_DISMISS$2="click.dismiss.bs.modal",EVENT_KEYDOWN_DISMISS$1="keydown.dismiss.bs.modal",EVENT_MOUSEUP_DISMISS="mouseup.dismiss.bs.modal",EVENT_MOUSEDOWN_DISMISS="mousedown.dismiss.bs.modal",EVENT_CLICK_DATA_API$2="click.bs.modal.data-api",CLASS_NAME_OPEN="modal-open",CLASS_NAME_FADE$4="fade",CLASS_NAME_SHOW$5="show",CLASS_NAME_STATIC="modal-static",SELECTOR_DIALOG=".modal-dialog",SELECTOR_MODAL_BODY=".modal-body",SELECTOR_DATA_TOGGLE$2='[data-bs-toggle="modal"]',SELECTOR_DATA_DISMISS$2='[data-bs-dismiss="modal"]';class Modal extends BaseComponent{constructor(e,t){super(e),this._config=this._getConfig(t),this._dialog=SelectorEngine.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollBar=new ScrollBarHelper}static get Default(){return Default$5}static get NAME(){return NAME$6}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){this._isShown||this._isTransitioning||EventHandler.trigger(this._element,EVENT_SHOW$3,{relatedTarget:e}).defaultPrevented||(this._isShown=!0,this._isAnimated()&&(this._isTransitioning=!0),this._scrollBar.hide(),document.body.classList.add("modal-open"),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),EventHandler.on(this._element,EVENT_CLICK_DISMISS$2,SELECTOR_DATA_DISMISS$2,e=>this.hide(e)),EventHandler.on(this._dialog,EVENT_MOUSEDOWN_DISMISS,()=>{EventHandler.one(this._element,EVENT_MOUSEUP_DISMISS,e=>{e.target===this._element&&(this._ignoreBackdropClick=!0)})}),this._showBackdrop(()=>this._showElement(e)))}hide(e){if(e&&["A","AREA"].includes(e.target.tagName)&&e.preventDefault(),!this._isShown||this._isTransitioning)return;if(EventHandler.trigger(this._element,EVENT_HIDE$3).defaultPrevented)return;this._isShown=!1;const t=this._isAnimated();t&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),EventHandler.off(document,EVENT_FOCUSIN$2),this._element.classList.remove("show"),EventHandler.off(this._element,EVENT_CLICK_DISMISS$2),EventHandler.off(this._dialog,EVENT_MOUSEDOWN_DISMISS),this._queueCallback(()=>this._hideModal(),this._element,t)}dispose(){[window,this._dialog].forEach(e=>EventHandler.off(e,".bs.modal")),this._backdrop.dispose(),super.dispose(),EventHandler.off(document,EVENT_FOCUSIN$2)}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Backdrop({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_getConfig(e){return e={...Default$5,...Manipulator.getDataAttributes(this._element),..."object"==typeof e?e:{}},typeCheckConfig(NAME$6,e,DefaultType$5),e}_showElement(e){const t=this._isAnimated(),n=SelectorEngine.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.appendChild(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,n&&(n.scrollTop=0),t&&reflow(this._element),this._element.classList.add("show"),this._config.focus&&this._enforceFocus(),this._queueCallback(()=>{this._config.focus&&this._element.focus(),this._isTransitioning=!1,EventHandler.trigger(this._element,EVENT_SHOWN$3,{relatedTarget:e})},this._dialog,t)}_enforceFocus(){EventHandler.off(document,EVENT_FOCUSIN$2),EventHandler.on(document,EVENT_FOCUSIN$2,e=>{document===e.target||this._element===e.target||this._element.contains(e.target)||this._element.focus()})}_setEscapeEvent(){this._isShown?EventHandler.on(this._element,EVENT_KEYDOWN_DISMISS$1,e=>{this._config.keyboard&&"Escape"===e.key?(e.preventDefault(),this.hide()):this._config.keyboard||"Escape"!==e.key||this._triggerBackdropTransition()}):EventHandler.off(this._element,EVENT_KEYDOWN_DISMISS$1)}_setResizeEvent(){this._isShown?EventHandler.on(window,EVENT_RESIZE,()=>this._adjustDialog()):EventHandler.off(window,EVENT_RESIZE)}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide(()=>{document.body.classList.remove("modal-open"),this._resetAdjustments(),this._scrollBar.reset(),EventHandler.trigger(this._element,EVENT_HIDDEN$3)})}_showBackdrop(e){EventHandler.on(this._element,EVENT_CLICK_DISMISS$2,e=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:e.target===e.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())}),this._backdrop.show(e)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(EventHandler.trigger(this._element,EVENT_HIDE_PREVENTED).defaultPrevented)return;const{classList:e,scrollHeight:t,style:n}=this._element,i=t>document.documentElement.clientHeight;!i&&"hidden"===n.overflowY||e.contains("modal-static")||(i||(n.overflowY="hidden"),e.add("modal-static"),this._queueCallback(()=>{e.remove("modal-static"),i||this._queueCallback(()=>{n.overflowY=""},this._dialog)},this._dialog),this._element.focus())}_adjustDialog(){const e=this._element.scrollHeight>document.documentElement.clientHeight,t=this._scrollBar.getWidth(),n=t>0;(!n&&e&&!isRTL()||n&&!e&&isRTL())&&(this._element.style.paddingLeft=t+"px"),(n&&!e&&!isRTL()||!n&&e&&isRTL())&&(this._element.style.paddingRight=t+"px")}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(e,t){return this.each((function(){const n=Modal.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===n[e])throw new TypeError(`No method named "${e}"`);n[e](t)}}))}}EventHandler.on(document,EVENT_CLICK_DATA_API$2,SELECTOR_DATA_TOGGLE$2,(function(e){const t=getElementFromSelector(this);["A","AREA"].includes(this.tagName)&&e.preventDefault(),EventHandler.one(t,EVENT_SHOW$3,e=>{e.defaultPrevented||EventHandler.one(t,EVENT_HIDDEN$3,()=>{isVisible(this)&&this.focus()})}),Modal.getOrCreateInstance(t).toggle(this)})),defineJQueryPlugin(Modal);const NAME$5="offcanvas",DATA_KEY$5="bs.offcanvas",EVENT_KEY$5=".bs.offcanvas",DATA_API_KEY$2=".data-api",EVENT_LOAD_DATA_API$1="load.bs.offcanvas.data-api",ESCAPE_KEY="Escape",Default$4={backdrop:!0,keyboard:!0,scroll:!1},DefaultType$4={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"},CLASS_NAME_SHOW$4="show",OPEN_SELECTOR=".offcanvas.show",EVENT_SHOW$2="show.bs.offcanvas",EVENT_SHOWN$2="shown.bs.offcanvas",EVENT_HIDE$2="hide.bs.offcanvas",EVENT_HIDDEN$2="hidden.bs.offcanvas",EVENT_FOCUSIN$1="focusin.bs.offcanvas",EVENT_CLICK_DATA_API$1="click.bs.offcanvas.data-api",EVENT_CLICK_DISMISS$1="click.dismiss.bs.offcanvas",EVENT_KEYDOWN_DISMISS="keydown.dismiss.bs.offcanvas",SELECTOR_DATA_DISMISS$1='[data-bs-dismiss="offcanvas"]',SELECTOR_DATA_TOGGLE$1='[data-bs-toggle="offcanvas"]';class Offcanvas extends BaseComponent{constructor(e,t){super(e),this._config=this._getConfig(t),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._addEventListeners()}static get NAME(){return NAME$5}static get Default(){return Default$4}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){this._isShown||EventHandler.trigger(this._element,EVENT_SHOW$2,{relatedTarget:e}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||((new ScrollBarHelper).hide(),this._enforceFocusOnElement(this._element)),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add("show"),this._queueCallback(()=>{EventHandler.trigger(this._element,EVENT_SHOWN$2,{relatedTarget:e})},this._element,!0))}hide(){this._isShown&&(EventHandler.trigger(this._element,EVENT_HIDE$2).defaultPrevented||(EventHandler.off(document,EVENT_FOCUSIN$1),this._element.blur(),this._isShown=!1,this._element.classList.remove("show"),this._backdrop.hide(),this._queueCallback(()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||(new ScrollBarHelper).reset(),EventHandler.trigger(this._element,EVENT_HIDDEN$2)},this._element,!0)))}dispose(){this._backdrop.dispose(),super.dispose(),EventHandler.off(document,EVENT_FOCUSIN$1)}_getConfig(e){return e={...Default$4,...Manipulator.getDataAttributes(this._element),..."object"==typeof e?e:{}},typeCheckConfig(NAME$5,e,DefaultType$4),e}_initializeBackDrop(){return new Backdrop({isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_enforceFocusOnElement(e){EventHandler.off(document,EVENT_FOCUSIN$1),EventHandler.on(document,EVENT_FOCUSIN$1,t=>{document===t.target||e===t.target||e.contains(t.target)||e.focus()}),e.focus()}_addEventListeners(){EventHandler.on(this._element,EVENT_CLICK_DISMISS$1,SELECTOR_DATA_DISMISS$1,()=>this.hide()),EventHandler.on(this._element,EVENT_KEYDOWN_DISMISS,e=>{this._config.keyboard&&"Escape"===e.key&&this.hide()})}static jQueryInterface(e){return this.each((function(){const t=Offcanvas.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError(`No method named "${e}"`);t[e](this)}}))}}EventHandler.on(document,EVENT_CLICK_DATA_API$1,SELECTOR_DATA_TOGGLE$1,(function(e){const t=getElementFromSelector(this);if(["A","AREA"].includes(this.tagName)&&e.preventDefault(),isDisabled(this))return;EventHandler.one(t,EVENT_HIDDEN$2,()=>{isVisible(this)&&this.focus()});const n=SelectorEngine.findOne(OPEN_SELECTOR);n&&n!==t&&Offcanvas.getInstance(n).hide(),Offcanvas.getOrCreateInstance(t).toggle(this)})),EventHandler.on(window,EVENT_LOAD_DATA_API$1,()=>SelectorEngine.find(OPEN_SELECTOR).forEach(e=>Offcanvas.getOrCreateInstance(e).show())),defineJQueryPlugin(Offcanvas);const uriAttrs=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),ARIA_ATTRIBUTE_PATTERN=/^aria-[\w-]*$/i,SAFE_URL_PATTERN=/^(?:(?:https?|mailto|ftp|tel|file):|[^#&/:?]*(?:[#/?]|$))/i,DATA_URL_PATTERN=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,allowedAttribute=(e,t)=>{const n=e.nodeName.toLowerCase();if(t.includes(n))return!uriAttrs.has(n)||Boolean(SAFE_URL_PATTERN.test(e.nodeValue)||DATA_URL_PATTERN.test(e.nodeValue));const i=t.filter(e=>e instanceof RegExp);for(let e=0,t=i.length;e<t;e++)if(i[e].test(n))return!0;return!1},DefaultAllowlist={"*":["class","dir","id","lang","role",ARIA_ATTRIBUTE_PATTERN],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]};function sanitizeHtml(e,t,n){if(!e.length)return e;if(n&&"function"==typeof n)return n(e);const i=(new window.DOMParser).parseFromString(e,"text/html"),s=Object.keys(t),o=[].concat(...i.body.querySelectorAll("*"));for(let e=0,n=o.length;e<n;e++){const n=o[e],i=n.nodeName.toLowerCase();if(!s.includes(i)){n.remove();continue}const r=[].concat(...n.attributes),a=[].concat(t["*"]||[],t[i]||[]);r.forEach(e=>{allowedAttribute(e,a)||n.removeAttribute(e.nodeName)})}return i.body.innerHTML}const NAME$4="tooltip",DATA_KEY$4="bs.tooltip",EVENT_KEY$4=".bs.tooltip",CLASS_PREFIX$1="bs-tooltip",BSCLS_PREFIX_REGEX$1=new RegExp("(^|\\s)bs-tooltip\\S+","g"),DISALLOWED_ATTRIBUTES=new Set(["sanitize","allowList","sanitizeFn"]),DefaultType$3={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},AttachmentMap={AUTO:"auto",TOP:"top",RIGHT:isRTL()?"left":"right",BOTTOM:"bottom",LEFT:isRTL()?"right":"left"},Default$3={animation:!0,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:DefaultAllowlist,popperConfig:null},Event$2={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"},CLASS_NAME_FADE$3="fade",CLASS_NAME_MODAL="modal",CLASS_NAME_SHOW$3="show",HOVER_STATE_SHOW="show",HOVER_STATE_OUT="out",SELECTOR_TOOLTIP_INNER=".tooltip-inner",TRIGGER_HOVER="hover",TRIGGER_FOCUS="focus",TRIGGER_CLICK="click",TRIGGER_MANUAL="manual";class Tooltip extends BaseComponent{constructor(e,t){if(void 0===Popper)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(e),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(t),this.tip=null,this._setListeners()}static get Default(){return Default$3}static get NAME(){return NAME$4}static get Event(){return Event$2}static get DefaultType(){return DefaultType$3}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(e){if(this._isEnabled)if(e){const t=this._initializeOnDelegatedTarget(e);t._activeTrigger.click=!t._activeTrigger.click,t._isWithActiveTrigger()?t._enter(null,t):t._leave(null,t)}else{if(this.getTipElement().classList.contains("show"))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),EventHandler.off(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this.tip&&this.tip.remove(),this._popper&&this._popper.destroy(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const e=EventHandler.trigger(this._element,this.constructor.Event.SHOW),t=findShadowRoot(this._element),n=null===t?this._element.ownerDocument.documentElement.contains(this._element):t.contains(this._element);if(e.defaultPrevented||!n)return;const i=this.getTipElement(),s=getUID(this.constructor.NAME);i.setAttribute("id",s),this._element.setAttribute("aria-describedby",s),this.setContent(),this._config.animation&&i.classList.add("fade");const o="function"==typeof this._config.placement?this._config.placement.call(this,i,this._element):this._config.placement,r=this._getAttachment(o);this._addAttachmentClass(r);const{container:a}=this._config;Data.set(i,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(a.appendChild(i),EventHandler.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=Popper.createPopper(this._element,i,this._getPopperConfig(r)),i.classList.add("show");const l="function"==typeof this._config.customClass?this._config.customClass():this._config.customClass;l&&i.classList.add(...l.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>{EventHandler.on(e,"mouseover",noop)});const c=this.tip.classList.contains("fade");this._queueCallback(()=>{const e=this._hoverState;this._hoverState=null,EventHandler.trigger(this._element,this.constructor.Event.SHOWN),"out"===e&&this._leave(null,this)},this.tip,c)}hide(){if(!this._popper)return;const e=this.getTipElement();if(EventHandler.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;e.classList.remove("show"),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>EventHandler.off(e,"mouseover",noop)),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const t=this.tip.classList.contains("fade");this._queueCallback(()=>{this._isWithActiveTrigger()||("show"!==this._hoverState&&e.remove(),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),EventHandler.trigger(this._element,this.constructor.Event.HIDDEN),this._popper&&(this._popper.destroy(),this._popper=null))},this.tip,t),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const e=document.createElement("div");return e.innerHTML=this._config.template,this.tip=e.children[0],this.tip}setContent(){const e=this.getTipElement();this.setElementContent(SelectorEngine.findOne(".tooltip-inner",e),this.getTitle()),e.classList.remove("fade","show")}setElementContent(e,t){if(null!==e)return isElement(t)?(t=getElement(t),void(this._config.html?t.parentNode!==e&&(e.innerHTML="",e.appendChild(t)):e.textContent=t.textContent)):void(this._config.html?(this._config.sanitize&&(t=sanitizeHtml(t,this._config.allowList,this._config.sanitizeFn)),e.innerHTML=t):e.textContent=t)}getTitle(){let e=this._element.getAttribute("data-bs-original-title");return e||(e="function"==typeof this._config.title?this._config.title.call(this._element):this._config.title),e}updateAttachment(e){return"right"===e?"end":"left"===e?"start":e}_initializeOnDelegatedTarget(e,t){const n=this.constructor.DATA_KEY;return(t=t||Data.get(e.delegateTarget,n))||(t=new this.constructor(e.delegateTarget,this._getDelegateConfig()),Data.set(e.delegateTarget,n,t)),t}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map(e=>Number.parseInt(e,10)):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(e){const t={placement:e,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:e=>this._handlePopperPlacementChange(e)}],onFirstUpdate:e=>{e.options.placement!==e.placement&&this._handlePopperPlacementChange(e)}};return{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_addAttachmentClass(e){this.getTipElement().classList.add("bs-tooltip-"+this.updateAttachment(e))}_getAttachment(e){return AttachmentMap[e.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach(e=>{if("click"===e)EventHandler.on(this._element,this.constructor.Event.CLICK,this._config.selector,e=>this.toggle(e));else if("manual"!==e){const t="hover"===e?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,n="hover"===e?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;EventHandler.on(this._element,t,this._config.selector,e=>this._enter(e)),EventHandler.on(this._element,n,this._config.selector,e=>this._leave(e))}}),this._hideModalHandler=()=>{this._element&&this.hide()},EventHandler.on(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const e=this._element.getAttribute("title"),t=typeof this._element.getAttribute("data-bs-original-title");(e||"string"!==t)&&(this._element.setAttribute("data-bs-original-title",e||""),!e||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",e),this._element.setAttribute("title",""))}_enter(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusin"===e.type?"focus":"hover"]=!0),t.getTipElement().classList.contains("show")||"show"===t._hoverState?t._hoverState="show":(clearTimeout(t._timeout),t._hoverState="show",t._config.delay&&t._config.delay.show?t._timeout=setTimeout(()=>{"show"===t._hoverState&&t.show()},t._config.delay.show):t.show())}_leave(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusout"===e.type?"focus":"hover"]=t._element.contains(e.relatedTarget)),t._isWithActiveTrigger()||(clearTimeout(t._timeout),t._hoverState="out",t._config.delay&&t._config.delay.hide?t._timeout=setTimeout(()=>{"out"===t._hoverState&&t.hide()},t._config.delay.hide):t.hide())}_isWithActiveTrigger(){for(const e in this._activeTrigger)if(this._activeTrigger[e])return!0;return!1}_getConfig(e){const t=Manipulator.getDataAttributes(this._element);return Object.keys(t).forEach(e=>{DISALLOWED_ATTRIBUTES.has(e)&&delete t[e]}),(e={...this.constructor.Default,...t,..."object"==typeof e&&e?e:{}}).container=!1===e.container?document.body:getElement(e.container),"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),"number"==typeof e.title&&(e.title=e.title.toString()),"number"==typeof e.content&&(e.content=e.content.toString()),typeCheckConfig(NAME$4,e,this.constructor.DefaultType),e.sanitize&&(e.template=sanitizeHtml(e.template,e.allowList,e.sanitizeFn)),e}_getDelegateConfig(){const e={};if(this._config)for(const t in this._config)this.constructor.Default[t]!==this._config[t]&&(e[t]=this._config[t]);return e}_cleanTipClass(){const e=this.getTipElement(),t=e.getAttribute("class").match(BSCLS_PREFIX_REGEX$1);null!==t&&t.length>0&&t.map(e=>e.trim()).forEach(t=>e.classList.remove(t))}_handlePopperPlacementChange(e){const{state:t}=e;t&&(this.tip=t.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(t.placement)))}static jQueryInterface(e){return this.each((function(){const t=Tooltip.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}defineJQueryPlugin(Tooltip);const NAME$3="popover",DATA_KEY$3="bs.popover",EVENT_KEY$3=".bs.popover",CLASS_PREFIX="bs-popover",BSCLS_PREFIX_REGEX=new RegExp("(^|\\s)bs-popover\\S+","g"),Default$2={...Tooltip.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="popover-arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>'},DefaultType$2={...Tooltip.DefaultType,content:"(string|element|function)"},Event$1={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"},CLASS_NAME_FADE$2="fade",CLASS_NAME_SHOW$2="show",SELECTOR_TITLE=".popover-header",SELECTOR_CONTENT=".popover-body";class Popover extends Tooltip{static get Default(){return Default$2}static get NAME(){return NAME$3}static get Event(){return Event$1}static get DefaultType(){return DefaultType$2}isWithContent(){return this.getTitle()||this._getContent()}getTipElement(){return this.tip||(this.tip=super.getTipElement(),this.getTitle()||SelectorEngine.findOne(SELECTOR_TITLE,this.tip).remove(),this._getContent()||SelectorEngine.findOne(".popover-body",this.tip).remove()),this.tip}setContent(){const e=this.getTipElement();this.setElementContent(SelectorEngine.findOne(SELECTOR_TITLE,e),this.getTitle());let t=this._getContent();"function"==typeof t&&(t=t.call(this._element)),this.setElementContent(SelectorEngine.findOne(".popover-body",e),t),e.classList.remove("fade","show")}_addAttachmentClass(e){this.getTipElement().classList.add("bs-popover-"+this.updateAttachment(e))}_getContent(){return this._element.getAttribute("data-bs-content")||this._config.content}_cleanTipClass(){const e=this.getTipElement(),t=e.getAttribute("class").match(BSCLS_PREFIX_REGEX);null!==t&&t.length>0&&t.map(e=>e.trim()).forEach(t=>e.classList.remove(t))}static jQueryInterface(e){return this.each((function(){const t=Popover.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}defineJQueryPlugin(Popover);const NAME$2="scrollspy",DATA_KEY$2="bs.scrollspy",EVENT_KEY$2=".bs.scrollspy",DATA_API_KEY$1=".data-api",Default$1={offset:10,method:"auto",target:""},DefaultType$1={offset:"number",method:"string",target:"(string|element)"},EVENT_ACTIVATE="activate.bs.scrollspy",EVENT_SCROLL="scroll.bs.scrollspy",EVENT_LOAD_DATA_API="load.bs.scrollspy.data-api",CLASS_NAME_DROPDOWN_ITEM="dropdown-item",CLASS_NAME_ACTIVE$1="active",SELECTOR_DATA_SPY='[data-bs-spy="scroll"]',SELECTOR_NAV_LIST_GROUP$1=".nav, .list-group",SELECTOR_NAV_LINKS=".nav-link",SELECTOR_NAV_ITEMS=".nav-item",SELECTOR_LIST_ITEMS=".list-group-item",SELECTOR_DROPDOWN$1=".dropdown",SELECTOR_DROPDOWN_TOGGLE$1=".dropdown-toggle",METHOD_OFFSET="offset",METHOD_POSITION="position";class ScrollSpy extends BaseComponent{constructor(e,t){super(e),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(t),this._selector=`${this._config.target} .nav-link, ${this._config.target} .list-group-item, ${this._config.target} .dropdown-item`,this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,EventHandler.on(this._scrollElement,EVENT_SCROLL,()=>this._process()),this.refresh(),this._process()}static get Default(){return Default$1}static get NAME(){return NAME$2}refresh(){const e=this._scrollElement===this._scrollElement.window?"offset":"position",t="auto"===this._config.method?e:this._config.method,n="position"===t?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),SelectorEngine.find(this._selector).map(e=>{const i=getSelectorFromElement(e),s=i?SelectorEngine.findOne(i):null;if(s){const e=s.getBoundingClientRect();if(e.width||e.height)return[Manipulator[t](s).top+n,i]}return null}).filter(e=>e).sort((e,t)=>e[0]-t[0]).forEach(e=>{this._offsets.push(e[0]),this._targets.push(e[1])})}dispose(){EventHandler.off(this._scrollElement,EVENT_KEY$2),super.dispose()}_getConfig(e){if("string"!=typeof(e={...Default$1,...Manipulator.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}}).target&&isElement(e.target)){let{id:t}=e.target;t||(t=getUID(NAME$2),e.target.id=t),e.target="#"+t}return typeCheckConfig(NAME$2,e,DefaultType$1),e}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const e=this._getScrollTop()+this._config.offset,t=this._getScrollHeight(),n=this._config.offset+t-this._getOffsetHeight();if(this._scrollHeight!==t&&this.refresh(),e>=n){const e=this._targets[this._targets.length-1];this._activeTarget!==e&&this._activate(e)}else{if(this._activeTarget&&e<this._offsets[0]&&this._offsets[0]>0)return this._activeTarget=null,void this._clear();for(let t=this._offsets.length;t--;)this._activeTarget!==this._targets[t]&&e>=this._offsets[t]&&(void 0===this._offsets[t+1]||e<this._offsets[t+1])&&this._activate(this._targets[t])}}_activate(e){this._activeTarget=e,this._clear();const t=this._selector.split(",").map(t=>`${t}[data-bs-target="${e}"],${t}[href="${e}"]`),n=SelectorEngine.findOne(t.join(","));n.classList.contains("dropdown-item")?(SelectorEngine.findOne(".dropdown-toggle",n.closest(".dropdown")).classList.add("active"),n.classList.add("active")):(n.classList.add("active"),SelectorEngine.parents(n,".nav, .list-group").forEach(e=>{SelectorEngine.prev(e,".nav-link, .list-group-item").forEach(e=>e.classList.add("active")),SelectorEngine.prev(e,".nav-item").forEach(e=>{SelectorEngine.children(e,".nav-link").forEach(e=>e.classList.add("active"))})})),EventHandler.trigger(this._scrollElement,EVENT_ACTIVATE,{relatedTarget:e})}_clear(){SelectorEngine.find(this._selector).filter(e=>e.classList.contains("active")).forEach(e=>e.classList.remove("active"))}static jQueryInterface(e){return this.each((function(){const t=ScrollSpy.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}EventHandler.on(window,EVENT_LOAD_DATA_API,()=>{SelectorEngine.find(SELECTOR_DATA_SPY).forEach(e=>new ScrollSpy(e))}),defineJQueryPlugin(ScrollSpy);const NAME$1="tab",DATA_KEY$1="bs.tab",EVENT_KEY$1=".bs.tab",DATA_API_KEY=".data-api",EVENT_HIDE$1="hide.bs.tab",EVENT_HIDDEN$1="hidden.bs.tab",EVENT_SHOW$1="show.bs.tab",EVENT_SHOWN$1="shown.bs.tab",EVENT_CLICK_DATA_API="click.bs.tab.data-api",CLASS_NAME_DROPDOWN_MENU="dropdown-menu",CLASS_NAME_ACTIVE="active",CLASS_NAME_FADE$1="fade",CLASS_NAME_SHOW$1="show",SELECTOR_DROPDOWN=".dropdown",SELECTOR_NAV_LIST_GROUP=".nav, .list-group",SELECTOR_ACTIVE=".active",SELECTOR_ACTIVE_UL=":scope > li > .active",SELECTOR_DATA_TOGGLE='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',SELECTOR_DROPDOWN_TOGGLE=".dropdown-toggle",SELECTOR_DROPDOWN_ACTIVE_CHILD=":scope > .dropdown-menu .active";class Tab extends BaseComponent{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains("active"))return;let e;const t=getElementFromSelector(this._element),n=this._element.closest(".nav, .list-group");if(n){const t="UL"===n.nodeName||"OL"===n.nodeName?SELECTOR_ACTIVE_UL:".active";e=SelectorEngine.find(t,n),e=e[e.length-1]}const i=e?EventHandler.trigger(e,EVENT_HIDE$1,{relatedTarget:this._element}):null;if(EventHandler.trigger(this._element,EVENT_SHOW$1,{relatedTarget:e}).defaultPrevented||null!==i&&i.defaultPrevented)return;this._activate(this._element,n);const s=()=>{EventHandler.trigger(e,EVENT_HIDDEN$1,{relatedTarget:this._element}),EventHandler.trigger(this._element,EVENT_SHOWN$1,{relatedTarget:e})};t?this._activate(t,t.parentNode,s):s()}_activate(e,t,n){const i=(!t||"UL"!==t.nodeName&&"OL"!==t.nodeName?SelectorEngine.children(t,".active"):SelectorEngine.find(SELECTOR_ACTIVE_UL,t))[0],s=n&&i&&i.classList.contains("fade"),o=()=>this._transitionComplete(e,i,n);i&&s?(i.classList.remove("show"),this._queueCallback(o,e,!0)):o()}_transitionComplete(e,t,n){if(t){t.classList.remove("active");const e=SelectorEngine.findOne(SELECTOR_DROPDOWN_ACTIVE_CHILD,t.parentNode);e&&e.classList.remove("active"),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!1)}e.classList.add("active"),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!0),reflow(e),e.classList.contains("fade")&&e.classList.add("show");let i=e.parentNode;if(i&&"LI"===i.nodeName&&(i=i.parentNode),i&&i.classList.contains("dropdown-menu")){const t=e.closest(".dropdown");t&&SelectorEngine.find(".dropdown-toggle",t).forEach(e=>e.classList.add("active")),e.setAttribute("aria-expanded",!0)}n&&n()}static jQueryInterface(e){return this.each((function(){const t=Tab.getOrCreateInstance(this);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}EventHandler.on(document,EVENT_CLICK_DATA_API,SELECTOR_DATA_TOGGLE,(function(e){["A","AREA"].includes(this.tagName)&&e.preventDefault(),isDisabled(this)||Tab.getOrCreateInstance(this).show()})),defineJQueryPlugin(Tab);const NAME="toast",DATA_KEY="bs.toast",EVENT_KEY=".bs.toast",EVENT_CLICK_DISMISS="click.dismiss.bs.toast",EVENT_MOUSEOVER="mouseover.bs.toast",EVENT_MOUSEOUT="mouseout.bs.toast",EVENT_FOCUSIN="focusin.bs.toast",EVENT_FOCUSOUT="focusout.bs.toast",EVENT_HIDE="hide.bs.toast",EVENT_HIDDEN="hidden.bs.toast",EVENT_SHOW="show.bs.toast",EVENT_SHOWN="shown.bs.toast",CLASS_NAME_FADE="fade",CLASS_NAME_HIDE="hide",CLASS_NAME_SHOW="show",CLASS_NAME_SHOWING="showing",DefaultType={animation:"boolean",autohide:"boolean",delay:"number"},Default={animation:!0,autohide:!0,delay:5e3},SELECTOR_DATA_DISMISS='[data-bs-dismiss="toast"]';class Toast extends BaseComponent{constructor(e,t){super(e),this._config=this._getConfig(t),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return DefaultType}static get Default(){return Default}static get NAME(){return NAME}show(){EventHandler.trigger(this._element,EVENT_SHOW).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove("hide"),reflow(this._element),this._element.classList.add("showing"),this._queueCallback(()=>{this._element.classList.remove("showing"),this._element.classList.add("show"),EventHandler.trigger(this._element,EVENT_SHOWN),this._maybeScheduleHide()},this._element,this._config.animation))}hide(){this._element.classList.contains("show")&&(EventHandler.trigger(this._element,EVENT_HIDE).defaultPrevented||(this._element.classList.remove("show"),this._queueCallback(()=>{this._element.classList.add("hide"),EventHandler.trigger(this._element,EVENT_HIDDEN)},this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains("show")&&this._element.classList.remove("show"),super.dispose()}_getConfig(e){return e={...Default,...Manipulator.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}},typeCheckConfig(NAME,e,this.constructor.DefaultType),e}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout(()=>{this.hide()},this._config.delay)))}_onInteraction(e,t){switch(e.type){case"mouseover":case"mouseout":this._hasMouseInteraction=t;break;case"focusin":case"focusout":this._hasKeyboardInteraction=t}if(t)return void this._clearTimeout();const n=e.relatedTarget;this._element===n||this._element.contains(n)||this._maybeScheduleHide()}_setListeners(){EventHandler.on(this._element,EVENT_CLICK_DISMISS,SELECTOR_DATA_DISMISS,()=>this.hide()),EventHandler.on(this._element,EVENT_MOUSEOVER,e=>this._onInteraction(e,!0)),EventHandler.on(this._element,EVENT_MOUSEOUT,e=>this._onInteraction(e,!1)),EventHandler.on(this._element,EVENT_FOCUSIN,e=>this._onInteraction(e,!0)),EventHandler.on(this._element,EVENT_FOCUSOUT,e=>this._onInteraction(e,!1))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(e){return this.each((function(){const t=Toast.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e](this)}}))}}defineJQueryPlugin(Toast);export{Alert,Button,Carousel,Collapse,Dropdown,Modal,Offcanvas,Popover,ScrollSpy,Tab,Toast,Tooltip}; //# sourceMappingURL=bootstrap.esm.min.js.map
PypiClean
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/typeahead/typeahead.bundle.js
(function($) { var _ = function() { "use strict"; return { isMsie: function() { return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; }, isBlankString: function(str) { return !str || /^\s*$/.test(str); }, escapeRegExChars: function(str) { return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); }, isString: function(obj) { return typeof obj === "string"; }, isNumber: function(obj) { return typeof obj === "number"; }, isArray: $.isArray, isFunction: $.isFunction, isObject: $.isPlainObject, isUndefined: function(obj) { return typeof obj === "undefined"; }, toStr: function toStr(s) { return _.isUndefined(s) || s === null ? "" : s + ""; }, bind: $.proxy, each: function(collection, cb) { $.each(collection, reverseArgs); function reverseArgs(index, value) { return cb(value, index); } }, map: $.map, filter: $.grep, every: function(obj, test) { var result = true; if (!obj) { return result; } $.each(obj, function(key, val) { if (!(result = test.call(null, val, key, obj))) { return false; } }); return !!result; }, some: function(obj, test) { var result = false; if (!obj) { return result; } $.each(obj, function(key, val) { if (result = test.call(null, val, key, obj)) { return false; } }); return !!result; }, mixin: $.extend, getUniqueId: function() { var counter = 0; return function() { return counter++; }; }(), templatify: function templatify(obj) { return $.isFunction(obj) ? obj : template; function template() { return String(obj); } }, defer: function(fn) { setTimeout(fn, 0); }, debounce: function(func, wait, immediate) { var timeout, result; return function() { var context = this, args = arguments, later, callNow; later = function() { timeout = null; if (!immediate) { result = func.apply(context, args); } }; callNow = immediate && !timeout; clearTimeout(timeout); timeout = setTimeout(later, wait); if (callNow) { result = func.apply(context, args); } return result; }; }, throttle: function(func, wait) { var context, args, timeout, result, previous, later; previous = 0; later = function() { previous = new Date(); timeout = null; result = func.apply(context, args); }; return function() { var now = new Date(), remaining = wait - (now - previous); context = this; args = arguments; if (remaining <= 0) { clearTimeout(timeout); timeout = null; previous = now; result = func.apply(context, args); } else if (!timeout) { timeout = setTimeout(later, remaining); } return result; }; }, noop: function() {} }; }(); var VERSION = "0.10.5"; var tokenizers = function() { "use strict"; return { nonword: nonword, whitespace: whitespace, obj: { nonword: getObjTokenizer(nonword), whitespace: getObjTokenizer(whitespace) } }; function whitespace(str) { str = _.toStr(str); return str ? str.split(/\s+/) : []; } function nonword(str) { str = _.toStr(str); return str ? str.split(/\W+/) : []; } function getObjTokenizer(tokenizer) { return function setKey() { var args = [].slice.call(arguments, 0); return function tokenize(o) { var tokens = []; _.each(args, function(k) { tokens = tokens.concat(tokenizer(_.toStr(o[k]))); }); return tokens; }; }; } }(); var LruCache = function() { "use strict"; function LruCache(maxSize) { this.maxSize = _.isNumber(maxSize) ? maxSize : 100; this.reset(); if (this.maxSize <= 0) { this.set = this.get = $.noop; } } _.mixin(LruCache.prototype, { set: function set(key, val) { var tailItem = this.list.tail, node; if (this.size >= this.maxSize) { this.list.remove(tailItem); delete this.hash[tailItem.key]; } if (node = this.hash[key]) { node.val = val; this.list.moveToFront(node); } else { node = new Node(key, val); this.list.add(node); this.hash[key] = node; this.size++; } }, get: function get(key) { var node = this.hash[key]; if (node) { this.list.moveToFront(node); return node.val; } }, reset: function reset() { this.size = 0; this.hash = {}; this.list = new List(); } }); function List() { this.head = this.tail = null; } _.mixin(List.prototype, { add: function add(node) { if (this.head) { node.next = this.head; this.head.prev = node; } this.head = node; this.tail = this.tail || node; }, remove: function remove(node) { node.prev ? node.prev.next = node.next : this.head = node.next; node.next ? node.next.prev = node.prev : this.tail = node.prev; }, moveToFront: function(node) { this.remove(node); this.add(node); } }); function Node(key, val) { this.key = key; this.val = val; this.prev = this.next = null; } return LruCache; }(); var PersistentStorage = function() { "use strict"; var ls, methods; try { ls = window.localStorage; ls.setItem("~~~", "!"); ls.removeItem("~~~"); } catch (err) { ls = null; } function PersistentStorage(namespace) { this.prefix = [ "__", namespace, "__" ].join(""); this.ttlKey = "__ttl__"; this.keyMatcher = new RegExp("^" + _.escapeRegExChars(this.prefix)); } if (ls && window.JSON) { methods = { _prefix: function(key) { return this.prefix + key; }, _ttlKey: function(key) { return this._prefix(key) + this.ttlKey; }, get: function(key) { if (this.isExpired(key)) { this.remove(key); } return decode(ls.getItem(this._prefix(key))); }, set: function(key, val, ttl) { if (_.isNumber(ttl)) { ls.setItem(this._ttlKey(key), encode(now() + ttl)); } else { ls.removeItem(this._ttlKey(key)); } return ls.setItem(this._prefix(key), encode(val)); }, remove: function(key) { ls.removeItem(this._ttlKey(key)); ls.removeItem(this._prefix(key)); return this; }, clear: function() { var i, key, keys = [], len = ls.length; for (i = 0; i < len; i++) { if ((key = ls.key(i)).match(this.keyMatcher)) { keys.push(key.replace(this.keyMatcher, "")); } } for (i = keys.length; i--; ) { this.remove(keys[i]); } return this; }, isExpired: function(key) { var ttl = decode(ls.getItem(this._ttlKey(key))); return _.isNumber(ttl) && now() > ttl ? true : false; } }; } else { methods = { get: _.noop, set: _.noop, remove: _.noop, clear: _.noop, isExpired: _.noop }; } _.mixin(PersistentStorage.prototype, methods); return PersistentStorage; function now() { return new Date().getTime(); } function encode(val) { return JSON.stringify(_.isUndefined(val) ? null : val); } function decode(val) { return JSON.parse(val); } }(); var Transport = function() { "use strict"; var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10); function Transport(o) { o = o || {}; this.cancelled = false; this.lastUrl = null; this._send = o.transport ? callbackToDeferred(o.transport) : $.ajax; this._get = o.rateLimiter ? o.rateLimiter(this._get) : this._get; this._cache = o.cache === false ? new LruCache(0) : sharedCache; } Transport.setMaxPendingRequests = function setMaxPendingRequests(num) { maxPendingRequests = num; }; Transport.resetCache = function resetCache() { sharedCache.reset(); }; _.mixin(Transport.prototype, { _get: function(url, o, cb) { var that = this, jqXhr; if (this.cancelled || url !== this.lastUrl) { return; } if (jqXhr = pendingRequests[url]) { jqXhr.done(done).fail(fail); } else if (pendingRequestsCount < maxPendingRequests) { pendingRequestsCount++; pendingRequests[url] = this._send(url, o).done(done).fail(fail).always(always); } else { this.onDeckRequestArgs = [].slice.call(arguments, 0); } function done(resp) { cb && cb(null, resp); that._cache.set(url, resp); } function fail() { cb && cb(true); } function always() { pendingRequestsCount--; delete pendingRequests[url]; if (that.onDeckRequestArgs) { that._get.apply(that, that.onDeckRequestArgs); that.onDeckRequestArgs = null; } } }, get: function(url, o, cb) { var resp; if (_.isFunction(o)) { cb = o; o = {}; } this.cancelled = false; this.lastUrl = url; if (resp = this._cache.get(url)) { _.defer(function() { cb && cb(null, resp); }); } else { this._get(url, o, cb); } return !!resp; }, cancel: function() { this.cancelled = true; } }); return Transport; function callbackToDeferred(fn) { return function customSendWrapper(url, o) { var deferred = $.Deferred(); fn(url, o, onSuccess, onError); return deferred; function onSuccess(resp) { _.defer(function() { deferred.resolve(resp); }); } function onError(err) { _.defer(function() { deferred.reject(err); }); } }; } }(); var SearchIndex = function() { "use strict"; function SearchIndex(o) { o = o || {}; if (!o.datumTokenizer || !o.queryTokenizer) { $.error("datumTokenizer and queryTokenizer are both required"); } this.datumTokenizer = o.datumTokenizer; this.queryTokenizer = o.queryTokenizer; this.reset(); } _.mixin(SearchIndex.prototype, { bootstrap: function bootstrap(o) { this.datums = o.datums; this.trie = o.trie; }, add: function(data) { var that = this; data = _.isArray(data) ? data : [ data ]; _.each(data, function(datum) { var id, tokens; id = that.datums.push(datum) - 1; tokens = normalizeTokens(that.datumTokenizer(datum)); _.each(tokens, function(token) { var node, chars, ch; node = that.trie; chars = token.split(""); while (ch = chars.shift()) { node = node.children[ch] || (node.children[ch] = newNode()); node.ids.push(id); } }); }); }, get: function get(query) { var that = this, tokens, matches; tokens = normalizeTokens(this.queryTokenizer(query)); _.each(tokens, function(token) { var node, chars, ch, ids; if (matches && matches.length === 0) { return false; } node = that.trie; chars = token.split(""); while (node && (ch = chars.shift())) { node = node.children[ch]; } if (node && chars.length === 0) { ids = node.ids.slice(0); matches = matches ? getIntersection(matches, ids) : ids; } else { matches = []; return false; } }); return matches ? _.map(unique(matches), function(id) { return that.datums[id]; }) : []; }, reset: function reset() { this.datums = []; this.trie = newNode(); }, serialize: function serialize() { return { datums: this.datums, trie: this.trie }; } }); return SearchIndex; function normalizeTokens(tokens) { tokens = _.filter(tokens, function(token) { return !!token; }); tokens = _.map(tokens, function(token) { return token.toLowerCase(); }); return tokens; } function newNode() { return { ids: [], children: {} }; } function unique(array) { var seen = {}, uniques = []; for (var i = 0, len = array.length; i < len; i++) { if (!seen[array[i]]) { seen[array[i]] = true; uniques.push(array[i]); } } return uniques; } function getIntersection(arrayA, arrayB) { var ai = 0, bi = 0, intersection = []; arrayA = arrayA.sort(compare); arrayB = arrayB.sort(compare); var lenArrayA = arrayA.length, lenArrayB = arrayB.length; while (ai < lenArrayA && bi < lenArrayB) { if (arrayA[ai] < arrayB[bi]) { ai++; } else if (arrayA[ai] > arrayB[bi]) { bi++; } else { intersection.push(arrayA[ai]); ai++; bi++; } } return intersection; function compare(a, b) { return a - b; } } }(); var oParser = function() { "use strict"; return { local: getLocal, prefetch: getPrefetch, remote: getRemote }; function getLocal(o) { return o.local || null; } function getPrefetch(o) { var prefetch, defaults; defaults = { url: null, thumbprint: "", ttl: 24 * 60 * 60 * 1e3, filter: null, ajax: {} }; if (prefetch = o.prefetch || null) { prefetch = _.isString(prefetch) ? { url: prefetch } : prefetch; prefetch = _.mixin(defaults, prefetch); prefetch.thumbprint = VERSION + prefetch.thumbprint; prefetch.ajax.type = prefetch.ajax.type || "GET"; prefetch.ajax.dataType = prefetch.ajax.dataType || "json"; !prefetch.url && $.error("prefetch requires url to be set"); } return prefetch; } function getRemote(o) { var remote, defaults; defaults = { url: null, cache: true, wildcard: "%QUERY", replace: null, rateLimitBy: "debounce", rateLimitWait: 300, send: null, filter: null, ajax: {} }; if (remote = o.remote || null) { remote = _.isString(remote) ? { url: remote } : remote; remote = _.mixin(defaults, remote); remote.rateLimiter = /^throttle$/i.test(remote.rateLimitBy) ? byThrottle(remote.rateLimitWait) : byDebounce(remote.rateLimitWait); remote.ajax.type = remote.ajax.type || "GET"; remote.ajax.dataType = remote.ajax.dataType || "json"; delete remote.rateLimitBy; delete remote.rateLimitWait; !remote.url && $.error("remote requires url to be set"); } return remote; function byDebounce(wait) { return function(fn) { return _.debounce(fn, wait); }; } function byThrottle(wait) { return function(fn) { return _.throttle(fn, wait); }; } } }(); (function(root) { "use strict"; var old, keys; old = root.Bloodhound; keys = { data: "data", protocol: "protocol", thumbprint: "thumbprint" }; root.Bloodhound = Bloodhound; function Bloodhound(o) { if (!o || !o.local && !o.prefetch && !o.remote) { $.error("one of local, prefetch, or remote is required"); } this.limit = o.limit || 5; this.sorter = getSorter(o.sorter); this.dupDetector = o.dupDetector || ignoreDuplicates; this.local = oParser.local(o); this.prefetch = oParser.prefetch(o); this.remote = oParser.remote(o); this.cacheKey = this.prefetch ? this.prefetch.cacheKey || this.prefetch.url : null; this.index = new SearchIndex({ datumTokenizer: o.datumTokenizer, queryTokenizer: o.queryTokenizer }); this.storage = this.cacheKey ? new PersistentStorage(this.cacheKey) : null; } Bloodhound.noConflict = function noConflict() { root.Bloodhound = old; return Bloodhound; }; Bloodhound.tokenizers = tokenizers; _.mixin(Bloodhound.prototype, { _loadPrefetch: function loadPrefetch(o) { var that = this, serialized, deferred; if (serialized = this._readFromStorage(o.thumbprint)) { this.index.bootstrap(serialized); deferred = $.Deferred().resolve(); } else { deferred = $.ajax(o.url, o.ajax).done(handlePrefetchResponse); } return deferred; function handlePrefetchResponse(resp) { that.clear(); that.add(o.filter ? o.filter(resp) : resp); that._saveToStorage(that.index.serialize(), o.thumbprint, o.ttl); } }, _getFromRemote: function getFromRemote(query, cb) { var that = this, url, uriEncodedQuery; if (!this.transport) { return; } query = query || ""; uriEncodedQuery = encodeURIComponent(query); url = this.remote.replace ? this.remote.replace(this.remote.url, query) : this.remote.url.replace(this.remote.wildcard, uriEncodedQuery); return this.transport.get(url, this.remote.ajax, handleRemoteResponse); function handleRemoteResponse(err, resp) { err ? cb([]) : cb(that.remote.filter ? that.remote.filter(resp) : resp); } }, _cancelLastRemoteRequest: function cancelLastRemoteRequest() { this.transport && this.transport.cancel(); }, _saveToStorage: function saveToStorage(data, thumbprint, ttl) { if (this.storage) { this.storage.set(keys.data, data, ttl); this.storage.set(keys.protocol, location.protocol, ttl); this.storage.set(keys.thumbprint, thumbprint, ttl); } }, _readFromStorage: function readFromStorage(thumbprint) { var stored = {}, isExpired; if (this.storage) { stored.data = this.storage.get(keys.data); stored.protocol = this.storage.get(keys.protocol); stored.thumbprint = this.storage.get(keys.thumbprint); } isExpired = stored.thumbprint !== thumbprint || stored.protocol !== location.protocol; return stored.data && !isExpired ? stored.data : null; }, _initialize: function initialize() { var that = this, local = this.local, deferred; deferred = this.prefetch ? this._loadPrefetch(this.prefetch) : $.Deferred().resolve(); local && deferred.done(addLocalToIndex); this.transport = this.remote ? new Transport(this.remote) : null; return this.initPromise = deferred.promise(); function addLocalToIndex() { that.add(_.isFunction(local) ? local() : local); } }, initialize: function initialize(force) { return !this.initPromise || force ? this._initialize() : this.initPromise; }, add: function add(data) { this.index.add(data); }, get: function get(query, cb) { var that = this, matches = [], cacheHit = false; matches = this.index.get(query); matches = this.sorter(matches).slice(0, this.limit); matches.length < this.limit ? cacheHit = this._getFromRemote(query, returnRemoteMatches) : this._cancelLastRemoteRequest(); if (!cacheHit) { (matches.length > 0 || !this.transport) && cb && cb(matches); } function returnRemoteMatches(remoteMatches) { var matchesWithBackfill = matches.slice(0); _.each(remoteMatches, function(remoteMatch) { var isDuplicate; isDuplicate = _.some(matchesWithBackfill, function(match) { return that.dupDetector(remoteMatch, match); }); !isDuplicate && matchesWithBackfill.push(remoteMatch); return matchesWithBackfill.length < that.limit; }); cb && cb(that.sorter(matchesWithBackfill)); } }, clear: function clear() { this.index.reset(); }, clearPrefetchCache: function clearPrefetchCache() { this.storage && this.storage.clear(); }, clearRemoteCache: function clearRemoteCache() { this.transport && Transport.resetCache(); }, ttAdapter: function ttAdapter() { return _.bind(this.get, this); } }); return Bloodhound; function getSorter(sortFn) { return _.isFunction(sortFn) ? sort : noSort; function sort(array) { return array.sort(sortFn); } function noSort(array) { return array; } } function ignoreDuplicates() { return false; } })(this); var html = function() { return { wrapper: '<span class="twitter-typeahead"></span>', dropdown: '<span class="tt-dropdown-menu"></span>', dataset: '<div class="tt-dataset-%CLASS%"></div>', suggestions: '<span class="tt-suggestions"></span>', suggestion: '<div class="tt-suggestion"></div>' }; }(); var css = function() { "use strict"; var css = { wrapper: { position: "relative", display: "inline-block" }, hint: { position: "absolute", top: "0", left: "0", borderColor: "transparent", boxShadow: "none", opacity: "1" }, input: { position: "relative", verticalAlign: "top", backgroundColor: "transparent" }, inputWithNoHint: { position: "relative", verticalAlign: "top" }, dropdown: { position: "absolute", top: "100%", left: "0", zIndex: "100", display: "none" }, suggestions: { display: "block" }, suggestion: { whiteSpace: "nowrap", cursor: "pointer" }, suggestionChild: { whiteSpace: "normal" }, ltr: { left: "0", right: "auto" }, rtl: { left: "auto", right: " 0" } }; if (_.isMsie()) { _.mixin(css.input, { backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)" }); } if (_.isMsie() && _.isMsie() <= 7) { _.mixin(css.input, { marginTop: "-1px" }); } return css; }(); var EventBus = function() { "use strict"; var namespace = "typeahead:"; function EventBus(o) { if (!o || !o.el) { $.error("EventBus initialized without el"); } this.$el = $(o.el); } _.mixin(EventBus.prototype, { trigger: function(type) { var args = [].slice.call(arguments, 1); this.$el.trigger(namespace + type, args); } }); return EventBus; }(); var EventEmitter = function() { "use strict"; var splitter = /\s+/, nextTick = getNextTick(); return { onSync: onSync, onAsync: onAsync, off: off, trigger: trigger }; function on(method, types, cb, context) { var type; if (!cb) { return this; } types = types.split(splitter); cb = context ? bindContext(cb, context) : cb; this._callbacks = this._callbacks || {}; while (type = types.shift()) { this._callbacks[type] = this._callbacks[type] || { sync: [], async: [] }; this._callbacks[type][method].push(cb); } return this; } function onAsync(types, cb, context) { return on.call(this, "async", types, cb, context); } function onSync(types, cb, context) { return on.call(this, "sync", types, cb, context); } function off(types) { var type; if (!this._callbacks) { return this; } types = types.split(splitter); while (type = types.shift()) { delete this._callbacks[type]; } return this; } function trigger(types) { var type, callbacks, args, syncFlush, asyncFlush; if (!this._callbacks) { return this; } types = types.split(splitter); args = [].slice.call(arguments, 1); while ((type = types.shift()) && (callbacks = this._callbacks[type])) { syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args)); asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args)); syncFlush() && nextTick(asyncFlush); } return this; } function getFlush(callbacks, context, args) { return flush; function flush() { var cancelled; for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) { cancelled = callbacks[i].apply(context, args) === false; } return !cancelled; } } function getNextTick() { var nextTickFn; if (window.setImmediate) { nextTickFn = function nextTickSetImmediate(fn) { setImmediate(function() { fn(); }); }; } else { nextTickFn = function nextTickSetTimeout(fn) { setTimeout(function() { fn(); }, 0); }; } return nextTickFn; } function bindContext(fn, context) { return fn.bind ? fn.bind(context) : function() { fn.apply(context, [].slice.call(arguments, 0)); }; } }(); var highlight = function(doc) { "use strict"; var defaults = { node: null, pattern: null, tagName: "strong", className: null, wordsOnly: false, caseSensitive: false }; return function hightlight(o) { var regex; o = _.mixin({}, defaults, o); if (!o.node || !o.pattern) { return; } o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ]; regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly); traverse(o.node, hightlightTextNode); function hightlightTextNode(textNode) { var match, patternNode, wrapperNode; if (match = regex.exec(textNode.data)) { wrapperNode = doc.createElement(o.tagName); o.className && (wrapperNode.className = o.className); patternNode = textNode.splitText(match.index); patternNode.splitText(match[0].length); wrapperNode.appendChild(patternNode.cloneNode(true)); textNode.parentNode.replaceChild(wrapperNode, patternNode); } return !!match; } function traverse(el, hightlightTextNode) { var childNode, TEXT_NODE_TYPE = 3; for (var i = 0; i < el.childNodes.length; i++) { childNode = el.childNodes[i]; if (childNode.nodeType === TEXT_NODE_TYPE) { i += hightlightTextNode(childNode) ? 1 : 0; } else { traverse(childNode, hightlightTextNode); } } } }; function getRegex(patterns, caseSensitive, wordsOnly) { var escapedPatterns = [], regexStr; for (var i = 0, len = patterns.length; i < len; i++) { escapedPatterns.push(_.escapeRegExChars(patterns[i])); } regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")"; return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i"); } }(window.document); var Input = function() { "use strict"; var specialKeyCodeMap; specialKeyCodeMap = { 9: "tab", 27: "esc", 37: "left", 39: "right", 13: "enter", 38: "up", 40: "down" }; function Input(o) { var that = this, onBlur, onFocus, onKeydown, onInput; o = o || {}; if (!o.input) { $.error("input is missing"); } onBlur = _.bind(this._onBlur, this); onFocus = _.bind(this._onFocus, this); onKeydown = _.bind(this._onKeydown, this); onInput = _.bind(this._onInput, this); this.$hint = $(o.hint); this.$input = $(o.input).on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown); if (this.$hint.length === 0) { this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop; } if (!_.isMsie()) { this.$input.on("input.tt", onInput); } else { this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) { if (specialKeyCodeMap[$e.which || $e.keyCode]) { return; } _.defer(_.bind(that._onInput, that, $e)); }); } this.query = this.$input.val(); this.$overflowHelper = buildOverflowHelper(this.$input); } Input.normalizeQuery = function(str) { return (str || "").replace(/^\s*/g, "").replace(/\s{2,}/g, " "); }; _.mixin(Input.prototype, EventEmitter, { _onBlur: function onBlur() { this.resetInputValue(); this.trigger("blurred"); }, _onFocus: function onFocus() { this.trigger("focused"); }, _onKeydown: function onKeydown($e) { var keyName = specialKeyCodeMap[$e.which || $e.keyCode]; this._managePreventDefault(keyName, $e); if (keyName && this._shouldTrigger(keyName, $e)) { this.trigger(keyName + "Keyed", $e); } }, _onInput: function onInput() { this._checkInputValue(); }, _managePreventDefault: function managePreventDefault(keyName, $e) { var preventDefault, hintValue, inputValue; switch (keyName) { case "tab": hintValue = this.getHint(); inputValue = this.getInputValue(); preventDefault = hintValue && hintValue !== inputValue && !withModifier($e); break; case "up": case "down": preventDefault = !withModifier($e); break; default: preventDefault = false; } preventDefault && $e.preventDefault(); }, _shouldTrigger: function shouldTrigger(keyName, $e) { var trigger; switch (keyName) { case "tab": trigger = !withModifier($e); break; default: trigger = true; } return trigger; }, _checkInputValue: function checkInputValue() { var inputValue, areEquivalent, hasDifferentWhitespace; inputValue = this.getInputValue(); areEquivalent = areQueriesEquivalent(inputValue, this.query); hasDifferentWhitespace = areEquivalent ? this.query.length !== inputValue.length : false; this.query = inputValue; if (!areEquivalent) { this.trigger("queryChanged", this.query); } else if (hasDifferentWhitespace) { this.trigger("whitespaceChanged", this.query); } }, focus: function focus() { this.$input.focus(); }, blur: function blur() { this.$input.blur(); }, getQuery: function getQuery() { return this.query; }, setQuery: function setQuery(query) { this.query = query; }, getInputValue: function getInputValue() { return this.$input.val(); }, setInputValue: function setInputValue(value, silent) { this.$input.val(value); silent ? this.clearHint() : this._checkInputValue(); }, resetInputValue: function resetInputValue() { this.setInputValue(this.query, true); }, getHint: function getHint() { return this.$hint.val(); }, setHint: function setHint(value) { this.$hint.val(value); }, clearHint: function clearHint() { this.setHint(""); }, clearHintIfInvalid: function clearHintIfInvalid() { var val, hint, valIsPrefixOfHint, isValid; val = this.getInputValue(); hint = this.getHint(); valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0; isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow(); !isValid && this.clearHint(); }, getLanguageDirection: function getLanguageDirection() { return (this.$input.css("direction") || "ltr").toLowerCase(); }, hasOverflow: function hasOverflow() { var constraint = this.$input.width() - 2; this.$overflowHelper.text(this.getInputValue()); return this.$overflowHelper.width() >= constraint; }, isCursorAtEnd: function() { var valueLength, selectionStart, range; valueLength = this.$input.val().length; selectionStart = this.$input[0].selectionStart; if (_.isNumber(selectionStart)) { return selectionStart === valueLength; } else if (document.selection) { range = document.selection.createRange(); range.moveStart("character", -valueLength); return valueLength === range.text.length; } return true; }, destroy: function destroy() { this.$hint.off(".tt"); this.$input.off(".tt"); this.$hint = this.$input = this.$overflowHelper = null; } }); return Input; function buildOverflowHelper($input) { return $('<pre aria-hidden="true"></pre>').css({ position: "absolute", visibility: "hidden", whiteSpace: "pre", fontFamily: $input.css("font-family"), fontSize: $input.css("font-size"), fontStyle: $input.css("font-style"), fontVariant: $input.css("font-variant"), fontWeight: $input.css("font-weight"), wordSpacing: $input.css("word-spacing"), letterSpacing: $input.css("letter-spacing"), textIndent: $input.css("text-indent"), textRendering: $input.css("text-rendering"), textTransform: $input.css("text-transform") }).insertAfter($input); } function areQueriesEquivalent(a, b) { return Input.normalizeQuery(a) === Input.normalizeQuery(b); } function withModifier($e) { return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey; } }(); var Dataset = function() { "use strict"; var datasetKey = "ttDataset", valueKey = "ttValue", datumKey = "ttDatum"; function Dataset(o) { o = o || {}; o.templates = o.templates || {}; if (!o.source) { $.error("missing source"); } if (o.name && !isValidName(o.name)) { $.error("invalid dataset name: " + o.name); } this.query = null; this.highlight = !!o.highlight; this.name = o.name || _.getUniqueId(); this.source = o.source; this.displayFn = getDisplayFn(o.display || o.displayKey); this.templates = getTemplates(o.templates, this.displayFn); this.$el = $(html.dataset.replace("%CLASS%", this.name)); } Dataset.extractDatasetName = function extractDatasetName(el) { return $(el).data(datasetKey); }; Dataset.extractValue = function extractDatum(el) { return $(el).data(valueKey); }; Dataset.extractDatum = function extractDatum(el) { return $(el).data(datumKey); }; _.mixin(Dataset.prototype, EventEmitter, { _render: function render(query, suggestions) { if (!this.$el) { return; } var that = this, hasSuggestions; this.$el.empty(); hasSuggestions = suggestions && suggestions.length; if (!hasSuggestions && this.templates.empty) { this.$el.html(getEmptyHtml()).prepend(that.templates.header ? getHeaderHtml() : null).append(that.templates.footer ? getFooterHtml() : null); } else if (hasSuggestions) { this.$el.html(getSuggestionsHtml()).prepend(that.templates.header ? getHeaderHtml() : null).append(that.templates.footer ? getFooterHtml() : null); } this.trigger("rendered"); function getEmptyHtml() { return that.templates.empty({ query: query, isEmpty: true }); } function getSuggestionsHtml() { var $suggestions, nodes; $suggestions = $(html.suggestions).css(css.suggestions); nodes = _.map(suggestions, getSuggestionNode); $suggestions.append.apply($suggestions, nodes); that.highlight && highlight({ className: "tt-highlight", node: $suggestions[0], pattern: query }); return $suggestions; function getSuggestionNode(suggestion) { var $el; $el = $(html.suggestion).append(that.templates.suggestion(suggestion)).data(datasetKey, that.name).data(valueKey, that.displayFn(suggestion)).data(datumKey, suggestion); $el.children().each(function() { $(this).css(css.suggestionChild); }); return $el; } } function getHeaderHtml() { return that.templates.header({ query: query, isEmpty: !hasSuggestions }); } function getFooterHtml() { return that.templates.footer({ query: query, isEmpty: !hasSuggestions }); } }, getRoot: function getRoot() { return this.$el; }, update: function update(query) { var that = this; this.query = query; this.canceled = false; this.source(query, render); function render(suggestions) { if (!that.canceled && query === that.query) { that._render(query, suggestions); } } }, cancel: function cancel() { this.canceled = true; }, clear: function clear() { this.cancel(); this.$el.empty(); this.trigger("rendered"); }, isEmpty: function isEmpty() { return this.$el.is(":empty"); }, destroy: function destroy() { this.$el = null; } }); return Dataset; function getDisplayFn(display) { display = display || "value"; return _.isFunction(display) ? display : displayFn; function displayFn(obj) { return obj[display]; } } function getTemplates(templates, displayFn) { return { empty: templates.empty && _.templatify(templates.empty), header: templates.header && _.templatify(templates.header), footer: templates.footer && _.templatify(templates.footer), suggestion: templates.suggestion || suggestionTemplate }; function suggestionTemplate(context) { return "<p>" + displayFn(context) + "</p>"; } } function isValidName(str) { return /^[_a-zA-Z0-9-]+$/.test(str); } }(); var Dropdown = function() { "use strict"; function Dropdown(o) { var that = this, onSuggestionClick, onSuggestionMouseEnter, onSuggestionMouseLeave; o = o || {}; if (!o.menu) { $.error("menu is required"); } this.isOpen = false; this.isEmpty = true; this.datasets = _.map(o.datasets, initializeDataset); onSuggestionClick = _.bind(this._onSuggestionClick, this); onSuggestionMouseEnter = _.bind(this._onSuggestionMouseEnter, this); onSuggestionMouseLeave = _.bind(this._onSuggestionMouseLeave, this); this.$menu = $(o.menu).on("click.tt", ".tt-suggestion", onSuggestionClick).on("mouseenter.tt", ".tt-suggestion", onSuggestionMouseEnter).on("mouseleave.tt", ".tt-suggestion", onSuggestionMouseLeave); _.each(this.datasets, function(dataset) { that.$menu.append(dataset.getRoot()); dataset.onSync("rendered", that._onRendered, that); }); } _.mixin(Dropdown.prototype, EventEmitter, { _onSuggestionClick: function onSuggestionClick($e) { this.trigger("suggestionClicked", $($e.currentTarget)); }, _onSuggestionMouseEnter: function onSuggestionMouseEnter($e) { this._removeCursor(); this._setCursor($($e.currentTarget), true); }, _onSuggestionMouseLeave: function onSuggestionMouseLeave() { this._removeCursor(); }, _onRendered: function onRendered() { this.isEmpty = _.every(this.datasets, isDatasetEmpty); this.isEmpty ? this._hide() : this.isOpen && this._show(); this.trigger("datasetRendered"); function isDatasetEmpty(dataset) { return dataset.isEmpty(); } }, _hide: function() { this.$menu.hide(); }, _show: function() { this.$menu.css("display", "block"); }, _getSuggestions: function getSuggestions() { return this.$menu.find(".tt-suggestion"); }, _getCursor: function getCursor() { return this.$menu.find(".tt-cursor").first(); }, _setCursor: function setCursor($el, silent) { $el.first().addClass("tt-cursor"); !silent && this.trigger("cursorMoved"); }, _removeCursor: function removeCursor() { this._getCursor().removeClass("tt-cursor"); }, _moveCursor: function moveCursor(increment) { var $suggestions, $oldCursor, newCursorIndex, $newCursor; if (!this.isOpen) { return; } $oldCursor = this._getCursor(); $suggestions = this._getSuggestions(); this._removeCursor(); newCursorIndex = $suggestions.index($oldCursor) + increment; newCursorIndex = (newCursorIndex + 1) % ($suggestions.length + 1) - 1; if (newCursorIndex === -1) { this.trigger("cursorRemoved"); return; } else if (newCursorIndex < -1) { newCursorIndex = $suggestions.length - 1; } this._setCursor($newCursor = $suggestions.eq(newCursorIndex)); this._ensureVisible($newCursor); }, _ensureVisible: function ensureVisible($el) { var elTop, elBottom, menuScrollTop, menuHeight; elTop = $el.position().top; elBottom = elTop + $el.outerHeight(true); menuScrollTop = this.$menu.scrollTop(); menuHeight = this.$menu.height() + parseInt(this.$menu.css("paddingTop"), 10) + parseInt(this.$menu.css("paddingBottom"), 10); if (elTop < 0) { this.$menu.scrollTop(menuScrollTop + elTop); } else if (menuHeight < elBottom) { this.$menu.scrollTop(menuScrollTop + (elBottom - menuHeight)); } }, close: function close() { if (this.isOpen) { this.isOpen = false; this._removeCursor(); this._hide(); this.trigger("closed"); } }, open: function open() { if (!this.isOpen) { this.isOpen = true; !this.isEmpty && this._show(); this.trigger("opened"); } }, setLanguageDirection: function setLanguageDirection(dir) { this.$menu.css(dir === "ltr" ? css.ltr : css.rtl); }, moveCursorUp: function moveCursorUp() { this._moveCursor(-1); }, moveCursorDown: function moveCursorDown() { this._moveCursor(+1); }, getDatumForSuggestion: function getDatumForSuggestion($el) { var datum = null; if ($el.length) { datum = { raw: Dataset.extractDatum($el), value: Dataset.extractValue($el), datasetName: Dataset.extractDatasetName($el) }; } return datum; }, getDatumForCursor: function getDatumForCursor() { return this.getDatumForSuggestion(this._getCursor().first()); }, getDatumForTopSuggestion: function getDatumForTopSuggestion() { return this.getDatumForSuggestion(this._getSuggestions().first()); }, update: function update(query) { _.each(this.datasets, updateDataset); function updateDataset(dataset) { dataset.update(query); } }, empty: function empty() { _.each(this.datasets, clearDataset); this.isEmpty = true; function clearDataset(dataset) { dataset.clear(); } }, isVisible: function isVisible() { return this.isOpen && !this.isEmpty; }, destroy: function destroy() { this.$menu.off(".tt"); this.$menu = null; _.each(this.datasets, destroyDataset); function destroyDataset(dataset) { dataset.destroy(); } } }); return Dropdown; function initializeDataset(oDataset) { return new Dataset(oDataset); } }(); var Typeahead = function() { "use strict"; var attrsKey = "ttAttrs"; function Typeahead(o) { var $menu, $input, $hint; o = o || {}; if (!o.input) { $.error("missing input"); } this.isActivated = false; this.autoselect = !!o.autoselect; this.minLength = _.isNumber(o.minLength) ? o.minLength : 1; this.$node = buildDom(o.input, o.withHint); $menu = this.$node.find(".tt-dropdown-menu"); $input = this.$node.find(".tt-input"); $hint = this.$node.find(".tt-hint"); $input.on("blur.tt", function($e) { var active, isActive, hasActive; active = document.activeElement; isActive = $menu.is(active); hasActive = $menu.has(active).length > 0; if (_.isMsie() && (isActive || hasActive)) { $e.preventDefault(); $e.stopImmediatePropagation(); _.defer(function() { $input.focus(); }); } }); $menu.on("mousedown.tt", function($e) { $e.preventDefault(); }); this.eventBus = o.eventBus || new EventBus({ el: $input }); this.dropdown = new Dropdown({ menu: $menu, datasets: o.datasets }).onSync("suggestionClicked", this._onSuggestionClicked, this).onSync("cursorMoved", this._onCursorMoved, this).onSync("cursorRemoved", this._onCursorRemoved, this).onSync("opened", this._onOpened, this).onSync("closed", this._onClosed, this).onAsync("datasetRendered", this._onDatasetRendered, this); this.input = new Input({ input: $input, hint: $hint }).onSync("focused", this._onFocused, this).onSync("blurred", this._onBlurred, this).onSync("enterKeyed", this._onEnterKeyed, this).onSync("tabKeyed", this._onTabKeyed, this).onSync("escKeyed", this._onEscKeyed, this).onSync("upKeyed", this._onUpKeyed, this).onSync("downKeyed", this._onDownKeyed, this).onSync("leftKeyed", this._onLeftKeyed, this).onSync("rightKeyed", this._onRightKeyed, this).onSync("queryChanged", this._onQueryChanged, this).onSync("whitespaceChanged", this._onWhitespaceChanged, this); this._setLanguageDirection(); } _.mixin(Typeahead.prototype, { _onSuggestionClicked: function onSuggestionClicked(type, $el) { var datum; if (datum = this.dropdown.getDatumForSuggestion($el)) { this._select(datum); } }, _onCursorMoved: function onCursorMoved() { var datum = this.dropdown.getDatumForCursor(); this.input.setInputValue(datum.value, true); this.eventBus.trigger("cursorchanged", datum.raw, datum.datasetName); }, _onCursorRemoved: function onCursorRemoved() { this.input.resetInputValue(); this._updateHint(); }, _onDatasetRendered: function onDatasetRendered() { this._updateHint(); }, _onOpened: function onOpened() { this._updateHint(); this.eventBus.trigger("opened"); }, _onClosed: function onClosed() { this.input.clearHint(); this.eventBus.trigger("closed"); }, _onFocused: function onFocused() { this.isActivated = true; this.dropdown.open(); }, _onBlurred: function onBlurred() { this.isActivated = false; this.dropdown.empty(); this.dropdown.close(); }, _onEnterKeyed: function onEnterKeyed(type, $e) { var cursorDatum, topSuggestionDatum; cursorDatum = this.dropdown.getDatumForCursor(); topSuggestionDatum = this.dropdown.getDatumForTopSuggestion(); if (cursorDatum) { this._select(cursorDatum); $e.preventDefault(); } else if (this.autoselect && topSuggestionDatum) { this._select(topSuggestionDatum); $e.preventDefault(); } }, _onTabKeyed: function onTabKeyed(type, $e) { var datum; if (datum = this.dropdown.getDatumForCursor()) { this._select(datum); $e.preventDefault(); } else { this._autocomplete(true); } }, _onEscKeyed: function onEscKeyed() { this.dropdown.close(); this.input.resetInputValue(); }, _onUpKeyed: function onUpKeyed() { var query = this.input.getQuery(); this.dropdown.isEmpty && query.length >= this.minLength ? this.dropdown.update(query) : this.dropdown.moveCursorUp(); this.dropdown.open(); }, _onDownKeyed: function onDownKeyed() { var query = this.input.getQuery(); this.dropdown.isEmpty && query.length >= this.minLength ? this.dropdown.update(query) : this.dropdown.moveCursorDown(); this.dropdown.open(); }, _onLeftKeyed: function onLeftKeyed() { this.dir === "rtl" && this._autocomplete(); }, _onRightKeyed: function onRightKeyed() { this.dir === "ltr" && this._autocomplete(); }, _onQueryChanged: function onQueryChanged(e, query) { this.input.clearHintIfInvalid(); query.length >= this.minLength ? this.dropdown.update(query) : this.dropdown.empty(); this.dropdown.open(); this._setLanguageDirection(); }, _onWhitespaceChanged: function onWhitespaceChanged() { this._updateHint(); this.dropdown.open(); }, _setLanguageDirection: function setLanguageDirection() { var dir; if (this.dir !== (dir = this.input.getLanguageDirection())) { this.dir = dir; this.$node.css("direction", dir); this.dropdown.setLanguageDirection(dir); } }, _updateHint: function updateHint() { var datum, val, query, escapedQuery, frontMatchRegEx, match; datum = this.dropdown.getDatumForTopSuggestion(); if (datum && this.dropdown.isVisible() && !this.input.hasOverflow()) { val = this.input.getInputValue(); query = Input.normalizeQuery(val); escapedQuery = _.escapeRegExChars(query); frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i"); match = frontMatchRegEx.exec(datum.value); match ? this.input.setHint(val + match[1]) : this.input.clearHint(); } else { this.input.clearHint(); } }, _autocomplete: function autocomplete(laxCursor) { var hint, query, isCursorAtEnd, datum; hint = this.input.getHint(); query = this.input.getQuery(); isCursorAtEnd = laxCursor || this.input.isCursorAtEnd(); if (hint && query !== hint && isCursorAtEnd) { datum = this.dropdown.getDatumForTopSuggestion(); datum && this.input.setInputValue(datum.value); this.eventBus.trigger("autocompleted", datum.raw, datum.datasetName); } }, _select: function select(datum) { this.input.setQuery(datum.value); this.input.setInputValue(datum.value, true); this._setLanguageDirection(); this.eventBus.trigger("selected", datum.raw, datum.datasetName); this.dropdown.close(); _.defer(_.bind(this.dropdown.empty, this.dropdown)); }, open: function open() { this.dropdown.open(); }, close: function close() { this.dropdown.close(); }, setVal: function setVal(val) { val = _.toStr(val); if (this.isActivated) { this.input.setInputValue(val); } else { this.input.setQuery(val); this.input.setInputValue(val, true); } this._setLanguageDirection(); }, getVal: function getVal() { return this.input.getQuery(); }, destroy: function destroy() { this.input.destroy(); this.dropdown.destroy(); destroyDomStructure(this.$node); this.$node = null; } }); return Typeahead; function buildDom(input, withHint) { var $input, $wrapper, $dropdown, $hint; $input = $(input); $wrapper = $(html.wrapper).css(css.wrapper); $dropdown = $(html.dropdown).css(css.dropdown); $hint = $input.clone().css(css.hint).css(getBackgroundStyles($input)); $hint.val("").removeData().addClass("tt-hint").removeAttr("id name placeholder required").prop("readonly", true).attr({ autocomplete: "off", spellcheck: "false", tabindex: -1 }); $input.data(attrsKey, { dir: $input.attr("dir"), autocomplete: $input.attr("autocomplete"), spellcheck: $input.attr("spellcheck"), style: $input.attr("style") }); $input.addClass("tt-input").attr({ autocomplete: "off", spellcheck: false }).css(withHint ? css.input : css.inputWithNoHint); try { !$input.attr("dir") && $input.attr("dir", "auto"); } catch (e) {} return $input.wrap($wrapper).parent().prepend(withHint ? $hint : null).append($dropdown); } function getBackgroundStyles($el) { return { backgroundAttachment: $el.css("background-attachment"), backgroundClip: $el.css("background-clip"), backgroundColor: $el.css("background-color"), backgroundImage: $el.css("background-image"), backgroundOrigin: $el.css("background-origin"), backgroundPosition: $el.css("background-position"), backgroundRepeat: $el.css("background-repeat"), backgroundSize: $el.css("background-size") }; } function destroyDomStructure($node) { var $input = $node.find(".tt-input"); _.each($input.data(attrsKey), function(val, key) { _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val); }); $input.detach().removeData(attrsKey).removeClass("tt-input").insertAfter($node); $node.remove(); } }(); (function() { "use strict"; var old, typeaheadKey, methods; old = $.fn.typeahead; typeaheadKey = "ttTypeahead"; methods = { initialize: function initialize(o, datasets) { datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1); o = o || {}; return this.each(attach); function attach() { var $input = $(this), eventBus, typeahead; _.each(datasets, function(d) { d.highlight = !!o.highlight; }); typeahead = new Typeahead({ input: $input, eventBus: eventBus = new EventBus({ el: $input }), withHint: _.isUndefined(o.hint) ? true : !!o.hint, minLength: o.minLength, autoselect: o.autoselect, datasets: datasets }); $input.data(typeaheadKey, typeahead); } }, open: function open() { return this.each(openTypeahead); function openTypeahead() { var $input = $(this), typeahead; if (typeahead = $input.data(typeaheadKey)) { typeahead.open(); } } }, close: function close() { return this.each(closeTypeahead); function closeTypeahead() { var $input = $(this), typeahead; if (typeahead = $input.data(typeaheadKey)) { typeahead.close(); } } }, val: function val(newVal) { return !arguments.length ? getVal(this.first()) : this.each(setVal); function setVal() { var $input = $(this), typeahead; if (typeahead = $input.data(typeaheadKey)) { typeahead.setVal(newVal); } } function getVal($input) { var typeahead, query; if (typeahead = $input.data(typeaheadKey)) { query = typeahead.getVal(); } return query; } }, destroy: function destroy() { return this.each(unattach); function unattach() { var $input = $(this), typeahead; if (typeahead = $input.data(typeaheadKey)) { typeahead.destroy(); $input.removeData(typeaheadKey); } } } }; $.fn.typeahead = function(method) { var tts; if (methods[method] && method !== "initialize") { tts = this.filter(function() { return !!$(this).data(typeaheadKey); }); return methods[method].apply(tts, [].slice.call(arguments, 1)); } else { return methods.initialize.apply(this, arguments); } }; $.fn.typeahead.noConflict = function noConflict() { $.fn.typeahead = old; return this; }; })(); })(window.jQuery);
PypiClean
/ModelicaLanguage-0.0.0a6-py3-none-any.whl/modelica_language/parsers/syntax/_keyword.py
__all__ = ( "ANY_KEYWORD", "ALGORITHM", "AND", "ANNOTATION", "BLOCK", "BREAK", "CLASS", "CONNECT", "CONNECTOR", "CONSTANT", "CONSTRAINEDBY", "DER", "DISCRETE", "EACH", "ELSE", "ELSEIF", "ELSEWHEN", "ENCAPSULATED", "END", "ENUMERATION", "EQUATION", "EXPANDABLE", "EXTENDS", "EXTERNAL", "FALSE", "FINAL", "FLOW", "FOR", "FUNCTION", "IF", "IMPORT", "IMPURE", "IN", "INITIAL", "INNER", "INPUT", "LOOP", "MODEL", "NOT", "OPERATOR", "OR", "OUTER", "OUTPUT", "PACKAGE", "PARAMETER", "PARTIAL", "PROTECTED", "PUBLIC", "PURE", "RECORD", "REDECLARE", "REPLACEABLE", "RETURN", "STREAM", "THEN", "TRUE", "TYPE", "WHEN", "WHILE", "WITHIN", ) from arpeggio import RegExMatch any_keyword = ( r'(' r'algorithm|and|annotation|block|break|class|connect|connector|' r'constant|constrainedby|der|discrete|each|else|elseif|elsewhen|' r'encapsulated|end|enumeration|equation|expandable|extends|external|' r'false|final|flow|for|function|if|import|impure|in|initial|inner|' r'input|loop|model|not|operator|or|outer|output|package|parameter|' r'partial|protected|public|pure|record|redeclare|replaceable|return|' r'stream|then|true|type|when|while|within' r')(?!\w)' ) def ANY_KEYWORD(): return RegExMatch(any_keyword) ANY_KEYWORD.__doc__ = f"ANY_KEYWORD = r'{any_keyword}'" def ALGORITHM(): r"ALGORITHM = r'algorithm(?!\w)'" return RegExMatch(r'algorithm(?!\w)') def AND(): r"AND = r'and(?!\w)'" return RegExMatch(r'and(?!\w)') def ANNOTATION(): r"ANNOTATION = r'annotation(?!\w)'" return RegExMatch(r'annotation(?!\w)') def BLOCK(): r"BLOCK = r'block(?!\w)'" return RegExMatch(r'block(?!\w)') def BREAK(): r"BREAK = r'break(?!\w)'" return RegExMatch(r'break(?!\w)') def CLASS(): r"CLASS = r'class(?!\w)'" return RegExMatch(r'class(?!\w)') def CONNECT(): r"CONNECT = r'connect(?!\w)'" return RegExMatch(r'connect(?!\w)') def CONNECTOR(): r"CONNECTOR = r'connector(?!\w)'" return RegExMatch(r'connector(?!\w)') def CONSTANT(): r"CONSTANT = r'constant(?!\w)'" return RegExMatch(r'constant(?!\w)') def CONSTRAINEDBY(): r"CONSTRAINEDBY = r'constrainedby(?!\w)'" return RegExMatch(r'constrainedby(?!\w)') def DER(): r"DER = r'der(?!\w)'" return RegExMatch(r'der(?!\w)') def DISCRETE(): r"DISCRETE = r'discrete(?!\w)'" return RegExMatch(r'discrete(?!\w)') def EACH(): r"EACH = r'each(?!\w)'" return RegExMatch(r'each(?!\w)') def ELSE(): r"ELSE = r'else(?!\w)'" return RegExMatch(r'else(?!\w)') def ELSEIF(): r"ELSEIF = r'elseif(?!\w)'" return RegExMatch(r'elseif(?!\w)') def ELSEWHEN(): r"ELSEWHEN = r'elsewhen(?!\w)'" return RegExMatch(r'elsewhen(?!\w)') def ENCAPSULATED(): r"ENCAPSULATED = r'encapsulated(?!\w)'" return RegExMatch(r'encapsulated(?!\w)') def END(): r"END = r'end(?!\w)'" return RegExMatch(r'end(?!\w)') def ENUMERATION(): r"ENUMERATION = r'enumeration(?!\w)'" return RegExMatch(r'enumeration(?!\w)') def EQUATION(): r"EQUATION = r'equation(?!\w)'" return RegExMatch(r'equation(?!\w)') def EXPANDABLE(): r"EXPANDABLE = r'expandable(?!\w)'" return RegExMatch(r'expandable(?!\w)') def EXTENDS(): r"EXTENDS = r'extends(?!\w)'" return RegExMatch(r'extends(?!\w)') def EXTERNAL(): r"EXTERNAL = r'external(?!\w)'" return RegExMatch(r'external(?!\w)') def FALSE(): r"FALSE = r'false(?!\w)'" return RegExMatch(r'false(?!\w)') def FINAL(): r"FINAL = r'final(?!\w)'" return RegExMatch(r'final(?!\w)') def FLOW(): r"FLOW = r'flow(?!\w)'" return RegExMatch(r'flow(?!\w)') def FOR(): r"FOR = r'for(?!\w)'" return RegExMatch(r'for(?!\w)') def FUNCTION(): r"FUNCTION = r'function(?!\w)'" return RegExMatch(r'function(?!\w)') def IF(): r"IF = r'if(?!\w)'" return RegExMatch(r'if(?!\w)') def IMPORT(): r"IMPORT = r'import(?!\w)'" return RegExMatch(r'import(?!\w)') def IMPURE(): r"IMPURE = r'impure(?!\w)'" return RegExMatch(r'impure(?!\w)') def IN(): r"IN = r'in(?!\w)'" return RegExMatch(r'in(?!\w)') def INITIAL(): r"INITIAL = r'initial(?!\w)'" return RegExMatch(r'initial(?!\w)') def INNER(): r"INNER = r'inner(?!\w)'" return RegExMatch(r'inner(?!\w)') def INPUT(): r"INPUT = r'input(?!\w)'" return RegExMatch(r'input(?!\w)') def LOOP(): r"LOOP = r'loop(?!\w)'" return RegExMatch(r'loop(?!\w)') def MODEL(): r"MODEL = r'model(?!\w)'" return RegExMatch(r'model(?!\w)') def NOT(): r"NOT = r'not(?!\w)'" return RegExMatch(r'not(?!\w)') def OPERATOR(): r"OPERATOR = r'operator(?!\w)'" return RegExMatch(r'operator(?!\w)') def OR(): r"OR = r'or(?!\w)'" return RegExMatch(r'or(?!\w)') def OUTER(): r"OUTER = r'outer(?!\w)'" return RegExMatch(r'outer(?!\w)') def OUTPUT(): r"OUTPUT = r'output(?!\w)'" return RegExMatch(r'output(?!\w)') def PACKAGE(): r"PACKAGE = r'package(?!\w)'" return RegExMatch(r'package(?!\w)') def PARAMETER(): r"PARAMETER = r'parameter(?!\w)'" return RegExMatch(r'parameter(?!\w)') def PARTIAL(): r"PARTIAL = r'partial(?!\w)'" return RegExMatch(r'partial(?!\w)') def PROTECTED(): r"PROTECTED = r'protected(?!\w)'" return RegExMatch(r'protected(?!\w)') def PUBLIC(): r"PUBLIC = r'public(?!\w)'" return RegExMatch(r'public(?!\w)') def PURE(): r"PURE = r'pure(?!\w)'" return RegExMatch(r'pure(?!\w)') def RECORD(): r"RECORD = r'record(?!\w)'" return RegExMatch(r'record(?!\w)') def REDECLARE(): r"REDECLARE = r'redeclare(?!\w)'" return RegExMatch(r'redeclare(?!\w)') def REPLACEABLE(): r"REPLACEABLE = r'replaceable(?!\w)'" return RegExMatch(r'replaceable(?!\w)') def RETURN(): r"RETURN = r'return(?!\w)'" return RegExMatch(r'return(?!\w)') def STREAM(): r"STREAM = r'stream(?!\w)'" return RegExMatch(r'stream(?!\w)') def THEN(): r"THEN = r'then(?!\w)'" return RegExMatch(r'then(?!\w)') def TRUE(): r"TRUE = r'true(?!\w)'" return RegExMatch(r'true(?!\w)') def TYPE(): r"TYPE = r'type(?!\w)'" return RegExMatch(r'type(?!\w)') def WHEN(): r"WHEN = r'when(?!\w)'" return RegExMatch(r'when(?!\w)') def WHILE(): r"WHILE = r'while(?!\w)'" return RegExMatch(r'while(?!\w)') def WITHIN(): r"WITHIN = r'within(?!\w)'" return RegExMatch(r'within(?!\w)')
PypiClean
/EOmaps-7.0-py3-none-any.whl/eomaps/qtcompanion/widgets/editor.py
import logging from PyQt5 import QtCore, QtWidgets, QtGui from PyQt5.QtCore import Qt, pyqtSignal, pyqtSlot, QPointF from PyQt5.QtGui import QFont from matplotlib.colors import to_rgba_array from ...inset_maps import InsetMaps from ..common import iconpath from ..base import BasicCheckableToolButton from .wms import AddWMSMenuButton from .utils import ColorWithSlidersWidget, GetColorWidget, AlphaSlider from .annotate import AddAnnotationWidget from .draw import DrawerTabs from .files import OpenDataStartTab from .layer import AutoUpdateLayerMenuButton _log = logging.getLogger(__name__) class AddFeaturesMenuButton(QtWidgets.QPushButton): FeatureAdded = pyqtSignal(str) def __init__(self, *args, m=None, **kwargs): super().__init__(*args, **kwargs) self.m = m self._menu_fetched = False # the layer to which features are added self.layer = None self.props = dict( # alpha = 1, facecolor="r", edgecolor="g", linewidth=1, zorder=0, ) self.setText("Add Feature") # self.setMaximumWidth(200) width = self.fontMetrics().boundingRect(self.text()).width() self.setFixedWidth(width + 30) self.feature_menu = QtWidgets.QMenu() self.feature_menu.setStyleSheet("QMenu { menu-scrollable: 1;}") self.feature_menu.aboutToShow.connect(self.fetch_menu) self.setMenu(self.feature_menu) self.clicked.connect(self.show_menu) def fetch_menu(self): if self._menu_fetched: return feature_types = [i for i in dir(self.m.add_feature) if not i.startswith("_")] for featuretype in feature_types: try: sub_menu = self.feature_menu.addMenu(featuretype) sub_features = [ i for i in dir(getattr(self.m.add_feature, featuretype)) if not i.startswith("_") ] for feature in sub_features: action = sub_menu.addAction(str(feature)) action.triggered.connect( self.menu_callback_factory(featuretype, feature) ) except Exception: _log.warning( f"There was a problem with the NaturalEarth feature: {featuretype}", exc_info=_log.getEffectiveLevel() <= logging.DEBUG, ) continue self._menu_fetched = True def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>NaturalEarth Features</h3>" "Add NaturalEarth features to the map." "<p>" "The feature will be added to the " "<b><font color=#c80000>currently selected tab</font></b> " "in the tab-bar below." "<p>" "NOTE: this is not necessarily the visible layer!", ) super().enterEvent(e) @pyqtSlot() def show_menu(self): self.feature_menu.popup(self.mapToGlobal(self.menu_button.pos())) def set_layer(self, layer): self.layer = layer def menu_callback_factory(self, featuretype, feature): @pyqtSlot() def cb(): # TODO set the layer !!!! if self.layer is None: layer = self.m.BM.bg_layer else: layer = self.layer if layer.startswith("_") and "|" in layer: self.window().statusBar().showMessage( "Adding features to temporary multi-layers is not supported!", 5000 ) return try: f = getattr(getattr(self.m.add_feature, featuretype), feature) if featuretype == "preset": f(layer=layer, **f.kwargs) else: f(layer=layer, **self.props) self.m.f.canvas.draw_idle() self.FeatureAdded.emit(str(layer)) except Exception: _log.error( "---- adding the feature", featuretype, feature, "did not work----", exc_info=_log.getEffectiveLevel() <= logging.DEBUG, ) return cb class ZorderInput(QtWidgets.QLineEdit): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Zorder</h3>" "Set the zorder of the artist (e.g. the vertical stacking " "order with respect to other artists on the same layer)", ) class RemoveArtistToolButton(QtWidgets.QToolButton): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Remove Artist</h3>" "Remove the artist from the axis. (This <b>can not</b> be undone!)", ) class ShowHideToolButton(QtWidgets.QToolButton): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Show/Hide Artist</h3>" "Make the corresponding artist visible (eye open) " "or invisible (eye closed).", ) class LineWidthInput(QtWidgets.QLineEdit): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Linewidth</h3>" "Set the linewidth of the corresponding artist.", ) class AlphaInput(QtWidgets.QLineEdit): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Transparency</h3>" "Set the alpha-transparency of the artist.", ) class AddFeatureWidget(QtWidgets.QFrame): def __init__(self, m=None): super().__init__() # self.setFrameStyle(QtWidgets.QFrame.StyledPanel | QtWidgets.QFrame.Plain) self.m = m self.selector = AddFeaturesMenuButton(m=self.m) self.selector.clicked.connect(self.update_props) self.selector.menu().aboutToShow.connect(self.update_props) self.colorselector = ColorWithSlidersWidget(facecolor="#aaaa7f") self.zorder = ZorderInput("0") validator = QtGui.QIntValidator() self.zorder.setValidator(validator) self.zorder.setMaximumWidth(30) self.zorder.setMaximumHeight(20) self.zorder.setSizePolicy( QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum ) self.zorder.textChanged.connect(self.update_props) zorder_label = QtWidgets.QLabel("zorder: ") zorder_label.setSizePolicy( QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum ) zorder_layout = QtWidgets.QHBoxLayout() zorder_layout.addWidget(zorder_label) zorder_layout.addWidget(self.zorder, 0) zorder_label.setAlignment(Qt.AlignRight | Qt.AlignCenter) layout_buttons = QtWidgets.QVBoxLayout() layout_buttons.addWidget(self.selector) layout_buttons.addLayout(zorder_layout) layout = QtWidgets.QHBoxLayout() layout.addLayout(layout_buttons) layout.addWidget(self.colorselector) layout.setAlignment(Qt.AlignLeft | Qt.AlignCenter) layout_tight = QtWidgets.QVBoxLayout() layout_tight.addStretch(1) layout_tight.addLayout(layout) layout_tight.addStretch(1) self.setLayout(layout_tight) @pyqtSlot() def update_props(self): # don't specify alpha! it interferes with the alpha of the colors! self.selector.props.update( dict( facecolor=self.colorselector.facecolor.getRgbF(), edgecolor=self.colorselector.edgecolor.getRgbF(), linewidth=self.colorselector.linewidth, zorder=int(self.zorder.text()), # alpha = self.colorselector.alpha, ) ) def set_linewidth_slider_stylesheet(self): self.linewidthslider.setStyleSheet( """ QSlider::handle:horizontal { background-color: black; border: none; border-radius: 0px; height: 10px; width: 5px; margin: -10px 0; padding: -10px 0px; } QSlider::groove:horizontal { border-radius: 1px; height: 1px; margin: 5px; background-color: rgba(0,0,0,50); } QSlider::groove:horizontal:hover { background-color: rgba(0,0,0,255); } """ ) def set_alpha_slider_stylesheet(self): a = self.alphaslider.alpha * 255 s = 12 self.alphaslider.setStyleSheet( f""" QSlider::handle:horizontal {{ background-color: rgba(0,0,0,{a}); border: 1px solid black; border-radius: {s//2}px; height: {s}px; width: {s}px; margin: -{s//2}px 0px; padding: -{s//2}px 0px; }} QSlider::groove:horizontal {{ border-radius: 1px; height: 1px; margin: 5px; background-color: rgba(0,0,0,50); }} QSlider::groove:horizontal:hover {{ background-color: rgba(0,0,0,255); }} """ ) def update_alphaslider(self): # to always round up to closest int use -(-x//1) self.alphaslider.setValue(int(-(-self.colorselector.alpha * 100 // 1))) class OpenFileButton(QtWidgets.QPushButton): def enterEvent(self, e): OpenDataStartTab.enterEvent(self, e) class PlusButton(BasicCheckableToolButton): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.set_icons( normal_icon=str(iconpath / "plus.png"), hoover_icon=str(iconpath / "plus_hoover.png"), ) self.setFixedSize(30, 30) self.setCheckable(False) self.setStyleSheet("PlusButton {border: 0}") class LayerArtistTabs(QtWidgets.QTabWidget): plusClicked = pyqtSignal() def __init__(self, *args, m=None, **kwargs): super().__init__(*args, **kwargs) self.m = m self.margin_left = 25 self.margin_right = 60 # Plus Button self.plus_button = PlusButton(self) self.plus_button.clicked.connect(self.plusClicked.emit) self.layer_button = AutoUpdateLayerMenuButton(self, m=self.m) self.layer_button.setFixedWidth(30) self.move_plus_button() # Move to the correct location self.move_layer_button() # Move to the correct location def move_plus_button(self, *args, **kwargs): """Move the plus button to the correct location.""" # Set the plus button location in a visible area h = self.geometry().top() w = self.window().width() self.plus_button.move(w - self.margin_right, -3) def move_layer_button(self, *args, **kwargs): """Move the plus button to the correct location.""" # Set the plus button location in a visible area h = self.geometry().top() self.layer_button.move(-5, 2) def paintEvent(self, *args, **kwargs): # make some space for the + button self.tabBar().setFixedWidth( self.window().width() - self.margin_left - self.margin_right ) self.tabBar().move(self.margin_left, 0) self.move_plus_button() self.move_layer_button() super().paintEvent(*args, **kwargs) def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Background Layers and Artists</h3>" "Each tab represents a layer of the map." "<ul>" "<li>The tab-order represents the stacking order of the layers.</li>" "<li><b>drag</b> tabs to change the layer ordering!</li>" "</ul>" "<ul>" "<li><b>click</b> on a tab to select it (to add/remove features)</li>" "<li><b>control + click</b> on a tab to make it the visible layer.</li>" "<li><b>shift + click</b> on tabs to make multiple layers visible.</li>" "</ul>" "The tab-entries show all individual <b>background</b> artists of the " "selected layer. (background artists are static map-elements that are " "only re-drawn on pan/zoom or resize events)" "<br>" "Features and WebMaps created with the controls above are always " "added to the <b>currently selected tab</b>!<br>" "(indicated by a <b><font color=#c80000>red border</font></b>)", ) class OptionTabs(QtWidgets.QTabWidget): def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Add Features / Add Annotations / Draw Shapes</h3>" "The tabs provide a set of convenience-functionalities to add basic " "features to the map." "<ul>" "<li><b>Add Features:</b> Add NaturalEarth features to the map.</li>" "<li><b>Add Annotations:</b> Add an arrow with a text-annotation " "to the map.</li>" "<li><b>Draw Shapes:</b> Draw basic shapes on the map and optionally " "save the shapes as geo-coded shapefiles.</li>" "</ul>", ) class LayerTransparencySlider(AlphaSlider): _alphas = dict() def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Layer Transparency</h3> Set the global layer transparency.", ) class LayerTabBar(QtWidgets.QTabBar): _number_of_min_tabs_for_size = 6 _n_layer_msg_shown = False def __init__(self, m=None, populate=False, *args, **kwargs): """ Parameters ---------- m : eomaps.Maps the Maps object to use populate : bool, optional Indicator if the layer-tabs are automatically created or not. - Use True if ONLY tabs should be shown - Use False if tabs should contain widgets... (then the TabWidget will take care of creating tabs) The default is False. """ super().__init__(*args, **kwargs) self.m = m # remove strange line on top of tabs # (see https://stackoverflow.com/a/33941638/9703451) self.setDrawBase(False) self.setExpanding(False) self.setElideMode(Qt.ElideRight) self._current_tab_idx = None self._current_tab_name = None self.setMovable(True) self.setUsesScrollButtons(True) self.tabBarClicked.connect(self.tabchanged) self.setTabsClosable(True) self.tabCloseRequested.connect(self.close_handler) self.tabMoved.connect(self.tab_moved) if populate: # re-populate tabs if a new layer is created # NOTE this is done by the TabWidget if tabs have content!! self.populate() # re-populate on show to make sure currently active layers are shown self.m.BM.on_layer(self.populate_on_layer, persistent=True) self.m._after_add_child.append(self.populate) self.m._on_show_companion_widget.append(self.populate) # set font properties before the stylesheet to avoid clipping of bold text! font = QFont("sans seriv", 8, QFont.Bold, False) self.setFont(font) self.setStyleSheet( """ QTabWidget::pane { border: 0px; top:0px; background: rgb(200, 200, 200); border-radius: 10px; } QTabBar::tab { background: rgb(245, 245, 245); border: 1px solid black; padding: 3px; margin-left: 2px; margin-bottom: 0px; border-radius: 4px; } QTabBar::tab:selected { background: rgb(245, 245, 245); border: 1px solid black; margin-bottom: 0px; } """ ) def event(self, event): # don't show normal tooltips while showhelp is active # (they would cause the help-popups to disappear after ~ 1 sec) if event.type() == QtCore.QEvent.ToolTip and self.window().showhelp: return return super().event(event) def mousePressEvent(self, event): # TODO a more clean implementation of this would be nice # explicitly handle control+click and shift+click events # to avoid activating the currently clicked tab # (we want to activate the currently active tab which is shifted to the # start-position!) modifiers = QtWidgets.QApplication.keyboardModifiers() if ( modifiers == Qt.ControlModifier and event.button() == Qt.MouseButton.LeftButton ): idx = self.tabAt(event.pos()) self.tabchanged(idx) elif ( modifiers == Qt.ShiftModifier and event.button() == Qt.MouseButton.LeftButton ): idx = self.tabAt(event.pos()) self.tabchanged(idx) else: super().mousePressEvent(event) @pyqtSlot() def get_tab_icon(self, color="red"): if isinstance(color, str): color = QtGui.QColor(color) elif isinstance(color, (list, tuple)): color = QtGui.QColor(*color) canvas = QtGui.QPixmap(20, 20) canvas.fill(Qt.transparent) painter = QtGui.QPainter(canvas) painter.setRenderHints(QtGui.QPainter.HighQualityAntialiasing) pencolor = QtGui.QColor(color) pencolor.setAlpha(100) painter.setPen(QtGui.QPen(pencolor, 2, Qt.SolidLine)) painter.setBrush(QtGui.QBrush(color, Qt.SolidPattern)) painter.drawEllipse(QPointF(10, 12), 7, 7) painter.end() icon = QtGui.QIcon(canvas) return icon # def sizeHint(self): # # make sure the TabBar does not expand the window width # hint = super().sizeHint() # width = self.window().width() # hint.setWidth(width) # return hint def minimumTabSizeHint(self, index): # the minimum width of the tabs is determined such that at least # "_number_of_min_tabs_for_size" tabs are visible. # (e.g. for the elide of long tab-names) hint = super().tabSizeHint(index) w = int(self.sizeHint().width() / self._number_of_min_tabs_for_size) hint.setWidth(w) return hint def enterEvent(self, e): if self.window().showhelp is True: QtWidgets.QToolTip.showText( e.globalPos(), "<h3>Layer Tabs</h3>" "Select, combine and re-arrange layers of the map. " "<ul>" "<li><b>ctrl + click:</b> make selected layer visible</li>" "<li><b>shift + click:</b> select multiple layers </li>" "<li><b>drag:</b> change the layer stacking-order. " "</ul>", ) def repopulate_and_activate_current(self, *args, **kwargs): self.populate() # activate the currently visible layer tab try: idx = next( i for i in range(self.count()) if self.tabText(i) == self.m.BM._bg_layer ) self.setCurrentIndex(idx) except StopIteration: pass @pyqtSlot() def tab_moved(self): # get currently active layers active_layers, alphas = self.m.BM._get_layers_alphas() # get the name of the layer that was moved layer = self.tabText(self.currentIndex()) if layer not in active_layers: return # get the current ordering of visible layers ntabs = self.count() layer_order = [] for i in range(ntabs): txt = self.tabText(i) if txt in active_layers: layer_order.append(txt) # set the new layer-order if active_layers != layer_order: # avoid recursions alpha_order = [alphas[active_layers.index(i)] for i in layer_order] self.m.show_layer(*zip(layer_order, alpha_order)) @pyqtSlot(int) def close_handler(self, index): layer = self.tabText(index) self._msg = QtWidgets.QMessageBox(self) self._msg.setIcon(QtWidgets.QMessageBox.Question) self._msg.setWindowIcon(QtGui.QIcon(str(iconpath / "info.png"))) self._msg.setText(f"Do you really want to delete the layer '{layer}'") self._msg.setWindowTitle(f"Delete layer: '{layer}'?") self._msg.setStandardButtons( QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No ) self._msg.buttonClicked.connect(self.get_close_tab_cb(index)) _ = self._msg.show() def get_close_tab_cb(self, index): @pyqtSlot() def cb(): self._do_close_tab(index) return cb def _do_close_tab(self, index): if self._msg.standardButton(self._msg.clickedButton()) != self._msg.Yes: return layer = self.tabText(index) if self.m.layer == layer: _log.error("EOmaps: The base-layer cannot be deleted!") return # get currently active layers active_layers, alphas = self.m.BM._get_layers_alphas() # cleanup the layer and remove any artists etc. for m in list(self.m._children): if layer == m.layer: m.cleanup() m.BM._bg_layers.pop(layer, None) # in case the layer was visible, try to activate a suitable replacement if layer in active_layers: # if possible, show the currently active multi-layer but without # the deleted layer layer_idx = active_layers.index(layer) active_layers.pop(layer_idx) alphas.pop(layer_idx) if len(active_layers) > 0: try: self.m.show_layer(*zip(active_layers, alphas)) except Exception: pass else: # otherwise switch to the first available layer try: switchlayer = next( (i for i in self.m.BM._bg_artists if layer not in i.split("|")) ) self.m.show_layer(switchlayer) except StopIteration: # don't allow deletion of last layer _log.error("EOmaps: Unable to delete the last available layer!") return if layer in list(self.m.BM._bg_artists): for a in self.m.BM._bg_artists[layer]: self.m.BM.remove_bg_artist(a) a.remove() del self.m.BM._bg_artists[layer] if layer in self.m.BM._bg_layers: del self.m.BM._bg_layers[layer] # also remove the layer from any layer-change/layer-activation triggers # (e.g. to deal with not-yet-fetched WMS services) for permanent, d in self.m.BM._on_layer_activation.items(): if layer in d: del d[layer] for permanent, d in self.m.BM._on_layer_change.items(): if layer in d: del d[layer] self.populate() def color_active_tab(self, m=None, layer=None, adjust_order=True): # defaultcolor = self.palette().color(self.foregroundRole()) defaultcolor = QtGui.QColor(100, 100, 100) activecolor = QtGui.QColor(50, 150, 50) # QtGui.QColor(0, 128, 0) multicolor = QtGui.QColor(50, 150, 50) # QtGui.QColor(0, 128, 0) # get currently active layers active_layers, alphas = self.m.BM._get_layers_alphas() for i in range(self.count()): selected_layer = self.tabText(i) color = activecolor if len(active_layers) == 1 else multicolor if selected_layer in active_layers: idx = active_layers.index(selected_layer) self.setTabTextColor(i, color) if alphas[idx] < 1: color = QtGui.QColor(color) color.setAlpha(int(alphas[idx] * 100)) self.setTabIcon(i, self.get_tab_icon(color)) else: self.setTabTextColor(i, defaultcolor) self.setTabIcon(i, QtGui.QIcon()) if layer == selected_layer: self.setTabTextColor(i, activecolor) if adjust_order: # --- adjust the sort-order of the tabs to the order of the visible layers # disconnect tab_moved callback to avoid recursions self.tabMoved.disconnect(self.tab_moved) # to avoid issues with non-existent and private layers (e.g. the background # layer on savefig etc.) use the following strategy: # - go through the layers in reverse # - move each found layer to the position 0 for cl in active_layers[::-1]: for i in range(self.count()): layer = self.tabText(i) if layer == cl: self.moveTab(i, 0) # re-connect tab_moved callback self.tabMoved.connect(self.tab_moved) @pyqtSlot() def populate_on_layer(self, *args, **kwargs): lastlayer = getattr(self, "_last_populated_layer", "") currlayer = self.m.BM.bg_layer # only populate if the current layer is not part of the last set of layers # (e.g. to allow show/hide of selected layers without removing the tabs) if not set(lastlayer.split("|")).issuperset(set(currlayer.split("|"))): self.populate(*args, **kwargs) self._last_populated_layer = currlayer else: # still update tab colors (e.g. if layers are removed from multi) self.color_active_tab() @pyqtSlot() def populate(self, *args, **kwargs): if not self.isVisible(): return self._current_tab_idx = self.currentIndex() self._current_tab_name = self.tabText(self._current_tab_idx) alllayers = set(self.m._get_layers()) nlayers = len(alllayers) max_n_layers = self.m._companion_widget_n_layer_tabs if nlayers > max_n_layers: if not LayerTabBar._n_layer_msg_shown: _log.info( "EOmaps-companion: The map has more than " f"{max_n_layers} layers... only last active layers " "are shown in the layer-tabs!" ) LayerTabBar._n_layer_msg_shown = True # if more than 200 layers are available, show only active tabs to # avoid performance issues when too many tabs are created alllayers = [i for i in self.m.BM._bg_layer.split("|") if i in alllayers] for i in range(self.count(), -1, -1): self.removeTab(i) else: # go through the layers in reverse and remove any no longer existing layers existing_layers = set() for i in range(self.count(), -1, -1): layer = self.tabText(i) # remove all tabs that do not represent existing layers of the map if layer not in alllayers: self.removeTab(i) else: existing_layers.add(layer) # pop all existing layers from the alllayers set (no need to re-create them) alllayers.difference_update(existing_layers) for i, layer in enumerate(sorted(alllayers)): layout = QtWidgets.QGridLayout() layout.setAlignment(Qt.AlignTop | Qt.AlignLeft) if layer.startswith("_"): # or "|" in layer: # make sure the currently opened tab is always added (even if empty) if layer != self._current_tab_name: # don't show empty layers continue self.addTab(layer) self.setTabToolTip(i, layer) if layer == "all" or layer == self.m.layer: # don't show the close button for this tab self.setTabButton(self.count() - 1, self.RightSide, None) self.color_active_tab() # try to restore the previously opened tab self.set_current_tab_by_name(self._current_tab_name) @pyqtSlot(str) def set_current_tab_by_name(self, layer): if layer is None: layer = self.m.BM.bg_layer found = False ntabs = self.count() if ntabs > 0 and layer != "": for i in range(ntabs): if self.tabText(i) == layer: self.setCurrentIndex(i) found = True break if found is False: self.setCurrentIndex(0) @pyqtSlot(int) def tabchanged(self, index): # TODO # modifiers are only released if the canvas has focus while the event happens!! # (e.g. button is released but event is not fired on the canvas) # see https://stackoverflow.com/q/60978379/9703451 # simply calling canvas.setFocus() does not work! # for w in QtWidgets.QApplication.topLevelWidgets(): # if w.inherits('QMainWindow'): # w.canvas.setFocusPolicy(QtCore.Qt.FocusPolicy.StrongFocus) # w.canvas.setFocus() # w.raise_() # _log.debug("raising", w, w.canvas) layer = self.tabText(index) if len(layer) == 0: return modifiers = QtWidgets.QApplication.keyboardModifiers() if modifiers == Qt.ControlModifier: if layer != "": self.m.show_layer( (layer, LayerTransparencySlider._alphas.get(layer, 1)) ) # TODO this is a workaround since modifier-releases are not # forwarded to the canvas if it is not in focus self.m.f.canvas.key_release_event("control") elif modifiers == Qt.ShiftModifier: # The all layer should not be combined with other layers... # (it is already visible anyways) if layer == "all" and "|" in layer: return # get currently active layers active_layers, alphas = self.m.BM._get_layers_alphas() for x in (i for i in layer.split("|") if i != "_"): if x not in active_layers: active_layers.append(x) alphas.append(LayerTransparencySlider._alphas.get(layer, 1)) else: idx = active_layers.index(x) active_layers.pop(idx) alphas.pop(idx) if len(active_layers) >= 1: self.m.show_layer(*zip(active_layers, alphas)) else: self.m.show_layer( (layer, LayerTransparencySlider._alphas.get(layer, 1)) ) # TODO this is a workaround since modifier-releases are not # forwarded to the canvas if it is not in focus self.m.f.canvas.key_release_event("shift") # make sure to reflect the layer-changes in the tab-colors (and positions) self.color_active_tab() self.set_current_tab_by_name(layer) class ArtistEditorTabs(LayerArtistTabs): def __init__(self, m=None): super().__init__(m=m) self.setTabBar(LayerTabBar(m=self.m)) # re-populate tabs if a new layer is created self.populate() self.m._after_add_child.append(self.populate) self.m.BM.on_layer(self.populate_on_layer, persistent=True) self.currentChanged.connect(self.populate_layer) self.m.BM._on_add_bg_artist.append(self.populate) self.m.BM._on_remove_bg_artist.append(self.populate) self.m._on_show_companion_widget.append(self.populate) self.m._on_show_companion_widget.append(self.populate_layer) self.plusClicked.connect(self.new_layer_button_clicked) self.setStyleSheet( """ QTabWidget::pane { border: 0px; top:0px; background: rgb(240, 240, 240); border-radius: 10px; } QScrollArea {border:0px} """ ) def new_layer_button_clicked(self, *args, **kwargs): inp = QtWidgets.QInputDialog(self) inp.setWindowIcon(QtGui.QIcon(str(iconpath / "plus.png"))) inp.setWindowFlags(inp.windowFlags() & ~Qt.WindowContextHelpButtonHint) inp.setInputMode(QtWidgets.QInputDialog.TextInput) inp.setFixedSize(200, 100) inp.setWindowTitle("New Layer") inp.setLabelText("Name:") if inp.exec_() == QtWidgets.QDialog.Accepted: # use .strip to remove any trailing spaces layer = inp.textValue().strip() # only create layers if at least 1 character has been provided if len(layer) > 0: self.m.new_layer(layer) inp.deleteLater() def repopulate_and_activate_current(self, *args, **kwargs): self.populate() # activate the currently visible layer tab try: idx = next( i for i in range(self.count()) if self.tabText(i) == self.m.BM._bg_layer ) self.setCurrentIndex(idx) except StopIteration: pass self.populate_layer() def _get_artist_layout(self, a, layer): # label try: name = a.get_label() if len(name) == 0: name = str(a) except Exception: name = str(a) # for artists that should not show up in the editor if name.startswith("__EOmaps_exclude"): return [(None, None)] elif name.startswith("__EOmaps_deactivate"): name = name[20:].strip() deactivated = True else: deactivated = False if len(name) > 80: label = QtWidgets.QLabel(name[:76] + "... >") label.setToolTip(name) else: label = QtWidgets.QLabel(name) label.setStyleSheet( "border-radius: 5px;" "border-style: solid;" "border-width: 1px;" "border-color: rgba(0, 0, 0,100);" "padding-left: 10px;" ) label.setAlignment(Qt.AlignLeft) label.setMaximumHeight(25) # remove b_r = RemoveArtistToolButton() b_r.setText("🞪") b_r.setAutoRaise(True) b_r.setStyleSheet("QToolButton {color: red;}") b_r.clicked.connect(self.remove(artist=a, layer=layer)) # show / hide b_sh = ShowHideToolButton() b_sh.setAutoRaise(True) if a in self.m.BM._hidden_artists: b_sh.setIcon(QtGui.QIcon(str(iconpath / "eye_closed.png"))) else: b_sh.setIcon(QtGui.QIcon(str(iconpath / "eye_open.png"))) b_sh.clicked.connect(self.show_hide(artist=a, layer=layer)) # zorder b_z = ZorderInput() b_z.setMinimumWidth(30) b_z.setMaximumWidth(30) validator = QtGui.QIntValidator() b_z.setValidator(validator) b_z.setText(str(a.get_zorder())) b_z.returnPressed.connect(self.set_zorder(artist=a, layer=layer, widget=b_z)) # alpha alpha = a.get_alpha() if alpha is not None: b_a = AlphaInput() b_a.setMinimumWidth(25) b_a.setMaximumWidth(50) validator = QtGui.QDoubleValidator(0.0, 1.0, 3) validator.setLocale(QtCore.QLocale("en_US")) b_a.setValidator(validator) b_a.setText(str(alpha)) b_a.returnPressed.connect(self.set_alpha(artist=a, layer=layer, widget=b_a)) else: b_a = None # linewidth try: lw = a.get_linewidth() if isinstance(lw, list) and len(lw) > 1: pass else: lw = lw[0] if lw is not None: b_lw = LineWidthInput() b_lw.setMinimumWidth(25) b_lw.setMaximumWidth(50) validator = QtGui.QDoubleValidator(0, 100, 3) validator.setLocale(QtCore.QLocale("en_US")) b_lw.setValidator(validator) b_lw.setText(str(lw)) b_lw.returnPressed.connect( self.set_linewidth(artist=a, layer=layer, widget=b_lw) ) else: b_lw = None except Exception: b_lw = None # color try: facecolor = to_rgba_array(a.get_facecolor()) edgecolor = to_rgba_array(a.get_edgecolor()) if facecolor.shape[0] != 1: facecolor = (0, 0, 0, 0) use_cmap = True else: facecolor = (facecolor.squeeze() * 255).astype(int).tolist() use_cmap = False if edgecolor.shape[0] != 1: edgecolor = (0, 0, 0, 0) else: edgecolor = (edgecolor.squeeze() * 255).astype(int).tolist() b_c = GetColorWidget(facecolor=facecolor, edgecolor=edgecolor) b_c.cb_colorselected = self.set_color( artist=a, layer=layer, colorwidget=b_c ) b_c.setFrameStyle(QtWidgets.QFrame.StyledPanel | QtWidgets.QFrame.Plain) b_c.setSizePolicy( QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum ) b_c.setMaximumWidth(25) except Exception: b_c = None use_cmap = True pass # cmap from .utils import CmapDropdown if use_cmap is True: try: cmap = a.get_cmap() b_cmap = CmapDropdown(startcmap=cmap.name) b_cmap.activated.connect( self.set_cmap(artist=a, layer=layer, widget=b_cmap) ) except Exception: b_cmap = None pass else: b_cmap = None layout = [] if not deactivated: layout.append((b_sh, 0)) # show hide # if b_c is not None: # layout.append((b_c, 1)) # color layout.append((b_z, 2)) # zorder layout.append((label, 3)) # title # if b_lw is not None: # layout.append((b_lw, 4)) # linewidth # if b_a is not None: # layout.append((b_a, 5)) # alpha # if b_cmap is not None: # layout.append((b_cmap, 6)) # cmap if not deactivated: layout.append((b_r, 7)) # remove if deactivated: for w in layout: w[0].setEnabled(False) return layout @pyqtSlot() def populate_on_layer(self, *args, **kwargs): lastlayer = getattr(self, "_last_populated_layer", "") currlayer = self.m.BM.bg_layer # ignore global layer transparencies (no need to re-populate if global) # transparency changes. # NOTE: This is necessary to avoid recursions for multi-layers! last_layers = set(self.m.BM._get_layers_alphas(lastlayer)[0]) curr_layers = set(self.m.BM._get_layers_alphas(currlayer)[0]) # only populate if the current layer is not part of the last set of layers # (e.g. to allow show/hide of selected layers without removing the tabs) if not last_layers.issuperset(curr_layers): self.populate(*args, **kwargs) self._last_populated_layer = currlayer else: # TODO check why adjusting the tab-order causes recursions if multiple # layers are selected (and the transparency of a sub-layer is changed) self.tabBar().color_active_tab(adjust_order=False) @pyqtSlot() def populate(self, *args, **kwargs): if not self.isVisible(): return tabbar = self.tabBar() self._current_tab_idx = self.currentIndex() self._current_tab_name = self.tabText(self._current_tab_idx) # go through the layers in reverse and remove any no longer existing layers alllayers = set(self.m._get_layers()) nlayers = len(alllayers) max_n_layers = self.m._companion_widget_n_layer_tabs if nlayers > max_n_layers: if not LayerTabBar._n_layer_msg_shown: _log.info( "EOmaps-companion: The map has more than " f"{max_n_layers} layers... only last active layers " "are shown in the layer-tabs!" ) LayerTabBar._n_layer_msg_shown = True # if more than max_n_layers layers are available, show only active tabs to # avoid performance issues when too many tabs are created alllayers = [i for i in self.m.BM._bg_layer.split("|") if i in alllayers] for i in range(self.count(), -1, -1): self.removeTab(i) else: existing_layers = set() for i in range(self.count(), -1, -1): layer = self.tabText(i) # remove all tabs that do not represent existing layers of the map if layer not in alllayers: self.removeTab(i) else: existing_layers.add(layer) # pop all existing layers from the alllayers set (no need to re-create them) alllayers.difference_update(existing_layers) for i, layer in enumerate(sorted(alllayers)): layout = QtWidgets.QGridLayout() layout.setAlignment(Qt.AlignTop | Qt.AlignLeft) if layer.startswith("_"): # or "|" in layer: # make sure the currently opened tab is always added (even if empty) if layer != self._current_tab_name: # don't show empty layers continue scroll = QtWidgets.QScrollArea() scroll.setWidgetResizable(True) self.addTab(scroll, layer) self.setTabToolTip(i, layer) if layer == "all" or layer == self.m.layer: # don't show the close button for this tab tabbar.setTabButton(self.count() - 1, tabbar.RightSide, None) tabbar.color_active_tab() # try to restore the previously opened tab tabbar.set_current_tab_by_name(self._current_tab_name) def get_layer_alpha(self, layer): layers, alphas = self.m.BM._get_layers_alphas() if layer in layers: idx = layers.index(layer) alpha = alphas[idx] LayerTransparencySlider._alphas[layer] = alpha elif layer in LayerTransparencySlider._alphas: # use last set alpha value for the layer alpha = LayerTransparencySlider._alphas[layer] else: alpha = 1 return alpha @pyqtSlot() def populate_layer(self, layer=None): if not self.isVisible(): return if layer is None: layer = self.tabText(self.currentIndex()) # make sure we fetch artists of inset-maps from the layer with # the "__inset_" prefix if isinstance(self.m, InsetMaps) and not layer.startswith("__inset_"): layer = "__inset_" + layer widget = self.currentWidget() if widget is None: # ignore events without tabs (they happen on re-population of the tabs) return edit_layout = QtWidgets.QGridLayout() edit_layout.setAlignment(Qt.AlignTop | Qt.AlignLeft) # make sure that we don't create an empty entry ! # TODO the None check is to address possible race-conditions # with Maps objects that have no axes defined. if layer in self.m.BM._bg_artists and self.m.ax is not None: artists = [ a for a in self.m.BM.get_bg_artists(layer) if a.axes is self.m.ax ] else: artists = [] for i, a in enumerate(artists): for art, pos in self._get_artist_layout(a, layer): if art is not None: edit_layout.addWidget(art, i, pos) # ------------------------ layer-actions menu # button to add WebMap services to the currently selected layer try: self.addwms = AddWMSMenuButton(m=self.m, new_layer=False, layer=layer) self.addwms.wmsLayerCreated.connect(self.populate_layer) except Exception: self.addwms = None # slider to set the global layer transparency self.layer_transparency_slider = LayerTransparencySlider(Qt.Horizontal) self.layer_transparency_slider.set_alpha_stylesheet() self.layer_transparency_slider.setValue(int(self.get_layer_alpha(layer) * 100)) layer_transparency_label = QtWidgets.QLabel("<b>Layer Transparency:</b>") def update_layerslider(alpha): self.set_layer_alpha(layer, alpha / 100) LayerTransparencySlider._alphas[layer] = alpha / 100 self.layer_transparency_slider.valueChanged.connect(update_layerslider) layer_actions_layout = QtWidgets.QHBoxLayout() if self.addwms is not None: layer_actions_layout.addWidget(self.addwms) spacer = QtWidgets.QSpacerItem(50, 1) layer_actions_layout.addItem(spacer) layer_actions_layout.addWidget(layer_transparency_label) layer_actions_layout.addWidget(self.layer_transparency_slider, 1) # ------------------------ # a separator line separator = QtWidgets.QFrame() separator.setFrameShape(QtWidgets.QFrame.HLine) separator.setFixedHeight(1) separator.setStyleSheet("background-color: rgb(150,150,150)") layout = QtWidgets.QVBoxLayout() layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(layer_actions_layout) # layout.addWidget(separator) for text in self.m.BM._pending_webmaps.get(layer, []): layout.addWidget(QtWidgets.QLabel(f"<b>PENDING WebMap</b>: {text}")) layout.addLayout(edit_layout) layout.addStretch(1) tabwidget = QtWidgets.QWidget() tabwidget.setLayout(layout) widget.setWidget(tabwidget) # -------- def set_color(self, artist, layer, colorwidget): def cb(): artist.set_fc(colorwidget.facecolor.getRgbF()) artist.set_edgecolor(colorwidget.edgecolor.getRgbF()) self.m.BM._refetch_layer(layer) self.m.BM.update() return cb def _do_remove(self, artist, layer): if self._msg.standardButton(self._msg.clickedButton()) != self._msg.Yes: return self.m.BM.remove_bg_artist(artist, layer) try: artist.remove() except Exception: _log.error( "EOmaps: There was an error while trying to remove the artist", exc_info=_log.getEffectiveLevel() <= logging.DEBUG, ) # explicit treatment for gridlines grids = self.m.parent._grid._gridlines for g in grids: if artist == g._coll: g.remove() self.populate_layer(layer) self.m.redraw(layer) def remove(self, artist, layer): @pyqtSlot() def cb(): self._msg = QtWidgets.QMessageBox(self) self._msg.setIcon(QtWidgets.QMessageBox.Question) self._msg.setWindowTitle("Delete artist?") self._msg.setText( "Do you really want to delete the following artist " + f"from the layer '{layer}'?\n\n" + f" '{artist.get_label()}'" ) self._msg.setStandardButtons( QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No ) self._msg.buttonClicked.connect(lambda: self._do_remove(artist, layer)) self._msg.show() return cb def show_hide(self, artist, layer): @pyqtSlot() def cb(): if artist in self.m.BM._hidden_artists: self.m.BM._hidden_artists.remove(artist) artist.set_visible(True) else: self.m.BM._hidden_artists.add(artist) artist.set_visible(False) self.m.redraw(layer) self.populate_layer(layer) return cb def set_zorder(self, artist, layer, widget): @pyqtSlot() def cb(): val = widget.text() if len(val) > 0: artist.set_zorder(int(val)) self.m.redraw(layer) return cb def set_alpha(self, artist, layer, widget): @pyqtSlot() def cb(): val = widget.text() if len(val) > 0: artist.set_alpha(float(val.replace(",", "."))) self.m.redraw(layer) return cb def set_linewidth(self, artist, layer, widget): @pyqtSlot() def cb(): val = widget.text() if len(val) > 0: artist.set_linewidth(float(val.replace(",", "."))) self.m.redraw(layer) return cb def set_cmap(self, artist, layer, widget): @pyqtSlot() def cb(): val = widget.currentText() if len(val) > 0: artist.set_cmap(val) self.m.redraw(layer) return cb @pyqtSlot() def set_layer_alpha(self, layer, alpha): layers, alphas = self.m.BM._get_layers_alphas() if layer in layers: idx = layers.index(layer) alphas[idx] = alpha self.m.show_layer(*zip(layers, alphas)) class ArtistEditor(QtWidgets.QWidget): def __init__(self, *args, m=None, show_editor=False, **kwargs): super().__init__() self.m = m self.artist_tabs = ArtistEditorTabs(m=self.m) self.artist_tabs.tabBar().setStyleSheet( """ QTabBar::tab { background: rgb(220, 220, 220); border: 0px solid black; padding: 1px; padding-bottom: 6px; margin: 0px; margin-left: 2px; margin-bottom: -3px; border-radius: 4px; } QTabBar::tab:selected { background: rgb(150, 150, 150); border: 2px solid darkred; margin-bottom: -3px; } """ ) self.addfeature = AddFeatureWidget(m=self.m) self.addannotation = AddAnnotationWidget(m=self.m) self.draw = DrawerTabs(m=self.m) # add a margin to the top of the drawer widget d = QtWidgets.QWidget() layout = QtWidgets.QVBoxLayout() layout.addWidget(self.draw) layout.setContentsMargins(0, 5, 0, 0) d.setLayout(layout) # make sure the layer is properly set self.set_layer() self.option_tabs = OptionTabs() self.option_tabs.addTab(self.addfeature, "Add Features") self.option_tabs.addTab(self.addannotation, "Add Annotations") self.option_tabs.addTab(d, "Draw Shapes") # set font properties before the stylesheet to avoid clipping of bold text! font = QFont("sans seriv", 8, QFont.Bold, False) self.option_tabs.setFont(font) self.option_tabs.setStyleSheet( """ QTabWidget::pane { border: 0px; top:0px; background: rgb(200, 200, 200); border-radius: 10px; } QTabBar::tab { background: rgb(220, 220, 220); border: 0px; padding: 5px; padding-bottom: 6px; margin-left: 10px; margin-bottom: -2px; border-radius: 4px; font-weight: normal; } QTabBar::tab:selected { background: rgb(200, 200, 200); border: 0px; margin-bottom: -2px; font-weight: bold; } """ ) # repopulate the layer if features or webmaps are added self.addfeature.selector.FeatureAdded.connect(self.artist_tabs.populate_layer) option_widget = QtWidgets.QWidget() option_layout = QtWidgets.QVBoxLayout() option_layout.addWidget(self.option_tabs) option_widget.setLayout(option_layout) splitter = QtWidgets.QSplitter(Qt.Vertical) splitter.addWidget(option_widget) splitter.addWidget(self.artist_tabs) splitter.setStretchFactor(0, 0) splitter.setStretchFactor(1, 1) splitter.setStyleSheet( """ QSplitter::handle { background: rgb(220,220,220); margin: 1px; margin-left: 20px; margin-right: 20px; height:1px; } QSplitter::handle:pressed { background: rgb(180,180,180); } """ ) layout = QtWidgets.QVBoxLayout() layout.addWidget(splitter) self.setLayout(layout) # connect a callback to update the layer of the feature-button # with respect to the currently selected layer-tab self.artist_tabs.tabBar().currentChanged.connect(self.set_layer) @pyqtSlot() def set_layer(self): layer = self.artist_tabs.tabText(self.artist_tabs.currentIndex()) self.addfeature.selector.set_layer(layer) if self.draw is not None: self.draw.set_layer(layer) self.addannotation.set_layer(layer)
PypiClean
/NESTML-5.3.0-py3-none-any.whl/pynestml/meta_model/ast_kernel.py
from pynestml.meta_model.ast_node import ASTNode class ASTKernel(ASTNode): """ This class is used to store kernels. Grammar: kernel : KERNEL_KEYWORD variable EQUALS expression (COMMA variable EQUALS expression)* (SEMICOLON)?; """ def __init__(self, variables, expressions, *args, **kwargs): """ Standard constructor. Parameters for superclass (ASTNode) can be passed through :python:`*args` and :python:`**kwargs`. :param variables: the variable corresponding to the kernel :type variables: ASTVariable :param expressions: the right-hand side :type expressions: Union[ASTExpression, ASTSimpleExpression] """ super(ASTKernel, self).__init__(*args, **kwargs) self.variables = variables self.expressions = expressions def clone(self): """ Return a clone ("deep copy") of this node. :return: new AST node instance :rtype: ASTInputPort """ variables_dup = None if self.variables: variables_dup = [var.clone() for var in self.variables] expressions_dup = None if self.expressions: expressions_dup = [expr.clone() for expr in self.expressions] dup = ASTKernel(variables=variables_dup, expressions=expressions_dup, # ASTNode common attributes: source_position=self.source_position, scope=self.scope, comment=self.comment, pre_comments=[s for s in self.pre_comments], in_comment=self.in_comment, implicit_conversion_factor=self.implicit_conversion_factor) return dup def get_variables(self): """ Returns the variable of the left-hand side. :return: the variable :rtype: ast_variable """ return self.variables def get_variable_names(self): """ Returns the variable of the left-hand side. :return: the variable :rtype: ast_variable """ return [var.get_complete_name() for var in self.variables] def get_expressions(self): """ Returns the right-hand side rhs. :return: the rhs :rtype: ast_expression """ return self.expressions def get_parent(self, ast): """ Indicates whether this node contains the handed over node. :param ast: an arbitrary meta_model node. :type ast: ASTNode :return: AST if this or one of the child nodes contains the handed over element. :rtype: ASTNode or None """ for var in self.get_variables(): if var is ast: return self if var.get_parent(ast) is not None: return var.get_parent(ast) for expr in self.get_expressions(): if expr is ast: return self if expr.get_parent(ast) is not None: return expr.get_parent(ast) return None def equals(self, other): """ The equals method. :param other: a different object. :type other: object :return: True if equal, otherwise False. :rtype: bool """ if not isinstance(other, ASTKernel): return False for var in self.get_variables(): if not var in other.get_variables(): return False for var in other.get_variables(): if not var in self.get_variables(): return False for expr in self.get_expressions(): if not expr in other.get_expressions(): return False for expr in other.get_expressions(): if not expr in self.get_expressions(): return False return True
PypiClean
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/65611.ec32da0a6ccb5c5f2b11.min.js
"use strict";(self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[65611],{11210:function(n,e,t){var i=t(87537),o=t.n(i),r=t(23645),s=t.n(r)()(o());s.push([n.id,'.slick-slider{position:relative;display:block;box-sizing:border-box;-webkit-touch-callout:none;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-ms-touch-action:pan-y;touch-action:pan-y;-webkit-tap-highlight-color:rgba(0,0,0,0)}.slick-list{position:relative;overflow:hidden;display:block;margin:0;padding:0}.slick-list:focus{outline:none}.slick-list.dragging{cursor:pointer;cursor:hand}.slick-slider .slick-track,.slick-slider .slick-list{-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d(0, 0, 0)}.slick-track{position:relative;left:0;top:0;display:block;margin-left:auto;margin-right:auto}.slick-track:before,.slick-track:after{content:"";display:table}.slick-track:after{clear:both}.slick-loading .slick-track{visibility:hidden}.slick-slide{float:left;height:100%;min-height:1px;display:none}[dir=rtl] .slick-slide{float:right}.slick-slide img{display:block}.slick-slide.slick-loading img{display:none}.slick-slide.dragging img{pointer-events:none}.slick-initialized .slick-slide{display:block}.slick-loading .slick-slide{visibility:hidden}.slick-vertical .slick-slide{display:block;height:auto;border:1px solid rgba(0,0,0,0)}.slick-arrow.slick-hidden{display:none}',"",{version:3,sources:["webpack://./node_modules/slick-carousel/slick/slick.scss"],names:[],mappings:"AAEA,cACI,iBAAA,CACA,aAAA,CACA,qBAAA,CACA,0BAAA,CACA,wBAAA,CACA,uBAAA,CACA,qBAAA,CACA,oBAAA,CACA,gBAAA,CACA,sBAAA,CACA,kBAAA,CACA,yCAAA,CAEJ,YACI,iBAAA,CACA,eAAA,CACA,aAAA,CACA,QAAA,CACA,SAAA,CAEA,kBACI,YAAA,CAGJ,qBACI,cAAA,CACA,WAAA,CAGR,qDAEI,sCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,8BAAA,CAGJ,aACI,iBAAA,CACA,MAAA,CACA,KAAA,CACA,aAAA,CACA,gBAAA,CACA,iBAAA,CAEA,uCAEI,UAAA,CACA,aAAA,CAGJ,mBACI,UAAA,CAGJ,4BACI,iBAAA,CAGR,aACI,UAAA,CACA,WAAA,CACA,cAAA,CAWA,YAAA,CAVA,uBACI,WAAA,CAEJ,iBACI,aAAA,CAEJ,+BACI,YAAA,CAKJ,0BACI,mBAAA,CAGJ,gCACI,aAAA,CAGJ,4BACI,iBAAA,CAGJ,6BACI,aAAA,CACA,WAAA,CACA,8BAAA,CAGR,0BACI,YAAA",sourcesContent:['/* Slider */\n\n.slick-slider {\n position: relative;\n display: block;\n box-sizing: border-box;\n -webkit-touch-callout: none;\n -webkit-user-select: none;\n -khtml-user-select: none;\n -moz-user-select: none;\n -ms-user-select: none;\n user-select: none;\n -ms-touch-action: pan-y;\n touch-action: pan-y;\n -webkit-tap-highlight-color: transparent;\n}\n.slick-list {\n position: relative;\n overflow: hidden;\n display: block;\n margin: 0;\n padding: 0;\n\n &:focus {\n outline: none;\n }\n\n &.dragging {\n cursor: pointer;\n cursor: hand;\n }\n}\n.slick-slider .slick-track,\n.slick-slider .slick-list {\n -webkit-transform: translate3d(0, 0, 0);\n -moz-transform: translate3d(0, 0, 0);\n -ms-transform: translate3d(0, 0, 0);\n -o-transform: translate3d(0, 0, 0);\n transform: translate3d(0, 0, 0);\n}\n\n.slick-track {\n position: relative;\n left: 0;\n top: 0;\n display: block;\n margin-left: auto;\n margin-right: auto;\n\n &:before,\n &:after {\n content: "";\n display: table;\n }\n\n &:after {\n clear: both;\n }\n\n .slick-loading & {\n visibility: hidden;\n }\n}\n.slick-slide {\n float: left;\n height: 100%;\n min-height: 1px;\n [dir="rtl"] & {\n float: right;\n }\n img {\n display: block;\n }\n &.slick-loading img {\n display: none;\n }\n\n display: none;\n\n &.dragging img {\n pointer-events: none;\n }\n\n .slick-initialized & {\n display: block;\n }\n\n .slick-loading & {\n visibility: hidden;\n }\n\n .slick-vertical & {\n display: block;\n height: auto;\n border: 1px solid transparent;\n }\n}\n.slick-arrow.slick-hidden {\n display: none;\n}\n'],sourceRoot:""}]),e.Z=s},23645:function(n){n.exports=function(n){var e=[];return e.toString=function(){return this.map((function(e){var t="",i=void 0!==e[5];return e[4]&&(t+="@supports (".concat(e[4],") {")),e[2]&&(t+="@media ".concat(e[2]," {")),i&&(t+="@layer".concat(e[5].length>0?" ".concat(e[5]):""," {")),t+=n(e),i&&(t+="}"),e[2]&&(t+="}"),e[4]&&(t+="}"),t})).join("")},e.i=function(n,t,i,o,r){"string"==typeof n&&(n=[[null,n,void 0]]);var s={};if(i)for(var a=0;a<this.length;a++){var A=this[a][0];null!=A&&(s[A]=!0)}for(var l=0;l<n.length;l++){var c=[].concat(n[l]);i&&s[c[0]]||(void 0!==r&&(void 0===c[5]||(c[1]="@layer".concat(c[5].length>0?" ".concat(c[5]):""," {").concat(c[1],"}")),c[5]=r),t&&(c[2]?(c[1]="@media ".concat(c[2]," {").concat(c[1],"}"),c[2]=t):c[2]=t),o&&(c[4]?(c[1]="@supports (".concat(c[4],") {").concat(c[1],"}"),c[4]=o):c[4]="".concat(o)),e.push(c))}},e}},87537:function(n){n.exports=function(n){var e=n[1],t=n[3];if(!t)return e;if("function"==typeof btoa){var i=btoa(unescape(encodeURIComponent(JSON.stringify(t)))),o="sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(i),r="/*# ".concat(o," */");return[e].concat([r]).join("\n")}return[e].join("\n")}},65611:function(n,e,t){t.r(e);var i=t(93379),o=t.n(i),r=t(7795),s=t.n(r),a=t(3565),A=t.n(a),l=t(19216),c=t.n(l),d=t(44589),u=t.n(d),p=t(11210),C={};C.styleTagTransform=u(),C.setAttributes=A(),C.insert=function(n){var e=document.head.querySelectorAll("*")[0];e?document.head.insertBefore(n,e):document.head.append(n)},C.domAPI=s(),C.insertStyleElement=c();o()(p.Z,C);e.default=p.Z&&p.Z.locals?p.Z.locals:void 0},93379:function(n){var e=[];function t(n){for(var t=-1,i=0;i<e.length;i++)if(e[i].identifier===n){t=i;break}return t}function i(n,i){for(var r={},s=[],a=0;a<n.length;a++){var A=n[a],l=i.base?A[0]+i.base:A[0],c=r[l]||0,d="".concat(l," ").concat(c);r[l]=c+1;var u=t(d),p={css:A[1],media:A[2],sourceMap:A[3],supports:A[4],layer:A[5]};if(-1!==u)e[u].references++,e[u].updater(p);else{var C=o(p,i);i.byIndex=a,e.splice(a,0,{identifier:d,updater:C,references:1})}s.push(d)}return s}function o(n,e){var t=e.domAPI(e);t.update(n);return function(e){if(e){if(e.css===n.css&&e.media===n.media&&e.sourceMap===n.sourceMap&&e.supports===n.supports&&e.layer===n.layer)return;t.update(n=e)}else t.remove()}}n.exports=function(n,o){var r=i(n=n||[],o=o||{});return function(n){n=n||[];for(var s=0;s<r.length;s++){var a=t(r[s]);e[a].references--}for(var A=i(n,o),l=0;l<r.length;l++){var c=t(r[l]);0===e[c].references&&(e[c].updater(),e.splice(c,1))}r=A}}},19216:function(n){n.exports=function(n){var e=document.createElement("style");return n.setAttributes(e,n.attributes),n.insert(e,n.options),e}},3565:function(n,e,t){n.exports=function(n){var e=t.nc;e&&n.setAttribute("nonce",e)}},7795:function(n){n.exports=function(n){if("undefined"==typeof document)return{update:function(){},remove:function(){}};var e=n.insertStyleElement(n);return{update:function(t){!function(n,e,t){var i="";t.supports&&(i+="@supports (".concat(t.supports,") {")),t.media&&(i+="@media ".concat(t.media," {"));var o=void 0!==t.layer;o&&(i+="@layer".concat(t.layer.length>0?" ".concat(t.layer):""," {")),i+=t.css,o&&(i+="}"),t.media&&(i+="}"),t.supports&&(i+="}");var r=t.sourceMap;r&&"undefined"!=typeof btoa&&(i+="\n/*# sourceMappingURL=data:application/json;base64,".concat(btoa(unescape(encodeURIComponent(JSON.stringify(r))))," */")),e.styleTagTransform(i,n,e.options)}(e,n,t)},remove:function(){!function(n){if(null===n.parentNode)return!1;n.parentNode.removeChild(n)}(e)}}}},44589:function(n){n.exports=function(n,e){if(e.styleSheet)e.styleSheet.cssText=n;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(n))}}}}]); //# sourceMappingURL=65611.ec32da0a6ccb5c5f2b11.min.js.map
PypiClean
/FlexGet-3.9.6-py3-none-any.whl/flexget/plugins/input/filmweb_watchlist.py
from loguru import logger from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached try: from filmweb.exceptions import RequestFailed from filmweb.filmweb import Filmweb as FilmwebAPI from filmweb.items import LoggedUser except ImportError: # Errors are handled later pass logger = logger.bind(name='filmweb_watchlist') def translate_type(type): return {'shows': 'serial', 'movies': 'film'}[type] class FilmwebWatchlist: """ "Creates an entry for each movie in your Filmweb list.""" schema = { 'type': 'object', 'properties': { 'login': {'type': 'string', 'description': 'Can be username or email address'}, 'password': {'type': 'string'}, 'type': {'type': 'string', 'enum': ['shows', 'movies'], 'default': 'movies'}, 'min_star': { 'type': 'integer', 'default': 0, 'description': 'Items will be processed with at least this level of "How much I want to see"', }, }, 'additionalProperties': False, 'required': ['login', 'password'], } def on_task_start(self, task, config): """Raise a DependencyError if our dependencies aren't available""" try: from filmweb.filmweb import Filmweb as FilmwebAPI # noqa except ImportError as e: logger.debug('Error importing pyfilmweb: {}', e) raise plugin.DependencyError( 'filmweb_watchlist', 'pyfilmweb', 'pyfilmweb==0.1.1.1 module required. ImportError: %s' % e, logger, ) @cached('filmweb_watchlist', persist='2 hours') def on_task_input(self, task, config): type = translate_type(config['type']) logger.verbose('Retrieving filmweb watch list for user: {}', config['login']) fw = FilmwebAPI() logger.verbose('Logging as {}', config['login']) try: fw.login(str(config['login']), str(config['password'])) except RequestFailed as error: raise plugin.PluginError('Authentication request failed, reason %s' % str(error)) user = LoggedUser(fw) try: watch_list = user.get_want_to_see() except RequestFailed as error: raise plugin.PluginError('Fetching watch list failed, reason %s' % str(error)) logger.verbose('Filmweb list contains {} items', len(watch_list)) entries = [] for item in watch_list: if item['level'] < config['min_star']: continue if item['film'].type != type: continue item_info = item['film'].get_info() entry = Entry() entry['title'] = item_info['name_org'] or item_info['name'] entry['title'] += ' (%s)' % item_info['year'] entry['year'] = item_info['year'] entry['url'] = item['film'].url entry['filmweb_type'] = item_info['type'] entry['filmweb_id'] = item['film'].uid logger.debug('Created entry {}', entry) entries.append(entry) return entries @event('plugin.register') def register_plugin(): plugin.register(FilmwebWatchlist, 'filmweb_watchlist', api_ver=2)
PypiClean
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/nodes/AsyncgenNodes.py
from .ChildrenHavingMixins import ChildHavingAsyncgenRefMixin from .ExpressionBases import ExpressionBase, ExpressionNoSideEffectsMixin from .FunctionNodes import ExpressionFunctionEntryPointBase class ExpressionMakeAsyncgenObject( ExpressionNoSideEffectsMixin, ChildHavingAsyncgenRefMixin, ExpressionBase ): kind = "EXPRESSION_MAKE_ASYNCGEN_OBJECT" named_children = ("asyncgen_ref",) __slots__ = ("variable_closure_traces",) def __init__(self, asyncgen_ref, source_ref): assert asyncgen_ref.getFunctionBody().isExpressionAsyncgenObjectBody() ChildHavingAsyncgenRefMixin.__init__(self, asyncgen_ref=asyncgen_ref) ExpressionBase.__init__(self, source_ref) self.variable_closure_traces = [] def getDetailsForDisplay(self): return {"asyncgen": self.subnode_asyncgen_ref.getFunctionBody().getCodeName()} def computeExpression(self, trace_collection): self.variable_closure_traces = [] for ( closure_variable ) in self.subnode_asyncgen_ref.getFunctionBody().getClosureVariables(): trace = trace_collection.getVariableCurrentTrace(closure_variable) trace.addNameUsage() self.variable_closure_traces.append((closure_variable, trace)) # TODO: Asyncgen body may know something too. return self, None, None def getClosureVariableVersions(self): return self.variable_closure_traces class ExpressionAsyncgenObjectBody(ExpressionFunctionEntryPointBase): kind = "EXPRESSION_ASYNCGEN_OBJECT_BODY" __slots__ = ("qualname_setup", "needs_generator_return_exit") def __init__(self, provider, name, code_object, flags, auto_release, source_ref): ExpressionFunctionEntryPointBase.__init__( self, provider=provider, name=name, code_object=code_object, code_prefix="asyncgen", flags=flags, auto_release=auto_release, source_ref=source_ref, ) self.needs_generator_return_exit = False self.qualname_setup = None def getFunctionName(self): return self.name def markAsNeedsGeneratorReturnHandling(self, value): self.needs_generator_return_exit = max(self.needs_generator_return_exit, value) def needsGeneratorReturnHandling(self): return self.needs_generator_return_exit == 2 def needsGeneratorReturnExit(self): return bool(self.needs_generator_return_exit) @staticmethod def needsCreation(): return False @staticmethod def isUnoptimized(): return False
PypiClean
/MCsniperPY-3.5-py3-none-any.whl/mcsniperpy/cli.py
import asyncio import typer from mcsniperpy import Sniper from mcsniperpy.util import logs_manager as log from mcsniperpy.util import ping_tester from mcsniperpy.util.name_system import next_name app = typer.Typer() def startup(): title = f"""{log.Color.cyan} ███╗ ███╗ ██████╗███████╗███╗ ██╗██╗██████╗ ███████╗██████╗ \ {log.Color.blue}██████╗ {log.Color.blue}██{log.Color.blue}╗ {log.Color.blue}\ ██╗ ████╗ ████║██╔════╝██╔════╝████╗ ██║██║██╔══██╗██╔════╝██╔══██╗\ {log.Color.blue}██╔══{log.Color.blue}██╗╚{log.Color.blue}██{log.Color.blue}╗\ {log.Color.blue}██╔╝ ██╔████╔██║██║ ███████╗██╔██╗ ██║██║██████╔╝█████╗ ██████╔╝\ {log.Color.blue}██{log.Color.blue}████╔╝ ╚{log.Color.blue}████╔╝ ██║╚██╔╝██║██║ ╚════██║██║╚██╗██║██║██╔═══╝ ██╔══╝ ██╔══██╗\ {log.Color.blue}█{log.Color.blue}█╔═══╝ ╚{log.Color.blue}██╔╝ ██║ ╚═╝ ██║╚██████╗███████║██║ ╚████║██║██║ ███████╗██║ ██║\ {log.Color.blue}██║ {log.Color.blue}██║ ╚═╝ ╚═╝ ╚═════╝╚══════╝╚═╝ ╚═══╝╚═╝╚═╝ ╚══════╝╚═╝ ╚═╝\ ╚═╝ ╚═╝ """ lines = "╗║╔═╝╚" for line_type in lines: title = title.replace( line_type, f"{log.Color.white}%s{log.Color.cyan}" % line_type) print(title) print(f"{log.Color.cyan}Created by Kqzz#0001") print( "Git: github.com/MCSniperPY/MCsniperPY | " f"Discord: https://mcsniperpy.com/discord{log.Color.white}" ) sniper = Sniper(log.Color, log.Logger) @app.command() def snipe( username: str = typer.Option( None, help="The username to attempt a snipe on"), offset: int = typer.Option( None, help="The offset you want to use for the target username." ), debug: bool = typer.Option(False, help="Enable debug mode."), color: bool = typer.Option(True, help="Colored terminal output"), next_with_searches: int = typer.Option( None, "--next", help="Snipe the next available username." " It is not recommended to use this!", ), ): """ Snipe a minecraft username! """ if debug: sniper.log.debug_enabled = True if not color: sniper.color.disable() if next_with_searches is not None: username = next_name(searches=next_with_searches) startup() asyncio.get_event_loop().run_until_complete(sniper.run(username, offset)) sniper.on_shutdown() @app.command() def ping( iterations: int = typer.Option( 5, help="How many times to ping Mojang's servers.") ): """ Test your ping to Mojang's servers """ asyncio.get_event_loop().run_until_complete(ping_tester.ping_test(iterations)) sniper.on_shutdown() @app.command() def offset_test( ): print("offset test was removed due to innacuracy") print("it will not be re-added") print("change your offset around until 403 requests around around .01 through .04 and do not rely on an offset test of any kind for best results") print("better instructions will be added later") @app.command() def init(no_confirm: bool = typer.Option(False, help="remove confirmation message")): """ Initialize MCsniperPY to be able to snipe names. This is an essential step\ before sniping. Please read the docs for more info. https://docs.mcsniperpy.com """ sniper.init(no_confirm=no_confirm) def cli(): # try: # app() # # pylint: disable=broad-except # except Exception as ex: # traceback = ex.__traceback__ # sniper.log.error(f"type: {type(ex).__name__}") # sniper.log.error(f"message: {str(ex)}") # while traceback is not None: # # pylint: disable=no-member # sniper.log.error( # f"{traceback.tb_frame.f_code.co_filename}:{traceback.tb_lineno}" # ) # traceback = traceback.tb_next # finally: # sniper.on_shutdown() print("THIS SNIPER IS DONE") print("USE MCSNIPERGO") print("github.com/Kqzz/MCsniperGO") print("Made by https://kqzz.me") if __name__ == "__name__": cli()
PypiClean
/NeodroidAgent-0.4.8-py36-none-any.whl/neodroidagent/common/architectures/experimental/recurrent.py
from neodroidagent.common.architectures.mlp import MLP __author__ = "Christian Heider Nielsen" __doc__ = "" import torch from torch import nn from torch.nn import functional as F class RecurrentCategoricalMLP(MLP): def __init__(self, r_hidden_layers=10, **kwargs): super().__init__(**kwargs) self._r_hidden_layers = r_hidden_layers self._r_input_shape = self._output_shape + r_hidden_layers self.hidden = nn.Linear( self._r_input_shape, r_hidden_layers, bias=self._use_bias ) self.out = nn.Linear(self._r_input_shape, r_hidden_layers, bias=self._use_bias) self._prev_hidden_x = torch.zeros(r_hidden_layers) def forward(self, x, **kwargs): x = super().forward(x, **kwargs) combined = torch.cat((x, self._prev_hidden_x), 1) out_x = self.out(combined) hidden_x = self.hidden(combined) self._prev_hidden_x = hidden_x return F.log_softmax(out_x, dim=-1) class ExposedRecurrentCategoricalMLP(RecurrentCategoricalMLP): def forward(self, x, hidden_x, **kwargs): self._prev_hidden_x = hidden_x out_x = super().forward(x, **kwargs) return F.log_softmax(out_x, dim=-1), self._prev_hidden_x class RecurrentBase(nn.Module): def __init__(self, recurrent, recurrent_input_size, hidden_size): super().__init__() self._hidden_size = hidden_size self._recurrent = recurrent if recurrent: self.gru = nn.GRUCell(recurrent_input_size, hidden_size) nn.init.orthogonal_(self.gru.weight_ih.data) nn.init.orthogonal_(self.gru.weight_hh.data) self.gru.bias_ih.data.fill_(0) self.gru.bias_hh.data.fill_(0) def _forward_gru(self, x, hxs, masks): if x.size(0) == hxs.size(0): x = hxs = self.gru(x, hxs * masks) else: # x is a (T, N, -1) tensor that has been flatten to (T * N, -1) N = hxs.size(0) T = int(x.size(0) / N) # unflatten x = x.view(T, N, x.size(1)) # Same deal with masks masks = masks.view(T, N, 1) outputs = [] for i in range(T): hx = hxs = self.gru(x[i], hxs * masks[i]) outputs.append(hx) # assert len(outputs) == T # x is a (T, N, -1) tensor x = torch.stack(outputs, dim=0) # flatten x = x.view(T * N, -1) return x, hxs
PypiClean
/Graphy-1.0.0.tar.gz/Graphy-1.0.0/graphy/bar_chart.py
import copy import warnings from graphy import common from graphy import util class BarsStyle(object): """Style of a series of bars in a BarChart Object Attributes: color: Hex string, like '00ff00' for green """ def __init__(self, color): self.color = color class BarChartStyle(object): """Represents the style for bars on a BarChart. Any of the object attributes may be set to None, in which case the value will be auto-calculated. Object Attributes: bar_thickness: The thickness of a bar, in pixels. bar_gap: The gap between bars, in pixels, or as a fraction of bar thickness if use_fractional_gap_spacing is True. group_gap: The gap between groups of bars, in pixels, or as a fraction of bar thickness if use_fractional_gap_spacing is True. use_fractional_gap_spacing: if True, bar_gap and group_gap specify gap sizes as a fraction of bar width. Default is False. """ _DEFAULT_GROUP_GAP = 8 _DEFAULT_BAR_GAP = 4 def __init__(self, bar_thickness=None, bar_gap=_DEFAULT_BAR_GAP, group_gap=_DEFAULT_GROUP_GAP, use_fractional_gap_spacing=False): """Create a new BarChartStyle. Args: bar_thickness: The thickness of a bar, in pixels. Set this to None if you want the bar thickness to be auto-calculated (this is the default behaviour). bar_gap: The gap between bars, in pixels. Default is 4. group_gap: The gap between groups of bars, in pixels. Default is 8. """ self.bar_thickness = bar_thickness self.bar_gap = bar_gap self.group_gap = group_gap self.use_fractional_gap_spacing = use_fractional_gap_spacing class BarStyle(BarChartStyle): def __init__(self, *args, **kwargs): warnings.warn('BarStyle is deprecated. Use BarChartStyle.', DeprecationWarning, stacklevel=2) super(BarStyle, self).__init__(*args, **kwargs) class BarChart(common.BaseChart): """Represents a bar chart. Object attributes: vertical: if True, the bars will be vertical. Default is True. stacked: if True, the bars will be stacked. Default is False. style: The BarChartStyle for all bars on this chart, specifying bar thickness and gaps between bars. """ def __init__(self, points=None): """Constructor for BarChart objects.""" super(BarChart, self).__init__() if points is not None: self.AddBars(points) self.vertical = True self.stacked = False self.style = BarChartStyle(None, None, None) # full auto def AddBars(self, points, label=None, color=None): """Add a series of bars to the chart. points: List of y-values for the bars in this series label: Name of the series (used in the legend) color: Hex string, like '00ff00' for green This is a convenience method which constructs & appends the DataSeries for you. """ if label is not None and util._IsColor(label): warnings.warn('Your code may be broken! ' 'Label is a hex triplet. Maybe it is a color? The ' 'old argument order (color before label) is deprecated.', DeprecationWarning, stacklevel=2) style = BarsStyle(color) series = common.DataSeries(points, label=label, style=style) self.data.append(series) return series def GetDependentAxes(self): """Get the dependendant axes, which depend on orientation.""" if self.vertical: return (self._axes[common.AxisPosition.LEFT] + self._axes[common.AxisPosition.RIGHT]) else: return (self._axes[common.AxisPosition.TOP] + self._axes[common.AxisPosition.BOTTOM]) def GetIndependentAxes(self): """Get the independendant axes, which depend on orientation.""" if self.vertical: return (self._axes[common.AxisPosition.TOP] + self._axes[common.AxisPosition.BOTTOM]) else: return (self._axes[common.AxisPosition.LEFT] + self._axes[common.AxisPosition.RIGHT]) def GetDependentAxis(self): """Get the main dependendant axis, which depends on orientation.""" if self.vertical: return self.left else: return self.bottom def GetIndependentAxis(self): """Get the main independendant axis, which depends on orientation.""" if self.vertical: return self.bottom else: return self.left def GetMinMaxValues(self): """Get the largest & smallest bar values as (min_value, max_value).""" if not self.stacked: return super(BarChart, self).GetMinMaxValues() if not self.data: return None, None # No data, nothing to do. num_bars = max(len(series.data) for series in self.data) positives = [0 for i in xrange(0, num_bars)] negatives = list(positives) for series in self.data: for i, point in enumerate(series.data): if point: if point > 0: positives[i] += point else: negatives[i] += point min_value = min(min(positives), min(negatives)) max_value = max(max(positives), max(negatives)) return min_value, max_value
PypiClean
/FoilMesh-0.0.8.tar.gz/FoilMesh-0.0.8/foilmesh/meshio/ansys/_ansys.py
import re import numpy as np from ..__about__ import __version__ from .._common import warn from .._exceptions import ReadError, WriteError from .._files import open_file from .._helpers import register_format from .._mesh import Mesh def _skip_to(f, char): c = None while c != char: c = f.read(1).decode() def _skip_close(f, num_open_brackets): while num_open_brackets > 0: char = f.read(1).decode() if char == "(": num_open_brackets += 1 elif char == ")": num_open_brackets -= 1 def _read_points(f, line, first_point_index_overall, last_point_index): # If the line is self-contained, it is merely a declaration # of the total number of points. if line.count("(") == line.count(")"): return None, None, None # (3010 (zone-id first-index last-index type ND) out = re.match("\\s*\\(\\s*(|20|30)10\\s*\\(([^\\)]*)\\).*", line) assert out is not None a = [int(num, 16) for num in out.group(2).split()] if len(a) <= 4: raise ReadError() first_point_index = a[1] # store the very first point index if first_point_index_overall is None: first_point_index_overall = first_point_index # make sure that point arrays are subsequent if last_point_index is not None: if last_point_index + 1 != first_point_index: raise ReadError() last_point_index = a[2] num_points = last_point_index - first_point_index + 1 dim = a[4] # Skip ahead to the byte that opens the data block (might # be the current line already). last_char = line.strip()[-1] while last_char != "(": last_char = f.read(1).decode() if out.group(1) == "": # ASCII data pts = np.empty((num_points, dim)) for k in range(num_points): # skip ahead to the first line with data line = "" while line.strip() == "": line = f.readline().decode() dat = line.split() if len(dat) != dim: raise ReadError() for d in range(dim): pts[k][d] = float(dat[d]) else: # binary data if out.group(1) == "20": dtype = np.float32 else: if out.group(1) != "30": ReadError(f"Expected keys '20' or '30', got {out.group(1)}.") dtype = np.float64 # read point data pts = np.fromfile(f, count=dim * num_points, dtype=dtype).reshape( (num_points, dim) ) # make sure that the data set is properly closed _skip_close(f, 2) return pts, first_point_index_overall, last_point_index def _read_cells(f, line): # If the line is self-contained, it is merely a declaration of the total number of # points. if line.count("(") == line.count(")"): return None, None out = re.match("\\s*\\(\\s*(|20|30)12\\s*\\(([^\\)]+)\\).*", line) assert out is not None a = [int(num, 16) for num in out.group(2).split()] if len(a) <= 4: raise ReadError() first_index = a[1] last_index = a[2] num_cells = last_index - first_index + 1 zone_type = a[3] element_type = a[4] if zone_type == 0: # dead zone return None, None key, num_nodes_per_cell = { 0: ("mixed", None), 1: ("triangle", 3), 2: ("tetra", 4), 3: ("quad", 4), 4: ("hexahedron", 8), 5: ("pyramid", 5), 6: ("wedge", 6), }[element_type] # Skip to the opening `(` and make sure that there's no non-whitespace character # between the last closing bracket and the `(`. if line.strip()[-1] != "(": c = None while True: c = f.read(1).decode() if c == "(": break if not re.match("\\s", c): # Found a non-whitespace character before `(`. # Assume this is just a declaration line then and # skip to the closing bracket. _skip_to(f, ")") return None, None if key == "mixed": # From # <https://www.afs.enea.it/project/neptunius/docs/fluent/html/ug/node1470.htm>: # # > If a zone is of mixed type (element-type=0), it will have a body that # > lists the element type of each cell. # # No idea where the information other than the element types is stored # though. Skip for now. data = None else: # read cell data if out.group(1) == "": # ASCII cells data = np.empty((num_cells, num_nodes_per_cell), dtype=int) for k in range(num_cells): line = f.readline().decode() dat = line.split() if len(dat) != num_nodes_per_cell: raise ReadError() data[k] = [int(d, 16) for d in dat] else: if key == "mixed": raise ReadError("Cannot read mixed cells in binary mode yet") # binary cells if out.group(1) == "20": dtype = np.int32 else: if out.group(1) != "30": ReadError(f"Expected keys '20' or '30', got {out.group(1)}.") dtype = np.int64 shape = (num_cells, num_nodes_per_cell) count = shape[0] * shape[1] data = np.fromfile(f, count=count, dtype=dtype).reshape(shape) # make sure that the data set is properly closed _skip_close(f, 2) return key, data def _read_faces(f, line): # faces # (13 (zone-id first-index last-index type element-type)) # If the line is self-contained, it is merely a declaration of # the total number of points. if line.count("(") == line.count(")"): return {} out = re.match("\\s*\\(\\s*(|20|30)13\\s*\\(([^\\)]+)\\).*", line) assert out is not None a = [int(num, 16) for num in out.group(2).split()] if len(a) <= 4: raise ReadError() first_index = a[1] last_index = a[2] num_cells = last_index - first_index + 1 element_type = a[4] element_type_to_key_num_nodes = { 0: ("mixed", None), 2: ("line", 2), 3: ("triangle", 3), 4: ("quad", 4), } key, num_nodes_per_cell = element_type_to_key_num_nodes[element_type] # Skip ahead to the line that opens the data block (might be # the current line already). if line.strip()[-1] != "(": _skip_to(f, "(") data = {} if out.group(1) == "": # ASCII if key == "mixed": # From # <https://www.afs.enea.it/project/neptunius/docs/fluent/html/ug/node1471.htm>: # # > If the face zone is of mixed type (element-type = > 0), the body of the # > section will include the face type and will appear as follows # > # > type v0 v1 v2 c0 c1 # > for k in range(num_cells): line = "" while line.strip() == "": line = f.readline().decode() dat = line.split() type_index = int(dat[0], 16) if type_index == 0: raise ReadError() type_string, num_nodes_per_cell = element_type_to_key_num_nodes[ type_index ] if len(dat) != num_nodes_per_cell + 3: raise ReadError() if type_string not in data: data[type_string] = [] data[type_string].append( [int(d, 16) for d in dat[1 : num_nodes_per_cell + 1]] ) data = {key: np.array(data[key]) for key in data} else: # read cell data data = np.empty((num_cells, num_nodes_per_cell), dtype=int) for k in range(num_cells): line = f.readline().decode() dat = line.split() # The body of a regular face section contains the grid connectivity, and # each line appears as follows: # n0 n1 n2 cr cl # where n* are the defining nodes (vertices) of the face, and c* are the # adjacent cells. if len(dat) != num_nodes_per_cell + 2: raise ReadError() data[k] = [int(d, 16) for d in dat[:num_nodes_per_cell]] data = {key: data} else: # binary if out.group(1) == "20": dtype = np.int32 else: if out.group(1) != "30": ReadError(f"Expected keys '20' or '30', got {out.group(1)}.") dtype = np.int64 if key == "mixed": raise ReadError("Mixed element type for binary faces not supported yet") # Read cell data. # The body of a regular face section contains the grid # connectivity, and each line appears as follows: # n0 n1 n2 cr cl # where n* are the defining nodes (vertices) of the face, # and c* are the adjacent cells. shape = (num_cells, num_nodes_per_cell + 2) count = shape[0] * shape[1] data = np.fromfile(f, count=count, dtype=dtype).reshape(shape) # Cut off the adjacent cell data. data = data[:, :num_nodes_per_cell] data = {key: data} # make sure that the data set is properly closed _skip_close(f, 2) return data def read(filename): # noqa: C901 # Initialize the data optional data fields field_data = {} cell_data = {} point_data = {} points = [] cells = [] first_point_index_overall = None last_point_index = None # read file in binary mode since some data might be binary with open_file(filename, "rb") as f: while True: line = f.readline().decode() if not line: break if line.strip() == "": continue # expect the line to have the form # (<index> [...] out = re.match("\\s*\\(\\s*([0-9]+).*", line) if not out: raise ReadError() index = out.group(1) if index == "0": # Comment. _skip_close(f, line.count("(") - line.count(")")) elif index == "1": # header # (1 "<text>") _skip_close(f, line.count("(") - line.count(")")) elif index == "2": # dimensionality # (2 3) _skip_close(f, line.count("(") - line.count(")")) elif re.match("(|20|30)10", index): # points pts, first_point_index_overall, last_point_index = _read_points( f, line, first_point_index_overall, last_point_index ) if pts is not None: points.append(pts) elif re.match("(|20|30)12", index): # cells # (2012 (zone-id first-index last-index type element-type)) key, data = _read_cells(f, line) if data is not None: cells.append((key, data)) elif re.match("(|20|30)13", index): data = _read_faces(f, line) for key in data: cells.append((key, data[key])) elif index == "39": warn("Zone specification not supported yet. Skipping.") _skip_close(f, line.count("(") - line.count(")")) elif index == "45": # (45 (2 fluid solid)()) obj = re.match("\\(45 \\([0-9]+ ([\\S]+) ([\\S]+)\\)\\(\\)\\)", line) if obj: warn( f"Zone specification not supported yet ({obj.group(1)}, {obj.group(2)}). " + "Skipping.", ) else: warn("Zone specification not supported yet.") else: warn(f"Unknown index {index}. Skipping.") # Skipping ahead to the next line with two closing brackets. _skip_close(f, line.count("(") - line.count(")")) points = np.concatenate(points) # Gauge the cells with the first point_index. for k, c in enumerate(cells): cells[k] = (c[0], c[1] - first_point_index_overall) return Mesh( points, cells, point_data=point_data, cell_data=cell_data, field_data=field_data ) def write(filename, mesh, binary=True): with open_file(filename, "wb") as fh: # header fh.write(f'(1 "meshio {__version__}")\n'.encode()) # dimension num_points, dim = mesh.points.shape if dim not in [2, 3]: raise WriteError(f"Can only write dimension 2, 3, got {dim}.") fh.write((f"(2 {dim})\n").encode()) # total number of nodes first_node_index = 1 fh.write((f"(10 (0 {first_node_index:x} {num_points:x} 0))\n").encode()) # total number of cells total_num_cells = sum(len(c) for c in mesh.cells) fh.write((f"(12 (0 1 {total_num_cells:x} 0))\n").encode()) # Write nodes key = "3010" if binary else "10" fh.write( f"({key} (1 {first_node_index:x} {num_points:x} 1 {dim:x})(\n".encode() ) if binary: mesh.points.tofile(fh) fh.write(b"\n)") fh.write(b"End of Binary Section 3010)\n") else: np.savetxt(fh, mesh.points, fmt="%.16e") fh.write(b"))\n") # Write cells meshio_to_ansys_type = { # "mixed": 0, "triangle": 1, "tetra": 2, "quad": 3, "hexahedron": 4, "pyramid": 5, "wedge": 6, # "polyhedral": 7, } first_index = 0 binary_dtypes = { # np.int16 is not allowed np.dtype("int32"): "2012", np.dtype("int64"): "3012", } for cell_block in mesh.cells: cell_type = cell_block.type values = cell_block.data key = binary_dtypes[values.dtype] if binary else "12" last_index = first_index + len(values) - 1 try: ansys_cell_type = meshio_to_ansys_type[cell_type] except KeyError: legal_keys = ", ".join(meshio_to_ansys_type.keys()) raise KeyError( f"Illegal ANSYS cell type '{cell_type}'. (legal: {legal_keys})" ) fh.write( f"({key} (1 {first_index:x} {last_index:x} 1 {ansys_cell_type})(\n".encode() ) if binary: (values + first_node_index).tofile(fh) fh.write(b"\n)") fh.write((f"End of Binary Section {key})\n").encode()) else: np.savetxt(fh, values + first_node_index, fmt="%x") fh.write(b"))\n") first_index = last_index + 1 register_format("ansys", [".msh"], read, {"ansys": write})
PypiClean
/FuzzyClassificator-1.3.84-py3-none-any.whl/pybrain/rl/learners/valuebased/sarsa.py
__author__ = 'Thomas Rueckstiess, [email protected]' from pybrain.rl.learners.valuebased.valuebased import ValueBasedLearner class SARSA(ValueBasedLearner): """ State-Action-Reward-State-Action (SARSA) algorithm. In batchMode, the algorithm goes through all the samples in the history and performs an update on each of them. if batchMode is False, only the last data sample is considered. The user himself has to make sure to keep the dataset consistent with the agent's history.""" offPolicy = False batchMode = True def __init__(self, alpha=0.5, gamma=0.99): ValueBasedLearner.__init__(self) self.alpha = alpha self.gamma = gamma self.laststate = None self.lastaction = None def learn(self): if self.batchMode: samples = self.dataset else: samples = [[self.dataset.getSample()]] for seq in samples: # information from the previous episode (sequence) # should not influence the training on this episode self.laststate = None self.lastaction = None self.lastreward = None for state, action, reward in seq: state = int(state) action = int(action) # first learning call has no last state: skip if self.laststate == None: self.lastaction = action self.laststate = state self.lastreward = reward continue qvalue = self.module.getValue(self.laststate, self.lastaction) qnext = self.module.getValue(state, action) self.module.updateValue(self.laststate, self.lastaction, qvalue + self.alpha * (self.lastreward + self.gamma * qnext - qvalue)) # move state to oldstate self.laststate = state self.lastaction = action self.lastreward = reward
PypiClean
/BacGenomePipeline-1.0.9.tar.gz/BacGenomePipeline-1.0.9/README.txt
BacGenomePipeline Complete Bacterial Genome Assembly and Annotation Pipeline Program developed by Stephen Fordham General Description BacGenomePipeline is a complete convenience bacterial genome assembly pipeline. Assembled and annotated bacterial genomes can be created with only raw reads as input! BacGenomePipeline can accept either fastq or gzipped fastq files. Relax and grab a coffee while BacGenomePipeline does the genomic heavy lifting. This pipeline filters raw reads to produce the best 500mb reads. The filtering process also places weight on read quality, to ensure small high quality reads are not discarded. This is considered vital to aid the recovery of small plasmids present within bacterial strains. Optionally, the user can run Nanostat to assess read quality metrics. The best reads are then assembled using the flye genome assembler with settings adjusted to help recovery of plasmids with an imbalanced distribution. Optionally, the assembly is then polished with one round of medaka-consensus polishing. The polished assembly is annotated using staramr which scans bacterial genome contigs against the ResFinder, PointFinder, and PlasmidFinder databases (used by the ResFinder webservice and other webservices offered by the Center for Genomic Epidemiology) and abricate and compiles a summary report of detected antimicrobial resistance and virulence genes. Additionally, BacGenomePipeline can be run in 4 modes. These modes include: Running the entire pipeline workflow --pipeline Running the pipeline using reduced memory by setting parameters for genome size and coverage for initial disjointings --pipe_red_mem Running a genome only assembly --assembly Running the annotation step on an pre-exisiing genome assembly in FASTA format. --annotation For usage instructions, run: BacGenomePipeline --help Currently BacGenomePipeline has been tested and runs on Linux OS. To run BacGenomePipeline, you must also install the following programs by running the following commands: conda install -c bioconda filtlong==0.2.0 conda install -c bioconda flye==2.8.1 conda install -c bioconda abricate==1.0.1
PypiClean
/CRIkit2-0.4.4.tar.gz/CRIkit2-0.4.4/docs/source/crikit.io.lazy5.ui.rst
crikit.io.lazy5.ui package ========================== Submodules ---------- crikit.io.lazy5.ui.QtHdfLoad module ----------------------------------- .. automodule:: crikit.io.lazy5.ui.QtHdfLoad :members: :undoc-members: :show-inheritance: crikit.io.lazy5.ui.qt\_HdfLoad module ------------------------------------- .. automodule:: crikit.io.lazy5.ui.qt_HdfLoad :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: crikit.io.lazy5.ui :members: :undoc-members: :show-inheritance:
PypiClean
/Django-Pizza-16.10.1.tar.gz/Django-Pizza-16.10.1/pizza/kitchen_sink/static/ks/ckeditor/plugins/a11yhelp/dialogs/lang/bg.js
/* Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang("a11yhelp","bg",{title:"Accessibility Instructions",contents:"Help Contents. To close this dialog press ESC.",legend:[{name:"Общо",items:[{name:"Editor Toolbar",legend:"Press ${toolbarFocus} to navigate to the toolbar. Move to the next and previous toolbar group with TAB and SHIFT-TAB. Move to the next and previous toolbar button with RIGHT ARROW or LEFT ARROW. Press SPACE or ENTER to activate the toolbar button."},{name:"Editor Dialog",legend:"Inside a dialog, press TAB to navigate to next dialog field, press SHIFT + TAB to move to previous field, press ENTER to submit dialog, press ESC to cancel dialog. For dialogs that have multiple tab pages, press ALT + F10 to navigate to tab-list. Then move to next tab with TAB OR RIGTH ARROW. Move to previous tab with SHIFT + TAB or LEFT ARROW. Press SPACE or ENTER to select the tab page."}, {name:"Editor Context Menu",legend:"Press ${contextMenu} or APPLICATION KEY to open context-menu. Then move to next menu option with TAB or DOWN ARROW. Move to previous option with SHIFT+TAB or UP ARROW. Press SPACE or ENTER to select the menu option. Open sub-menu of current option with SPACE or ENTER or RIGHT ARROW. Go back to parent menu item with ESC or LEFT ARROW. Close context menu with ESC."},{name:"Editor List Box",legend:"Inside a list-box, move to next list item with TAB OR DOWN ARROW. Move to previous list item with SHIFT + TAB or UP ARROW. Press SPACE or ENTER to select the list option. Press ESC to close the list-box."}, {name:"Editor Element Path Bar",legend:"Press ${elementsPathFocus} to navigate to the elements path bar. Move to next element button with TAB or RIGHT ARROW. Move to previous button with SHIFT+TAB or LEFT ARROW. Press SPACE or ENTER to select the element in editor."}]},{name:"Commands",items:[{name:" Undo command",legend:"Press ${undo}"},{name:" Redo command",legend:"Press ${redo}"},{name:" Bold command",legend:"Press ${bold}"},{name:" Italic command",legend:"Press ${italic}"},{name:" Underline command", legend:"Press ${underline}"},{name:" Link command",legend:"Press ${link}"},{name:" Toolbar Collapse command",legend:"Press ${toolbarCollapse}"},{name:" Access previous focus space command",legend:"Press ${accessPreviousSpace} to access the closest unreachable focus space before the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."},{name:" Access next focus space command",legend:"Press ${accessNextSpace} to access the closest unreachable focus space after the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."}, {name:" Accessibility Help",legend:"Press ${a11yHelp}"}]}]});
PypiClean
/Flask-AutoFixture-0.2.3.tar.gz/Flask-AutoFixture-0.2.3/README.rst
***************** Flask-AutoFixture ***************** |version| |license| |travis| Flask-AutoFixture is an extension that automatically records JSON fixtures right from the test suite by hooking into the request callbacks of your Flask application. Installation ============ :: pip install flask-autofixture Quickstart ========== To get started, simply wrap your ``Flask`` application under test in the setup method of your testing framework like this: .. code-block:: python import unittest from app import create_app from flask.ext.autofixture import AutoFixture autofixture = AutoFixture() class APITestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') # Register the app for recording autofixture.init_app(self.app) self.app_context = self.app.app_context() self.app_context.push() self.client = self.app.test_client() def tearDown(self): self.app_context.pop() Instead of passing the Flask instance directly to the ``AutoFixture`` constructor, you can use ``init_app`` to initialize Flask afterwards. If you are using a factory to create your Flask instance or want to configure the recording of your fixtures (see below), this is the recommended approach. Then simply run your test suite. Fixtures for every request executed by the ``test_client`` will magically appear in your instance folder. Configuration ============= Recording --------- Flask-AutoFixture provides parametrized decorators to configure fixture generation on individual test methods. To provide a descriptive name for the generated fixture, simply annotate the test method with the ``record`` decorator like so: .. code-block:: python from app import create_app from flask.ext.autofixture import AutoFixture app = create_app('testing') autofixture = AutoFixture(app) @autofixture.record(request_name="missing_email_request", response_name="missing_email_resonse") def test_missing_email_returns_bad_request(self): response = self.client.post( url_for('api.new_user'), data=json.dumps({'name': 'john'})) self.assertTrue(response.status_code == 400) By default, ``AutoFixture`` will record all requests and responses automatically. If you want to record requests only in a specific set of test methods, you can disable this behaviour in the ``AutoFixture`` constructor by means of the ``explicit_recording`` argument: .. code-block:: python from app import create_app from flask.ext.autofixture import AutoFixture app = create_app('testing') autofixture = AutoFixture(app, explicit_recording=True) If ``explicit_recording`` is enabled, you must declare individual requests to be recorded using the ``record`` decorator. Alternatively, if a test methods performs multiple requests, you can apply the ``record_all`` decorator to avoid nested ``record`` decorators. Fixture directory ----------------- By default, the generated fixtures will be stored in your app's instance folder (1) in an ``autofixture`` directory. You can specify an alternative path and name for the generated directory in the ``AutoFixture`` constructor like so: .. code-block:: python from flask.ext.autofixture import AutoFixture, RouteLayout autofixture = AutoFixture(app, fixture_dirname="mydir", fixture_dirpath="/path/to/project", storage_layout=RouteLayout) The generated directory is laid out according to the ``StorageLayout`` specified in the ``AutoFixture`` constructor. The default layout is ``RequestMethodLayout``: .. code-block:: python class RequestMethodLayout(StorageLayout): """This strategy lays out a :class:`Fixture` by its request method first. Example directory structure: /autofixture (the name of the extension) /app (the name of the app) /GET (the request method) /api-posts (the request path) response.json /POST /api-posts request.json (the request payload) response.json (the response data) request_2.json response_2.json """ (1) http://flask.pocoo.org/docs/0.10/config/#instance-folders Roadmap ======= - Support further mime types - Support request context manager (trigger preprocess_request) - Get listed in the Flask extension registry .. |version| image:: http://img.shields.io/pypi/v/flask-autofixture.svg?style=flat :target: https://pypi.python.org/pypi/Flask-AutoFixture/ .. |license| image:: http://img.shields.io/pypi/l/flask-autofixture.svg?style=flat :target: https://pypi.python.org/pypi/Flask-AutoFixture/ .. |travis| image:: https://api.travis-ci.org/janukobytsch/flask-autofixture.svg?branch=master :target: https://travis-ci.org/janukobytsch/flask-autofixture
PypiClean
/MongoAlchemy-0.21.tar.gz/MongoAlchemy-0.21/mongoalchemy/fields/sequence.py
from __future__ import print_function from mongoalchemy.py3compat import * from mongoalchemy.fields.base import * class SequenceField(Field): ''' Base class for Fields which are an iterable collection of objects in which every child element is of the same type''' is_sequence_field = True valid_modifiers = LIST_MODIFIERS def __init__(self, item_type, min_capacity=None, max_capacity=None, default_empty=False, **kwargs): ''' :param item_type: :class:`Field` instance used for validation and (un)wrapping :param min_capacity: minimum number of items contained in values :param max_capacity: maximum number of items contained in values :param default_empty: the default is an empty sequence. ''' super(SequenceField, self).__init__(**kwargs) self.item_type = item_type self.min = min_capacity self.max = max_capacity self.default_empty = default_empty if not isinstance(item_type, Field): raise BadFieldSpecification("List item_type is not a field!") def schema_json(self): super_schema = super(SequenceField, self).schema_json() return dict(item_type=self.item_type.schema_json(), min_capacity=self.min, max_capacity=self.max, default_empty=self.default_empty, **super_schema) @property def has_subfields(self): ''' Returns True if the sequence's value type has subfields. ''' return self.item_type.has_subfields @property def has_autoload(self): return self.item_type.has_autoload def set_parent_on_subtypes(self, parent): self.item_type._set_parent(parent) def subfields(self): ''' Returns the names of the value type's sub-fields''' return self.item_type.subfields() def _dereference(self, session, ref, allow_none=False): return self.item_type.dereference(session, ref, allow_none=allow_none) def wrap_value(self, value): ''' A function used to wrap a value used in a comparison. It will first try to wrap as the sequence's sub-type, and then as the sequence itself''' try: return self.item_type.wrap_value(value) except BadValueException: pass try: return self.wrap(value) except BadValueException: pass self._fail_validation(value, 'Could not wrap value as the correct type. Tried %s and %s' % (self.item_type, self)) def child_type(self): ''' Returns the :class:`Field` instance used for items in the sequence''' return self.item_type def _validate_child_wrap(self, value): self.item_type.validate_wrap(value) def _validate_child_unwrap(self, value, session=None): if self.has_autoload: self.item_type.validate_unwrap(value, session=session) else: self.item_type.validate_unwrap(value) def _length_valid(self, value): if self.min is not None and len(value) < self.min: self._fail_validation(value, 'Value has too few elements') if self.max is not None and len(value) > self.max: self._fail_validation(value, 'Value has too many elements') def validate_wrap(self, value): ''' Checks that the type of ``value`` is correct as well as validating the elements of value''' self._validate_wrap_type(value) self._length_valid(value) for v in value: self._validate_child_wrap(v) def validate_unwrap(self, value, session=None): ''' Checks that the type of ``value`` is correct as well as validating the elements of value''' self._validate_unwrap_type(value) self._length_valid(value) for v in value: if self.has_autoload: self._validate_child_unwrap(v, session=session) else: self._validate_child_unwrap(v) def set_value(self, instance, value): super(SequenceField, self).set_value(instance, value) # TODO:2012 # value_obj = instance._values[self._name] # if from_db: # # loaded from db, stash it # if 'orig_values' not in instance.__dict__: # instance.__dict__['orig_values'] = {} # instance.__dict__['orig_values'][self._name] = deepcopy(value) def dirty_ops(self, instance): obj_value = instance._values[self._name] ops = super(SequenceField, self).dirty_ops(instance) if len(ops) == 0 and obj_value.set: ops = {'$set': { self.db_field : self.wrap(obj_value.value) }} return ops class ListField(SequenceField): ''' Field representing a python list. ''' def __init__(self, item_type, **kwargs): ''' :param item_type: :class:`Field` instance used for validation and (un)wrapping :param min_capacity: minimum number of items contained in values :param max_capacity: maximum number of items contained in values :param default_empty: the default is an empty sequence. ''' if kwargs.get('default_empty'): kwargs['default_f'] = list super(ListField, self).__init__(item_type, **kwargs) # def set_default(self, value): # return super(ListField, self).set_default(value) # def get_default(self): # if self.default_empty: # return [] # return super(ListField, self).get_default() # default = property(get_default, set_default) def rel(self, ignore_missing=False): from mongoalchemy.fields import RefBase assert isinstance(self.item_type, RefBase) return ListProxy(self, ignore_missing=ignore_missing) def _validate_wrap_type(self, value): import types if not any([isinstance(value, list), isinstance(value, tuple), isinstance(value, types.GeneratorType)]): self._fail_validation_type(value, list, tuple) _validate_unwrap_type = _validate_wrap_type def wrap(self, value): ''' Wraps the elements of ``value`` using ``ListField.item_type`` and returns them in a list''' self.validate_wrap(value) return [self.item_type.wrap(v) for v in value] def unwrap(self, value, session=None): ''' Unwraps the elements of ``value`` using ``ListField.item_type`` and returns them in a list''' kwargs = {} if self.has_autoload: kwargs['session'] = session self.validate_unwrap(value, **kwargs) return [ self.item_type.unwrap(v, **kwargs) for v in value] class SetField(SequenceField): ''' Field representing a python set. ''' def __init__(self, item_type, **kwargs): ''' :param item_type: :class:`Field` instance used for validation and (un)wrapping :param min_capacity: minimum number of items contained in values :param max_capacity: maximum number of items contained in values :param default_empty: the default is an empty sequence. ''' if kwargs.get('default_empty'): kwargs['default_f'] = set super(SetField, self).__init__(item_type, **kwargs) # def set_default(self, value): # return super(SetField, self).set_default(value) # def get_default(self): # if self.default_empty: # return set() # return super(SetField, self).get_default() # default = property(get_default, set_default) def rel(self, ignore_missing=False): return ListProxy(self, ignore_missing=ignore_missing) def _validate_wrap_type(self, value): if not isinstance(value, set): self._fail_validation_type(value, set) def _validate_unwrap_type(self, value): if not isinstance(value, list): self._fail_validation_type(value, list) def wrap(self, value): ''' Unwraps the elements of ``value`` using ``SetField.item_type`` and returns them in a set ''' self.validate_wrap(value) return [self.item_type.wrap(v) for v in value] def unwrap(self, value, session=None): ''' Unwraps the elements of ``value`` using ``SetField.item_type`` and returns them in a set''' self.validate_unwrap(value) return set([self.item_type.unwrap(v, session=session) for v in value]) class ListProxy(object): def __init__(self, field, ignore_missing=False): self.field = field self.ignore_missing = ignore_missing def __get__(self, instance, owner): if instance is None: return getattr(owner, self.field._name) session = instance._get_session() def iterator(): for v in getattr(instance, self.field._name): if v is None: yield v continue value = self.field._dereference(session, v, allow_none=self.ignore_missing) if value is None and self.ignore_missing: continue yield value return iterator()
PypiClean
/AwesomeTkinter-2021.11.8-py3-none-any.whl/awesometkinter/utils.py
import base64 import math import platform import tkinter as tk from tkinter import ttk import PIL from PIL import Image, ImageTk, ImageColor, ImageDraw, ImageFilter import hashlib import io def identify_operating_system(): """identify current operating system Returns: (str): 'Windows', 'Linux', or 'Darwin' for mac """ return platform.system() def calc_md5(binary_data): return hashlib.md5(binary_data).hexdigest() def generate_unique_name(*args): """get md5 encoding for any arguments that have a string representation Returns: md5 string """ name = ''.join([str(x) for x in args]) try: name = calc_md5(name.encode()) except: pass return name def invert_color(color): """return inverted hex color """ color = color_to_rgba(color) r, g, b, a = color inverted_color = rgb2hex(255 - r, 255 - g, 255 - b) return inverted_color def rgb2hex(r, g, b): return '#{:02x}{:02x}{:02x}'.format(r, g, b) def change_img_color(img, new_color, old_color=None): """Change image color Args: img: pillow image new_color (str): new image color, ex: 'red', '#ff00ff', (255, 0, 0), (255, 0, 0, 255) old_color (str): color to be replaced, if omitted, all colors will be replaced with new color keeping alpha channel. Returns: pillow image """ # convert image to RGBA color scheme img = img.convert('RGBA') # load pixels data pixdata = img.load() # handle color new_color = color_to_rgba(new_color) old_color = color_to_rgba(old_color) for y in range(img.size[1]): for x in range(img.size[0]): alpha = pixdata[x, y][-1] if old_color: if pixdata[x, y] == old_color: r, g, b, _ = new_color pixdata[x, y] = (r, g, b, alpha) else: r, g, b, _ = new_color pixdata[x, y] = (r, g, b, alpha) return img def resize_img(img, size, keep_aspect_ratio=True): """resize image using pillow Args: img (PIL.Image): pillow image object size(int or tuple(in, int)): width of image or tuple of (width, height) keep_aspect_ratio(bool): maintain aspect ratio relative to width Returns: (PIL.Image): pillow image """ if isinstance(size, int): size = (size, size) # get ratio width, height = img.size requested_width = size[0] if keep_aspect_ratio: ratio = width / requested_width requested_height = height / ratio else: requested_height = size[1] size = (int(requested_width), int(requested_height)) img = img.resize(size, resample=PIL.Image.LANCZOS) return img def mix_images(background_img, foreground_img): """paste an image on top of another image Args: background_img: pillow image in background foreground_img: pillow image in foreground Returns: pillow image """ background_img = background_img.convert('RGBA') foreground_img = foreground_img.convert('RGBA') img_w, img_h = foreground_img.size bg_w, bg_h = background_img.size offset = ((bg_w - img_w) // 2, (bg_h - img_h) // 2) background_img.paste(foreground_img, offset, mask=foreground_img) return background_img def color_to_rgba(color): """Convert color names or hex notation to RGBA, Args: color (str): color e.g. 'white' or '#333' or formats like #rgb or #rrggbb Returns: (4-tuple): tuple of format (r, g, b, a) e.g. it will return (255, 0, 0, 255) for solid red """ if color is None: return None if isinstance(color, (tuple, list)): if len(color) == 3: r, g, b = color color = (r, g, b, 255) return color else: return ImageColor.getcolor(color, 'RGBA') def is_dark(color): """rough check if color is dark or light Returns: (bool): True if color is dark, False if light """ r, g, b, a = color_to_rgba(color) # calculate lumina, reference https://stackoverflow.com/a/1855903 lumina = (0.299 * r + 0.587 * g + 0.114 * b) / 255 return True if lumina < 0.6 else False def calc_font_color(bg): """calculate font color based on given background Args: bg (str): background color Returns: (str): color name, e.g. "white" for dark background and "black" for light background """ return 'white' if is_dark(bg) else 'black' def calc_contrast_color(color, offset): """calculate a contrast color for darker colors will get a slightly lighter color depend on "offset" and for light colors will get a darker color Args: color (str): color offset (int): 1 to 254 Returns: (str): color """ r, g, b, a = color_to_rgba(color) if is_dark(color): new_color = [x + offset if x + offset <= 255 else 255 for x in (r, g, b)] else: new_color = [x - offset if x - offset >= 0 else 0 for x in (r, g, b)] return rgb2hex(*new_color) def text_to_image(text, text_color, bg_color, size): """Not implemented""" pass # img = Image.new('RGBA', size, color_to_rgba(text_color)) # draw = ImageDraw.Draw(img) # font = ImageFont.truetype(current_path + "s.ttf", size - int(0.15 * width)) # draw.text((pad, -pad), str(num), font=font, fill=color_to_rgba(bg_color)) def create_pil_image(fp=None, color=None, size=None, b64=None): """create pillow Image object Args: fp: A filename (string), pathlib.Path object or a file object. The file object must implement read(), seek(), and tell() methods, and be opened in binary mode. color (str): color in tkinter format, e.g. 'red', '#3300ff', also color can be a tuple or a list of RGB, e.g. (255, 0, 255) size (int or 2-tuple(int, int)): an image required size in a (width, height) tuple b64 (str): base64 hex representation of an image, if "fp" is given this parameter will be ignored Returns: pillow image object """ if not fp and b64: fp = io.BytesIO(base64.b64decode(b64)) img = Image.open(fp) # change color if color: img = change_img_color(img, color) # resize if size: if isinstance(size, int): size = (size, size) img = resize_img(img, size) return img def create_image(fp=None, img=None, color=None, size=None, b64=None): """create tkinter PhotoImage object it can modify size and color of original image Args: fp: A filename (string), pathlib.Path object or a file object. The file object must implement read(), seek(), and tell() methods, and be opened in binary mode. img (pillow image): if exist fp or b64 arguments will be ignored color (str): color in tkinter format, e.g. 'red', '#3300ff', also color can be a tuple or a list of RGB, e.g. (255, 0, 255) size (int or 2-tuple(int, int)): an image required size in a (width, height) tuple b64 (str): base64 hex representation of an image, if "fp" is given this parameter will be ignored Returns: tkinter PhotoImage object """ # create pillow image if not img: img = create_pil_image(fp, color, size, b64) # create tkinter images using pillow ImageTk img = ImageTk.PhotoImage(img) return img def create_circle(size=100, thickness=None, color='black', fill=None, antialias=4, offset=0): """create high quality circle the idea to smooth circle line is to draw a bigger size circle and then resize it to the requested size inspired from https://stackoverflow.com/a/34926008 Args: size (tuple or list, or int): outer diameter of the circle or width of bounding box thickness (int): outer line thickness in pixels color (str): outer line color fill (str): fill color, default is a transparent fill antialias (int): used to enhance outer line quality and make it smoother offset (int): correct cut edges of circle outline Returns: PIL image: a circle on a transparent image """ if isinstance(size, int): size = (size, size) else: size = size fill_color = color_to_rgba(fill) or '#0000' requested_size = size # calculate thickness to be 2% of circle diameter thickness = thickness or max(size[0] * 2 // 100, 2) offset = offset or thickness // 2 # make things bigger size = [x * antialias for x in requested_size] thickness *= antialias # create a transparent image with a big size img = Image.new(size=size, mode='RGBA', color='#0000') draw = ImageDraw.Draw(img) # draw circle with a required color draw.ellipse([offset, offset, size[0] - offset, size[1] - offset], outline=color, fill=fill_color, width=thickness) img = img.filter(ImageFilter.BLUR) # resize image back to the requested size img = img.resize(requested_size, Image.LANCZOS) # change color again will enhance quality (weird) if fill: img = change_img_color(img, color, old_color=color) img = change_img_color(img, fill, old_color=fill) else: img = change_img_color(img, color) return img def apply_gradient(img, gradient='vertical', colors=None, keep_transparency=True): """apply gradient color for pillow image Args: img: pillow image gradient (str): vertical, horizontal, diagonal, radial colors (iterable): 2-colors for the gradient keep_transparency (bool): keep original transparency """ size = img.size colors = colors or ['black', 'white'] color1 = color_to_rgba(colors[0]) color2 = color_to_rgba(colors[1]) # load pixels data pixdata = img.load() if gradient in ('horizontal', 'vertical', 'diagonal'): for x in range(0, size[0]): for y in range(0, size[1]): if gradient == 'horizontal': ratio1 = x / size[1] elif gradient == 'vertical': ratio1 = y / size[1] elif gradient == 'diagonal': ratio1 = (y + x) / size[1] ratio2 = 1 - ratio1 r = ratio1 * color2[0] + ratio2 * color1[0] g = ratio1 * color2[1] + ratio2 * color1[1] b = ratio1 * color2[2] + ratio2 * color1[2] if keep_transparency: a = pixdata[x, y][-1] else: a = ratio1 * color2[3] + ratio2 * color1[3] r, g, b, a = (int(x) for x in (r, g, b, a)) # Place the pixel img.putpixel((x, y), (r, g, b, a)) elif gradient == 'radial': # inspired by https://stackoverflow.com/a/30669765 d = min(size) radius = d // 2 for x in range(0, size[0]): for y in range(0, size[1]): # Find the distance to the center distance_to_center = math.sqrt((x - size[0] / 2) ** 2 + (y - size[1] / 2) ** 2) ratio1 = distance_to_center / radius ratio2 = 1 - ratio1 r = ratio1 * color2[0] + ratio2 * color1[0] g = ratio1 * color2[1] + ratio2 * color1[1] b = ratio1 * color2[2] + ratio2 * color1[2] if keep_transparency: a = pixdata[x, y][-1] else: a = ratio1 * color2[3] + ratio2 * color1[3] r, g, b, a = (int(x) for x in (r, g, b, a)) # Place the pixel img.putpixel((x, y), (r, g, b, a)) return img def scroll_with_mousewheel(widget, target=None, modifier='Shift', apply_to_children=False): """scroll a widget with mouse wheel Args: widget: tkinter widget target: scrollable tkinter widget, in case you need "widget" to catch mousewheel event and make another widget to scroll, useful for child widget in a scrollable frame modifier (str): Modifier to use with mousewheel to scroll horizontally, default is shift key apply_to_children (bool): bind all children Examples: scroll_with_mousewheel(my_text_widget, target='my_scrollable_frame') to make a scrollable canvas: for w in my_canvas: scroll_with_mousewheel(w, target=my_canvas) """ def _scroll_with_mousewheel(widget): target_widget = target if target else widget def scroll_vertically(event): # scroll vertically ---------------------------------- if event.num == 4 or event.delta > 0: target_widget.yview_scroll(-1, "unit") elif event.num == 5 or event.delta < 0: target_widget.yview_scroll(1, "unit") return 'break' # bind events for vertical scroll ---------------------------------------------- if hasattr(target_widget, 'yview_scroll'): # linux widget.bind("<Button-4>", scroll_vertically, add='+') widget.bind("<Button-5>", scroll_vertically, add='+') # windows and mac widget.bind("<MouseWheel>", scroll_vertically, add='+') # scroll horizontally --------------------------------------- def scroll_horizontally(event): # scroll horizontally if event.num == 4 or event.delta > 0: target_widget.xview_scroll(-10, "unit") elif event.num == 5 or event.delta < 0: target_widget.xview_scroll(10, "unit") return 'break' # bind events for horizontal scroll ---------------------------------------------- if hasattr(target_widget, 'xview_scroll'): # linux widget.bind(f"<{modifier}-Button-4>", scroll_horizontally, add='+') widget.bind(f"<{modifier}-Button-5>", scroll_horizontally, add='+') # windows and mac widget.bind(f"<{modifier}-MouseWheel>", scroll_horizontally, add='+') _scroll_with_mousewheel(widget) def handle_children(w): for child in w.winfo_children(): _scroll_with_mousewheel(child) # recursive call if child.winfo_children(): handle_children(child) if apply_to_children: handle_children(widget) def unbind_mousewheel(widget): """unbind mousewheel for a specific widget, e.g. combobox which have mouswheel scroll by default""" # linux widget.unbind("<Button-4>") widget.unbind("<Button-5>") # windows and mac widget.unbind("<MouseWheel>") def get_widget_attribute(widget, attr): """get an attribute of a widget Args: widget: tkinter widget "tk or ttk" attr (str): attribute or property e.g. 'background' Returns: attribute value, e.g. '#ffffff' for a background color """ # if it is ttk based will get style applied, it will raise an error if the widget not a ttk try: style_name = widget.cget('style') or widget.winfo_class() s = ttk.Style() value = s.lookup(style_name, attr) return value except: pass try: # if it's a tk widget will use cget return widget.cget(attr) except: pass return None def configure_widget(widget, **kwargs): """configure widget's attributes""" for k, v in kwargs.items(): # set widget attribute try: # treat as a "tk" widget, it will raise if widget is a "ttk" widget.config(**{k: v}) continue except: pass try: # in case above failed, it might be a ttk widget style_name = widget.cget('style') or widget.winfo_class() s = ttk.Style() s.configure(style_name, **{k: v}) except: pass def set_default_theme(): # select tkinter theme required for things to be right on windows, # only 'alt', 'default', or 'classic' can work fine on windows 10 s = ttk.Style() s.theme_use('default') def theme_compatibility_check(print_warning=False): """check if current theme is compatible Return: bool: True or False """ compatible_themes = ['alt', 'default', 'classic'] s = ttk.Style() current_theme = s.theme_use() if current_theme not in compatible_themes: if print_warning: print(f'AwesomeTkinter Warning: Widgets might not work properly under current theme ({current_theme})\n' f"compatible_themes are ['alt', 'default', 'classic']\n" f"you can set default theme using atk.set_default_theme() or style.theme_use('default')") return False return True def center_window(window, width=None, height=None, set_geometry_wh=True, reference=None): """center a tkinter window on screen's center and set its geometry if width and height given Args: window (tk.root or tk.Toplevel): a window to be centered width (int): window's width height (int): window's height set_geometry_wh (bool): include width and height in geometry reference: tk window e.g parent window as a reference """ # update_idletasks will cause a window to show early at the top left corner # then change position to center in non-proffesional way # window.update_idletasks() if width and height: if reference: refx = reference.winfo_x() + reference.winfo_width() // 2 refy = reference.winfo_y() + reference.winfo_height() // 2 else: refx = window.winfo_screenwidth() // 2 refy = window.winfo_screenheight() // 2 x = refx - width // 2 y = refy - height // 2 if set_geometry_wh: window.geometry(f'{width}x{height}+{x}+{y}') else: window.geometry(f'+{x}+{y}') else: window.eval('tk::PlaceWindow . center') __all__ = ['identify_operating_system', 'calc_md5', 'generate_unique_name', 'invert_color', 'rgb2hex', 'change_img_color', 'resize_img', 'mix_images', 'color_to_rgba', 'is_dark', 'calc_font_color', 'calc_contrast_color', 'text_to_image', 'create_pil_image', 'create_image', 'create_circle', 'scroll_with_mousewheel', 'unbind_mousewheel', 'get_widget_attribute', 'ImageTk', 'set_default_theme', 'theme_compatibility_check', 'configure_widget', 'center_window']
PypiClean
/MXFusion-0.3.1.tar.gz/MXFusion-0.3.1/mxfusion/components/functions/operators/operator_impl.py
import mxnet as mx from . import MXNetOperatorDecorator from .operators import Operator from ...variables import Variable from ....util.inference import realize_shape from ....common.exceptions import InferenceError """ Basic Arithmetic """ @MXNetOperatorDecorator(name='add', args=['x', 'y'], inputs=['x', 'y']) def add(F, x, y): return F.add(x, y) @MXNetOperatorDecorator(name='subtract', args=['x', 'y'], inputs=['x', 'y']) def subtract(F, x, y): return F.subtract(x, y) @MXNetOperatorDecorator(name='multiply', args=['x', 'y'], inputs=['x', 'y']) def multiply(F, x, y): return F.multiply(x, y) @MXNetOperatorDecorator(name='divide', args=['x', 'y'], inputs=['x', 'y']) def divide(F, x, y): return F.divide(x, y) @MXNetOperatorDecorator(name='power', args=['x', 'y'], inputs=['x', 'y']) def power(F, x, y): return F.power(x, y) """ Elementwise Operations """ @MXNetOperatorDecorator(name='square', args=['data'], inputs=['data']) def square(F, data): return F.square(data) @MXNetOperatorDecorator(name='exp', args=['data'], inputs=['data']) def exp(F, data): return F.exp(data) @MXNetOperatorDecorator(name='log', args=['data'], inputs=['data']) def log(F, data): return F.log(data) """ Aggregation """ @MXNetOperatorDecorator(name='sum', args=['data', 'axis'], inputs=['data']) def sum(F, data, axis=None): return F.sum(data, axis) @MXNetOperatorDecorator(name='mean', args=['data', 'axis'], inputs=['data']) def mean(F, data, axis=None): return F.mean(data, axis) @MXNetOperatorDecorator(name='prod', args=['data', 'axis'], inputs=['data']) def prod(F, data, axis=None): return F.prod(data, axis) """ Matrix Operations """ @MXNetOperatorDecorator(name='dot', args=['x', 'y'], inputs=['x', 'y']) def dot(F, x, y): return F.linalg.gemm2(x, y) # TODO Bring in the axis arguments once it's in the release version of MXNet @MXNetOperatorDecorator(name='diag', args=['data', 'k', 'axis1', 'axis2'], inputs=['data']) def diag(F, data, k=0, axis1=None, axis2=None): if axis1 is not None or axis2 is not None: raise Exception("axis1 and axis2 are not implemented yet.") return F.diag(data, k) """ Matrix Manipulations """ @MXNetOperatorDecorator(name='reshape', args=['data', 'shape', 'reverse'], inputs=['data']) def reshape(F, data, shape, reverse=False): return F.reshape(data=data, shape=shape, reverse=reverse) @MXNetOperatorDecorator(name='transpose', args=['data', 'axes'], inputs=['data']) def transpose(F, data, axes=None): axes = axes if axes is not None else [] return F.transpose(data=data, axes=axes) """Special Operators""" def broadcast_to(data, shape): """ This operator broadcast a variable to a target shape. The broadcasting rule is the same as [the numpy broadcasting rule](https://docs.scipy.org/doc/numpy-1.13.0/user/basics.broadcasting.html). See the following example: ```python m.x = Gaussian.define_variable(mean=broadcast_to(array([0]), (2,)), variance=broadcast_to(array([1]), (2,))), shape=(2,)) ``` :param data: the variable to be broadcasted :type data: Variable :param shape: the shape of which the variable will be broadcasted to :type shape: tuple of int or Variable """ class BroadcastToOperator(Operator): def __init__(self, data, shape): super(BroadcastToOperator, self).__init__( inputs=[('data', data)], outputs=[('output_0', Variable(shape=None))], operator_name='broadcast_to', properties={'shape': shape}, broadcastable=True) def eval(self, F, variables, always_return_tuple=False): target_shape = realize_shape(self.properties['shape'], variables) data = variables[self.inputs[0][1].uuid] if F is mx.ndarray: source_shape = data.shape elif F is mx.symbol: raise NotImplementedError('Symbolic mode to be supported!') else: raise InferenceError('Unknown MXNet Mode '+str(F)) n_target_dim = len(target_shape) n_source_dim = len(source_shape) if n_target_dim + 1 - n_source_dim > 0: t_shape = (source_shape[0],) + \ (1,) * (n_target_dim + 1 - n_source_dim) + source_shape[1:] data = F.reshape(data, shape=t_shape) shape = (source_shape[0],) + target_shape res = F.broadcast_to(data, shape=shape) if always_return_tuple: res = (res,) return res op = BroadcastToOperator(data=data, shape=shape) return op.outputs[0][1]
PypiClean
/Flask-KQMaps-0.4.2.tar.gz/Flask-KQMaps-0.4.2/flask_kqmaps/static/kqwebclient/leaflet/3rd_libs/Leaflet.hotline/leaflet.hotline.min.js
!function(t,i){"function"==typeof define&&define.amd?define(["leaflet"],i):"object"==typeof exports?module.exports=i:i(t.L)}(this,function(t){if(t.Hotline)return t;var i=function(t){if(!(this instanceof i))return new i(t);var e={0:"green",.5:"yellow",1:"red"};this._canvas=t="string"==typeof t?document.getElementById(t):t,this._ctx=t.getContext("2d"),this._width=t.width,this._height=t.height,this._weight=5,this._outlineWidth=1,this._outlineColor="black",this._min=0,this._max=1,this._data=[],this.palette(e)};i.prototype={width:function(t){return this._width=t,this},height:function(t){return this._height=t,this},weight:function(t){return this._weight=t,this},outlineWidth:function(t){return this._outlineWidth=t,this},outlineColor:function(t){return this._outlineColor=t,this},palette:function(t){var i=document.createElement("canvas"),e=i.getContext("2d"),n=e.createLinearGradient(0,0,0,256);i.width=1,i.height=256;for(var o in t)n.addColorStop(o,t[o]);return e.fillStyle=n,e.fillRect(0,0,1,256),this._palette=e.getImageData(0,0,1,256).data,this},min:function(t){return this._min=t,this},max:function(t){return this._max=t,this},data:function(t){return this._data=t,this},add:function(t){return this._data.push(t),this},draw:function(){var t=this._ctx;return t.globalCompositeOperation="source-over",t.lineCap="round",this._drawOutline(t),this._drawHotline(t),this},getRGBForValue:function(t){var i=Math.min(Math.max((t-this._min)/(this._max-this._min),0),.999),e=4*Math.floor(256*i);return[this._palette[e],this._palette[e+1],this._palette[e+2]]},_drawOutline:function(t){var i,e,n,o,h,r,s;if(this._outlineWidth)for(i=0,n=this._data.length;i<n;i++)for(o=this._data[i],t.lineWidth=this._weight+2*this._outlineWidth,e=1,h=o.length;e<h;e++)r=o[e-1],s=o[e],t.strokeStyle=this._outlineColor,t.beginPath(),t.moveTo(r.x,r.y),t.lineTo(s.x,s.y),t.stroke()},_drawHotline:function(t){var i,e,n,o,h,r,s,l,a,u;for(t.lineWidth=this._weight,i=0,n=this._data.length;i<n;i++)for(o=this._data[i],e=1,h=o.length;e<h;e++)r=o[e-1],s=o[e],l=t.createLinearGradient(r.x,r.y,s.x,s.y),a=this.getRGBForValue(r.z),u=this.getRGBForValue(s.z),l.addColorStop(0,"rgb("+a.join(",")+")"),l.addColorStop(1,"rgb("+u.join(",")+")"),t.strokeStyle=l,t.beginPath(),t.moveTo(r.x,r.y),t.lineTo(s.x,s.y),t.stroke()}};var e=t.Canvas.extend({_initContainer:function(){t.Canvas.prototype._initContainer.call(this),this._hotline=new i(this._container)},_update:function(){t.Canvas.prototype._update.call(this),this._hotline.width(this._container.width),this._hotline.height(this._container.height)},_updatePoly:function(t){if(this._drawing){var i=t._parts;i.length&&(this._updateOptions(t),this._hotline.data(i).draw())}},_updateOptions:function(t){null!=t.options.min&&this._hotline.min(t.options.min),null!=t.options.max&&this._hotline.max(t.options.max),null!=t.options.weight&&this._hotline.weight(t.options.weight),null!=t.options.outlineWidth&&this._hotline.outlineWidth(t.options.outlineWidth),null!=t.options.outlineColor&&this._hotline.outlineColor(t.options.outlineColor),t.options.palette&&this._hotline.palette(t.options.palette)}}),n=function(i){return t.Browser.canvas?new e(i):null},o={clipSegment:function(i,e,n,o,h){var r,s,l,a=o?this._lastCode:t.LineUtil._getBitCode(i,n),u=t.LineUtil._getBitCode(e,n);for(this._lastCode=u;;){if(!(a|u))return[i,e];if(a&u)return!1;r=a||u,s=t.LineUtil._getEdgeIntersection(i,e,r,n,h),l=t.LineUtil._getBitCode(s,n),r===a?(s.z=i.z,i=s,a=l):(s.z=e.z,e=s,u=l)}}};return t.Hotline=t.Polyline.extend({statics:{Renderer:e,renderer:n},options:{renderer:n(),min:0,max:1,palette:{0:"green",.5:"yellow",1:"red"},weight:5,outlineColor:"black",outlineWidth:1},getRGBForValue:function(t){return this._renderer._hotline.getRGBForValue(t)},_projectLatlngs:function(i,e,n){var o,h,r=i[0]instanceof t.LatLng,s=i.length;if(r){for(h=[],o=0;o<s;o++)h[o]=this._map.latLngToLayerPoint(i[o]),h[o].z=i[o].alt,n.extend(h[o]);e.push(h)}else for(o=0;o<s;o++)this._projectLatlngs(i[o],e,n)},_clipPoints:function(){if(this.options.noClip)return void(this._parts=this._rings);this._parts=[];var t,i,e,n,h,r,s,l=this._parts,a=this._renderer._bounds;for(t=0,e=0,n=this._rings.length;t<n;t++)for(s=this._rings[t],i=0,h=s.length;i<h-1;i++)r=o.clipSegment(s[i],s[i+1],a,i,!0),r&&(l[e]=l[e]||[],l[e].push(r[0]),r[1]===s[i+1]&&i!==h-2||(l[e].push(r[1]),e++))},_clickTolerance:function(){return this.options.weight/2+this.options.outlineWidth+(t.Browser.touch?10:0)}}),t.hotline=function(i,e){return new t.Hotline(i,e)},t});
PypiClean
/Actflow-0.2.3.1-py3-none-any.whl/connectivity_estimation/pc_multregconn.py
from sklearn.linear_model import LinearRegression from sklearn.decomposition import PCA from sklearn.model_selection import cross_val_predict from sklearn.metrics import mean_squared_error, r2_score import numpy as np def pc_multregconn(activity_matrix, target_ts=None, n_components=None, n_comp_search=False, n_components_min=1, n_components_max=None): """ activity_matrix: Activity matrix should be nodes X time target_ts: Optional, used when only a single target time series (returns 1 X nnodes matrix) n_components: Optional. Number of PCA components to use. If None, the smaller of number of nodes or number of time points (minus 1) will be selected. n_comp_search: Optional. Boolean indicating whether to search for the best number of components based on cross-validation generalization (to reduce overfitting). n_components_min: Optional. The smallest number to test in the n_comp_search. n_components_max: Optional. The largest number to test in the n_comp_search. Output: connectivity_mat (formatted targets X sources), n_components """ nnodes = activity_matrix.shape[0] timepoints = activity_matrix.shape[1] if n_components == None: n_components = np.min([nnodes-1, timepoints-1]) else: if nnodes<n_components or timepoints<n_components: print('activity_matrix shape: ',np.shape(activity_matrix)) raise Exception('More components than nodes and/or timepoints! Use fewer components') #De-mean time series activity_matrix_mean = np.mean(activity_matrix,axis=1) activity_matrix = activity_matrix - activity_matrix_mean[:, np.newaxis] if target_ts is None: #Cross-validation to find optimal number of components (based on mean MSE across all nodes) if n_comp_search: if n_components_max is None: n_components_max = np.min([nnodes-1, timepoints-1]) componentnum_set=np.arange(n_components_min,n_components_max+1) mse_regionbycomp = np.zeros([np.shape(componentnum_set)[0],nnodes]) for targetnode in range(nnodes): othernodes = list(range(nnodes)) othernodes.remove(targetnode) # Remove target node from 'other nodes' X = activity_matrix[othernodes,:].T y = activity_matrix[targetnode,:] #Run PCA pca = PCA() Xreg_allPCs = pca.fit_transform(X) mscv_vals=np.zeros(np.shape(componentnum_set)[0]) comp_count=0 for comp_num in componentnum_set: regr = LinearRegression() Xreg = Xreg_allPCs[:,:comp_num] regr.fit(Xreg, y) # Cross-validation y_cv = cross_val_predict(regr, Xreg, y, cv=10) mscv_vals[comp_count] = mean_squared_error(y, y_cv) comp_count=comp_count+1 mse_regionbycomp[:,targetnode] = mscv_vals min_comps_means = np.mean(mse_regionbycomp, axis=1) n_components=componentnum_set[np.where(min_comps_means==np.min(min_comps_means))[0][0]] print('n_components = ' + str(n_components)) connectivity_mat = np.zeros((nnodes,nnodes)) for targetnode in range(nnodes): othernodes = list(range(nnodes)) othernodes.remove(targetnode) # Remove target node from 'other nodes' X = activity_matrix[othernodes,:].T y = activity_matrix[targetnode,:] #Run PCA on source time series pca = PCA(n_components) reduced_mat = pca.fit_transform(X) # Time X Features components = pca.components_ #Note: LinearRegression fits intercept by default (intercept beta not included in coef_ output) regrmodel = LinearRegression() reg = regrmodel.fit(reduced_mat, y) #Convert regression betas from component space to node space betasPCR = pca.inverse_transform(reg.coef_) connectivity_mat[targetnode,othernodes]=betasPCR else: #Remove time series mean target_ts = target_ts - np.mean(target_ts) #Computing values for a single target node connectivity_mat = np.zeros((nnodes,1)) X = activity_matrix.T y = target_ts #Cross-validation to find optimal number of components if n_comp_search: componentnum_set=np.arange(n_components_min,n_components_max+1) mscv_vals=np.zeros(np.shape(componentnum_set)[0]) comp_count=0 for comp_num in componentnum_set: mscv_vals[comp_count] = pcr_cvtest(X,y, pc=comp_num, cv=10) comp_count=comp_count+1 n_components=componentnum_set[np.where(mscv_vals==np.min(mscv_vals))[0][0]] #Run PCA on source time series pca = PCA(n_components) reduced_mat = pca.fit_transform(X) # Time X Features components = pca.components_ #Note: LinearRegression fits intercept by default (intercept beta not included in coef_ output) reg = LinearRegression().fit(reduced_mat, y) #Convert regression betas from component space to node space betasPCR = pca.inverse_transform(reg.coef_) connectivity_mat=betasPCR return connectivity_mat def pcr_cvtest(X,y,pc,cv): ''' Principal Component Regression in Python''' ''' Based on code from here: https://nirpyresearch.com/principal-component-regression-python/''' ''' Step 1: PCA on input data''' # Define the PCA object pca = PCA() # Run PCA producing the reduced variable Xred and select the first pc components Xreg = pca.fit_transform(X)[:,:pc] ''' Step 2: regression on selected principal components''' # Create linear regression object regr = LinearRegression() # Fit regr.fit(Xreg, y) # Calibration #y_c = regr.predict(Xreg) # Cross-validation y_cv = cross_val_predict(regr, Xreg, y, cv=cv) # Calculate mean square error for calibration and cross validation #mse_c = mean_squared_error(y, y_c) mse_cv = mean_squared_error(y, y_cv) #return(y_cv, mse_c, mse_cv) return(mse_cv)
PypiClean
/Flask-ReqArg-0.1.5.tar.gz/Flask-ReqArg-0.1.5/flask_reqarg/base.py
from sys import version_info from abc import ABCMeta, abstractproperty from functools import wraps from inspect import isfunction, getargspec if version_info[0] == 2: from itertools import izip as zip __all__ = ( 'get', 'post', 'args', 'files', 'cookies', 'collection', 'RequestWrapperBase' ) def _extract_opt_source(kwargs): result = kwargs.pop('_source', 'args') if result not in ('post', 'get', 'args', 'files', 'cookies'): result = 'args' return result def _extract_opt_storage_type(kwargs): return kwargs.pop('_storage', dict) def _fetch_from_dict(d, name, default, type): result = d.get(name, default) if type is not None: try: result = type(result) except ValueError: result = default return result def get(name=None, default=None, type=None, getlist=False): def fetch_one(request, arg_name): return request.from_get(name or arg_name, default, type) def fetch_all(request, arg_name): return request.list_from_get(name or arg_name) return fetch_all if getlist else fetch_one def post(name=None, default=None, type=None, getlist=False): def fetch_one(request, arg_name): return request.from_post(name or arg_name, default, type) def fetch_all(request, arg_name): return request.list_from_post(name or arg_name) return fetch_all if getlist else fetch_one def args(name=None, default=None, type=None, getlist=False): def fetch_one(request, arg_name): return request.from_get_or_post(name or arg_name, default, type) def fetch_all(request, arg_name): return request.list_from_get_or_post(name or arg_name) return fetch_all if getlist else fetch_one def files(name=None, getlist=False): def fetch_one(request, arg_name): return request.from_files(name or arg_name) def fetch_all(request, arg_name): return request.list_from_files(name or arg_name) return fetch_all if getlist else fetch_one def cookies(name=None, default=None, type=None): def fetch(request, arg_name): return request.from_cookies(name or arg_name, default, type) return fetch def collection(*args, **kwargs): source = _extract_opt_source(kwargs) storage_type = _extract_opt_storage_type(kwargs) def getter(request, arg_name): values = {} for arg in args: values[arg] = request.from_source(source, arg) for arg, arg_getter in kwargs.items(): values[arg] = arg_getter(request, arg) return storage_type(**values) return getter class RequestWrapperBase(object): __metaclass__ = ABCMeta def __init__(self, request): self._request = request self._storage_dict_map = { '_request': self._request, '_get': self.get_dict, '_post': self.post_dict, '_args': self.args_dict, '_cookies': self.cookies_dict, '_files': self.files_dict } def from_source(self, source, name): return self._storage_dict_map['_' + source].get(name) def from_get(self, name, default, type): return _fetch_from_dict(self.get_dict, name, default, type) def from_post(self, name, default, type): return _fetch_from_dict(self.post_dict, name, default, type) def from_get_or_post(self, name, default, type): return _fetch_from_dict(self.args_dict, name, default, type) def from_cookies(self, name, default, type): return _fetch_from_dict(self.cookies_dict, name, default, type) def from_files(self, name): return self.files_dict.get(name) def list_from_get(self, name): return self.get_dict.getlist(name) def list_from_post(self, name): return self.post_dict.getlist(name) def list_from_get_or_post(self, name): return self.args_dict.getlist(name) def list_from_files(self, name): return self.files_dict.getlist(name) @property def request(self): return self._request @abstractproperty def get_dict(self): pass @abstractproperty def post_dict(self): pass @abstractproperty def args_dict(self): pass @abstractproperty def cookies_dict(self): pass @abstractproperty def files_dict(self): pass @classmethod def create(cls, *args, **kwargs): pass @classmethod def request_args(cls, *args, **kwargs): source = _extract_opt_source(kwargs) def decorator(func, spec=True): func_arg_names = getargspec(func)[0] if spec: kwargs.update(zip(func_arg_names, args)) @wraps(func) def wrapper(*func_args, **func_kwargs): request = cls.create(*func_args, **func_kwargs) values = func_kwargs.copy() for arg_name in func_arg_names: if arg_name in kwargs: values[arg_name] = kwargs[arg_name](request, arg_name) elif arg_name not in values: values[arg_name] = request.from_source(source, arg_name) return func(**values) return wrapper if len(args) == 1 and len(kwargs) == 0 and isfunction(args[0]): return decorator(args[0], False) return decorator
PypiClean
/GraphLab_Create-2.1-cp27-none-macosx_10_5_x86_64.macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.macosx_10_11_intel.macosx_10_11_x86_64.whl/graphlab/_beta/views/platform/wrappers/_matplotlib.py
try: import io as _StringIO except ImportError: import StringIO as _StringIO from .. import _view from ._plotly import PlotlyView as _PlotlyView class MatplotlibView(_view.View): def __init__(self, figure, backend='plotly'): """ Wraps a matplotlib figure in a View. The resulting View can be used like any other view (i.e. shown, composed in a layout, etc.). Parameters ---------- figure : matplotlib.figure.Figure A matplotlib figure. backend : str Can be one of the following values: 'plotly' or 'svg'. The default is 'plotly'. If 'plotly' is selected, the plotly wrapper for matplotlib is used (in combination with `graphlab.beta.views.wrappers.PlotlyView`). The plotly wrapper allows for more user interaction with the plot but has some limitations and incompatibilities. If 'svg' is selected, the plot will be serialized to non-interactive SVG format and that representation will be displayed in the view. Returns ------- out : View A View representation of the figure. """ super(MatplotlibView, self).__init__( name='Matplotlib Wrapper View', label='Plot', tag_name='gl-matplotlib-wrapper', description='Matplotlib Plot inside the Turi Views platform' ) self.__backend = backend if backend == 'plotly': import plotly self.__plotly_view = _PlotlyView(plotly.tools.mpl_to_plotly(figure)) elif backend == 'svg': buf = _StringIO.StringIO() figure.savefig(buf, format='svg') self.__svg = buf.getvalue() else: raise ValueError('Expected backend to be one of: "plotly", "svg"') def get_plot(self): backend = self.__backend plot = None if backend == 'plotly': plot = self.__plotly_view.get_plot() else: assert backend == 'svg' plot = self.__svg return { "backend": backend, "plot": plot }
PypiClean
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/lib/mnemonic.py
import os import hmac import math import hashlib import unicodedata import string import ecdsa import pbkdf2 from util import print_error from bitcoin import is_old_seed, is_new_seed import version import i18n # http://www.asahi-net.or.jp/~ax2s-kmtn/ref/unicode/e_asia.html CJK_INTERVALS = [ (0x4E00, 0x9FFF, 'CJK Unified Ideographs'), (0x3400, 0x4DBF, 'CJK Unified Ideographs Extension A'), (0x20000, 0x2A6DF, 'CJK Unified Ideographs Extension B'), (0x2A700, 0x2B73F, 'CJK Unified Ideographs Extension C'), (0x2B740, 0x2B81F, 'CJK Unified Ideographs Extension D'), (0xF900, 0xFAFF, 'CJK Compatibility Ideographs'), (0x2F800, 0x2FA1D, 'CJK Compatibility Ideographs Supplement'), (0x3190, 0x319F , 'Kanbun'), (0x2E80, 0x2EFF, 'CJK Radicals Supplement'), (0x2F00, 0x2FDF, 'CJK Radicals'), (0x31C0, 0x31EF, 'CJK Strokes'), (0x2FF0, 0x2FFF, 'Ideographic Description Characters'), (0xE0100, 0xE01EF, 'Variation Selectors Supplement'), (0x3100, 0x312F, 'Bopomofo'), (0x31A0, 0x31BF, 'Bopomofo Extended'), (0xFF00, 0xFFEF, 'Halfwidth and Fullwidth Forms'), (0x3040, 0x309F, 'Hiragana'), (0x30A0, 0x30FF, 'Katakana'), (0x31F0, 0x31FF, 'Katakana Phonetic Extensions'), (0x1B000, 0x1B0FF, 'Kana Supplement'), (0xAC00, 0xD7AF, 'Hangul Syllables'), (0x1100, 0x11FF, 'Hangul Jamo'), (0xA960, 0xA97F, 'Hangul Jamo Extended A'), (0xD7B0, 0xD7FF, 'Hangul Jamo Extended B'), (0x3130, 0x318F, 'Hangul Compatibility Jamo'), (0xA4D0, 0xA4FF, 'Lisu'), (0x16F00, 0x16F9F, 'Miao'), (0xA000, 0xA48F, 'Yi Syllables'), (0xA490, 0xA4CF, 'Yi Radicals'), ] def is_CJK(c): n = ord(c) for imin,imax,name in CJK_INTERVALS: if n>=imin and n<=imax: return True return False def normalize_text(seed): # normalize seed = unicodedata.normalize('NFKD', unicode(seed)) # lower seed = seed.lower() # remove accents seed = u''.join([c for c in seed if not unicodedata.combining(c)]) # normalize whitespaces seed = u' '.join(seed.split()) # remove whitespaces between CJK seed = u''.join([seed[i] for i in range(len(seed)) if not (seed[i] in string.whitespace and is_CJK(seed[i-1]) and is_CJK(seed[i+1]))]) return seed def load_wordlist(filename): path = os.path.join(os.path.dirname(__file__), 'wordlist', filename) s = open(path,'r').read().strip() s = unicodedata.normalize('NFKD', s.decode('utf8')) lines = s.split('\n') wordlist = [] for line in lines: line = line.split('#')[0] line = line.strip(' \r') assert ' ' not in line if line: wordlist.append(line) return wordlist filenames = { 'en':'english.txt', 'es':'spanish.txt', 'ja':'japanese.txt', 'pt':'portuguese.txt', 'zh':'chinese_simplified.txt' } class Mnemonic(object): # Seed derivation no longer follows BIP39 # Mnemonic phrase uses a hash based checksum, instead of a wordlist-dependent checksum def __init__(self, lang=None): lang = lang or 'en' print_error('language', lang) filename = filenames.get(lang[0:2], 'english.txt') self.wordlist = load_wordlist(filename) print_error("wordlist has %d words"%len(self.wordlist)) @classmethod def mnemonic_to_seed(self, mnemonic, passphrase): PBKDF2_ROUNDS = 2048 mnemonic = normalize_text(mnemonic) passphrase = normalize_text(passphrase) return pbkdf2.PBKDF2(mnemonic, 'electrum' + passphrase, iterations = PBKDF2_ROUNDS, macmodule = hmac, digestmodule = hashlib.sha512).read(64) def mnemonic_encode(self, i): n = len(self.wordlist) words = [] while i: x = i%n i = i/n words.append(self.wordlist[x]) return ' '.join(words) def get_suggestions(self, prefix): for w in self.wordlist: if w.startswith(prefix): yield w def mnemonic_decode(self, seed): n = len(self.wordlist) words = seed.split() i = 0 while words: w = words.pop() k = self.wordlist.index(w) i = i*n + k return i def check_seed(self, seed, custom_entropy): assert is_new_seed(seed) i = self.mnemonic_decode(seed) return i % custom_entropy == 0 def make_seed(self, seed_type='standard', num_bits=132, custom_entropy=1): import version prefix = version.seed_prefix(seed_type) # increase num_bits in order to obtain a uniform distibution for the last word bpw = math.log(len(self.wordlist), 2) num_bits = int(math.ceil(num_bits/bpw)) * bpw # handle custom entropy; make sure we add at least 16 bits n_custom = int(math.ceil(math.log(custom_entropy, 2))) n = max(16, num_bits - n_custom) print_error("make_seed", prefix, "adding %d bits"%n) my_entropy = ecdsa.util.randrange(pow(2, n)) nonce = 0 while True: nonce += 1 i = custom_entropy * (my_entropy + nonce) seed = self.mnemonic_encode(i) assert i == self.mnemonic_decode(seed) if is_old_seed(seed): continue if is_new_seed(seed, prefix): break print_error('%d words'%len(seed.split())) return seed
PypiClean
/Djaloha-0.4.2.tar.gz/Djaloha-0.4.2/djaloha/static/aloha.0.20.20/plugins/common/contenthandler/lib/genericcontenthandler.js
define( ['aloha', 'aloha/jquery', 'aloha/contenthandlermanager'], function(Aloha, jQuery, ContentHandlerManager) { var GENTICS = window.GENTICS; /** * Register the generic content handler */ var GenericContentHandler = ContentHandlerManager.createHandler({ /** * Handle the pasting. Remove all unwanted stuff. * @param content */ handleContent: function( content ) { if ( typeof content === 'string' ){ content = jQuery( '<div>' + content + '</div>' ); } else if ( content instanceof jQuery ) { content = jQuery( '<div>' ).append(content); } // If we find an aloha-block inside the pasted content, // we do not modify the pasted stuff, as it most probably // comes from Aloha and not from other sources, and does // not need to be cleaned up. if (content.find('.aloha-block').length > 0) { return; } // clean lists this.cleanLists(content); // transform tables this.transformTables(content); // remove comments this.removeComments(content); // unwrap font and span tags this.unwrapTags(content); // remove styles this.removeStyles(content); // remove namespaced elements this.removeNamespacedElements(content); // transform formattings this.transformFormattings(content); // transform links //this.transformLinks(content); return content.html(); }, /** * Clean lists: The only allowed children of ol or ul elements are li's. Everything else will be removed * @param content */ cleanLists: function(content) { content.find('ul,ol').each(function() { var $list = jQuery(this); $list.contents(':not(li,ul,ol)').each(function() { jQuery(this).remove(); }); }); }, /** * Transform tables which were pasted * @param content */ transformTables: function(content) { // remove border, cellspacing, cellpadding from all tables // @todo what about width, height? content.find('table').each(function() { jQuery(this).removeAttr('border').removeAttr('cellspacing').removeAttr('cellpadding'); }); // remove unwanted attributes and cleanup single empty p-tags content.find('td').each(function() { // remove width, height and valign from all table cells jQuery(this).removeAttr('width').removeAttr('height').removeAttr('valign'); if ( this.innerHTML.replace(/[\s\xA0]+/g,'') === '<p><br></p>' ) { this.innerHTML = '&nbsp;'; } if ( jQuery(this).find('p').length == 1) { jQuery(this).find('p').contents().unwrap(); } }); // remove unwanted attributes from tr also? (tested with paste from open/libre office) // @todo or do this all via sanitize.js content.find('tr').each(function() { // remove width, height and valign from all table cells jQuery(this).removeAttr('width').removeAttr('height').removeAttr('valign'); }); // completely colgroup tags // @TODO should we remove colgroup? use sanitize for that? content.find('colgroup').remove(); }, /** * Transform formattings * @param content */ transformFormattings: function( content ) { // find all formattings we will transform content.find('strong,em,s,u,strike').each(function() { if (this.nodeName.toLowerCase() == 'strong') { // transform strong to b Aloha.Markup.transformDomObject(jQuery(this), 'b'); } else if (this.nodeName.toLowerCase() == 'em') { // transform em to i Aloha.Markup.transformDomObject(jQuery(this), 'i'); } else if (this.nodeName.toLowerCase() == 's' || this.nodeName.toLowerCase() == 'strike') { // transform s and strike to del Aloha.Markup.transformDomObject(jQuery(this), 'del'); } else if (this.nodeName.toLowerCase() == 'u') { // transform u? jQuery(this).contents().unwrap(); } }); }, /** * Transform links * @param content */ transformLinks: function( content ) { // find all links and remove the links without href (will be destination anchors from word table of contents) // aloha is not supporting anchors at the moment -- maybe rewrite anchors in headings to "invisible" // in the test document there are anchors for whole paragraphs --> the whole P appear as link content.find('a').each(function() { if ( typeof jQuery(this).attr('href') === 'undefined' ) { jQuery(this).contents().unwrap(); } }); }, /** * Remove all comments * @param content */ removeComments: function( content ) { var that = this; // ok, remove all comments content.contents().each(function() { if (this.nodeType == 8) { jQuery(this).remove(); } else { // do recursion that.removeComments(jQuery(this)); } }); }, /** * Remove some unwanted tags from content pasted * @param content */ unwrapTags: function( content ) { var that = this; // Note: we exclude all elements (they will be spans) here, that have the class aloha-wai-lang // TODO find a better solution for this (e.g. invent a more generic aloha class for all elements, that are // somehow maintained by aloha, and are therefore allowed) content.find('span,font,div').not('.aloha-wai-lang').each(function() { if (this.nodeName == 'DIV') { // safari and chrome cleanup for plain text paste with working linebreaks if (this.innerHTML == '<br>') { jQuery(this).contents().unwrap(); } else { jQuery( Aloha.Markup.transformDomObject(jQuery(this), 'p').append('<br>') ).contents().unwrap(); } } else { jQuery(this).contents().unwrap(); } }); }, /** * Remove styles * @param content */ removeStyles: function( content ) { var that = this; // completely remove style tags content.children('style').filter(function() { return this.contentEditable != 'false'; }).remove(); // remove style attributes and classes content.children().filter(function() { return this.contentEditable != 'false'; }).each(function() { jQuery(this).removeAttr('style').removeClass(); that.removeStyles(jQuery(this)); }); }, /** * Remove all elements which are in different namespaces * @param content */ removeNamespacedElements: function( content ) { // get all elements content.find('*').each(function() { // try to determine the namespace prefix ('prefix' works for W3C // compliant browsers, 'scopeName' for IE) var nsPrefix = this.prefix ? this.prefix : (this.scopeName ? this.scopeName : undefined); // when the prefix is set (and different from 'HTML'), we remove the // element if ((nsPrefix && nsPrefix != 'HTML') || this.nodeName.indexOf(':') >= 0 ) { var $this = jQuery(this), $contents = $this.contents(); if ($contents.length) { // the element has contents, so unwrap the contents $contents.unwrap(); } else { // the element is empty, so remove it $this.remove(); } } }); } }); return GenericContentHandler; });
PypiClean
/EpistaSim_Linux-1.1.0.tar.gz/EpistaSim_Linux-1.1.0/BackWard.py
import random import math from collections import defaultdict import sys,os,time from optparse import OptionParser from tempfile import gettempdir try: import numpy except ImportError: print >>sys.stderr, "No numpy module" def Hapftrac(hapfre,Shap,haplotype,deltfre,N): trac={} trac.setdefault(0,{})["00"]=hapfre[0] trac.setdefault(0,{})["01"]=hapfre[1] trac.setdefault(0,{})["10"]=hapfre[2] trac.setdefault(0,{})["11"]=hapfre[3] T=1 k=haplotype.index(Shap) frequency=hapfre temphap=[] for ele in haplotype: if ele != Shap: temphap.append(ele) while 1: trac.setdefault(T,{})[Shap]=frequency[k]-deltfre[k]+random.normalvariate(0, 1)*math.sqrt(frequency[k]*(1-frequency[k])/N) i=2 while i >= 0: if i==2: if 1-frequency[k] == 0: y=deltfre[haplotype.index(temphap[i])] else: x=frequency[haplotype.index(temphap[i])]/(1-frequency[k]) if int(N-frequency[k]*N) == 0 or x >= 1: y=x+deltfre[haplotype.index(temphap[i])] else: y=x+deltfre[haplotype.index(temphap[i])]+random.normalvariate(0, 1)*math.sqrt(x*(1-x)/int(N-frequency[k]*N)) trac.setdefault(T,{})[temphap[i]]=y*(1-frequency[k]) elif i ==1: allp=1-frequency[k]-frequency[haplotype.index(temphap[2])] if allp == 0: y=deltfre[haplotype.index(temphap[i])] else: num=int(N-frequency[k]*N-frequency[haplotype.index(temphap[2])]*N) x=frequency[haplotype.index(temphap[i])]/allp if num == 0 or x >= 1: y=x+deltfre[haplotype.index(temphap[i])] else: y=x+deltfre[haplotype.index(temphap[i])]+random.normalvariate(0, 1)*math.sqrt(x*(1-x)/num) trac.setdefault(T,{})[temphap[i]]=y*allp else: fre=1 for key in trac[T].keys(): fre=fre-trac[T][key] trac.setdefault(T,{})[temphap[i]]=fre i=i-1 if trac[T][Shap] <= 1/float(N): trac[T][Shap]=0 valuesum=sum(trac[T].values()) for ele in trac[T].keys(): if trac[T][ele] == max(trac[T].values()): trac[T][ele]= trac[T][ele]-valuesum+1 break elif min(trac[T].values()) < 0: for ele in trac[T].keys(): if trac[T][ele] < 0: trac[T][ele] = 0 valuesum=sum(trac[T].values()) for ele in trac[T].keys(): trac[T][ele]=trac[T][ele]/float(valuesum) frequency=[trac[T]["00"],trac[T]["01"],trac[T]["10"],trac[T]["11"]] elif max(trac[T].values()) == 1: break else: frequency=[trac[T]["00"],trac[T]["01"],trac[T]["10"],trac[T]["11"]] T=T+1 return trac ##coalescent probability def Pca(n,N): Coa_pro=(n*(n-1))/float(4*N) return Coa_pro ## selective model: Two Locus Coalescent with selective ##coalescent probability def Hapcoalescet(n,N,deltt,T,Haptrac,theta,haplotype,Shap,r,region): #colascent in 00,01,10,11 #n=[int(nsam*Haptrac[t]["00"]),int(nsam*Haptrac[t]["01"]),int(nsam*Haptrac[t]["10"]),int(nsam*Haptrac[t]["11"])] if n[haplotype.index(Shap)]==0: fre=[] for ele in n: if ele !=0: fre.append(ele/float(sum(n))) else: fre.append(float(0)) else: fre=[Haptrac[deltt+T]["00"],Haptrac[deltt+T]["01"],Haptrac[deltt+T]["10"],Haptrac[deltt+T]["11"]] hapcoa=[] i=0 for i in range(len(n)): if n[i] > 1: hapcoa.append((1-r*len(region))*n[i]*(n[i]-1)/float(4*N*fre[i])) else: hapcoa.append(0) return hapcoa def Haprecombiantion(n,N,deltt,T,Haptrac,theta,haplotype,Shap): #recombination result in 00,01,10,11 #n=[int(nsam*Haptrac[t]["00"]),int(nsam*Haptrac[t]["01"]),int(nsam*Haptrac[t]["10"]),int(nsam*Haptrac[t]["11"])] if n[haplotype.index(Shap)]==0: fre=[] for ele in n: if ele !=0: fre.append(ele/float(sum(n))) else: fre.append(float(0)) Num=[int(round(N*fre[0])),int(round(N*fre[1])),int(round(N*fre[2])),int(round(N*fre[3]))] else: fre=[Haptrac[deltt+T]["00"],Haptrac[deltt+T]["01"],Haptrac[deltt+T]["10"],Haptrac[deltt+T]["11"]] Num=[int(round(N*Haptrac[deltt+T]["00"])),int(round(N*Haptrac[deltt+T]["01"])),int(round(N*Haptrac[deltt+T]["10"])),int(round(N*Haptrac[deltt+T]["11"]))] r0000=n[0]*theta*(Num[0]-n[0])*(Num[0]-n[0]-1)/float(2)/float(2*N*(2*N-1)) r0001=n[0]*theta*(Num[0]-n[0])*(Num[1]-n[1])/float(2*N*(2*N-1)) r0010=n[0]*theta*(Num[0]-n[0])*(Num[2]-n[2])/float(2*N*(2*N-1)) r0110=n[0]*theta*(Num[1]-n[1])*(Num[2]-n[2])/float(2*N*(2*N-1)) r00=[r0000,r0001,r0010,r0110] r0101=n[1]*theta*(Num[1]-n[1])*(Num[1]-n[1]-1)/float(2)/float(2*N*(2*N-1)) r0001=n[1]*theta*(Num[0]-n[0])*(Num[1]-n[1])/float(2*N*(2*N-1)) r0011=n[1]*theta*(Num[0]-n[0])*(Num[3]-n[3])/float(2*N*(2*N-1)) r0111=n[1]*theta*(Num[1]-n[1])*(Num[3]-n[3])/float(2*N*(2*N-1)) r01=[r0101,r0001,r0011,r0111] r1010=n[2]*theta*(Num[2]-n[2])*(Num[2]-n[2]-1)/float(2)/float(2*N*(2*N-1)) r0010=n[2]*theta*(Num[0]-n[0])*(Num[2]-n[2])/float(2*N*(2*N-1)) r0011=n[2]*theta*(Num[0]-n[0])*(Num[3]-n[3])/float(2*N*(2*N-1)) r1011=n[2]*theta*(Num[2]-n[2])*(Num[3]-n[3])/float(2*N*(2*N-1)) r10=[r1010,r0010,r0011,r1011] r1111=n[3]*theta*(Num[3]-n[3])*(Num[3]-n[3]-1)/float(2)/float(2*N*(2*N-1)) r0110=n[3]*theta*(Num[1]-n[1])*(Num[2]-n[2])/float(2*N*(2*N-1)) r0111=n[3]*theta*(Num[1]-n[1])*(Num[3]-n[3])/float(2*N*(2*N-1)) r1011=n[3]*theta*(Num[2]-n[2])*(Num[3]-n[3])/float(2*N*(2*N-1)) r11=[r1111,r0110,r0111,r1011] hapreco=[sum(r00),sum(r01),sum(r10),sum(r11)] return hapreco,r00,r01,r10,r11 def Hapmutation(n,Haptrac,mu,deltt,T,haplotype,Shap,N): #mutation result in 01,10,11 #n=[int(nsam*Haptrac[t]["00"]),int(nsam*Haptrac[t]["01"]),int(nsam*Haptrac[t]["10"]),int(nsam*Haptrac[t]["11"])] if n[haplotype.index(Shap)]==0: fre=[] for ele in n: if ele !=0: fre.append(ele/float(sum(n))) else: fre.append(float(0)) Num=[int(round(N*fre[0])),int(round(N*fre[1])),int(round(N*fre[2])),int(round(N*fre[3]))] else: Num=[int(round(N*Haptrac[deltt+T]["00"])),int(round(N*Haptrac[deltt+T]["01"])),int(round(N*Haptrac[deltt+T]["10"])),int(round(N*Haptrac[deltt+T]["11"]))] hapmut=[] if Num[0] > 0: m0100=mu*Num[1]*n[0]/float(Num[0]) m1000=mu*Num[2]*n[0]/float(Num[0]) m1100=mu*mu*Num[3]*n[0]/float(Num[0]) else: m0100=0 m1000=0 m1100=0 m00=[m0100,m1000,m1100] if Num[1] > 0: m0001=mu*n[1]*Num[0]/float(Num[1]) m1001=mu*mu*n[1]*Num[2]/float(Num[1]) m1101=mu*n[1]*Num[3]/float(Num[1]) else: m0001=0 m1001=0 m1101=0 m01=[m0001,m1001,m1101] if Num[2] > 0: m0010=mu*n[2]*Num[0]/float(Num[2]) m0110=mu*mu*n[2]*Num[1]/float(Num[2]) m1110=mu*n[2]*Num[3]/float(Num[2]) else: m0010=0 m0110=0 m1110=0 m10=[m0010,m0110,m1110] if Num[3] > 0: m0011=mu**2*Num[0]*n[3]/float(Num[3]) m0111=mu*Num[1]*n[3]/float(Num[3]) m1011=mu*Num[2]*n[3]/float(Num[3]) else: m0011=0 m0111=0 m1011=0 m11=[m0011,m0111,m1011] hapmut=[m0100,m1000,m1100,m0001,m1001,m1101,m0010,m0110,m1110,m0011,m0111,m1011] return hapmut,m00,m01,m10,m11 def Hapbackward(nsam,N,mu,Haptrac,theta,hapfre,Shap,locus1,locus2,haplotype,region,Shapfrequency,r,logout): sample={} n=[int(round(nsam*hapfre[0])),int(round(nsam*hapfre[1])),int(round(nsam*hapfre[2])),int(round(nsam*hapfre[3]))] if sum(n) > nsam: n[n.index(max(n))]=n[n.index(max(n))]-1 active00=range(1,n[0]+1) for ele in active00: sample[ele]="00" active01=range(n[0]+1,sum(n[0:2])+1) for ele in active01: sample[ele]="01" active10=range(sum(n[0:2])+1,sum(n[0:3])+1) for ele in active10: sample[ele]="10" active11=range(sum(n[0:3])+1,sum(n)+1) for ele in active11: sample[ele]="11" shapfre=n[haplotype.index(Shap)]/float(sum(n)) T=0 time=0 #the right hand locus rightlengths={} rightoffsprings={} rightparent={} #the left hand locus leftlengths={} leftoffsprings={} leftparent={} for i in range(1,sum(n)+1): rightlengths[i]=0 rightoffsprings[i]=1 rightparent[i]=0 leftlengths[i]=0 leftoffsprings[i]=1 leftparent[i]=0 #offspring nodes for each node activePairs=range(1,sum(n)+1) activeLeft=[] activeRight=[] nextNode=sum(n)+1 recon=[0,0,0,0] n=[0,0,0,0] for ele in activePairs: if sample[ele] == "00": recon[0]=recon[0]+1 n[0]=n[0]+1 elif sample[ele] == "01": recon[1] = recon[1]+1 n[1]=n[1]+1 elif sample[ele] == "10": recon[2] = recon[2]+1 n[2]=n[2]+1 else: recon[3] = recon[3]+1 n[3]=n[3]+1 for ele in activeLeft: if sample[ele] == "00": n[0]=n[0]+1 elif sample[ele] == "01": n[1]=n[1]+1 elif sample[ele] == "10": n[2]=n[2]+1 else: n[3]=n[3]+1 for ele in activeRight: if sample[ele] == "00": n[0]=n[0]+1 elif sample[ele] == "01": n[1]=n[1]+1 elif sample[ele] == "10": n[2]=n[2]+1 else: n[3]=n[3]+1 while len(activePairs)+max(len(activeLeft),len(activeRight)) > 1: # compute the probability of recombination activen=len(activePairs)+len(activeLeft)+len(activeRight) if T == max(Haptrac.keys()): deltt = 0 else : deltt = 1 (hapreco,r00,r01,r10,r11)=Haprecombiantion(recon,N,deltt,T,Haptrac,theta,haplotype,Shap) recombination=sum(hapreco)*len(activePairs) # compute the probability of coalescent hapcoa=Hapcoalescet(n,N,deltt,T,Haptrac,theta,haplotype,Shap,r,region) coalescent=sum(hapcoa) # probability of mutation (hapmut,m00,m01,m10,m11)=Hapmutation(n,Haptrac,mu,deltt,T,haplotype,Shap,N) mutation=sum(hapmut)*activen prob=recombination+coalescent+mutation t=random.expovariate(prob) timet=1 time=time+timet log=str(len(activePairs))+' '+str(len(activeLeft))+' '+str(len(activeRight))+" After "+str(t)+" times:" print >>logout,log #update times for all nodes indexRight=[] if len(activeRight) > 0: indexRight.extend(activeRight) if len(activePairs) > 0: indexRight.extend(activePairs) for ele in indexRight: rightlengths[ele]=rightlengths[ele]+timet indexLeft=[] if len(activeLeft) > 0: indexLeft.extend(activeLeft) if len(activePairs) > 0: indexLeft.extend(activePairs) for ele in indexLeft: leftlengths[ele]=leftlengths[ele]+timet #determine the events was coalescent or recombination event? event=list(numpy.random.multinomial(1,[recombination/prob,coalescent/prob,mutation/prob])) #coalescent event if event.index(1)==1: active00=[] active01=[] active10=[] active11=[] for ele in activePairs: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) for ele in activeRight: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) for ele in activeLeft: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) #pick two nodes at random coalescent #active=[] #if len(activeLeft) > 0: # active.extend(activeLeft) #if len(activeRight) > 0: # active.extend(activeRight) #if len(activePairs) > 0: # active.extend(activePairs) coaprob=[hapcoa[0]/sum(hapcoa),hapcoa[1]/sum(hapcoa),hapcoa[2]/sum(hapcoa),hapcoa[3]/sum(hapcoa)] coalescenttype=list(numpy.random.multinomial(1,coaprob)) if coalescenttype.index(1)==0: coasample=random.sample(active00,2) sample[nextNode]="00" # n[0]=n[0]+1 for ele in coasample: if ele in active00: active00.remove(ele) # n[0]=n[0]-1 elif coalescenttype.index(1)==1: coasample=random.sample(active01,2) sample[nextNode]="01" # n[1]=n[1]+1 for ele in coasample: if ele in active01: active01.remove(ele) # n[1]=n[1]-1 elif coalescenttype.index(1)==2: coasample=random.sample(active10,2) sample[nextNode]="10" # n[2]=n[2]+1 for ele in coasample: if ele in active10: active10.remove(ele) # n[2]=n[2]-1 else: coasample=random.sample(active11,2) sample[nextNode]="11" # n[3]=n[3]+1 for ele in coasample: if ele in active11: active11.remove(ele) # n[3]=n[3]-1 log="Nodes "+str(coasample[0])+" and "+str(coasample[1])+' coalescent into '+str(nextNode) print >>logout,log #assign parent node for ele in coasample: rightparent[ele]=nextNode leftparent[ele]=nextNode #creat ancestor node rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 leftoffsprings[nextNode]=0 for ele in coasample: rightoffsprings[nextNode]=rightoffsprings[nextNode]+rightoffsprings[ele] leftoffsprings[nextNode]=leftoffsprings[nextNode]+leftoffsprings[ele] #delete coalescent node in list activePairs=list(set(activePairs).difference(set(coasample))) activeLeft=list(set(activeLeft).difference(set(coasample))) activeRight=list(set(activeRight).difference(set(coasample))) #add new ancestor node to list if rightoffsprings[nextNode]==0: activeLeft.append(nextNode) elif leftoffsprings[nextNode]==0: activeRight.append(nextNode) else: activePairs.append(nextNode) ##check for special situation where one locus finishes coalescent nextNode=nextNode+1 #recombination event elif event.index(1)==0: #pick one node at random to recombine active00=[] active01=[] active10=[] active11=[] for ele in activePairs: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) recprob=[hapreco[0]/sum(hapreco),hapreco[1]/sum(hapreco),hapreco[2]/sum(hapreco),hapreco[3]/sum(hapreco)] recomtype=list(numpy.random.multinomial(1,recprob)) if recomtype.index(1)==2: recomsample=random.sample(active10,1)[0] recomparent=list(numpy.random.multinomial(1,[r10[0]/sum(r10),r10[1]/sum(r10),r10[2]/sum(r10),r10[3]/sum(r10)])) active10.remove(recomsample) # n[2]=n[2]-1 if recomparent.index(1)==0: sample[nextNode]="10" sample[nextNode+1]="10" # n[2]=n[2]+2 elif recomparent.index(1)==1: sample[nextNode]="00" sample[nextNode+1]="10" # n[2]=n[2]+1 # n[0]=n[0]+1 elif recomparent.index(1)==2: sample[nextNode]="00" sample[nextNode+1]="11" # n[0]=n[0]+1 # n[3]=n[3]+1 else: sample[nextNode]="10" sample[nextNode+1]="11" # n[2]=n[2]+1 # n[3]=n[3]+1 elif recomtype.index(1)==0: recomsample=random.sample(active00,1)[0] recomparent=list(numpy.random.multinomial(1,[r00[0]/sum(r00),r00[1]/sum(r00),r00[2]/sum(r00),r00[3]/(sum(r00))])) active00.remove(recomsample) # n[0]=n[0]-1 if recomparent.index(1)==0: sample[nextNode]="00" sample[nextNode+1]="00" # n[0]=n[0]+2 elif recomparent.index(1)==1: sample[nextNode]="00" sample[nextNode+1]="01" # n[0]=n[0]+1 # n[1]=n[1]+1 elif recomparent.index(1)==2: sample[nextNode]="00" sample[nextNode+1]="10" # n[0]=n[0]+1 # n[2]=n[2]+1 else: sample[nextNode]="01" sample[nextNode+1]="10" # n[1]=n[1]+1 # n[2]=n[2]+1 elif recomtype.index(1)==1: recomsample=random.sample(active01,1)[0] recomparent=list(numpy.random.multinomial(1,[r01[0]/sum(r01),r01[1]/sum(r01),r01[2]/sum(r01),r01[3]/(sum(r01))])) active01.remove(recomsample) # n[1]=n[1]-1 if recomparent.index(1)==0: sample[nextNode]="01" sample[nextNode+1]="01" # n[1]=n[1]+2 elif recomparent.index(1)==1: sample[nextNode]="00" sample[nextNode+1]="01" # n[0]=n[0]+1 # n[1]=n[1]+1 elif recomparent.index(1)==2: sample[nextNode]="00" sample[nextNode+1]="11" # n[0]=n[0]+1 # n[3]=n[3]+1 else: sample[nextNode]="01" sample[nextNode+1]="11" # n[1]=n[1]+1 # n[3]=n[3]+1 else: recomsample=random.sample(active11,1)[0] recomparent=list(numpy.random.multinomial(1,[r11[0]/sum(r11),r11[1]/sum(r11),r11[2]/sum(r11),r11[3]/(sum(r11))])) active11.remove(recomsample) # n[3]=n[3]-1 if recomparent.index(1)==0: sample[nextNode]="11" sample[nextNode+1]="11" # n[3]=n[3]+2 elif recomparent.index(1)==1: sample[nextNode]="01" sample[nextNode+1]="10" # n[1]=n[1]+1 # n[2]=n[2]+1 elif recomparent.index(1)==2: sample[nextNode]="01" sample[nextNode+1]="11" # n[1]=n[1]+1 # n[3]=n[3]+1 else: sample[nextNode]="10" sample[nextNode+1]="11" # n[2]=n[2]+1 # n[3]=n[3]+1 log="Node "+str(recomsample)+' splits into '+str(nextNode)+" and "+str(nextNode+1) print >>logout, log #assign different ancestor to recombinant rightparent[recomsample]=nextNode leftparent[recomsample]=nextNode+1 activeRight.append(nextNode) activeLeft.append(nextNode+1) #create ancestor for right portion rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=rightoffsprings[recomsample] leftoffsprings[nextNode]=0 nextNode=nextNode+1 #create ancestor for left portion rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 leftoffsprings[nextNode]=leftoffsprings[recomsample] nextNode=nextNode+1 #remove recombinant node from active list activePairs.remove(recomsample) ##mutation event else: active00=[] active01=[] active10=[] active11=[] for ele in activePairs: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) for ele in activeRight: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) for ele in activeLeft: if sample[ele]=="00": active00.append(ele) elif sample[ele]=="01": active01.append(ele) elif sample[ele]=="10": active10.append(ele) else: active11.append(ele) mutprob=[sum(m00)/sum(hapmut),sum(m01)/sum(hapmut),sum(m10)/sum(hapmut),sum(m11)/sum(hapmut)] muttype=list(numpy.random.multinomial(1,mutprob)) if muttype.index(1)==0: mprob=[m00[0]/sum(m00),m00[1]/sum(m00),m00[2]/sum(m00)] type=list(numpy.random.multinomial(1,mprob)) mutsample=random.sample(active00,1)[0] active00.remove(mutsample) if type.index(1) == 0: active01.append(nextNode) sample[nextNode]="01" elif type.index(1) == 1: active10.append(nextNode) sample[nextNode]="10" else: active11.append(nextNode) sample[nextNode]="11" # n[0]=n[0]+1 # n[1]=n[1]-1 elif muttype.index(1)==1: mprob=[m01[0]/sum(m01),m01[1]/sum(m01),m01[2]/sum(m01)] type=list(numpy.random.multinomial(1,mprob)) mutsample=random.sample(active01,1)[0] active01.remove(mutsample) if type.index(1) == 0: active00.append(nextNode) sample[nextNode]="00" elif type.index(1) == 1: active10.append(nextNode) sample[nextNode]="10" else: active11.append(nextNode) sample[nextNode]="11" # n[0]=n[0]+1 # n[2]=n[2]-1 elif muttype.index(1)==2: mprob=[m10[0]/sum(m10),m10[1]/sum(m10),m10[2]/sum(m10)] type=list(numpy.random.multinomial(1,mprob)) mutsample=random.sample(active10,1)[0] active10.remove(mutsample) if type.index(1) == 0: active00.append(nextNode) sample[nextNode]="00" elif type.index(1) == 1: active01.append(nextNode) sample[nextNode]="01" else: active11.append(nextNode) sample[nextNode]="11" # n[0]=n[0]+1 # n[3]=n[3]-1 else: mprob=[m11[0]/sum(m11),m11[1]/sum(m11),m11[2]/sum(m11)] type=list(numpy.random.multinomial(1,mprob)) mutsample=random.sample(active11,1)[0] active11.remove(mutsample) if type.index(1) == 0: active00.append(nextNode) sample[nextNode]="00" elif type.index(1) == 1: active01.append(nextNode) sample[nextNode]="01" else: active10.append(nextNode) sample[nextNode]="10" # n[2]=n[2]+1 # n[3]=n[3]-1 log="Node "+str(mutsample)+' was mutated by '+str(nextNode) print >> logout, log rightparent[mutsample]=nextNode leftparent[mutsample]=nextNode #creat ancestor node rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 rightoffsprings[nextNode]=rightoffsprings[nextNode]+rightoffsprings[mutsample] leftoffsprings[nextNode]=0 leftoffsprings[nextNode]=leftoffsprings[nextNode]+leftoffsprings[mutsample] #delete mutation node activePairs=list(set(activePairs).difference(set([mutsample]))) activeLeft=list(set(activeLeft).difference(set([mutsample]))) activeRight=list(set(activeRight).difference(set([mutsample]))) #add new ancestor node to list if rightoffsprings[nextNode]==0: activeLeft.append(nextNode) elif leftoffsprings[nextNode]==0: activeRight.append(nextNode) else: activePairs.append(nextNode) nextNode=nextNode+1 if len(activePairs)==0: if len(activeRight)==1: activeRight=[] if len(activeLeft)==1: activeLeft=[] if len(activePairs)==1: if len(activeRight) !=0: activeRight.extend(activePairs) activePairs=[] if len(activeLeft)!=0: activeLeft.extend(activePairs) activePairs=[] activen=len(activePairs)+len(activeLeft)+len(activeRight) if activen > 0: recon=[0,0,0,0] n=[0,0,0,0] for ele in activePairs: if sample[ele] == "00": recon[0]=recon[0]+1 n[0]=n[0]+1 elif sample[ele] == "01": recon[1] = recon[1]+1 n[1]=n[1]+1 elif sample[ele] == "10": recon[2] = recon[2]+1 n[2]=n[2]+1 else: recon[3] = recon[3]+1 n[3]=n[3]+1 for ele in activeLeft: if sample[ele] == "00": n[0]=n[0]+1 elif sample[ele] == "01": n[1]=n[1]+1 elif sample[ele] == "10": n[2]=n[2]+1 else: n[3]=n[3]+1 for ele in activeRight: if sample[ele] == "00": n[0]=n[0]+1 elif sample[ele] == "01": n[1]=n[1]+1 elif sample[ele] == "10": n[2]=n[2]+1 else: n[3]=n[3]+1 shapfre=n[haplotype.index(Shap)]/float(sum(n)) diff=[] for ele in Shapfrequency: diff.append(abs(ele-shapfre)) value=min(diff) T=diff.index(value) return leftparent,leftlengths,rightparent,rightlengths,sample,time def Twolocusneutral(nsam,N,r,mu,region,hapfre,logout): sample={} n=[int(round(nsam*hapfre[0])),int(round(nsam*hapfre[1])),int(round(nsam*hapfre[2])),int(round(nsam*hapfre[3]))] if sum(n) > nsam: n[n.index(max(n))]=n[n.index(max(n))]-1 active00=range(1,n[0]+1) for ele in active00: sample[ele]="00" active01=range(n[0]+1,sum(n[0:2])+1) for ele in active01: sample[ele]="01" active10=range(sum(n[0:2])+1,sum(n[0:3])+1) for ele in active10: sample[ele]="10" active11=range(sum(n[0:3])+1,sum(n)+1) for ele in active11: sample[ele]="11" #the right hand locus time=0 rightlengths={} rightoffsprings={} rightparent={} #the left hand locus leftlengths={} leftoffsprings={} leftparent={} for i in range(1,nsam+1): rightlengths[i]=0 rightoffsprings[i]=1 rightparent[i]=0 leftlengths[i]=0 leftoffsprings[i]=1 leftparent[i]=0 #offspring nodes for each node activePairs=range(1,nsam+1) activeLeft=[] activeRight=[] nextNode=nsam+1 while len(activePairs)+max(len(activeLeft),len(activeRight)) > 1: # compute the probability of recombination activen=len(activePairs)+len(activeLeft)+len(activeRight) Rec_pro=r*len(region)*len(activePairs) mut_pro=mu*len(region)*activen # compute the probability of coalescent pca=Pca(activen,N) # calculate the time to next event (recombination or coalescent) prob=Rec_pro+pca+mut_pro t=random.expovariate(prob) timet=1 time=time+timet log=str(len(activePairs))+' '+str(len(activeLeft))+' '+str(len(activeRight))+" After "+str(t)+" generations:" print >>logout,log #update times for all nodes indexRight=[] if len(activeRight) > 0: indexRight.extend(activeRight) if len(activePairs) > 0: indexRight.extend(activePairs) for ele in indexRight: rightlengths[ele]=rightlengths[ele]+timet indexLeft=[] if len(activeLeft) > 0: indexLeft.extend(activeLeft) if len(activePairs) > 0: indexLeft.extend(activePairs) for ele in indexLeft: leftlengths[ele]=leftlengths[ele]+timet #determine the events was coalescent or recombination event? event=list(numpy.random.multinomial(1,[Rec_pro/prob,pca/prob,mut_pro/prob])) if event.index(1)==1: #coalescent event #pick two nodes at random coalescent active=[] if len(activeLeft) > 0: active.extend(activeLeft) if len(activeRight) > 0: active.extend(activeRight) if len(activePairs) > 0: active.extend(activePairs) coalescent=random.sample(active,2) log="Nodes "+str(coalescent[0])+" and "+str(coalescent[1])+' coalescent into '+str(nextNode) print >>logout,log #assign parent node overlapRight=list(set(coalescent).intersection(set(indexRight))) for ele in overlapRight: rightparent[ele]=nextNode overlapLeft=list(set(coalescent).intersection(set(indexLeft))) for ele in overlapLeft: leftparent[ele]=nextNode #creat ancestor node rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 for ele in overlapRight: rightoffsprings[nextNode]=rightoffsprings[nextNode]+rightoffsprings[ele] leftoffsprings[nextNode]=0 for ele in overlapLeft: leftoffsprings[nextNode]=leftoffsprings[nextNode]+leftoffsprings[ele] #delete coalescent node in list activePairs=list(set(activePairs).difference(set(coalescent))) activeLeft=list(set(activeLeft).difference(set(coalescent))) activeRight=list(set(activeRight).difference(set(coalescent))) #add new ancestor node to list if rightoffsprings[nextNode]==0: activeLeft.append(nextNode) elif leftoffsprings[nextNode]==0: activeRight.append(nextNode) else: activePairs.append(nextNode) nextNode=nextNode+1 elif event.index(1) == 0: #recombination event #pick one node at random to recombine if len(activePairs)==1: recombinant=activePairs[0] else: recombinant=random.sample(activePairs,1)[0] log="Node "+str(recombinant)+' splits into '+str(nextNode)+" and "+str(nextNode+1) print >>logout,log #assign different ancestor to recombinant rightparent[recombinant]=nextNode leftparent[recombinant]=nextNode+1 activeRight.append(nextNode) activeLeft.append(nextNode+1) #create ancestor for right portion rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=rightoffsprings[recombinant] leftoffsprings[nextNode]=0 nextNode=nextNode+1 #create ancestor for left portion rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 leftoffsprings[nextNode]=leftoffsprings[recombinant] nextNode=nextNode+1 #remove recombinant node from active list activePairs.remove(recombinant) else: active=[] if len(activeLeft) > 0: active.extend(activeLeft) if len(activeRight) > 0: active.extend(activeRight) if len(activePairs) > 0: active.extend(activePairs) mutationsample=random.sample(active,1)[0] log="Nodes "+str(mutationsample)+' mutated by '+str(nextNode) print >>logout,log #assign parent node rightparent[mutationsample]=nextNode leftparent[mutationsample]=nextNode #creat ancestor node rightparent[nextNode]=0 leftparent[nextNode]=0 rightlengths[nextNode]=0 leftlengths[nextNode]=0 rightoffsprings[nextNode]=0 rightoffsprings[nextNode]=rightoffsprings[nextNode]+rightoffsprings[mutationsample] leftoffsprings[nextNode]=0 leftoffsprings[nextNode]=leftoffsprings[nextNode]+leftoffsprings[mutationsample] #delete coalescent node in list activePairs=list(set(activePairs).difference(set([mutationsample]))) activeLeft=list(set(activeLeft).difference(set([mutationsample]))) activeRight=list(set(activeRight).difference(set([mutationsample]))) activePairs.append(nextNode) nextNode=nextNode+1 ##check for special situation where one locus finishes coalescent if len(activePairs)==0: if len(activeRight)==1: activeRight=[] if len(activeLeft)==1: activeLeft=[] if len(activePairs)==1: if len(activeRight) !=0: activeRight.extend(activePairs) activePairs=[] if len(activeLeft)!=0: activeLeft.extend(activePairs) activePairs=[] return leftparent,leftlengths,rightparent,rightlengths,sample,time def Tree(lengths,parent,nsam): #determine parent nodes Tree={} Nodes=sorted(parent.keys(),reverse=True) parentNodes=range(Nodes[0],nsam-1,-1) for ele in parentNodes: #determin sun nodes and length of each parent node for key, value in parent.iteritems(): if value == ele: Tree.setdefault(ele,{})[key]=lengths[key] return Tree def SepMutation(time,position,leftTree,rightTree,locus1,locus2): left=sorted(leftTree.keys(),reverse=True) right=sorted(rightTree.keys(),reverse=True) parent=sorted(list(set(left).union(set(right))),reverse=True) cutoff=locus1+abs(locus1-locus2)/float(2) i=0 offspring={} while i < len(parent): Start=parent[i] if Start in left: activeLeft=sorted(leftTree[Start].keys(),reverse=True) else: activeLeft=[] if Start in right: activeRight=sorted(rightTree[Start].keys(),reverse=True) else: activeRight=[] if activeLeft == activeRight: for ele in activeLeft: delttime=max(leftTree[Start][ele],rightTree[Start][ele]) nseg=numpy.random.binomial(len(position),delttime/float(time)) position1 = random.sample(position,nseg) if offspring.has_key(Start): for key in offspring[Start].keys(): offspring.setdefault(ele,{})[key]=offspring[Start][key] for pos in position1: if offspring.has_key(Start): if pos in offspring[Start].keys(): offspring.setdefault(ele,{})[pos]=offspring[ele][pos]+1 else: offspring.setdefault(ele,{})[pos]=1 else: offspring.setdefault(ele,{})[pos]=1 else: for ele in activeLeft: delttime=leftTree[Start][ele] nseg=numpy.random.binomial(len(position),delttime/float(time)) position1 = random.sample(position,nseg) position2=[] if offspring.has_key(Start): for pos in offspring[Start].keys(): if pos < cutoff: offspring.setdefault(ele,{})[pos]=offspring[Start][pos] for pos in position1: if ele < cutoff: position2.append(pos) for pos in position2: if offspring.has_key(Start): if pos in offspring[Start].keys(): offspring.setdefault(ele,{})[pos]=offspring[Start][pos]+1 else: offspring.setdefault(ele,{})[pos]=1 else: offspring.setdefault(ele,{})[pos]=1 for ele in activeRight: delttime=rightTree[Start][ele] nseg=numpy.random.binomial(len(position),delttime/float(time)) position1 = random.sample(position,nseg) position2=[] if offspring.has_key(Start): for pos in offspring[Start].keys(): if pos > cutoff: offspring.setdefault(ele,{})[pos]=offspring[Start][pos] for pos in position1: if ele > cutoff: position2.append(pos) for pos in position2: if offspring.has_key(Start): if pos in offspring[Start].keys(): offspring.setdefault(ele,{})[pos]=offspring[Start][pos]+1 else: offspring.setdefault(ele,{})[pos]=1 else: offspring.setdefault(ele,{})[pos]=1 i=i+1 return offspring def hapSequence(position,offmutation,nsam,locus1,locus2,sample,res): sampleId=range(1,nsam+1) out="//" print >> res,out out="Segsites: "+str(len(position)) print >>res,out out="Positions: " for ele in position: out=out+str(ele)+' ' print >>res, out for ID in sampleId: out="" #output sequence 0: ancestor, 1:derived for segsite in position: if segsite in offmutation[ID].keys(): if segsite == locus1: if sample[ID] == "00" or sample[ID] == "01" or offmutation[ID][segsite]%2 == 0: out=out+str(0) else: out=out+str(1) elif segsite == locus2: if sample[ID] == "00" or sample[ID] == "10" or offmutation[ID][segsite]%2 == 0: out=out+str(0) else: out=out+str(1) else: if offmutation[ID][segsite]%2 == 0: out=out+str(0) else: out=out+str(1) else: if segsite == locus1: if sample[ID] == "10" or sample[ID] == "11" : out=out+str(1) else: out=out+str(0) elif segsite == locus2: if sample[ID] == "01" or sample[ID] == "11" : out=out+str(1) else: out=out+str(0) else: out=out+str(0) print >>res, out def main(): usage = "usage: python %prog <-n sample_number> <-r replication_number> [...]" description = "two-SNPs locus evolution backward simulation of selection model. The order of haplotypes should follow 00,01,10 and 11. For example: python %prog -H 01, -s 0.01. If not input selected haplotype (-H) and selective coefficient (-s), neutral model was default." op = OptionParser(version="%prog 0.1",description=description,usage=usage,add_help_option=False) op.add_option("-h","--help",action="help", help="Show this help message and exit.") op.add_option("-n","--sample number",dest="nsam",type="int",default="30", help=" please input the number of simulated samples, default is 30 samples") op.add_option("-d","--duplication number",dest="nrep",type="int", default="1", help=" please input the replication number of simulated samples, default is 1") op.add_option("-l","--region length",dest="region",type="int", default="10000", help="please input the length of simulated region (bp), default is 10000") op.add_option("-t","--two locus",dest="two_locus",type="int", nargs=2, help="please input the position of selected two_locus,seprated by space. It would be random selected in the region if not specified ") op.add_option("-p","--haplotype frequency",dest="hapf",type="float", nargs=4, help="please input the frequency of haplotype of selected two_locus seprated by space. It followed the order 00, 01, 10 and 11 which 0 represented ancestor and 1 represented derived , the sum should be 1") op.add_option("-R","--recombination rate",dest="R",type="float", default="0.000000003", help="please input the recombination rate per site r where in 4Nr,defaule is 3*10**(-8)") op.add_option("-u","--mutation rate", dest="mu",type="float",default="0.000000003", help="please input the mutation rate per site u where in 4Nu,defaule is 3*10**(-8) ") op.add_option("-e","--segsites",dest="segsites",type="int", help="please input the number of segsites in the region, it would be randomly generated according to the length of region if not input ") op.add_option("-H","--selective advantage haplotype" , dest="H",type="str", help="please specified the haplotype which was selected in evolution,for example 10;") op.add_option("-S","--selection coefficient",dest="s",type="float", help="please input the select coefficient of haplotype which specified previous, default was 0 represent neutral model. ") op.add_option("-o","--outfilename",dest="outfilename",type="str",default="simulation.out", help="The file name of output file showing the simulation data including number of segsites, position of each segsite, positions of selected two-locus, allele of each sample") op.add_option("-f","--frequencyfilename",dest="frequencyfilename",type="str",default="Hapfre.trac", help="The file name of haplotype frequency") (options,args) = op.parse_args() if not options.hapf: print "Please input the haplotype frequency according to 00, 01, 10 and 11 which 0 represented ancestral and 1 was derived." op.print_help() sys.exit(1) haplotype=["00","01","10","11"] hapfre=[] for ele in list(options.hapf): hapfre.append(float(ele)) if float('%.4f'%sum(hapfre)) != 1: print "The sum of haplotype frequency is not equal 1!" sys.exit(1) if not options.H and not options.s: print "You did not specify the selected haplotype!" Shap=haplotype[hapfre.index(max(hapfre))] S=0 elif not options.H: print "Please input the haplotype corresponding to the selective coefficient, one from 00,01,10 and 11." sys.exit(1) elif not options.s: print "You did not input the selected coefficient of specificed haplotype." S=0 else: Shap=options.H S=options.s if Shap not in haplotype: print "The type of -H was error!" sys.exit(1) nsam = options.nsam nrep=options.nrep region=range(0,options.region) N=17469 mu=options.mu r=options.R outputfilename=options.outfilename if os.path.exists(outputfilename): os.remove(outputfilename) if nsam >=N: N = nsam outputfilename=options.outfilename if os.path.exists(outputfilename): os.remove(outputfilename) #------step 1: Backward tracjory of haplotype frequency in time----------- res=open(os.path.join(os.getcwd(),outputfilename),'wb') frequencyfilename=options.frequencyfilename trac=open(os.path.join(os.getcwd(),frequencyfilename),'wb') logout=open(os.path.join(os.getcwd(),"log.out"),'wb') for t in range(nrep): if not options.two_locus: two_locus=sorted(random.sample(region,2)) else: two_locus=[] for ele in list(options.two_locus): two_locus.append(int(ele)) two_locus=sorted(two_locus) theta=r*abs(two_locus[1]-two_locus[0]) v=mu/(len(hapfre)-1) hapfit=((1+S)-(1+S*hapfre[haplotype.index(Shap)]))/(1+S*hapfre[haplotype.index(Shap)]) deltfre=[] for ele in haplotype: if ele == Shap: mean=v-(mu+v)*hapfre[haplotype.index(ele)]+hapfit else: mean=v-(mu+v)*hapfre[haplotype.index(ele)] deltfre.append(mean) Haptrac=Hapftrac(hapfre,Shap,haplotype,deltfre,N) Shapfrequency=[] for key in sorted(Haptrac.keys()): Shapfrequency.append(Haptrac[key][Shap]) print "Print the track file of haplotype frequency" out="T"+"\t"+"00"+"\t"+"01"+"\t"+"10"+"\t"+"11" print >> trac,out for ele in Haptrac.keys(): out=str(ele)+"\t" for key in sorted(Haptrac[ele].keys()): out=out+str(Haptrac[ele][key])+"\t" print >> trac,out #-----------step 2: Coalescent process ---------------- locus1=two_locus[0] locus2=two_locus[1] print "Simulation the offspring" if S!=0: (leftparent, leftlengths,rightparent,rightlengths,sample,time)=Hapbackward(nsam,N,mu,Haptrac,theta,hapfre,Shap,locus1,locus2,haplotype,region,Shapfrequency,r,logout) else: (leftparent, leftlengths,rightparent,rightlengths,sample,time)=Twolocusneutral(nsam,N,r,mu,region,hapfre,logout) leftTree=Tree(leftlengths,leftparent,nsam) rightTree=Tree(rightlengths,rightparent,nsam) print "simulation the "+str(t)+"th replication" if not options.segsites: segsites = numpy.random.poisson(len(region)/100, 1)[0]+2 position=random.sample(region,segsites) if locus1 not in position: position.append(locus1) if locus2 not in position: position.append(locus2) else: segsites = options.segsites position=random.sample(region,segsites) if locus1 not in position: position.append(locus1) if locus2 not in position: position.append(locus2) l1=position.index(locus1) l2=position.index(locus2) positionindex=range(0,len(position)) positionindex.remove(l1) positionindex.remove(l2) if len(position) > segsites: rs=random.sample(positionindex,(len(position)-segsites)) for ele in rs: position.remove(position[ele]) position=sorted(position) offmutation=SepMutation(time,position,leftTree,rightTree,locus1,locus2) hapSequence(position,offmutation,nsam,locus1,locus2,sample,res) print " A region of "+str(options.region)+'bp include '+str(len(position))+" segsites were simulated with sample size "+str(nsam)+' for '+str(t+1)+' replication.' if __name__ == "__main__": try: main() except KeyboardInterrupt: sys.stderr.write("User interrupt me, see you!\n") sys.exit(0)
PypiClean
/AutoDiff_jnrw-0.0.2-py3-none-any.whl/autodiff/forward/function.py
import numpy as np from .sym import DualNumber, Symbol, symbol, symbolic NumberTypes = (int, float, complex) # concatenation operator for vector functions def sum(v: 'List[Symbol]') -> Symbol: """ Summing elements of a list of Symbols :param v: list of Symbol objects :type v: list of Symbols :return: summation of elements :rtype: Symbol """ if len(v) == 1: return v[0] result = v[0] for num in v[1:]: result = result + num return result def prod(v: 'List[Symbol]') -> Symbol: """ Multiplying elements of a list of Symbols :param v: list of Symbol objects :type v: list of Symbols :return: multiplication of elements :rtype: Symbol """ if len(v) == 1: return v[0] if len(v) == 2: return v[0] * v[1] else: return v[0] * prod(v[1:]) # For one or two input functions # trigonometric functions @symbolic def sin(a: DualNumber) -> DualNumber: """ Sine operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after sine operation :rtype: Symbol """ real = np.sin(a.real) dual = np.cos(a.real) * a.dual return DualNumber(real, dual) @symbolic def cos(a: DualNumber) -> DualNumber: """ Cosine operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after cosine operation :rtype: Symbol """ real = np.cos(a.real) dual = -np.sin(a.real) * a.dual return DualNumber(real, dual) @symbolic def tan(a: DualNumber) -> DualNumber: """ Tangent operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after tangent operation :rtype: Symbol """ real = np.tan(a.real) dual = a.dual / (np.cos(a.real)) ** 2 return DualNumber(real, dual) @symbolic def arcsin(a: DualNumber) -> DualNumber: """ Arcsine operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after arcsine operation :rtype: Symbol """ real = np.arcsin(a.real) dual = a.dual / (1 - a.real ** 2) ** 0.5 return DualNumber(real, dual) @symbolic def arccos(a: DualNumber) -> DualNumber: """ Arccosine operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after arccosine operation :rtype: Symbol """ real = np.arccos(a.real) dual = -a.dual / (1 - a.real ** 2) ** 0.5 return DualNumber(real, dual) @symbolic def arctan(a: DualNumber) -> DualNumber: """ Arctangent operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after arctangent operation :rtype: Symbol """ real = np.arctan(a.real) dual = a.dual / (1 + a.real ** 2) return DualNumber(real, dual) # Hyperbolic functions @symbolic def sinh(a: DualNumber) -> DualNumber: """ Sinh operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after sinh operation :rtype: Symbol """ real = np.sinh(a.real) dual = a.dual * np.cosh(a.real) return DualNumber(real, dual) @symbolic def cosh(a: DualNumber) -> DualNumber: """ Cosh operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after cosh operation :rtype: Symbol """ real = np.cosh(a.real) dual = a.dual * np.sinh(a.real) return DualNumber(real, dual) @symbolic def tanh(a: DualNumber) -> DualNumber: """ Tanh operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after tanh operation :rtype: Symbol """ real = np.tanh(a.real) dual = a.dual * (1 - real ** 2) return DualNumber(real, dual) # Natural functions @symbolic def sqrt(a: DualNumber) -> DualNumber: """ Square root operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after square root operation :rtype: Symbol """ real = np.sqrt(a.real) dual = a.dual / (2 * np.sqrt(a.real)) return DualNumber(real, dual) @symbolic def exp(a: DualNumber) -> DualNumber: """ Exponential operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after exponential operation :rtype: Symbol """ real = np.exp(a.real) dual = real * a.dual return DualNumber(real, dual) @symbolic def log(a: DualNumber) -> DualNumber: """ Natural logarithm operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after natural logarithm operation :rtype: Symbol """ real = np.log(a.real) dual = a.dual / a.real return DualNumber(real, dual) @symbolic def log10(a: DualNumber) -> DualNumber: """ Logarithm base 10 operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after logarithm base 10 operation :rtype: Symbol """ real = np.log10(a.real) dual = a.dual / (np.log(10) * a.real) return DualNumber(real, dual) # define a log function with user defined base def log_base(a: Symbol, base): """ Logarithm base 'base' operation on Symbol object :param float base: base number of type float :param a: input dual number :type a: Symbol :return: dual number after logarithm base 'base' operation :rtype: Symbol """ if isinstance(base, Symbol): return log(a) / log(base) elif isinstance(base, NumberTypes): if base <= 0: raise ValueError('Base cannot be lower than 0') return log(a) / np.log(base) else: return NotImplemented # Activation functions @symbolic def sigmoid(a: DualNumber) -> DualNumber: """ Sigmoid operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after sigmoid operation :rtype: Symbol """ real = 1 / (1 + np.exp(- a.real)) dual = a.dual * real * (1 - real) return DualNumber(real, dual) @symbolic def ReLU(a: DualNumber) -> DualNumber: """ ReLU operation on Symbol object :param a: input dual number :type a: Symbol :return: dual number after ReLU operation :rtype: Symbol """ if a.real > 0: # create a copy of a return DualNumber(a.real, a.dual) if a.real <= 0: return DualNumber(0, 0)
PypiClean
/OLCTools-1.3.5.tar.gz/OLCTools-1.3.5/olctools/databasesetup/enterobase_api_download_assemblies.py
from olctools.accessoryFunctions.accessoryFunctions import make_path, SetupLogging import olctools.databasesetup.settings try: from olctools.databasesetup.settings import ECOLI except (NameError, ImportError): ECOLI = str() try: from olctools.databasesetup.settings import SENTERICA except (NameError, ImportError): SENTERICA = str() try: from olctools.databasesetup.settings import YERSINIA except (NameError, ImportError): YERSINIA = str() from urllib3.exceptions import HTTPError from argparse import ArgumentParser import pandas as pd import urllib3 import json import os class DownloadScheme(object): def api_keys(self): if self.organism == 'ecoli': self.api_key = ECOLI if not self.api_key: # Use the user input to set the verifier code self.api_key = input('Enter API token from https://enterobase.warwick.ac.uk/species/index/ecoli ') with open(olctools.databasesetup.settings.__file__, 'a+') as env: env.write("ECOLI = '{api}'\n".format(api=self.api_key)) elif self.organism == 'senterica': self.api_key = SENTERICA if not self.api_key: # Use the user input to set the verifier code self.api_key = input('Enter API token from https://enterobase.warwick.ac.uk/species/index/senterica ') with open(olctools.databasesetup.settings.__file__, 'a+') as env: env.write("SENTERICA = '{api}'\n".format(api=self.api_key)) else: self.api_key = YERSINIA if not self.api_key: # Use the user input to set the verifier code self.api_key = input('Enter API token from https://enterobase.warwick.ac.uk/species/index/yersinia ') with open(olctools.databasesetup.settings.__file__, 'a+') as env: env.write("YERSINIA = '{api}'\n".format(api=self.api_key)) def create_request(self, request_str): http = urllib3.PoolManager() headers = urllib3.util.make_headers(basic_auth='{token}:'.format(token=self.api_key)) request = http.request(method='GET', url=request_str, headers=headers, preload_content=False) return request def download_assemblies(self): self.api_keys() if self.filter_file: self.assembly_names = self.create_filter_list(filter_file=self.filter_file) try: response = self.create_request(self.server_address) try: data = json.loads(response.data.decode('utf-8')) except json.decoder.JSONDecodeError: print('Decoder Error') print(response.data.decode('utf-8')) raise SystemExit print(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))) response.release_conn() print(len(data['straindata'])) for record in data['straindata']: # if not self.assembly_names or (record in self.assembly_names): # if record_values = data['straindata'][record] # if record_values['sts'][0]['st_id'] == 99: assembly_response = self.create_request(record_values['download_fasta_link']) assembly_file = os.path.join(self.outputpath, '{sn}.fasta'.format(sn=record_values['assembly_barcode'])) if not os.path.isfile(assembly_file): with open(assembly_file, 'wb') as out_assembly: out_assembly.write(assembly_response.read()) except HTTPError as Response_error: error_string = str() for key, value in vars(Response_error).items(): error_string += '{key}: {value}\n'.format(key=key, value=value) print('HTTPError: {error}'.format(error=error_string)) quit() @staticmethod def create_filter_list(filter_file): # Read in the filter file using pandas.read_csv. Use tabs as the separator # Transpose the data, and convert the dataframe to a dictionary assemblies = list() filter_dict = pd.read_csv( filter_file, sep='\t', ).transpose().to_dict() for key, arg_dict in filter_dict.items(): assemblies.append(arg_dict['Uberstrain']) return assemblies def __init__(self, serovar, organism, outputpath, filter_file=None): self.serovar = serovar self.organism = organism # serotype={sero}& &limit={limit} 1407678 &limit=50 self.server_address = 'https://enterobase.warwick.ac.uk/api/v2.0/{organism}/straindata?' \ '&assembly_status=Assembled&only_fields=strain_name,download_fasta_link'\ .format(organism=self.organism, sero=self.serovar, limit=1000) if outputpath.startswith('~'): self.outputpath = os.path.expanduser(os.path.abspath(os.path.join(outputpath))) else: self.outputpath = os.path.abspath(os.path.join(outputpath)) self.api_key = str() make_path(self.outputpath) self.filter_file = filter_file if self.filter_file: assert os.path.isfile(self.filter_file), f'Cannot located supplied filter file: {self.filter_file}. ' \ f'Please ensure that you entered the name and path correctly.' self.assembly_names = list() def cli(): # Parser for arguments parser = ArgumentParser(description='Download typing schemes and alleles from Enterobase') parser.add_argument('-o', '--outputpath', # required=True, default='/mnt/nas2/processed_sequence_data/enterobase_assemblies/Litchfield', help='The path to the folder in which the typing scheme is to be installed. The program will ' 'create sub-folders as necessary. So, if you specify ' '/mnt/nas2/databases/assemblydatabases/0.5.0.0, that will be used as the root for the ' 'SCHEME/ORGANISM subfolder, e.g. cgMLST/Escherichia') parser.add_argument('-g', '--genus', default='senterica', choices=['ecoli', 'senterica', 'yersinia']) parser.add_argument('-s', '--serovar', # required=True, default='Litchfield', choices=['Litchfield']) parser.add_argument('-v', '--verbose', action='store_true', help='Print debug level messages') parser.add_argument('-f', '--filter_file', type=str, help='Filter the download based on Uberstrain. You must use the "Search strains" functionality ' 'on Enterobase to filter the strains of interest. Use "Save to Local File". Provide the ' 'name and path of the file') # Get the arguments into an object arguments = parser.parse_args() # Setup logging SetupLogging(debug=arguments.verbose) download = DownloadScheme(outputpath=arguments.outputpath, organism=arguments.genus, serovar=arguments.serovar, filter_file=arguments.filter_file) download.download_assemblies() # If the script is called from the command line, then call the argument parser if __name__ == '__main__': cli()
PypiClean
/Mopidy-Material-Webclient-0.2.1.tar.gz/Mopidy-Material-Webclient-0.2.1/README.rst
**************************** Mopidy-Material-Webclient **************************** .. image:: https://img.shields.io/pypi/v/Mopidy-Material-Webclient.svg?style=flat :target: https://pypi.python.org/pypi/Mopidy-Material-Webclient/ :alt: Latest PyPI version .. image:: https://img.shields.io/pypi/dm/Mopidy-Material-Webclient.svg?style=flat :target: https://pypi.python.org/pypi/Mopidy-Material-Webclient/ :alt: Number of PyPI downloads A Mopidy web client with an Android Material design feel. This is a stripped down and slightly tarted up web front end for my own personal use and a few people I make retro radios into streaming music players for. You won't see the wealth of settings that the brilliant `Mopidy Websettings <https://github.com/woutervanwijk/mopidy-websettings>`_ gives you because this was created for people with pre-installed systems who won't be making that level of change. Maybe I'll add in some meta settings to say which settings are accessible in a later update! Installation ============ Install by running:: pip install Mopidy-Material-Webclient Configuration ============= Before starting Mopidy, you must add configuration for Mopidy-Material-Webclient to your Mopidy configuration file:: [material-webclient] enabled = true config_file = /etc/mopidy/mopidy.conf Project resources ================= - `Source code <https://github.com/matgallacher/mopidy-material-webclient>`_ - `Issue tracker <https://github.com/matgallacher/mopidy-material-webclient/issues>`_ - `Development branch tarball <https://github.com/matgallacher/mopidy-material-webclient/archive/master.tar.gz#egg=Mopidy-Material-Webclient-dev>`_ Thanks ====== - `Mopidy music server <http://mopidy.com>`_ which this is only a web front end for - A lot of the original code from the `Pi Musicbox <http://pimusicbox.com>`_ for settings and the orginal SD image I worked from Changelog ========= v0.2.1 ---------------------------------------- - Added missing files to the pypi distribution v0.2.0 ---------------------------------------- - Reworked searching from the library - Playing a track now inserts it into the current queue then plays it - Clear queue function - Lots of cleanup and linting of scripts v0.1.0 ---------------------------------------- - Initial release. - Lots of unnecessary files still in there from Bower, I'll add Grunt at some point to strip out the minified versions. - About and restart don't work at all from the System menu - Search produces results but you can't do anything with them. I'm not happy with any of the search mechanic at the moment - so expect to see that entirely revamped. - Playlist support is really basic, just saving the current queue as a new playlist
PypiClean
/DirectFolderBrowser-22.1.tar.gz/DirectFolderBrowser-22.1/README.md
# DirectFolderBrowser A file and folder browser for Panda3D using DirectGUI ## Features This is a simple fullscreen file and folder browser with a basic featureset. Currently implemented are: - Browsing files and folders - Display content as symbols or in a detailed list - Show/Hide hidden files (using unix like leading dot) - Create new folders - Filter by file extension - Resizes with window size changes - Makes use of the <a href="https://github.com/fireclawthefox/DirectTooltip">Tooltip class</a> ## Install Install the DirectFolderBrowser via pip ```bash pip install DirectFolderBrowser ``` ## How to use To add a browser instance to your running Panda3D application, just instantiate it like shown here: ```python3 from DirectFolderBrowser.DirectFolderBrowser import DirectFolderBrowser # this command will be called by the browser def callbackCommand(ok): if ok == 1: print("User Clicked OK") # print the selected file print(browser.get()) browser.hide() # Destroy the browser if it's not needed anymore #browser.destroy() elif ok == 0: print("User Clicked Cancel") browser.hide() browser.destroy() # show the browser as file browser browser = DirectFolderBrowser(callbackCommand, fileBrowser=True) ``` ### Parameters The DirectFolderBrowser accepts a few arguments. - <b>command:</b> The command that will be called on closing the browser - <b>fileBrowser:</b> If set to True the browser will show files, otherwise it will only show folders - <b>defaultPath:</b> The initial path the browser will be set to show - <b>defaultFilename:</b> The filename that will be set by default, <i>only usefull if fileBrowser is True</i> - <b>fileExtensions:</b> A list of extensions. Only files with those extensions will be shown. <i>Only usefull if fileBrowser is True</i> - <b>tooltip:</b> An instance of the <a href="https://github.com/fireclawthefox/DirectTooltip">Tooltip class</a> to display tooltips for certain parts of the editor - <b>iconDir:</b> A directory path that contains replacement images. It must contain all required images which are:<br /> File.png<br /> Folder.png<br /> FolderNew.png<br /> FolderShowHidden.png<br /> FolderUp.png<br /> Reload.png<br /> - <b>parent:</b> Another DirectGUI element which has pixel2d as root parent.<br /> The browser frame is placed centered so a frame for example should have equal sizes in horizontal and vertical directions<br /> e.g. frameSize=(-250,250,-200,200)<br />
PypiClean
/Hikka_TL-1.24.14-py3-none-any.whl/telethon/tl/functions/stickers.py
from ...tl.tlobject import TLObject from ...tl.tlobject import TLRequest from typing import Optional, List, Union, TYPE_CHECKING import os import struct from datetime import datetime if TYPE_CHECKING: from ...tl.types import TypeInputDocument, TypeInputStickerSet, TypeInputStickerSetItem, TypeInputUser class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset: 'TypeInputStickerSet', sticker: 'TypeInputStickerSetItem'): """ :returns messages.StickerSet: Instance of either StickerSet, StickerSetNotModified. """ self.stickerset = stickerset self.sticker = sticker def to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset': self.stickerset.to_dict() if isinstance(self.stickerset, TLObject) else self.stickerset, 'sticker': self.sticker.to_dict() if isinstance(self.sticker, TLObject) else self.sticker } def _bytes(self): return b''.join(( b'\xbe\xfeS\x86', self.stickerset._bytes(), self.sticker._bytes(), )) @classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker: 'TypeInputDocument', position: int): """ :returns messages.StickerSet: Instance of either StickerSet, StickerSetNotModified. """ self.sticker = sticker self.position = position async def resolve(self, client, utils): self.sticker = utils.get_input_document(self.sticker) def to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker': self.sticker.to_dict() if isinstance(self.sticker, TLObject) else self.sticker, 'position': self.position } def _bytes(self): return b''.join(( b'\xca\xd4\xb6\xff', self.sticker._bytes(), struct.pack('<i', self.position), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker, position=_position) class CheckShortNameRequest(TLRequest): CONSTRUCTOR_ID = 0x284b3639 SUBCLASS_OF_ID = 0xf5b399ac def __init__(self, short_name: str): """ :returns Bool: This type has no constructors. """ self.short_name = short_name def to_dict(self): return { '_': 'CheckShortNameRequest', 'short_name': self.short_name } def _bytes(self): return b''.join(( b'96K(', self.serialize_bytes(self.short_name), )) @classmethod def from_reader(cls, reader): _short_name = reader.tgread_string() return cls(short_name=_short_name) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9021ab67 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id: 'TypeInputUser', title: str, short_name: str, stickers: List['TypeInputStickerSetItem'], masks: Optional[bool]=None, animated: Optional[bool]=None, videos: Optional[bool]=None, thumb: Optional['TypeInputDocument']=None, software: Optional[str]=None): """ :returns messages.StickerSet: Instance of either StickerSet, StickerSetNotModified. """ self.user_id = user_id self.title = title self.short_name = short_name self.stickers = stickers self.masks = masks self.animated = animated self.videos = videos self.thumb = thumb self.software = software async def resolve(self, client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) if self.thumb: self.thumb = utils.get_input_document(self.thumb) def to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id': self.user_id.to_dict() if isinstance(self.user_id, TLObject) else self.user_id, 'title': self.title, 'short_name': self.short_name, 'stickers': [] if self.stickers is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.stickers], 'masks': self.masks, 'animated': self.animated, 'videos': self.videos, 'thumb': self.thumb.to_dict() if isinstance(self.thumb, TLObject) else self.thumb, 'software': self.software } def _bytes(self): return b''.join(( b'g\xab!\x90', struct.pack('<I', (0 if self.masks is None or self.masks is False else 1) | (0 if self.animated is None or self.animated is False else 2) | (0 if self.videos is None or self.videos is False else 16) | (0 if self.thumb is None or self.thumb is False else 4) | (0 if self.software is None or self.software is False else 8)), self.user_id._bytes(), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'' if self.thumb is None or self.thumb is False else (self.thumb._bytes()), b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.stickers)),b''.join(x._bytes() for x in self.stickers), b'' if self.software is None or self.software is False else (self.serialize_bytes(self.software)), )) @classmethod def from_reader(cls, reader): flags = reader.read_int() _masks = bool(flags & 1) _animated = bool(flags & 2) _videos = bool(flags & 16) _user_id = reader.tgread_object() _title = reader.tgread_string() _short_name = reader.tgread_string() if flags & 4: _thumb = reader.tgread_object() else: _thumb = None reader.read_int() _stickers = [] for _ in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) if flags & 8: _software = reader.tgread_string() else: _software = None return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks, animated=_animated, videos=_videos, thumb=_thumb, software=_software) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker: 'TypeInputDocument'): """ :returns messages.StickerSet: Instance of either StickerSet, StickerSetNotModified. """ self.sticker = sticker async def resolve(self, client, utils): self.sticker = utils.get_input_document(self.sticker) def to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker': self.sticker.to_dict() if isinstance(self.sticker, TLObject) else self.sticker } def _bytes(self): return b''.join(( b'Q\x0fv\xf7', self.sticker._bytes(), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() return cls(sticker=_sticker) class SetStickerSetThumbRequest(TLRequest): CONSTRUCTOR_ID = 0x9a364e30 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset: 'TypeInputStickerSet', thumb: 'TypeInputDocument'): """ :returns messages.StickerSet: Instance of either StickerSet, StickerSetNotModified. """ self.stickerset = stickerset self.thumb = thumb async def resolve(self, client, utils): self.thumb = utils.get_input_document(self.thumb) def to_dict(self): return { '_': 'SetStickerSetThumbRequest', 'stickerset': self.stickerset.to_dict() if isinstance(self.stickerset, TLObject) else self.stickerset, 'thumb': self.thumb.to_dict() if isinstance(self.thumb, TLObject) else self.thumb } def _bytes(self): return b''.join(( b'0N6\x9a', self.stickerset._bytes(), self.thumb._bytes(), )) @classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object() _thumb = reader.tgread_object() return cls(stickerset=_stickerset, thumb=_thumb) class SuggestShortNameRequest(TLRequest): CONSTRUCTOR_ID = 0x4dafc503 SUBCLASS_OF_ID = 0xc44a4b21 def __init__(self, title: str): """ :returns stickers.SuggestedShortName: Instance of SuggestedShortName. """ self.title = title def to_dict(self): return { '_': 'SuggestShortNameRequest', 'title': self.title } def _bytes(self): return b''.join(( b'\x03\xc5\xafM', self.serialize_bytes(self.title), )) @classmethod def from_reader(cls, reader): _title = reader.tgread_string() return cls(title=_title)
PypiClean
/EOxServer-1.2.12-py3-none-any.whl/eoxserver/core/util/multiparttools.py
from django.utils.six import b def capitalize(header_name): """ Capitalize header field name. Eg., 'content-type' is capilalized to 'Content-Type'. .. deprecated:: 0.4 """ return "-".join([f.capitalize() for f in header_name.split("-")]) # local alias to prevent conflict with local variable __capitalize = capitalize def getMimeType(content_type): """ Extract MIME-type from Content-Type string and convert it to lower-case. .. deprecated:: 0.4 """ return content_type.partition(";")[0].strip().lower() def getMultipartBoundary(content_type): """ Extract boundary string from mutipart Content-Type string. .. deprecated:: 0.4 """ for opt in content_type.split(";")[1:]: key, _, val = opt.partition("=") if key.strip().lower() == "boundary": return val.strip() raise ValueError( "failed to extract the mutipart boundary string! content-type: %s" % content_type ) def mpPack(parts, boundary): """ Low-level memory-friendly MIME multipart packing. Note: The data payload is passed untouched and no transport encoding of the payload is performed. Inputs: - parts - list of part-tuples, each tuple shall have two elements the header list and (string) payload. The header itsels should be a sequence of key-value pairs (tuples). - boundary - boundary string Ouput: - list of strings (which can be directly passsed as a Django response content) .. deprecated:: 0.4 """ # empty multipart package pack = ["--%s" % boundary] for header, data in parts: # pack header for key, value in header: pack.append("\r\n%s: %s" % (key, value)) # terminate header pack.append("\r\n\r\n") # append data pack.append(data) # terminate partition pack.append("\r\n--%s" % boundary) #terminate package pack.append("--") # return package return pack def mpUnpack(cbuffer, boundary, capitalize=False): """ Low-level memory-friendly MIME multipart unpacking. Note: The payload of the multipart package data is neither modified nor copied. No decoding of the transport encoded payload is performed. Note: The subroutine does not unpack any nested mutipart content. Inputs: - ``cbuffer`` - character buffer (string) containing the the header list and (string) payload. The header itsels should be a sequence of key-value pairs (tuples). - ``boundary`` - boundary string - ``capitalize`` - by default the header keys are converted to lower-case (e.g., 'content-type'). To capitalize the names (e.g., 'Content-Type') set this option to true. Output: - list of parts - each part is a tuple of the header dictionary, payload ``cbuffer`` offset and payload size. .. deprecated:: 0.4 """ def findBorder(offset=0): delim = "--%s" % boundary if offset == 0 else "\n--%s" % boundary # boundary offset (end of last data) idx0 = cbuffer.find(delim, offset) if idx0 < 0: raise ValueError("Boundary cannot be found!") # header offset idx1 = idx0 + len(delim) # nescessary check to be able to safely check two following characters if len(cbuffer[idx1:]) < 2: raise ValueError("Buffer too short!") # check the leading CR character if idx0 > 0 and cbuffer[idx0-1] == "\r": idx0 -= 1 # check the terminating sequence if cbuffer[idx1:(idx1+2)] == "--": return idx0, idx1+2, -1 # look-up double endl-line (data offset) tmp = idx1 while True: tmp = 1 + cbuffer.find("\n", tmp) if tmp < 1: raise ValueError( "Cannot find payload's a double new-line separator!" ) # is it followed by new line? elif cbuffer[tmp:(tmp+2)] == "\r\n": idx2 = tmp + 2 break elif cbuffer[tmp:(tmp+1)] == "\n": idx2 = tmp + 1 break # otherwise continue to lookup continue # adjust the data offset (separator must be followed by new-line) if cbuffer[idx1:(idx1+2)] == "\r\n": idx1 += 2 elif cbuffer[idx1:(idx1+1)] == "\n": idx1 += 1 else: raise ValueError("Boundary is not followed by a new-line!") return idx0, idx1, idx2 #-------------------------------------------------------------------------- # auxiliary nested functions formating header names # capitalize header name def unpackCC(v): key, _, val = v.partition(b(":"":")) return __capitalize(key.strip()), val.strip() # header name all lower def unpackLC(v): key, _, val = v.partition(b(":")) return key.strip().lower(), val.strip() # filter function rejecting entries with blank keys def noblank(tup): (k, v) = tup return bool(k) #-------------------------------------------------------------------------- # get the offsets # off = (<last payload end>,<header start>,<payload start>) # negative <payload start> means terminating boundary try: off = findBorder() offsets = [off] while off[1] < off[2]: off = findBorder(off[2]) offsets.append(off) except ValueError as e: raise Exception( "The buffer is not a valid MIME multi-part message! Reason: %s" % e.message ) # process the parts parts = [] for of0, of1 in zip(offsets[:-1], offsets[1:]): # get the http header with <LF> line ending tmp = cbuffer[of0[1]:of0[2]].replace("\r\n", "\n")[:-2].split("\n") # unpack header header = dict( filter(noblank, map((unpackLC, unpackCC)[capitalize], tmp) ) ) # get the header and payload offset and size parts.append((header, of0[2], of1[0]-of0[2])) return parts CRLF = b"\r\n" CRLFCRLF = b"\r\n\r\n" def get_substring(data, boundary, offset, end): """ Retrieves the substring of ``data`` until the next ``boundary`` from a given offset to a until ``end``. """ index = data.find(boundary, offset, end) if index == -1: return None, None return data[offset:index], index + len(boundary) def parse_parametrized_option(string): """ Parses a parametrized options string like 'base;option=value;otheroption=othervalue'. :returns: the base string and a :class:`dict` with all parameters """ parts = string.split(b";") params = dict( param.strip().split(b"=", 1) for param in parts[1:] ) return parts[0], params def capitalize_header(key): """ Returns a capitalized version of the header line such as 'content-type' -> 'Content-Type'. """ return b"-".join([ item if item.decode()[0].isupper() else (item.decode()[0].upper() + item.decode()[1:]).encode('ascii') for item in key.split(b"-") ]) def iterate(data, offset=0, end=None, headers=None): """ Efficient generator function to iterate over a single- or multipart message. I yields tuples in the shape (``headers``, ``data``), where headers is a ``dict`` and data a buffer object, referencing the subset of the original content. In case of multipart messages, the multipart headers are yielded beforehand, with an empty string as data. The `offset` parameter specifies the offset index to the start of the data. This is mostly used in the recursive call. The same applies to the `end` parameter. The `headers` parameter specifies that the header section of the response was already read, and the headers are now entailed in the given dict. If this parameter is omitted, the headers are read from the stream. """ # check if the headers need to be parsed. if not headers: # read the header bytes from the string and get the new offset. header_bytes, offset = get_substring(data, CRLFCRLF, offset, end) # check if no data could be extracted if (header_bytes, offset) == (None, None): return # parse the headers into a dict headers = {} for line in header_bytes.split(CRLF): key, _, value = line.partition(b":") headers[capitalize_header(key.strip())] = value.strip() # get the content type content_type, params = parse_parametrized_option( headers.get(b"Content-Type", b"") ) # check if this is a multipart if content_type.startswith(b"multipart"): # if this is a multipart, yield only its headers and an empty string yield headers, memoryview(b"") # parse the boundary and find the final index of all multiparts boundary = b"%s--%s" % (CRLF, params[b"boundary"]) end_boundary = b"%s--" % boundary sub_end = data.find(end_boundary) if sub_end == -1: raise ValueError("Could not find multipart end.") # get the first part of this multipart sub_offset = data.find(boundary, offset, sub_end) if sub_offset == -1: raise ValueError("Could not find boundary.") # iterate over all parts until we reach the end of the multipart while sub_offset < sub_end: sub_offset += len(boundary) + 1 sub_stop = data.find(boundary, sub_offset, sub_end) sub_stop = sub_stop if sub_stop > -1 else sub_end # recursive function call for sub_headers, sub_data in iterate(data, sub_offset, sub_stop): yield sub_headers, sub_data sub_offset = sub_stop else: # in case we have a single part, just yield the headers and a buffer # pointing to a substring of the original data stream. if end is not None: yield headers, memoryview(data)[offset:end] else: yield headers, memoryview(data)[offset:]
PypiClean
/Mathics3-6.0.2.tar.gz/Mathics3-6.0.2/mathics/builtin/numbers/trig.py
import math from collections import namedtuple from contextlib import contextmanager from itertools import chain import mpmath from mathics.builtin.arithmetic import _MPMathFunction from mathics.builtin.base import Builtin from mathics.core.atoms import Integer, Integer0, IntegerM1, Real from mathics.core.convert.python import from_python from mathics.core.exceptions import IllegalStepSpecification from mathics.core.expression import Expression from mathics.core.list import ListExpression from mathics.core.symbols import SymbolPower from mathics.core.systemsymbols import ( SymbolArcCos, SymbolArcSin, SymbolArcTan, SymbolCos, SymbolSin, ) class Fold: # allows inherited classes to specify a single algorithm implementation that # can be called with machine precision, arbitrary precision or symbolically. ComputationFunctions = namedtuple("ComputationFunctions", ("sin", "cos")) FLOAT = 0 MPMATH = 1 SYMBOLIC = 2 math = { FLOAT: ComputationFunctions( cos=math.cos, sin=math.sin, ), MPMATH: ComputationFunctions( cos=mpmath.cos, sin=mpmath.sin, ), SYMBOLIC: ComputationFunctions( cos=lambda x: Expression(SymbolCos, x), sin=lambda x: Expression(SymbolSin, x), ), } operands = { FLOAT: lambda x: None if x is None else x.round_to_float(), MPMATH: lambda x: None if x is None else x.to_mpmath(), SYMBOLIC: lambda x: x, } def _operands(self, state, steps): raise NotImplementedError def _fold(self, state, steps, math): raise NotImplementedError def _spans(self, operands): spans = {} k = 0 j = 0 for mode in (self.FLOAT, self.MPMATH): for i, operand in enumerate(operands[k:]): if operand[0] > mode: break j = i + k + 1 if k == 0 and j == 1: # only init state? then ignore. j = 0 spans[mode] = slice(k, j) k = j spans[self.SYMBOLIC] = slice(k, len(operands)) return spans def fold(self, x, ll): # computes fold(x, ll) with the internal _fold function. will start # its evaluation machine precision, and will escalate to arbitrary # precision if or symbolical evaluation only if necessary. folded # items already computed are carried over to new evaluation modes. yield x # initial state init = None operands = list(self._operands(x, ll)) spans = self._spans(operands) for mode in (self.FLOAT, self.MPMATH, self.SYMBOLIC): s_operands = [y[1:] for y in operands[spans[mode]]] if not s_operands: continue if mode == self.MPMATH: from mathics.core.number import min_prec precision = min_prec(*[t for t in chain(*s_operands) if t is not None]) working_precision = mpmath.workprec else: @contextmanager def working_precision(_): yield precision = None if mode == self.FLOAT: def out(z): return Real(z) elif mode == self.MPMATH: def out(z): return Real(z, precision) else: def out(z): return z as_operand = self.operands.get(mode) def converted_operands(): for y in s_operands: yield tuple(as_operand(t) for t in y) with working_precision(precision): c_operands = converted_operands() if init is not None: c_init = tuple( (None if t is None else as_operand(from_python(t))) for t in init ) else: c_init = next(c_operands) init = tuple((None if t is None else out(t)) for t in c_init) generator = self._fold(c_init, c_operands, self.math.get(mode)) for y in generator: y = tuple(out(t) for t in y) yield y init = y class AnglePath(Builtin): """ <url> :WMA link: https://reference.wolfram.com/language/ref/AnglePath.html</url> <dl> <dt>'AnglePath[{$phi1$, $phi2$, ...}]' <dd>returns the points formed by a turtle starting at {0, 0} and angled \ at 0 degrees going through the turns given by angles $phi1$, $phi2$, ... and using distance 1 \ for each step. <dt>'AnglePath[{{$r1$, $phi1$}, {$r2$, $phi2$}, ...}]' <dd>instead of using 1 as distance, use $r1$, $r2$, ... as distances for \ the respective steps. <dt>'AnglePath[$phi0$, {$phi1$, $phi2$, ...}]' <dd>starts with direction $phi0$ instead of 0. <dt>'AnglePath[{$x$, $y$}, {$phi1$, $phi2$, ...}]' <dd>starts at {$x, $y} instead of {0, 0}. <dt>'AnglePath[{{$x$, $y$}, $phi0$}, {$phi1$, $phi2$, ...}]' <dd>specifies initial position {$x$, $y$} and initial direction $phi0$. <dt>'AnglePath[{{$x$, $y$}, {$dx$, $dy$}}, {$phi1$, $phi2$, ...}]' <dd>specifies initial position {$x$, $y$} and a slope {$dx$, $dy$} that is \ understood to be the initial direction of the turtle. </dl> >> AnglePath[{90 Degree, 90 Degree, 90 Degree, 90 Degree}] = {{0, 0}, {0, 1}, {-1, 1}, {-1, 0}, {0, 0}} >> AnglePath[{{1, 1}, 90 Degree}, {{1, 90 Degree}, {2, 90 Degree}, {1, 90 Degree}, {2, 90 Degree}}] = {{1, 1}, {0, 1}, {0, -1}, {1, -1}, {1, 1}} >> AnglePath[{a, b}] = {{0, 0}, {Cos[a], Sin[a]}, {Cos[a] + Cos[a + b], Sin[a] + Sin[a + b]}} >> Precision[Part[AnglePath[{N[1/3, 100], N[2/3, 100]}], 2, 1]] = 100. >> Graphics[Line[AnglePath[Table[1.7, {50}]]]] = -Graphics- >> Graphics[Line[AnglePath[RandomReal[{-1, 1}, {100}]]]] = -Graphics- """ summary_text = 'form a path from a sequence of "turtle-like" turns and motions' messages = {"steps": "`1` is not a valid description of steps."} @staticmethod def _compute(x0, y0, phi0, steps, evaluation): if not steps: return ListExpression() if steps[0].get_head_name() == "System`List": def parse(step): if step.get_head_name() != "System`List": raise IllegalStepSpecification arguments = step.elements if len(arguments) != 2: raise IllegalStepSpecification return arguments else: def parse(step): if step.get_head_name() == "System`List": raise IllegalStepSpecification return None, step try: fold = AnglePathFold(parse) elements = [ ListExpression(x, y) for x, y, _ in fold.fold((x0, y0, phi0), steps) ] return ListExpression(*elements) except IllegalStepSpecification: evaluation.message("AnglePath", "steps", ListExpression(*steps)) def eval(self, steps, evaluation): "AnglePath[{steps___}]" return AnglePath._compute( Integer0, Integer0, None, steps.get_sequence(), evaluation ) def eval_phi0(self, phi0, steps, evaluation): "AnglePath[phi0_, {steps___}]" return AnglePath._compute( Integer0, Integer0, phi0, steps.get_sequence(), evaluation ) def eval_xy(self, x, y, steps, evaluation): "AnglePath[{x_, y_}, {steps___}]" return AnglePath._compute(x, y, None, steps.get_sequence(), evaluation) def eval_xy_phi0(self, x, y, phi0, steps, evaluation): "AnglePath[{{x_, y_}, phi0_}, {steps___}]" return AnglePath._compute(x, y, phi0, steps.get_sequence(), evaluation) def eval_xy_dx(self, x, y, dx, dy, steps, evaluation): "AnglePath[{{x_, y_}, {dx_, dy_}}, {steps___}]" phi0 = Expression(SymbolArcTan, dx, dy) return AnglePath._compute(x, y, phi0, steps.get_sequence(), evaluation) class AnglePathFold(Fold): def __init__(self, parse): self._parse = parse def _operands(self, state, steps): SYMBOLIC = self.SYMBOLIC MPMATH = self.MPMATH FLOAT = self.FLOAT def check_pos_operand(x): if x is not None: if isinstance(x, Integer) and x.get_int_value() in (0, 1): pass elif not isinstance(x, Real): return SYMBOLIC elif not x.is_machine_precision(): return MPMATH return FLOAT def check_angle_operand(phi): if phi is not None: if not isinstance(phi, Real): return SYMBOLIC elif not phi.is_machine_precision(): return MPMATH return FLOAT parse = self._parse x, y, phi = state mode = max(check_pos_operand(x), check_pos_operand(y), check_angle_operand(phi)) yield mode, x, y, phi for step in steps: distance, delta_phi = parse(step) mode = max(check_angle_operand(delta_phi), check_pos_operand(distance)) yield mode, distance, delta_phi def _fold(self, state, steps, math): sin = math.sin cos = math.cos x, y, phi = state for distance, delta_phi in steps: if phi is None: phi = delta_phi else: phi += delta_phi dx = cos(phi) dy = sin(phi) if distance is not None: dx *= distance dy *= distance x += dx y += dy yield x, y, phi class ArcCos(_MPMathFunction): """ Inverse cosine, <url> :arccosine: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#acot</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#acos</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcCos.html</url>) <dl> <dt>'ArcCos[$z$]' <dd>returns the inverse cosine of $z$. </dl> >> ArcCos[1] = 0 >> ArcCos[0] = Pi / 2 >> Integrate[ArcCos[x], {x, -1, 1}] = Pi """ mpmath_name = "acos" rules = { "ArcCos[0]": "Pi / 2", "ArcCos[1]": "0", "ArcCos[Undefined]": "Undefined", "Derivative[1][ArcCos]": "-1/Sqrt[1-#^2]&", } summary_text = "inverse cosine function" sympy_name = "acos" class ArcCot(_MPMathFunction): """ Inverse cotangent, <url> :arccotangent: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#acot</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#acot</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcCot.html</url>) <dl> <dt>'ArcCot[$z$]' <dd>returns the inverse cotangent of $z$. </dl> >> ArcCot[0] = Pi / 2 >> ArcCot[1] = Pi / 4 """ mpmath_name = "acot" rules = { "ArcCot[0]": "Pi / 2", "ArcCot[1]": "Pi / 4", "ArcCot[Undefined]": "Undefined", "Derivative[1][ArcCot]": "-1/(1+#^2)&", } summary_text = "inverse cotangent function" sympy_name = "acot" class ArcCsc(_MPMathFunction): """ Inverse cosecant, <url> :arccosecant: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#acsc</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#acsc</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcCsc.html</url>) <dl> <dt>'ArcCsc[$z$]' <dd>returns the inverse cosecant of $z$. </dl> >> ArcCsc[1] = Pi / 2 >> ArcCsc[-1] = -Pi / 2 """ mpmath_name = "acsc" rules = { "ArcCsc[Undefined]": "Undefined", "ArcCsc[0]": "ComplexInfinity", "ArcCsc[1]": "Pi / 2", "Derivative[1][ArcCsc]": "-1 / (Sqrt[1 - 1/#^2] * #^2)&", } summary_text = "inverse cosecant function" sympy_name = "acsc" def to_sympy(self, expr, **kwargs): if len(expr.elements) == 1: return Expression( SymbolArcSin, Expression(SymbolPower, expr.elements[0], Integer(-1)) ).to_sympy() class ArcSec(_MPMathFunction): """ Inverse secant, <url> :arcsecant: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#sympy.functions.elementary.trigonometric.asec</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#asec</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcSec.html</url>) <dl> <dt>'ArcSec[$z$]' <dd>returns the inverse secant of $z$. </dl> >> ArcSec[1] = 0 >> ArcSec[-1] = Pi """ mpmath_name = "asec" rules = { "ArcSec[0]": "ComplexInfinity", "ArcSec[1]": "0", "ArcSec[Undefined]": "Undefined", "Derivative[1][ArcSec]": "1 / (Sqrt[1 - 1/#^2] * #^2)&", } summary_text = "inverse secant function" sympy_name = "asec" def to_sympy(self, expr, **kwargs): if len(expr.elements) == 1: return Expression( SymbolArcCos, Expression(SymbolPower, expr.elements[0], IntegerM1) ).to_sympy() class ArcSin(_MPMathFunction): """ Inverse sine, <url> :arcsine: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#asin</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#asin</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcSin.html</url>) <dl> <dt>'ArcSin[$z$]' <dd>returns the inverse sine of $z$. </dl> >> ArcSin[0] = 0 >> ArcSin[1] = Pi / 2 """ mpmath_name = "asin" rules = { "ArcSin[0]": "0", "ArcSin[1]": "Pi / 2", "ArcSin[Undefined]": "Undefined", "Derivative[1][ArcSin]": "1/Sqrt[1-#^2]&", } summary_text = "inverse sine function" sympy_name = "asin" class ArcTan(_MPMathFunction): """ Inverse tangent, <url> :arctangent: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Principal_values</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#atan</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#atan</url>, <url> :WMA: https://reference.wolfram.com/language/ref/ArcTan.html</url>) <dl> <dt>'ArcTan[$z$]' <dd>returns the inverse tangent of $z$. </dl> >> ArcTan[1] = Pi / 4 >> ArcTan[1.0] = 0.785398 >> ArcTan[-1.0] = -0.785398 >> ArcTan[1, 1] = Pi / 4 #> ArcTan[-1, 1] = 3 Pi / 4 #> ArcTan[1, -1] = -Pi / 4 #> ArcTan[-1, -1] = -3 Pi / 4 #> ArcTan[1, 0] = 0 #> ArcTan[-1, 0] = Pi #> ArcTan[0, 1] = Pi / 2 #> ArcTan[0, -1] = -Pi / 2 """ mpmath_name = "atan" rules = { "ArcTan[0]": "0", "ArcTan[1]": "Pi/4", "ArcTan[Undefined]": "Undefined", "ArcTan[Undefined, x_]": "Undefined", "ArcTan[y_, Undefined]": "Undefined", "ArcTan[x_?RealNumberQ, y_?RealNumberQ]": """If[x == 0, If[y == 0, 0, If[y > 0, Pi/2, -Pi/2]], If[x > 0, ArcTan[y/x], If[y >= 0, ArcTan[y/x] + Pi, ArcTan[y/x] - Pi]]]""", "Derivative[1][ArcTan]": "1/(1+#^2)&", } summary_text = "inverse tangent function" sympy_name = "atan" class Cos(_MPMathFunction): """ <url> :Cosine: https://en.wikipedia.org/wiki/Sine_and_cosine</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#cos</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#cos</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Cos.html</url>) <dl> <dt>'Cos[$z$]' <dd>returns the cosine of $z$. </dl> >> Cos[3 Pi] = -1 #> Cos[1.5 Pi] = -1.83697×10^-16 """ mpmath_name = "cos" rules = { "Cos[(1/2) * Pi]": "0", "Cos[0]": "1", "Cos[Pi]": "-1", "Cos[Undefined]": "Undefined", "Cos[n_Integer * Pi]": "(-1)^n", "Derivative[1][Cos]": "-Sin[#]&", } summary_text = "cosine function" sympy_name = "cos" class Cot(_MPMathFunction): """ <url> :Cotangent: https://en.wikipedia.org/wiki/Trigonometric_functions</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#cot</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#cot</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Cot.html</url>) <dl> <dt>'Cot[$z$]' <dd>returns the cotangent of $z$. </dl> >> Cot[0] = ComplexInfinity >> Cot[1.] = 0.642093 """ mpmath_name = "cot" rules = { "Cot[0]": "ComplexInfinity", "Cot[Undefined]": "Undefined", "Derivative[1][Cot]": "-Csc[#]^2&", } summary_text = "cotangent function" sympy_name = "cot" class Csc(_MPMathFunction): """ <url> :Cosecant: https://en.wikipedia.org/wiki/Trigonometric_functions</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#csc</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#csc</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Csc.html</url>) <dl> <dt>'Csc[$z$]' <dd>returns the cosecant of $z$. </dl> >> Csc[0] = ComplexInfinity >> Csc[1] (* Csc[1] in Mathematica *) = 1 / Sin[1] >> Csc[1.] = 1.1884 """ mpmath_name = "csc" rules = { "Csc[0]": "ComplexInfinity", "Csc[Undefined]": "Undefined", "Derivative[1][Csc]": "-Cot[#] Csc[#]&", } summary_text = "cosecant function" sympy_name = "csc" def to_sympy(self, expr, **kwargs): if len(expr.elements) == 1: return Expression( SymbolPower, Expression(SymbolSin, expr.elements[0]), Integer(-1) ).to_sympy() class Haversine(_MPMathFunction): """ <url> :WMA link: https://reference.wolfram.com/language/ref/Haversine.html</url> <dl> <dt>'Haversine[$z$]' <dd>returns the haversine function of $z$. </dl> >> Haversine[1.5] = 0.464631 >> Haversine[0.5 + 2I] = -1.15082 + 0.869405 I """ rules = {"Haversine[z_]": "Power[Sin[z/2], 2]"} summary_text = "Haversine function" class InverseHaversine(_MPMathFunction): """ <url> :WMA link: https://reference.wolfram.com/language/ref/InverseHaversine.html</url> <dl> <dt>'InverseHaversine[$z$]' <dd>returns the inverse haversine function of $z$. </dl> >> InverseHaversine[0.5] = 1.5708 >> InverseHaversine[1 + 2.5 I] = 1.76459 + 2.33097 I """ rules = {"InverseHaversine[z_]": "2 * ArcSin[Sqrt[z]]"} summary_text = "inverse Haversine function" class Sec(_MPMathFunction): """ <url> :Secant: https://en.wikipedia.org/wiki/Trigonometric_functions</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#sec</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#sec</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Sec.html</url>) <dl> <dt>'Sec[$z$]' <dd>returns the secant of $z$. </dl> >> Sec[0] = 1 >> Sec[1] (* Sec[1] in Mathematica *) = 1 / Cos[1] >> Sec[1.] = 1.85082 """ mpmath_name = "sec" rules = { "Derivative[1][Sec]": "Sec[#] Tan[#]&", "Sec[0]": "1", } summary_text = "secant function" sympy_name = "sec" def to_sympy(self, expr, **kwargs): if len(expr.elements) == 1: return Expression( SymbolPower, Expression(SymbolCos, expr.elements[0]), Integer(-1) ).to_sympy() class Sin(_MPMathFunction): """ <url> :Sine: https://en.wikipedia.org/wiki/Sine_and_cosine</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#sin</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#sin</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Sin.html</url>) <dl> <dt>'Sin[$z$]' <dd>returns the sine of $z$. </dl> >> Sin[0] = 0 >> Sin[0.5] = 0.479426 >> Sin[3 Pi] = 0 >> Sin[1.0 + I] = 1.29846 + 0.634964 I >> Plot[Sin[x], {x, -Pi, Pi}] = -Graphics- #> N[Sin[1], 40] = 0.8414709848078965066525023216302989996226 """ mpmath_name = "sin" rules = { "Derivative[1][Sin]": "Cos[#]&", "Sin[Pi]": "0", "Sin[n_Integer*Pi]": "0", "Sin[(1/2) * Pi]": "1", "Sin[0]": "0", "Sin[Undefined]": "Undefined", } summary_text = "sine function" sympy_name = "sin" class Tan(_MPMathFunction): """ <url> :Tangent: https://en.wikipedia.org/wiki/Tangent</url> (<url> :SymPy: https://docs.sympy.org/latest/modules/functions/elementary.html#tan</url>, <url> :mpmath: https://mpmath.org/doc/current/functions/trigonometric.html#tan</url>, <url> :WMA: https://reference.wolfram.com/language/ref/Tan.html</url>) <dl> <dt>'Tan[$z$]' <dd>returns the tangent of $z$. </dl> >> Tan[0] = 0 >> Tan[Pi / 2] = ComplexInfinity #> Tan[0.5 Pi] = 1.63312×10^16 """ mpmath_name = "tan" rules = { "Derivative[1][Tan]": "Sec[#]^2&", "Tan[(1/2) * Pi]": "ComplexInfinity", "Tan[0]": "0", "Tan[Undefined]": "Undefined", } summary_text = "tangent function" sympy_name = "tan"
PypiClean
/GeneClust-0.0.1-py3-none-any.whl/scGeneClust/tl/cluster.py
from functools import partial from itertools import combinations from multiprocessing import cpu_count from multiprocessing.pool import Pool from typing import Optional import anndata as ad import networkx as nx import numpy as np from loguru import logger from scipy.spatial.distance import squareform from sklearn.cluster import MiniBatchKMeans from sklearn.feature_selection import mutual_info_regression import scGeneClust.tl as tl def gene_clustering_mbkmeans( adata: ad.AnnData, n_gene_clusters: int, random_stat: Optional[int] ): """ Cluster genes based on mini-batch k-means. :param adata: The annotated matrix. :param n_gene_clusters: The number of gene clusters. Only used in GeneClust-fast. :param random_stat: Change to use different initial states for the optimization """ logger.info(f"Clustering genes...") km = MiniBatchKMeans(n_clusters=n_gene_clusters, batch_size=1024, random_state=random_stat) adata.var['cluster'] = km.fit_predict(adata.varm['pca']) # gene clustering adata.var['score'] = tl.compute_gene_closeness(adata, km.cluster_centers_) logger.info(f"Gene clustering finished!") def gene_clustering_graph( adata: ad.AnnData, scale: int, random_stat: Optional[int] ): """ Cluster genes based on graph. :param adata: The annotated matrix. :param scale: The scale factor used in the partition of MST. :param random_stat: Change to use different initial states for the optimization. """ logger.info(f"Clustering genes...") # calculate mi between genes to get an upper triangular matrix pool = Pool(processes=cpu_count() - 1) # partial_cal_mi = partial(cal_mi, data=adata.layers['X_gene_log'], random_stat=random_stat) adata.varp['redundancy'] = squareform(pool.starmap(partial_cal_mi, combinations(range(adata.n_vars), 2))) # construct MST G = nx.Graph(adata.varp['redundancy']) MST = nx.minimum_spanning_tree(G, weight="weight", algorithm="prim") logger.debug("MST constructed!") # prune MST: mi < max{comp(mode_1,node_2),min{rlv(node_1),rlv(node_2)}} partial_prune = partial(prune, data=adata.layers['X_gene_log'], clusters=adata.obs['cluster'], rlv=adata.var['score'], scale=scale, random_stat=random_stat) node_pairs = pool.starmap(partial_prune, MST.edges(data=True)) for node_pair in node_pairs: if node_pair is not None: MST.remove_edge(node_pair[0], node_pair[1]) logger.debug("MST pruned!") # cluster genes by finding subtrees clusters = list(nx.connected_components(MST)) adata.var['cluster'] = np.hstack([np.full((len(cluster),), fill_value=i) for i, cluster in enumerate(clusters)]) logger.info(f"Gene clustering finished!") def cal_mi(i: int, j: int, data: np.ndarray, random_stat: Optional[int]): return mutual_info_regression( data[:, i].reshape(-1, 1), data[:, j], discrete_features=False, random_state=random_stat )[0] def prune(node1: int, node2: int, w, data, clusters, rlv, scale, random_stat): class_rlv = min(rlv[node1], rlv[node2]) complm = cal_complementarity(data, clusters, node1, node2, random_stat) return (node1, node2) if w['weight'] * scale < max(class_rlv, complm) else None def cal_complementarity(data, clusters, n1: int, n2: int, random_stat: Optional[int]): cmi = 0 for clus in np.unique(clusters): clus_mask = clusters == clus rlv = mutual_info_regression( data[clus_mask, n1].reshape(-1, 1), data[clus_mask, n2], discrete_features=False, random_state=random_stat ) cmi += rlv * clus_mask.sum() / data.shape[0] return cmi
PypiClean
/COMPAS-1.17.5.tar.gz/COMPAS-1.17.5/src/compas_ghpython/artists/frameartist.py
from __future__ import print_function from __future__ import absolute_import from __future__ import division import compas_ghpython from compas.artists import PrimitiveArtist from compas.colors import Color from .artist import GHArtist class FrameArtist(GHArtist, PrimitiveArtist): """Artist for drawing frames. Parameters ---------- frame : :class:`~compas.geometry.Frame` A COMPAS frame. scale : float, optional The scale of the vectors representing the axes of the frame. **kwargs : dict, optional Additional keyword arguments. See :class:`~compas_ghpython.artists.GHArtist` and :class:`~compas.artists.PrimitiveArtist` for more info. Attributes ---------- scale : float Scale factor that controls the length of the axes. color_origin : :class:`~compas.colors.Color` Default is ``Color.black()``. color_xaxis : :class:`~compas.colors.Color` Default is ``Color.red()``. color_yaxis : :class:`~compas.colors.Color` Default is ``Color.green()``. color_zaxis : :class:`~compas.colors.Color` Default is ``Color.blue()``. """ def __init__(self, frame, scale=1.0, **kwargs): super(FrameArtist, self).__init__(primitive=frame, **kwargs) self.scale = scale self.color_origin = Color.black() self.color_xaxis = Color.red() self.color_yaxis = Color.green() self.color_zaxis = Color.blue() def draw(self): """Draw the frame. Returns ------- :rhino:`Rhino.Geometry.Plane` """ return compas_ghpython.draw_frame(self.primitive) def draw_origin(self): """Draw the frame's origin. Returns ------- :rhino:`Rhino.Geometry.Point` """ point = {"pos": list(self.primitive.point), "color": self.color_origin.rgb255} return compas_ghpython.draw_points([point])[0] def draw_axes(self): """Draw the frame's axes. Returns ------- list[:rhino:`Rhino.Geometry.Line`] """ origin = list(self.primitive.point) x = list(self.primitive.point + self.primitive.xaxis.scaled(self.scale)) y = list(self.primitive.point + self.primitive.yaxis.scaled(self.scale)) z = list(self.primitive.point + self.primitive.zaxis.scaled(self.scale)) lines = [ { "start": origin, "end": x, "color": self.color_xaxis.rgb255, "arrow": "end", }, { "start": origin, "end": y, "color": self.color_yaxis.rgb255, "arrow": "end", }, { "start": origin, "end": z, "color": self.color_zaxis.rgb255, "arrow": "end", }, ] return compas_ghpython.draw_lines(lines)
PypiClean
/Argonaut-0.3.4.tar.gz/Argonaut-0.3.4/argonaut/public/ckeditor/_source/plugins/panelbutton/plugin.js
/* Copyright (c) 2003-2010, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.html or http://ckeditor.com/license */ CKEDITOR.plugins.add( 'panelbutton', { requires : [ 'button' ], beforeInit : function( editor ) { editor.ui.addHandler( CKEDITOR.UI_PANELBUTTON, CKEDITOR.ui.panelButton.handler ); } }); /** * Button UI element. * @constant * @example */ CKEDITOR.UI_PANELBUTTON = 4; (function() { var clickFn = function( editor ) { var _ = this._; if ( _.state == CKEDITOR.TRISTATE_DISABLED ) return; this.createPanel( editor ); if ( _.on ) { _.panel.hide(); return; } _.panel.showBlock( this._.id, this.document.getById( this._.id ), 4 ); }; CKEDITOR.ui.panelButton = CKEDITOR.tools.createClass( { base : CKEDITOR.ui.button, $ : function( definition ) { // We don't want the panel definition in this object. var panelDefinition = definition.panel; delete definition.panel; this.base( definition ); this.document = ( panelDefinition && panelDefinition.parent && panelDefinition.parent.getDocument() ) || CKEDITOR.document; panelDefinition.block = { attributes : panelDefinition.attributes }; this.hasArrow = true; this.click = clickFn; this._ = { panelDefinition : panelDefinition }; }, statics : { handler : { create : function( definition ) { return new CKEDITOR.ui.panelButton( definition ); } } }, proto : { createPanel : function( editor ) { var _ = this._; if ( _.panel ) return; var panelDefinition = this._.panelDefinition || {}, panelBlockDefinition = this._.panelDefinition.block, panelParentElement = panelDefinition.parent || CKEDITOR.document.getBody(), panel = this._.panel = new CKEDITOR.ui.floatPanel( editor, panelParentElement, panelDefinition ), block = panel.addBlock( _.id, panelBlockDefinition ), me = this; panel.onShow = function() { if ( me.className ) this.element.getFirst().addClass( me.className + '_panel' ); _.oldState = me._.state; me.setState( CKEDITOR.TRISTATE_ON ); _.on = 1; if ( me.onOpen ) me.onOpen(); }; panel.onHide = function() { if ( me.className ) this.element.getFirst().removeClass( me.className + '_panel' ); me.setState( _.oldState ); _.on = 0; if ( me.onClose ) me.onClose(); }; panel.onEscape = function() { panel.hide(); me.document.getById( _.id ).focus(); }; if ( this.onBlock ) this.onBlock( panel, block ); block.onHide = function() { _.on = 0; me.setState( CKEDITOR.TRISTATE_OFF ); }; } } }); })();
PypiClean
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/fx/Shadow.js
if(!dojo._hasResource["dojox.fx.Shadow"]){ dojo._hasResource["dojox.fx.Shadow"]=true; dojo.provide("dojox.fx.Shadow"); dojo.experimental("dojox.fx.Shadow"); dojo.require("dijit._Widget"); dojo.require("dojo.NodeList-fx"); dojo.declare("dojox.fx.Shadow",dijit._Widget,{shadowPng:dojo.moduleUrl("dojox.fx","resources/shadow"),shadowThickness:7,shadowOffset:3,opacity:0.75,animate:false,node:null,startup:function(){ this.inherited(arguments); this.node.style.position="relative"; this.pieces={}; var x1=-1*this.shadowThickness; var y0=this.shadowOffset; var y1=this.shadowOffset+this.shadowThickness; this._makePiece("tl","top",y0,"left",x1); this._makePiece("l","top",y1,"left",x1,"scale"); this._makePiece("tr","top",y0,"left",0); this._makePiece("r","top",y1,"left",0,"scale"); this._makePiece("bl","top",0,"left",x1); this._makePiece("b","top",0,"left",0,"crop"); this._makePiece("br","top",0,"left",0); this.nodeList=dojo.query(".shadowPiece",this.node); this.setOpacity(this.opacity); this.resize(); },_makePiece:function(_1,_2,_3,_4,_5,_6){ var _7; var _8=this.shadowPng+_1.toUpperCase()+".png"; if(dojo.isIE<7){ _7=dojo.create("div"); _7.style.filter="progid:DXImageTransform.Microsoft.AlphaImageLoader(src='"+_8+"'"+(_6?", sizingMethod='"+_6+"'":"")+")"; }else{ _7=dojo.create("img",{src:_8}); } _7.style.position="absolute"; _7.style[_2]=_3+"px"; _7.style[_4]=_5+"px"; _7.style.width=this.shadowThickness+"px"; _7.style.height=this.shadowThickness+"px"; dojo.addClass(_7,"shadowPiece"); this.pieces[_1]=_7; this.node.appendChild(_7); },setOpacity:function(n,_9){ if(dojo.isIE){ return; } if(!_9){ _9={}; } if(this.animate){ var _a=[]; this.nodeList.forEach(function(_b){ _a.push(dojo._fade(dojo.mixin(_9,{node:_b,end:n}))); }); dojo.fx.combine(_a).play(); }else{ this.nodeList.style("opacity",n); } },setDisabled:function(_c){ if(_c){ if(this.disabled){ return; } if(this.animate){ this.nodeList.fadeOut().play(); }else{ this.nodeList.style("visibility","hidden"); } this.disabled=true; }else{ if(!this.disabled){ return; } if(this.animate){ this.nodeList.fadeIn().play(); }else{ this.nodeList.style("visibility","visible"); } this.disabled=false; } },resize:function(_d){ var x; var y; if(_d){ x=_d.x; y=_d.y; }else{ var co=dojo._getBorderBox(this.node); x=co.w; y=co.h; } var _e=y-(this.shadowOffset+this.shadowThickness); if(_e<0){ _e=0; } if(y<1){ y=1; } if(x<1){ x=1; } with(this.pieces){ l.style.height=_e+"px"; r.style.height=_e+"px"; b.style.width=x+"px"; bl.style.top=y+"px"; b.style.top=y+"px"; br.style.top=y+"px"; tr.style.left=x+"px"; r.style.left=x+"px"; br.style.left=x+"px"; } }}); }
PypiClean
/Auxjad-1.0.0.tar.gz/Auxjad-1.0.0/auxjad/mutate/merge_partial_tuplets.py
import abjad def merge_partial_tuplets(selection: abjad.Selection, *, merge_across_barlines: bool = False, ) -> None: r"""Mutates an input |abjad.Selection| in place and has no return value; this function merges all consecutive partial tuplets with the same ratio and which sum up to an assignable duration. Partial tuplets can result from algorithmic manipulations such as phasing or looping, which can slice through a tuplet. Basic usage: Usage is simple: >>> staff = abjad.Staff(r"\times 2/3 {c'1} \times 2/3 {d'2}") >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { c'1 } \times 2/3 { d'2 } } .. figure:: ../_images/merge_partial_tuplets-ilr68s15kqb.png >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { c'1 d'2 } } .. figure:: ../_images/merge_partial_tuplets-qe29etsedx.png .. note:: Auxjad automatically adds this function as an extension function to |abjad.mutate|. It can thus be used from either |auxjad.mutate|_ or |abjad.mutate| namespaces. Therefore, the two lines below are equivalent: >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.mutate.merge_partial_tuplets(staff[:]) Multiple consecutive partial tuplets: This function can also handle several consecutive partial tuplets: >>> staff = abjad.Staff( ... r"\times 2/3 {c'2} \times 2/3 {d'2} \times 2/3 {e'2}" ... ) >>> abjad.show(staff) .. docs:: { \times 2/3 { c'2 } \times 2/3 { d'2 } \times 2/3 { e'2 } } .. figure:: ../_images/merge_partial_tuplets-9rh7vpu208j.png >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { c'2 d'2 e'2 } } .. figure:: ../_images/merge_partial_tuplets-oy1imqisx2.png ``merge_across_barlines``: By default, partial tuplets are not merged across barlines. >>> staff = abjad.Staff( ... r"\time 3/4 c'2. " ... r"\times 2/3 {d'4} r4 \times 2/3 {e'2} " ... r"\times 2/3 {f'4} r4 \times 2/3 {g'2}" ... ) >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.show(staff) .. docs:: \new Staff { \time 3/4 c'2. \tweak edge-height #'(0.7 . 0) \times 2/3 { d'4 } r4 \tweak edge-height #'(0.7 . 0) \times 2/3 { e'2 } \tweak edge-height #'(0.7 . 0) \times 2/3 { f'4 } r4 \tweak edge-height #'(0.7 . 0) \times 2/3 { g'2 } } .. figure:: ../_images/merge_partial_tuplets-3rjib7pctml.png To change this behaviour, set ``merge_across_barlines`` to ``True``. >>> staff = abjad.Staff( ... r"\time 3/4 c'2. " ... r"\times 2/3 {d'4} r4 \times 2/3 {e'2} " ... r"\times 2/3 {f'4} r4 \times 2/3 {g'2}" ... ) >>> auxjad.mutate.merge_partial_tuplets( ... staff[:], ... merge_across_barlines=True, ... ) >>> abjad.show(staff) .. docs:: \new Staff { \time 3/4 c'2. \tweak edge-height #'(0.7 . 0) \times 2/3 { d'4 } r4 \times 2/3 { e'2 f'4 } r4 \tweak edge-height #'(0.7 . 0) \times 2/3 { g'2 } } .. figure:: ../_images/merge_partial_tuplets-icud1ejcmzc.png Tied partial tuplets: Tied partial tuplets are also handled by this function. >>> staff = abjad.Staff( ... r"\times 2/3 {r4} \times 2/3 {c'2} " ... r"\times 4/5 {d'2~} \times 4/5{d'8}" ... ) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { r4 } \times 2/3 { c'2 } \times 4/5 { d'2 ~ } \times 4/5 { d'8 } } .. figure:: ../_images/merge_partial_tuplets-st4zw38qfce.png >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { r4 c'2 } \times 4/5 { d'2 ~ d'8 } } .. figure:: ../_images/merge_partial_tuplets-1pky5fsh2nl.png Indicators: Indicators stay the same in the merged tuplet. >>> staff = abjad.Staff( ... r"\times 2/3 {c'2\p\< d'2} \times 2/3 {e'2\ff}" ... ) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { c'2 \p \< d'2 } \times 2/3 { e'2 \ff } } .. figure:: ../_images/merge_partial_tuplets-7cdtafl348h.png >>> auxjad.mutate.merge_partial_tuplets(staff[:]) >>> abjad.show(staff) .. docs:: \new Staff { \times 2/3 { c'2 \p \< d'2 e'2 \ff } } .. figure:: ../_images/merge_partial_tuplets-j9rmdfbawce.png .. tip:: The method |auxjad.mutate.extract_trivial_tuplets()| can be used after merging partial tuplets to further clean the output. The method |auxjad.mutate.auto_rewrite_meter()| can also be used for this purpose, as it will not only rewrite the metric notation of a staff but also apply both |auxjad.mutate.merge_partial_tuplets()| and |auxjad.mutate.extract_trivial_tuplets()| to the output. .. note:: When using |abjad.Container|'s, all time signatures in the output will be commented out with ``%%%.`` This is because Abjad only applies time signatures to containers that belong to a |abjad.Staff|. The present function works with either |abjad.Container| and |abjad.Staff|. >>> container = abjad.Container(r"\time 3/4 c'4 d'4 e'4") >>> abjad.show(container) .. docs:: { %%% \time 3/4 %%% c'4 d'4 e'4 } .. figure:: ../_images/merge_partial_tuplets-945s36mfdn.png >>> staff = abjad.Staff([container]) >>> abjad.show(container) .. docs:: { \time 3/4 c'4 d'4 e'4 } .. figure:: ../_images/merge_partial_tuplets-3b4tyqrnttw.png .. warning:: The input selection must be a contiguous logical voice. When dealing with a container with multiple subcontainers (e.g. a score containing multiple staves), the best approach is to cycle through these subcontainers, applying this function to them individually. """ if not isinstance(selection, abjad.Selection): raise TypeError("argument must be 'abjad.Selection'") if not isinstance(merge_across_barlines, bool): raise TypeError("'merge_across_barlines' must be 'bool'") tuplets = selection.tuplets() if len(tuplets) <= 1: return def _process_tuplets(tuplets): for index in range(len(tuplets[:-1])): for upper_index in range(index, len(tuplets)): if (tuplets[index].multiplier == tuplets[upper_index].multiplier): tuplet_group = tuplets[index:upper_index + 1] else: break if tuplet_group.are_contiguous_logical_voice(): durations = [abjad.get.duration(tuplet) for tuplet in tuplet_group] sum_durations = sum(durations) if (all(duration.implied_prolation != 1 for duration in durations) and sum_durations.implied_prolation == 1): for tuplet in tuplet_group[1:]: tuplet_group[0].extend(tuplet) abjad.mutate.extract(tuplet) if not merge_across_barlines: measures = selection.group_by_measure() for measure in measures: tuplets = abjad.select(measure).tuplets() if len(tuplets) <= 1: continue _process_tuplets(tuplets) else: _process_tuplets(tuplets)
PypiClean
/functions/structural_holes/MaxD.py
__all__ = [ "get_structural_holes_MaxD" ] def get_community_kernel(G, C: [frozenset]): ''' To get community kernels with most degrees. Parameters ---------- G : graph An undirected graph. C : int #communities Returns ------- kernels ''' area = [] for i in range(len(G)): area.append(0) for i, cc in enumerate(C): for each_node in cc: area[each_node-1] += 1 << i # node_id from 1 to n. # print(area) kernels = [] cnt = 0 for i in range(len(C)): mask = 1<<i cnt+=1 # print(cnt, ":", mask) q = [] p = [] for i in range(len(G)): if (area[i] & mask) == mask: q.append((G.degree[i+1], i+1)) q.sort() q.reverse() # print(q) for i in range(max(int(len(q)/100), min(2, len(q)))): # latter of min for test. p.append(q[i][1]) # if len(p) > 0: # kernels.append(p) kernels.append(p) # print(kernels) if len(kernels) < 2: print("ERROR: WE should have at least 2 communities.") for i in range(len(kernels)): if len(kernels[i]) == 0: print("Comunity %d is too small." % i) return None # print(kernels) return kernels def get_structural_holes_MaxD (G, k_size, C: [frozenset]): ''' To calc the strucutral hole spanners using MaxD. Parameters ---------- G : graph An undirected graph. k_size : int top-k SHS C : int #communities Returns ------- A list of top-k spanners. ''' # for i, cc in enumerate(C): # for aq in cc: # print(i, ":", aq) # print() # print(len(C)) kernels = get_community_kernel(G, C) # print(kernels) c = len(kernels) # print(c) save = [] for i in range(len(G)): save.append(False) build_network(kernels, c, G) n = len(G) sflow = [] save = [] for i in range(n): save.append(True) q = [] ans_list = [] for step in range(k_size): q.clear() sflow.clear() for i in range(n): sflow.append(0) max_flow(n, kernels, save) for i in range(n*(c-1)): k = head[i] while k >= 0: if flow[k] > 0: sflow[i % n] += flow[k] k = nex[k] for i in range(n): if save[i] == False: q.append((-1, i)) else: q.append((sflow[i]+G.degree[i+1], i)) q.sort() q.reverse() # print(q) candidates = [] for i in range(n): if save[q[i][1]] == True and len(candidates) < k_size: candidates.append(q[i][1]) ret = pick_candidates(n, candidates, kernels, save) #print(ret[1]+1) ans_list.append(ret[1]+1) # global head # head.append(233) del sflow del q return ans_list def pick_candidates(n, candidates, kernels, save): ''' detect candidates. Parameters ---------- n : #nodes candidates : A list of candidates. kernels : A list of kernels save : A bool list of visited candidates for max_flow. Returns ------- A tuple of min_cut, best_candidate of this round. ''' for i in range(len(candidates)): save[candidates[i]] = False old_flow = max_flow(n, kernels, save) global prev_flow prev_flow.clear() for i in range(nedge): prev_flow.append(flow[i]) mcut = 100000000 best_key = -1 for i in range(len(candidates)): key = candidates[i] for j in range(len(candidates)): save[candidates[j]] = True save[key] = False tp = max_flow(n, kernels, save, prev_flow) if tp < mcut: mcut = tp best_key = key for i in range(len(candidates)): save[candidates[i]] = True save[best_key] = False return (old_flow+mcut, best_key) head = [] point = [] nex = [] flow = [] capa = [] dist = [] work = [] dsave = [] src = 0 dest = 0 node = 0 nedge = 0 prev_flow = [] oo = 1000000000 def dinic_bfs(): ''' using BFS to find augmenting path. Returns ------- A bool, whether found a augmenting path or not. ''' global dist, dest, src, node dist.clear() for i in range(node): dist.append(-1) dist[src] = 0 Q = [] Q.append(src) cl = 0 while cl < len(Q): # print(cl, len(Q)) k = Q[cl] i = head[k] while i >= 0: if flow[i] < capa[i] and dsave[point[i]]==True and dist[point[i]]<0: dist[point[i]] = dist[k]+1 Q.append(point[i]) i = nex[i] cl += 1 return dist[dest] >= 0 def dinic_dfs(x, exp): ''' using DFS to calc the augmenting path and refresh network. Parameters ---------- x : current node. exp : current flow. Returns ------- current flow. ''' if x == dest: return exp res = 0 i = work[x] global flow while i >= 0: v = point[i] tmp = 0 if flow[i] < capa[i] and dist[v]==dist[x]+1: tmp = dinic_dfs(v, min(exp, capa[i] - flow[i])) if tmp>0: flow[i] += tmp flow[i^1] -= tmp res += tmp exp -= tmp if exp == 0: break i = nex[i] return res def dinic_flow(): ''' Dinic algorithm to calc max_flow. Returns ------- max_flow. ''' result = 0 global work # print(dinic_bfs()) while dinic_bfs(): work.clear() for i in range(node): work.append(head[i]) result += dinic_dfs(src, oo) # print(result) return result def max_flow(n,kernels, save, prev_flow = None): ''' Calculate max_flow. Parameters ---------- n : #nodes kernels : A list of kernels. save : A bool list of visited nodes. prev_flow : A list of previous flows. Returns ------- max_flow ''' global dsave, node dsave.clear() for i in range(node): dsave.append(True) # print(nedge) if prev_flow != None: for i in range(nedge): flow.append(prev_flow[i]) else: for i in range(nedge): flow.append(0) c = len(kernels) for i in range(n): for k in range(c-1): dsave[k*n+i] = save[i] ret = dinic_flow() return ret def init_MaxD(_node, _src, _dest): ''' Initialize a network. Parameters ---------- _node : #nodes _src : the source node _dest : the destiny node Returns ------- void ''' global node, src, dest node = _node src = _src dest = _dest global point, capa, flow, nex, head head.clear() # print(node) for i in range(node): head.append(-1) nedge = 0 point.clear() capa.clear() flow.clear() nex.clear() # print(head) # print(node) # print(src) # print(dest) return def addedge(u, v, c1, c2): ''' Add an edge(u,v) with capacity c1 and inverse capacity c2. Parameters ---------- u : node u v : node v c1 : capacity c1 c2 : capacity c2 Returns ------- void ''' # print(u, v) global nedge global point, capa, flow, nex, head point.append(v) capa.append(c1) flow.append(0) nex.append(head[u]) head[u] = nedge nedge += 1 # print(u, point[nedge-1]) point.append(u) capa.append(c2) flow.append(0) nex.append(head[v]) head[v] = nedge nedge +=1 return def build_network(kernels, c, G): ''' build a network. Parameters ---------- kernels : A list of kernels. c : #communities. G : graph An undirected graph. Returns ------- void ''' n = len(G) init_MaxD(n*(c-1)+2, n*(c-1), n*(c-1)+1) # print(kernels) base = 0 for k_iter in range(c): S1 = set() S2 = set() for i in range(c): for j in range(len(kernels[i])): if i == k_iter: S1.add(kernels[i][j]) elif i < k_iter: S2.add(kernels[i][j]) if len(S1) == 0 or len(S2) == 0: continue for edges in G.edges: addedge(base + edges[0] - 1, base + edges[1] - 1, 1, 1) addedge(base + edges[1] - 1, base + edges[0] - 1, 1, 1) # print(nedge) for i in S1: if i not in S2: # print(i) addedge(src, base + i - 1, n, 0) for i in S2: if i not in S1: addedge(base + i - 1, dest, n, 0) base += n # print(dest) return if __name__ == "__main__": import sys sys.path.append('../../../') import ONAL as og g = og.classes.Graph() edges1 = [(1, 2), (2, 3), (1, 3), (3, 4), (4, 5), (4, 6), (5, 6)] edges2 = [(3, 7), (4, 7), (10, 7), (11, 7)] edges3 = [(8, 9), (8, 10), (9, 10), (10, 11), (11, 12), (11, 13), (12, 13)] g.add_edges(edges1) g.add_edges(edges2) g.add_edges(edges3) cmnts = [frozenset([1, 2, 3]), frozenset([4, 5, 6]), frozenset([3, 4, 7, 10, 11]), frozenset([8, 9, 10]), frozenset([11, 12, 13])] # for edge in g.edges: # print(edge[0],edge[1]) k = 5 # top-k spanners k_top = get_structural_holes_MaxD(g, k, cmnts) print(k_top)
PypiClean
/AmFast-0.5.3-r541.tar.gz/AmFast-0.5.3-r541/amfast/remoting/sa_subscription_manager.py
import time import cPickle as pickle import sqlalchemy as sa from sqlalchemy.sql import func, and_ if sa.__version__.startswith('0.5'): # 0.5 is lowest supported version BINARY_TYPE = sa.Binary else: BINARY_TYPE = sa.LargeBinary from subscription_manager import Subscription, SubscriptionManager import flex_messages as messaging class SaSubscriptionManager(SubscriptionManager): """Manages subscriptions in a database, uses SqlAlchemy to talk to the DB.""" def __init__(self, engine, metadata, secure=False, ttl=30000, table_prefix=''): SubscriptionManager.__init__(self, secure=secure, ttl=ttl) self.engine = engine self.metadata = metadata self.table_prefix = table_prefix and "%s_" % table_prefix.rstrip('_') or table_prefix self.mapTables() def reset(self): db = self.getDb() db.execute(self.subscriptions.delete()) db.execute(self.messages.delete()) db.close() def mapTables(self): self.subscriptions = sa.Table('%ssubscriptions' % self.table_prefix, self.metadata, sa.Column('connection_id', sa.String(36), primary_key=True), sa.Column('client_id', sa.String(36), primary_key=True), sa.Column('topic', sa.String(128), primary_key=True) ) self.messages = sa.Table('%smessages' % self.table_prefix, self.metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('topic', sa.String(256), index=True), sa.Column('clientId', sa.String(128), nullable=True), sa.Column('messageId', sa.String(128), nullable=True), sa.Column('correlationId', sa.String(128), nullable=True), sa.Column('destination', sa.String(128), nullable=True), sa.Column('timestamp', sa.Float(), nullable=True), sa.Column('timeToLive', sa.Float(), nullable=True), sa.Column('headers', BINARY_TYPE(), nullable=True), sa.Column('body', BINARY_TYPE(), nullable=False) ) def createTables(self): db = self.getDb() self.subscriptions.create(db, checkfirst=True) self.messages.create(db, checkfirst=True) db.close() def getDb(self): return self.engine.connect() def subscribe(self, connection_id, client_id, topic, sub_topic=None, selector=None): """Subscribe a client to a topic. arguments ========== * connection_id - string, id of Flash client that is subscribing. * client_id - string, id of messaging client that is subscribing. * topic - string, Topic to subscribe to. * sub_topic - string, Sub-Topic to subscribe to. Default = None. """ topic = self.getTopicKey(topic, sub_topic) ins = self.subscriptions.insert().values( connection_id=connection_id, client_id=client_id, topic=topic ) db = self.getDb() db.execute(ins) db.close() def unSubscribe(self, connection_id, client_id, topic, sub_topic=None): """Un-Subscribe a client from a topic. arguments ========== * connection_id - string, id of Flash client that is subscribing. * client_id - string, id of messaging client that is subscribing. * topic - string, Topic to un-subscribe from. * sub_topic - string, Sub-Topic to un-subscribe from. Default = None. """ topic = self.getTopicKey(topic, sub_topic) d = self.subscriptions.delete().\ where(and_(self.subscriptions.c.connection_id==connection_id, self.subscriptions.c.client_id==client_id, self.subscriptions.c.topic==topic)) db = self.getDb() db.execute(d) db.close() def deleteConnection(self, connection): """Remove all subscriptions for this connection. arguments ========== * connection_id - string, id of Flash client that is subscribing. """ d = self.subscriptions.delete().\ where(self.subscriptions.c.connection_id==connection.id) db = self.getDb() db.execute(d) db.close() def iterSubscribers(self, topic, sub_topic=None): """Iterate through Flash client ids subscribed to a specific topic.""" topic = self.getTopicKey(topic, sub_topic) s = sa.select([self.subscriptions.c.connection_id], self.subscriptions.c.topic==topic, distinct=True) db = self.getDb() results = db.execute(s) for row in results: yield row[self.subscriptions.c.connection_id] def iterConnectionSubscriptions(self, connection): """Iterate through all Subscriptions that belong to a specific connection.""" s = sa.select([self.subscriptions.c.connection_id, self.subscriptions.c.client_id, self.subscriptions.c.topic], self.subscriptions.c.connection_id==connection.id) db = self.getDb() results = db.execute(s) for row in results: yield Subscription(row[self.subscriptions.c.connection_id], row[self.subscriptions.c.client_id], row[self.subscriptions.c.topic]) def persistMessage(self, msg): """Store a message.""" if hasattr(msg, 'headers') and (msg.headers is not None): enc_headers = pickle.dumps(msg.headers) else: enc_headers = None if hasattr(msg, 'correlationId'): correlation_id = msg.correlationId else: correlation_id = None ins = self.messages.insert().values( topic=self.getMessageTopicKey(msg), clientId=msg.clientId, messageId=msg.messageId, correlationId=correlation_id, destination=msg.destination, timestamp=msg.timestamp, timeToLive=msg.timeToLive, headers=enc_headers, body=pickle.dumps(msg.body) ) db = self.getDb() db.execute(ins) db.close() def deleteExpiredMessages(self, cutoff_time): """Deletes expired messages.""" d = self.messages.delete().\ where(self.messages.c.timestamp + self.messages.c.timeToLive < cutoff_time) db = self.getDb() db.execute(d) db.close() def pollMessages(self, topic, cutoff_time, current_time): """Retrieves all queued messages, and discards expired messages. arguments: =========== * topic - string, Topic to find messages for. * cutoff_time - float, epoch time, only messages published after this time will be returned. * current_time - float, epoch time, used to determine if a message is expired. """ # Poll for new messages s = sa.select((self.messages,), and_(self.messages.c.topic == topic, self.messages.c.timestamp > cutoff_time)).\ order_by(self.messages.c.timestamp) db = self.getDb() results = db.execute(s) for row in results: if row['headers'] is None: headers = None else: headers = pickle.loads(str(row['headers'])) yield messaging.AsyncMessage(body=pickle.loads(str(row['body'])), clientId=row['clientId'], destination=row['destination'], headers=headers, timeToLive=row['timeToLive'], timestamp=row['timestamp'], messageId=row['messageId']) db.close()
PypiClean
/0-orchestrator-1.1.0a7.tar.gz/0-orchestrator-1.1.0a7/zeroos/orchestrator/sal/healthchecks/networkstability.py
import re from ..healthcheck import HealthCheckRun descr = """ Monitors if a network bond (if there is one) has both (or more) interfaces properly active. """ class NetworkStability(HealthCheckRun): def __init__(self, node): resource = '/nodes/{}'.format(node.name) super().__init__('networkstability', 'Network Stability Check', 'Network',resource) self.node = node def run(self, nodes): nic = self.node.get_nic_by_ip(self.node.addr) if nic is None: raise LookupError("Couldn't get the management nic") jobs = [] for node in nodes: other_nic = node.get_nic_by_ip(node.addr) if other_nic is not None: if nic['mtu'] != other_nic['mtu']: self.add_message('{}_mtu'.format(node.name), 'ERROR', 'The management interface has mtu {} which is different than node {} which is {}'.format(nic['mtu'], node.name, other_nic['mtu'])) else: self.add_message('{}_mtu'.format(node.name), 'OK', 'The management interface has mtu {} is the same as node {}'.format(nic['mtu'], node.name, other_nic['mtu'])) else: self.add_message('{}_mtu'.format(node.name), 'ERROR', "Couldn't get the management nic for node {}".format(node.name)) jobs.append(self.node.client.system('ping -I {} -c 10 -W 1 -q {}'.format(self.node.addr, node.addr), max_time=20)) for node, job in zip(nodes, jobs): res = job.get().stdout.split('\n') perc = 100 - int(res[2].split(',')[-1].strip().split()[0][:-1]) if perc < 70: self.add_message('{}_ping_perc'.format(node.name), 'ERROR', "Can reach node {} with percentage {}".format(node.name, perc)) elif perc < 90: self.add_message('{}_ping_perc'.format(node.name), 'WARNING', "Can reach node {} with percentage {}".format(node.name, perc)) else: self.add_message('{}_ping_perc'.format(node.name), 'OK', "Can reach node {} with percentage {}".format(node.name, perc)) if perc == 0: self.add_message('{}_ping_rt'.format(node.name), 'ERROR', "Can't reach node {}".format(node.name)) else: rt = float(res[3].split('/')[3]) if rt > 200: self.add_message('{}_ping_rt'.format(node.name), 'ERROR', "Round-trip time to node {} is {}".format(node.name, rt)) elif rt > 10: self.add_message('{}_ping_rt'.format(node.name), 'WARNING', "Round-trip time to node {} is {}".format(node.name, rt)) else: self.add_message('{}_ping_rt'.format(node.name), 'OK', "Round-trip time to node {} is {}".format(node.name, rt))
PypiClean
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/nodes/ContainerMakingNodes.py
import functools from abc import abstractmethod from nuitka.PythonVersions import needsSetLiteralReverseInsertion from .ChildrenHavingMixins import ChildHavingElementsTupleMixin from .ConstantRefNodes import ( ExpressionConstantListEmptyRef, ExpressionConstantSetEmptyRef, ExpressionConstantTupleEmptyRef, makeConstantRefNode, ) from .ExpressionBases import ExpressionBase from .ExpressionShapeMixins import ( ExpressionListShapeExactMixin, ExpressionSetShapeExactMixin, ExpressionTupleShapeExactMixin, ) from .IterationHandles import ListAndTupleContainerMakingIterationHandle from .NodeBases import SideEffectsFromChildrenMixin from .NodeMakingHelpers import makeStatementOnlyNodesFromExpressions class ExpressionMakeSequenceMixin(object): __slots__ = () def isKnownToBeIterable(self, count): return count is None or count == len(self.subnode_elements) def isKnownToBeIterableAtMin(self, count): return count <= len(self.subnode_elements) def getIterationValue(self, count): return self.subnode_elements[count] def getIterationValueRange(self, start, stop): return self.subnode_elements[start:stop] @staticmethod def canPredictIterationValues(): return True def getIterationValues(self): return self.subnode_elements def getIterationHandle(self): return ListAndTupleContainerMakingIterationHandle(self.subnode_elements) @staticmethod def getTruthValue(): return True def mayRaiseException(self, exception_type): for element in self.subnode_elements: if element.mayRaiseException(exception_type): return True return False # TODO: Make this happen from auto-compute, children only side effects def computeExpressionDrop(self, statement, trace_collection): # Virtual method overload, pylint: disable=unused-argument result = makeStatementOnlyNodesFromExpressions( expressions=self.subnode_elements ) del self.parent return ( result, "new_statements", """\ Removed %s creation for unused sequence.""" % self.getSequenceName(), ) def onContentEscapes(self, trace_collection): for element in self.subnode_elements: element.onContentEscapes(trace_collection) @abstractmethod def getSequenceName(self): """Get name for use in traces""" class ExpressionMakeSequenceBase( SideEffectsFromChildrenMixin, ExpressionMakeSequenceMixin, ChildHavingElementsTupleMixin, ExpressionBase, ): named_children = ("elements|tuple",) def __init__(self, elements, source_ref): assert elements ChildHavingElementsTupleMixin.__init__( self, elements=elements, ) ExpressionBase.__init__(self, source_ref) def getSequenceName(self): """Get name for use in traces""" simulator = self.getSimulator() return simulator.__name__.capitalize() @staticmethod def isExpressionMakeSequence(): return True @abstractmethod def getSimulator(self): """The simulator for the container making, for overload.""" def computeExpression(self, trace_collection): for element in self.subnode_elements: if not element.isCompileTimeConstant(): return self, None, None simulator = self.getSimulator() assert simulator is not None return trace_collection.getCompileTimeComputationResult( node=self, computation=lambda: simulator( element.getCompileTimeConstant() for element in self.subnode_elements ), description="%s with constant arguments." % simulator.__name__.capitalize(), user_provided=True, ) def makeExpressionMakeTuple(elements, source_ref): if elements: return ExpressionMakeTuple(elements, source_ref) else: # TODO: Get rid of user provided for empty tuple refs, makes no sense. return ExpressionConstantTupleEmptyRef( user_provided=False, source_ref=source_ref ) def makeExpressionMakeTupleOrConstant(elements, user_provided, source_ref): for element in elements: # TODO: Compile time constant ought to be the criterion. if not element.isExpressionConstantRef(): result = makeExpressionMakeTuple(elements, source_ref) break else: result = makeConstantRefNode( constant=tuple(element.getCompileTimeConstant() for element in elements), user_provided=user_provided, source_ref=source_ref, ) if elements: result.setCompatibleSourceReference( source_ref=elements[-1].getCompatibleSourceReference() ) return result class ExpressionMakeTuple(ExpressionTupleShapeExactMixin, ExpressionMakeSequenceBase): kind = "EXPRESSION_MAKE_TUPLE" def __init__(self, elements, source_ref): ExpressionMakeSequenceBase.__init__( self, elements=elements, source_ref=source_ref ) @staticmethod def getSimulator(): return tuple def getIterationLength(self): return len(self.subnode_elements) def makeExpressionMakeList(elements, source_ref): if elements: return ExpressionMakeList(elements, source_ref) else: # TODO: Get rid of user provided for empty list refs, makes no sense. return ExpressionConstantListEmptyRef( user_provided=False, source_ref=source_ref ) def makeExpressionMakeListOrConstant(elements, user_provided, source_ref): for element in elements: # TODO: Compile time constant ought to be the criterion. if not element.isExpressionConstantRef(): result = makeExpressionMakeList(elements, source_ref) break else: result = makeConstantRefNode( constant=[element.getCompileTimeConstant() for element in elements], user_provided=user_provided, source_ref=source_ref, ) if elements: result.setCompatibleSourceReference( source_ref=elements[-1].getCompatibleSourceReference() ) return result class ExpressionMakeListMixin(object): __slots__ = () def computeExpressionIter1(self, iter_node, trace_collection): result = ExpressionMakeTuple( elements=self.subnode_elements, source_ref=self.source_ref ) self.parent.replaceChild(self, result) del self.parent return ( iter_node, "new_expression", """\ Iteration over list lowered to iteration over tuple.""", ) class ExpressionMakeList( ExpressionListShapeExactMixin, ExpressionMakeListMixin, ExpressionMakeSequenceBase ): kind = "EXPRESSION_MAKE_LIST" def __init__(self, elements, source_ref): ExpressionMakeSequenceBase.__init__( self, elements=elements, source_ref=source_ref ) @staticmethod def getSimulator(): return list def getIterationLength(self): return len(self.subnode_elements) class ExpressionMakeSet(ExpressionSetShapeExactMixin, ExpressionMakeSequenceBase): kind = "EXPRESSION_MAKE_SET" def __init__(self, elements, source_ref): ExpressionMakeSequenceBase.__init__( self, elements=elements, source_ref=source_ref ) @staticmethod def getSimulator(): return set def getIterationLength(self): element_count = len(self.subnode_elements) # Hashing and equality may consume elements of the produced set. if element_count >= 2: return None else: return element_count @staticmethod def getIterationMinLength(): # Hashing and equality may consume elements of the produced set. return 1 def computeExpression(self, trace_collection): # For sets, we need to consider hashing are_constants = True are_hashable = True for element in self.subnode_elements: if are_constants and not element.isCompileTimeConstant(): are_constants = False if are_hashable and not element.isKnownToBeHashable(): are_hashable = False if not are_hashable and not are_constants: break if not are_constants: if not are_hashable: trace_collection.onExceptionRaiseExit(BaseException) return self, None, None simulator = self.getSimulator() assert simulator is not None return trace_collection.getCompileTimeComputationResult( node=self, computation=lambda: simulator( element.getCompileTimeConstant() for element in self.subnode_elements ), description="%s with constant arguments." % simulator.__name__.capitalize(), user_provided=True, ) def mayRaiseException(self, exception_type): for element in self.subnode_elements: if not element.isKnownToBeHashable(): return True if element.mayRaiseException(exception_type): return True return False def computeExpressionIter1(self, iter_node, trace_collection): result = ExpressionMakeTuple( elements=self.subnode_elements, source_ref=self.source_ref ) self.parent.replaceChild(self, result) del self.parent return ( iter_node, "new_expression", """\ Iteration over set lowered to iteration over tuple.""", ) needs_set_literal_reverse = needsSetLiteralReverseInsertion() def makeExpressionMakeSetLiteral(elements, source_ref): if elements: if needs_set_literal_reverse: return ExpressionMakeSetLiteral(elements, source_ref) else: return ExpressionMakeSet(elements, source_ref) else: # TODO: Get rid of user provided for empty set refs, makes no sense. return ExpressionConstantSetEmptyRef(user_provided=False, source_ref=source_ref) @functools.wraps(set) def reversed_set(value): return set(reversed(tuple(value))) def makeExpressionMakeSetLiteralOrConstant(elements, user_provided, source_ref): for element in elements: # TODO: Compile time constant ought to be the criterion. if not element.isExpressionConstantRef(): result = makeExpressionMakeSetLiteral(elements, source_ref) break else: # Need to reverse now if needed. if needs_set_literal_reverse: elements = tuple(reversed(elements)) result = makeConstantRefNode( constant=set(element.getCompileTimeConstant() for element in elements), user_provided=user_provided, source_ref=source_ref, ) if elements: result.setCompatibleSourceReference( source_ref=elements[-1].getCompatibleSourceReference() ) return result class ExpressionMakeSetLiteral(ExpressionMakeSet): kind = "EXPRESSION_MAKE_SET_LITERAL" @staticmethod def getSimulator(): return reversed_set
PypiClean
/Moberg_Analytics_HDF5-1.0.1-py3-none-any.whl/moberg_analytics_hdf5/hdf5_tools.py
import h5py import numpy as np import pandas as pd import os from . import hdf5_exceptions class HDF5Helper: """ DESCRIPTION ----------- This class contains methods for argument, group, dataset, and duplicate checks as well as other methods that add functionality to HDF5Content and HDF5Components. """ #----------Group Functions---------- def check_group_name(self, group_names_list, group_name): """ DESCRIPTION ----------- Returns True if "group_name" is a group name in the HDF5 file. Otherwise returns False and raises a GroupNameError. PARAMETERS ---------- group_names_list: list list of group names in the HDF5 file group_name: str name of the group to check RETURNS ------- True -or- False """ if group_name not in group_names_list: valid_group_name = False raise hdf5_exceptions.GroupNameError(group_name=group_name) else: valid_group_name = True return valid_group_name def is_group(self, hdf5_obj): """ DESCRIPTION ----------- Returns True if the "hdf5_obj" object a HDF5 group object. Otherwise returns False. PARAMETERS ---------- hdf5_obj: HDF5 class object HDF5 class object to check RETURNS ------- True -or- False """ if isinstance(hdf5_obj, h5py._hl.group.Group): # if hdf5_obj is a group object return True else: return False #----------Dataset Functions---------- def check_dataset_name(self, dataset_names_list, dataset_name): """ DESCRIPTION ----------- Returns True if "dataset_name" is a dataset name in the HDF5 file. Otherwise returns False and raises a DatasetNameError. PARAMETERS ---------- dataset_names_list: list list of group names in the HDF5 file dataset_name: str name of the dataset to check RETURNS ------- True -or- False """ if dataset_name not in dataset_names_list: valid_dataset_name = False raise hdf5_exceptions.DatasetNameError(dataset_name=dataset_name) else: valid_dataset_name = True return valid_dataset_name def dup_dataset_check(self, dataset_name): """ DESCRIPTION ----------- Returns True if "dataset_name" is a duplicate dataset in the HDF5 file. Otherwise returns False. PARAMETERS ---------- dataset_name: str name of the dataset to check RETURNS ------- True -or- False """ dup_dataset_names_list = self.get_dup_dataset_names() # list of all duplicate dataset names if dataset_name in dup_dataset_names_list: # if duplicate dataset name return True else: return False def dup_dataset_prompt(self, dataset_name): """ DESCRIPTION ----------- Prompts the user to choose a dataset path to explicitly select a dataset when a duplicate "dataset_name" is passed. Returns the HDF5 file path of the selected dataset. PARAMETERS ---------- dataset_name: str name of the dataset to check RETURNS ------- sel_dataset_path: str path of the user-selected dataset """ print(str(dataset_name) + " is a duplicate dataset name. Please select a dataset path:") dup_dataset_path_list = self.all_dataset_names_dict[dataset_name]["dataset_paths"] # list of duplicate dataset paths for i, dataset_path in enumerate(dup_dataset_path_list): # for each dataset path print(str(i) + ": " + str(dataset_path)) sel_num = int(input("Enter the value of the corresponding path: ")) # prompt user to select a dataset path sel_dataset_path = dup_dataset_path_list[sel_num] # dataset path selected by the user return sel_dataset_path def zip_datasets(self): """ DESCRIPTION ----------- Returns a zip object of the dataset_path_list, dataset_name_list, and dataset_list. PARAMETERS ---------- none RETURNS ------- zipped_datasets: zip object iterator of a tuple of dataset_path_list, dataset_name_list, and dataset_list """ dataset_path_list = self.get_all_dataset_paths() dataset_name_list = self.get_all_dataset_names() dataset_obj_list = self.get_all_dataset_objs() zipped_datasets = zip(dataset_path_list, dataset_name_list, dataset_obj_list) return zipped_datasets def is_dataset(self, hdf5_obj): """ DESCRIPTION ----------- Returns True if the "hdf5_obj" object a HDF5 group object. Otherwise returns False. PARAMETERS ---------- hdf5_obj: HDF5 class object HDF5 class object to check RETURNS ------- True -or- False """ if isinstance(hdf5_obj, h5py._hl.dataset.Dataset): # if hdf5_obj is a dataset object return True else: return False #----------Misc. Functions---------- def check_args(self, arg_list, req_num_args): """ DESCRIPTION ----------- Raises an ArgumentError if zero or more than req_num_args number of arguments are passed to a function. PARAMETERS ---------- arg_list: list list of arguments passed to a function req_num_args: int number of required arguments RETURNS ------- none """ # the first arg in arg_list is always self, so the slice taken from [1:] # total number of args that are not None if sum(arg is not None for arg in arg_list[1:]) == 0: # if zero arguments are passed raise hdf5_exceptions.ArgumentError(error_message='Arguments are required for this function.') elif sum(arg is not None for arg in arg_list[1:]) > req_num_args: # if more than req_num_args number of args are passed raise hdf5_exceptions.ArgumentError(error_message='Too many or invalid arguments were passed to this function.') def check_path(self, path_list, path_to_check): """ DESCRIPTION ----------- Returns True if "path_to_check" is a path in the HDF5 file. Otherwise returns False and raises a PathError. PARAMETERS ---------- path_list: list list of group and dataset paths in the HDF5 file path_to_check: str name of the path to check RETURNS ------- True -or- False """ if path_to_check not in path_list: valid_path = False raise hdf5_exceptions.PathError(path=path_to_check) else: valid_path = True return valid_path class HDF5Content(HDF5Helper): """ DESCRIPTION ----------- This class contains methods that organize the content of the HDF5 file into lists and dictionaries. PARAMETERS ---------- hdf5_filepath: HDF5 file path path to the user-selected HDF5 file ATTRIBUTES ---------- hdf5file: HDF5 file user-selected HDF5 file all_group_names_dict: dict dictionary of all groups and their associated info group names are keys all_dataset_names_dict: dict dictionary of all datasets and their associated info datasetset names are keys all_dataset_paths_dict: dict dictionary of all datasets and their associated info datasetset paths are keys """ def __init__(self, hdf5_filepath): super() hdf5file = h5py.File(hdf5_filepath, "r") self.hdf5file = hdf5file self.all_group_names_dict = self.get_all_group_names_dict() self.all_dataset_names_dict = self.get_all_dataset_names_dict() self.all_dataset_paths_dict = self.get_all_dataset_paths_dict() # ----------Group Functions---------- def get_all_group_paths(self): """ DESCRIPTION ----------- Returns a list of all group paths in the HDF5 file (including the Root group and subgroups). RETURNS ------- all_group_paths_list: list list of all group paths """ all_group_paths_list = [] all_group_paths_list.append("/") # add Root group to list def visit_group(path): if isinstance(self.hdf5file[path], h5py._hl.group.Group): # if group all_group_paths_list.append(path) # add group to list self.hdf5file.visit(visit_group) # visit all objects in the HDF5 file return all_group_paths_list def get_all_group_objs(self): """ DESCRIPTION ----------- Returns a list of all group class objects in the HDF5 file (including the Root group and subgroups). RETURNS ------- all_group_objs_list: list list of all HDF5 group class objects """ all_group_objs_list = [] all_group_paths_list = self.get_all_group_paths() # all group paths for group_path in all_group_paths_list: # for every group path all_group_objs_list.append(self.hdf5file[group_path]) # add group object to list return all_group_objs_list def get_all_group_names(self): """ DESCRIPTION ----------- Returns a list of all group names in the HDF5 file (including the Root group and subgroups). RETURNS ------- all_group_names_list: list list of all group names """ all_group_names_list = [] def visit_group(path): if isinstance(self.hdf5file[path], h5py._hl.group.Group): # if group all_group_names_list.append(path.split("/")[-1]) # add group name to list self.hdf5file.visit(visit_group) # visit all objects in the HDF5 file all_group_names_list.insert(0, "/") # add Root group to start of list return all_group_names_list def get_all_group_names_dict(self): """ DESCRIPTION ----------- Returns a dictionary of all group names (including the Root group and subgroups) and their associated info from the HDF5 file. Key - group name Value - group info RETURNS ---------- all_group_names_dict: dict dictionary of all group names and their associated info """ all_group_names_list = self.get_all_group_names() all_group_paths_list = self.get_all_group_paths() all_group_objs_list = self.get_all_group_objs() all_group_names_dict = {} for i, group_name in enumerate(all_group_names_list): # for each group name in the list group_obj = all_group_objs_list[i] # group object group_path = all_group_paths_list[i] # group path subgroup_list = self.get_subgroups_list(group_path=group_path) # list of subgroups in the group_path group subgroup_dict = {} for subgroup_name in subgroup_list: # for each subgroup name in the list subgroup_path = group_path + "/" + subgroup_name # subgroup path subgroup_obj = self.hdf5file.get(subgroup_path) # subgroup object subgroup_dict.update({subgroup_name: subgroup_obj}) dataset_name_list = self.get_group_dataset_names(group_path=group_path) # list of dataset names in the group_path group dataset_dict = {} for dataset_name in dataset_name_list: # for each dataset name in the list dataset_path = group_path + "/" + dataset_name # dataset path dataset_obj = self.hdf5file.get(dataset_path) # dataset object dataset_dict.update({dataset_name: dataset_obj}) group_and_path_dict = { "group_obj": group_obj, "group_path": group_path, "subgroups": subgroup_dict, "datasets": dataset_dict, } all_group_names_dict.update({group_name: group_and_path_dict}) return all_group_names_dict def get_all_group_objs_dict(self): """ DESCRIPTION ----------- Returns a dictionary of all group objects (including the Root group and subgroups) and their group name from the HDF5 file. Key - group class object Value - group name RETURNS ------- all_group_objs_dict: dict dictionary of all group objects and their associated info """ all_group_objs_list = self.get_all_group_objs() all_group_names_list = self.get_all_group_names() all_group_paths_list = self.get_all_group_paths() all_group_objs_dict = {} for i, group_obj in enumerate(all_group_objs_list): # for each group object in the list group_name = all_group_names_list[i] # group name group_path = all_group_paths_list[i] # group path group_and_path_dict = { "group_name": group_name, "group_path": group_path } all_group_objs_dict.update({group_obj: group_and_path_dict}) return all_group_objs_dict def get_subgroups_list(self, group_path): """ DESCRIPTION ----------- Returns a list of subgroup names within the group at the "group_path" location in the HDF5 file. PARAMETERS ---------- group_path: str path of the group in the HDF5 file RETURNS ------- subgroup_list: list list of subgroups within a group """ try: valid_path = self.check_path(path_list=self.get_all_group_paths(), path_to_check=group_path) if valid_path: subgroup_list = [] for name, obj in self.hdf5file[group_path].items(): # for each name and object in the group_path group if self.is_group(hdf5_obj=obj): # if group (subgroup) object subgroup_name = name # subgroup name subgroup_list.append(subgroup_name) # add subgroup name to list return subgroup_list except hdf5_exceptions.PathError as e: print(e) # ----------Dataset Functions---------- def get_all_dataset_paths(self): """ DESCRIPTION ----------- Returns a list of all dataset paths in the HDF5 file. RETURNS ------- all_dataset_paths_list: list list of all dataset paths """ all_dataset_paths_list = [] def visit_dataset(path): if isinstance(self.hdf5file[path], h5py._hl.dataset.Dataset): # if dataset all_dataset_paths_list.append(path) # add dataset path to list self.hdf5file.visit(visit_dataset) # visit all objects in the HDF5 file return all_dataset_paths_list def get_all_dataset_objs(self): """ DESCRIPTION ----------- Returns a list of all dataset class objects in the HDF5 file. RETURNS ------- all_dataset_objs_list: list list of all dataset class objects """ all_dataset_objs_list = [] all_dataset_paths_list = self.get_all_dataset_paths() for dataset_path in all_dataset_paths_list: # for each dataset path in the list all_dataset_objs_list.append(self.hdf5file[dataset_path]) # add dataset object to list return all_dataset_objs_list def get_all_dataset_names(self): """ DESCRIPTION ----------- Returns a list of all dataset names in the HDF5 file. RETURNS ------- all_dataset_names_list: list list of all dataset names """ all_dataset_names_list = [] def visit_dataset(path): if isinstance(self.hdf5file[path], h5py._hl.dataset.Dataset): # if dataset all_dataset_names_list.append(path.split("/")[-1]) # add dataset name to list self.hdf5file.visit(visit_dataset) # visit all objects in the HDF5 file return all_dataset_names_list def get_all_dataset_paths_dict(self): """ DESCRIPTION ----------- Returns a dictionary of every dataset path and its associated info from the HDF5 file. Key - dataset path Value - dataset info RETURNS ------- all_dataset_paths_dict: dict dictionary of every dataset path and its associated info """ zipped_datasets = self.zip_datasets() all_dataset_paths_dict = {} for dataset_path, dataset_name, dataset_obj in zipped_datasets: # for tuple of dataset_path_list, dataset_name_list, and dataset_list all_dataset_paths_dict.update({dataset_path: {"dataset_name": dataset_name, "dataset_obj": dataset_obj}}) return all_dataset_paths_dict def get_all_dataset_names_dict(self): """ DESCRIPTION ----------- Returns a dictionary of every dataset name and its associated info from the HDF5 file. Key - dataset name Value - dataset info RETURNS ------- all_dataset_names_dict: dict dictionary of every dataset name and its associated info """ zipped_datasets = self.zip_datasets() all_dataset_names_dict = {} for dataset_path, dataset_name, dataset_obj in zipped_datasets: # for tuple of dataset_path_list, dataset_name_list, and dataset_list all_dataset_names_dict.update({dataset_name: {"dataset_path": dataset_path, "dataset_obj": dataset_obj}}) dup_dataset_dict = self.get_dup_dataset_dict() # dict of duplicate datasets for dataset_name, path_and_obj_dict in dup_dataset_dict.items(): # for dataset_name and path_and_obj_dict in the duplicate dataset dict all_dataset_names_dict.update({dataset_name: path_and_obj_dict}) return all_dataset_names_dict def get_group_dataset_names(self, group_name=None, group_path=None): """ DESCRIPTION ----------- Returns a list of dataset names within the group at the "group_path" location in the HDF5 file. Does not include datasets within subgroups. PARAMETERS ---------- group_name: str name of the group in the HDF5 file group_path: str path of the group in the HDF5 file RETURNS ------- dataset_name_list: list list of all dataset names within the group """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: group_path = self.get_path(group_name=group_name) elif group_path: # if group_path passed in self.check_path(path_list=self.get_all_group_paths(), path_to_check=group_path) dataset_name_list = [] for name, obj in self.hdf5file[group_path].items(): # for each name and object in the group_path group if self.is_dataset(hdf5_obj=obj): # if dataset object dataset_name = name # dataset name dataset_name_list.append(dataset_name) # add dataset name to list return dataset_name_list except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.PathError as e3: print(e3) def get_dup_dataset_names(self): """ DESCRIPTION ----------- Returns a list of all duplicate dataset names in the HDF5 file. RETURNS ------- dup_dataset_names_list: list list of all duplicate dataset names """ seen_dict = {} dup_dataset_names_list = [] all_dataset_names_list = self.get_all_dataset_names() # list of all dataset names for dataset_name in all_dataset_names_list: # for each dataset name if dataset_name not in seen_dict: # if first occurance of dataset name seen_dict.update({dataset_name: 1}) # key - dataset_name, value - 1 else: # if not first occurance of dataset_name if seen_dict[dataset_name] == 1: # if first time seeing duplicate dataset name dup_dataset_names_list.append(dataset_name) # add dataset name to list of duplicates seen_dict[dataset_name] += 1 # prevents duplicates of dataset names in the list return dup_dataset_names_list def get_dup_dataset_dict(self): """ DESCRIPTION ----------- Returns a dictionary of every duplicate dataset name and its associated info from the HDF5 file. Key - duplicate dataset name Value - dataset info RETURNS ------- dup_dataset_dict: dict dictionary of every duplicate dataset name and its associated info """ dup_dataset_dict = {} all_dataset_paths = self.get_all_dataset_paths() # list of all dataset paths dup_dataset_names_list = self.get_dup_dataset_names() # list of all duplicate dataset names for dataset_name in dup_dataset_names_list: # for each duplicate dataset name path_and_obj_dict = {} dup_dataset_path_list = [] dup_dataset_list = [] for dataset_path in all_dataset_paths: # for each dataset path split_path = self.split_hdf5_path(hdf5_path=dataset_path) # list of HDF5 path components if dataset_name == split_path[-1]: # if dataset path dup_dataset_path_list.append(dataset_path) # add duplicate dataset path to list dataset_obj = self.hdf5file.get(dataset_path) # duplicate dataset object dup_dataset_list.append(dataset_obj) # add duplicate dataset object to list path_and_obj_dict.update({"dataset_path": dup_dataset_path_list, "dataset_obj": dup_dataset_list}) dup_dataset_dict.update({dataset_name: path_and_obj_dict}) return dup_dataset_dict # ----------Misc. Functions---------- def get_path(self, group_name=None, dataset_name=None): """ DESCRIPTION ----------- Returns the HDF5 file path to a group or dataset. PARAMETERS ---------- group_name: str group name to get the path to dataset_name: str dataset name to get the path to RETURNS ------- hdf5_path: str path to the group or dataset location in the HDF5 file """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: hdf5_path = self.all_group_names_dict[group_name]["group_path"] elif dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: if self.dup_dataset_check(dataset_name=dataset_name): # True if dataset_name is a duplicate hdf5_path = self.dup_dataset_prompt(dataset_name=dataset_name) # prompt user to select a dataset path else: # if dataset_name is not a duplicate hdf5_path = self.all_dataset_names_dict[dataset_name]["dataset_path"] return hdf5_path except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.DatasetNameError as e3: print(e3) def get_metadata(self, group_name=None, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns a dictionary of metadata attributes. Key - attribute name Value - attribute value PARAMETERS ---------- group_name: str group name to get the metadata of -or- dataset_name: str dataset name to get the metadata of -or- dataset_path: str dataset path to get the metadata of RETURNS ------- metadata_dict: dict dictionary of metadata attributes """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: group_path = self.get_path(group_name=group_name) metadata_dict = dict(self.hdf5file[group_path].attrs) elif dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) metadata_dict = dict(self.hdf5file[dataset_path].attrs) elif dataset_path: # if dataset_path passed in valid_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_path: metadata_dict = dict(self.hdf5file[dataset_path].attrs) return metadata_dict except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.DatasetNameError as e3: print(e3) except hdf5_exceptions.PathError as e4: print(e4) def get_hdf5_filename(self, hdf5_filepath): """ DESCRIPTION ----------- Parses the HDF5 file path and returns the name of the HDF5 file. PARAMETERS ---------- hdf5_filepath: HDF5 file path path to the user-selected HDF5 file RETURNS ------- hdf5_filename: str name of the HDF5 file """ hdf5_filename = os.path.basename(hdf5_filepath) # works on Windows and Linux return hdf5_filename def get_name(self, hdf5_path): """ DESCRIPTION ----------- Returns the group or dataset name from a HDF5 file path. PARAMETERS ---------- hdf5_path: str HDF5 path to the group or dataset RETURNS ------- name: str name of the group or dataset """ split_path_list = self.split_hdf5_path(hdf5_path=hdf5_path) # list of HDF5 path components name = split_path_list[-1] # the last index is the name of the group or dataset return name def split_hdf5_path(self, hdf5_path): """ DESCRIPTION ----------- Parses an HDF5 group or dataset path and creates a list of the path components. PARAMETERS ---------- hdf5_path: str HDF5 path to the group or dataset RETURNS ------- split_path: list list of path components """ if "/" in hdf5_path: # if components in path split_path = hdf5_path.split("/") # create list of components if split_path[0] == "": split_path = split_path[1:] else: # if no components in path split_path = [hdf5_path] # create list of the components return split_path class HDF5Components(HDF5Content): """ DESCRIPTION ----------- This class contains methods that return various components of the HDF5 file such as groups, datasets, dataset values, NumPy/Pandas matrices of dataset values, metadata, and structured dictionaries. PARAMETERS ---------- hdf5_filepath: HDF5 file path path to the user-selected HDF5 file ATTRIBUTES ---------- all_group_names_dict: dict dictionary of every group and its associated info group names are keys all_dataset_names_dict: dict dictionary of all datasets and their associated info datasetset names are keys all_dataset_paths_dict: dict dictionary of all datasets and their associated info datasetset paths are keys """ def __init__(self, hdf5_filepath): super().__init__(hdf5_filepath=hdf5_filepath) # ----------Group Functions---------- def get_group_info(self, group_name): """ DESCRIPTION ----------- Returns a dictionary of the group info. Key - group item name Value - group item value PARAMETERS ---------- group_name: str name of the group to get the info of RETURNS ------- group_info_dict: dict dictionary of group info """ try: valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: group_path = self.get_path(group_name=group_name) group_metadata = self.get_metadata(group_name=group_name) subgroup_dict = self.all_group_names_dict[group_name]["subgroups"] dataset_dict = self.all_group_names_dict[group_name]["datasets"] group_info_dict = { "group_name": group_name, "group_path": group_path, "group_metadata": group_metadata, "subgroups": subgroup_dict, "datasets": dataset_dict } return group_info_dict except hdf5_exceptions.GroupNameError as e: print(e) def get_group_obj(self, group_name): """ DESCRIPTION ----------- Returns the HDF5 group class object for the "group_name" group. PARAMETERS ---------- group_name: str name of the group to get HDF5 class object of RETURNS ------- group_obj: HDF5 group class object instance of the HDF5 group class """ try: valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: group_obj = self.all_group_names_dict[group_name]["group_obj"] return group_obj except hdf5_exceptions.GroupNameError as e: print(e) def get_group_dict(self, group_name): """ DESCRIPTION ----------- Returns a dictionary of the group info and its subgroups, and datasets. Key - group item name Value - group item value PARAMETERS ---------- group_name: str name of the group to get RETURNS ------- group_dict: dict dictionary of group info, subgroups, and datasets """ try: valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: group_path = self.get_path(group_name=group_name) group_info_dict = self.get_group_info(group_name=group_name) subgroup_dict = self.get_subgroup_dict(group_path=group_path) group_dict = group_info_dict group_dict.update({"subgroup_dict": subgroup_dict}) full_dataset_dict = {} for dataset_name, dataset_obj in group_info_dict["datasets"].items(): # for each dataset name and dataset object dataset_path = dataset_obj.name[1:] # removes the "/" at the start of the path string dataset_dict = self.get_dataset_dict(dataset_path=dataset_path) full_dataset_dict.update({dataset_name: dataset_dict}) group_dict.update({"dataset_dict": full_dataset_dict}) return group_dict except hdf5_exceptions.GroupNameError as e: print(e) def get_subgroup_dict(self, group_path): """ DESCRIPTION ----------- Returns a dictionary of subgroup info for each subgroup in the group at the group_path location in the HDF5 file. Key - subgroup name Value - subgroup info PARAMETERS ---------- group_path: str path of the subgroup in the HDF5 file RETURNS ------- subgroup_dict: dict dictionary of subgroup info for each subgroup in the group """ try: valid_path = self.check_path(path_list=self.get_all_group_paths(), path_to_check=group_path) if valid_path: subgroups_list = self.get_subgroups_list(group_path=group_path) # list of subgroups in the group_path group subgroup_dict = {} for subgroup_name in subgroups_list: # for each subgroup name subgroup_info_dict = self.get_group_info(group_name=subgroup_name) subgroup_dict.update({subgroup_name: subgroup_info_dict}) return subgroup_dict except hdf5_exceptions.PathError as e: print(e) def get_parent_group_obj(self, group_name=None, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns the parent group class object of the dataset or group. PARAMETERS ---------- group_name: str name of the group to get the parent group of -or- dataset_name: str name of the dataset to get the parent group of -or- dataset_path: str path of the dataset to get the parent group of RETURNS ------- parent_group_obj: HDF5 group class object instance of the class object of the parent group """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: if group_name != "/": # if not the Root group group_obj = self.get_group_obj(group_name=group_name) parent_group_obj = group_obj.parent else: parent_group_obj = None # root group "/" has no parent group return parent_group_obj elif dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) parent_group_obj = dataset_obj.parent return parent_group_obj elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) parent_group_obj = dataset_obj.parent return parent_group_obj except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.DatasetNameError as e3: print(e3) except hdf5_exceptions.PathError as e4: print(e4) def get_parent_group_path(self, group_name=None, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns the path of the parent group of the dataset or group. PARAMETERS ---------- group_name: str name of the group to get the parent group of -or- dataset_name: str name of the dataset to get the parent group of -or- dataset_path: str path of the dataset to get the parent group of RETURNS ------- parent_group_path: str path in HDF5 file to the parent group """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: if group_name != "/": # if not the Root group group_obj = self.get_group_obj(group_name=group_name) parent_group_path = group_obj.parent.name else: parent_group_path = None # root group "/" has no parent group path return parent_group_path elif dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) parent_group_path = dataset_obj.parent.name return parent_group_path elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) parent_group_path = dataset_obj.parent.name return parent_group_path except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.DatasetNameError as e3: print(e3) except hdf5_exceptions.PathError as e4: print(e4) def get_parent_group_name(self, group_name=None, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns the name of the parent group of the dataset or group. PARAMETERS ---------- group_name: str name of the group to get the parent group of -or- dataset_name: str name of the dataset to get the parent group of -or- dataset_path: str path of the dataset to get the parent group of RETURNS ------- parent_group_name: str name of the parent group """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed all_group_objs_dict = self.get_all_group_objs_dict() if group_name: # if group_name passed in valid_group_name = self.check_group_name(group_names_list=self.get_all_group_names(), group_name=group_name) if valid_group_name: if group_name != "/": # if not the Root group parent_group_obj = self.get_parent_group_obj(group_name=group_name) parent_group_name = all_group_objs_dict[parent_group_obj]["group_name"] else: parent_group_name = None # Root group "/" has no parent group name return parent_group_name elif dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) parent_group_obj = self.get_parent_group_obj(dataset_path=dataset_path) parent_group_name = all_group_objs_dict[parent_group_obj]["group_name"] return parent_group_name elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: parent_group_obj = self.get_parent_group_obj(dataset_path=dataset_path) parent_group_name = all_group_objs_dict[parent_group_obj]["group_name"] return parent_group_name except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.GroupNameError as e2: print(e2) except hdf5_exceptions.DatasetNameError as e3: print(e3) except hdf5_exceptions.PathError as e4: print(e4) def get_eeg_matrix(self, group_name, matrix_type="pandas", num_points=None): """ DESCRIPTION ----------- Combines all CNS EEG datasets (EEG channels) within a group into one 2D matrix. Returns a matrix of all EEG channel values for the following CNS groups: Impedance, NeonatalParamas, SampleSeries PARAMETERS ---------- group_name: str name of the CNS group containing the datasets to be converted matrix_type: str matrix type to convert the dataset values into - "pandas" or "numpy" default value: "pandas" RETURNS ------- eeg_matrix: Pandas DataFrame or NumPy Array matrix of all EEG channel values of a group """ try: cns_eeg_group_list = ["Impedance", "NeonatalParams", "SampleSeries"] # list of valid CNS EEG group names if group_name not in cns_eeg_group_list: # if group_name is not in the list of valid CNS EEG group names raise hdf5_exceptions.EEGGroupNameError(group_name=group_name) else: group_path = self.get_path(group_name=group_name) eeg_metadata = self.get_metadata(group_name=group_name) channel_names_list = eeg_metadata["channel_names"] # list of EEG channel names if matrix_type.lower() == "pandas": # if the user-selected matrix_type is pandas eeg_matrix = pd.DataFrame(columns=channel_names_list) # create DataFrame for channel_name in channel_names_list: # for each channel name if num_points: channel_values = self.hdf5file[group_path].get(channel_name)[0:num_points] eeg_matrix[f"{channel_name}"] = channel_values # add values to the channel_name col else: channel_values = self.hdf5file[group_path].get(channel_name) eeg_matrix[f"{channel_name}"] = channel_values # add values to the channel_name col elif matrix_type.lower() == "numpy": # if the user-selected matrix_type is numpy if num_points: num_rows = num_points num_cols = eeg_metadata["num_channels"] # num of matrix cols is num of channels (each channel is a col) eeg_matrix = np.empty(shape=(num_rows, num_cols), dtype=float) # create Array for i, channel_name in enumerate(channel_names_list): # for each channel name eeg_matrix[:, i] = self.hdf5file[group_path].get(channel_name)[0:num_points] # add values to the channel_name col else: num_rows = eeg_metadata["num_time_slices"] # num of matrix rows is num of time slices attribute num_cols = eeg_metadata["num_channels"] # num of matrix cols is num of channels (each channel is a col) eeg_matrix = np.empty(shape=(num_rows, num_cols), dtype=float) # create Array for i, channel_name in enumerate(channel_names_list): # for each channel name eeg_matrix[:, i] = self.hdf5file[group_path].get(channel_name) # add values to the channel_name col else: raise hdf5_exceptions.MatrixTypeError(matrix_type=matrix_type) return eeg_matrix except hdf5_exceptions.EEGGroupNameError as e: print(e) except hdf5_exceptions.MatrixTypeError as e2: print(e2) # ----------Dataset Functions---------- def get_dataset_info(self, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns a dictionary of the dataset info. Key - info name Value - info value PARAMETERS ---------- dataset_name: str name of the dataset to get the info of -or- dataset_path: str path of the dataset to get the info of RETURNS ------- dataset_info_dict: dict dictionary of dataset info """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_metadata = self.get_metadata(dataset_path=dataset_path) column_names_list = self.get_column_names(dataset_path=dataset_path) # list of dataset column names dataset_info_dict = { "dataset_name": dataset_name, "dataset_path": dataset_path, "dataset_metadata": dataset_metadata, "column_names": column_names_list } return dataset_info_dict elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_metadata = self.get_metadata(dataset_path=dataset_path) column_names_list = self.get_column_names(dataset_path=dataset_path) # list of dataset column names dataset_info_dict = { "dataset_name": dataset_name, "dataset_path": dataset_path, "dataset_metadata": dataset_metadata, "column_names": column_names_list } return dataset_info_dict except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.DatasetNameError as e2: print(e2) except hdf5_exceptions.PathError as e3: print(e3) def get_dataset_obj(self, dataset_name=None, dataset_path=None): # returns the HDF5 dataset object """ DESCRIPTION ----------- Returns the HDF5 dataset class object for the "dataset_name" dataset. PARAMETERS ---------- dataset_name: str name of the dataset to get HDF5 class object of -or- dataset_path: str path of the dataset to get HDF5 class object of RETURNS ------- dataset_obj: HDF5 dataset class object instance of the HDF5 dataset class can be indexed like a normal array """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_obj = self.hdf5file.get(dataset_path) return dataset_obj elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_obj = self.hdf5file.get(dataset_path) return dataset_obj except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.DatasetNameError as e2: print(e2) except hdf5_exceptions.PathError as e3: print(e3) def get_dataset_dict(self, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns a dictionary of the dataset info and values. Key - item name Value - item value PARAMETERS ---------- dataset_name: str name of the dataset to get -or- dataset_path: str path of the dataset to get RETURNS ------- dataset_dict: dict dictionary of dataset info and values """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_info_dict = self.get_dataset_info(dataset_path=dataset_path) dataset = self.get_dataset_obj(dataset_path=dataset_path) dataset_dict = dataset_info_dict dataset_dict.update({"dataset": dataset}) return dataset_dict elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_info_dict = self.get_dataset_info(dataset_path=dataset_path) dataset = self.get_dataset_obj(dataset_path=dataset_path) dataset_dict = dataset_info_dict dataset_dict.update({"dataset": dataset}) return dataset_dict except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.DatasetNameError as e2: print(e2) except hdf5_exceptions.PathError as e3: print(e3) def get_column_names(self, dataset_name=None, dataset_path=None): """ DESCRIPTION ----------- Returns a list of dataset column names. PARAMETERS ---------- dataset_name: str name of the dataset to get the column names from -or- dataset_path: str path of the dataset to get the column names from RETURNS ------- column_names_list: list list of column names """ try: arg_list = list(locals().values()) # list of arguments self.check_args(arg_list=arg_list, req_num_args=1) # returns error if 0 or > 1 arguments passed if dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) column_names = self.get_dataset_obj(dataset_path=dataset_path).dtype.names if column_names: # if column names (not None) column_names_list = list(column_names) # list of dataset column names else: # if no column names column_names_list = [] return column_names_list elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: column_names = self.get_dataset_obj(dataset_path=dataset_path).dtype.names if column_names: # if column names (not None) column_names_list = list(column_names) # list of dataset column names else: # if no column names column_names_list = [] return column_names_list except hdf5_exceptions.ArgumentError as e: print(e) except hdf5_exceptions.DatasetNameError as e2: print(e2) except hdf5_exceptions.PathError as e3: print(e3) def get_values(self, dataset_name=None, dataset_path=None, matrix_type="pandas"): """ DESCRIPTION ----------- Returns a matrix the values in the dataset. PARAMETERS ---------- dataset_name: str name of the dataset to get the NumPy Array from -or- dataset_path: str path of the dataset to get the NumPy Array from matrix_type: str matrix type get the dataset values in - "pandas" or "numpy" default value: "pandas" RETURNS ------- dataset_values: Pandas DataFrame or NumPy Array matrix of dataset values """ try: if dataset_name: # if dataset_name passed in valid_dataset_name = self.check_dataset_name(dataset_names_list=self.get_all_dataset_names(), dataset_name=dataset_name) if valid_dataset_name: dataset_path = self.get_path(dataset_name=dataset_name) dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) if matrix_type == "pandas": column_names_list = self.get_column_names(dataset_path=dataset_path) # list of dataset column names if column_names_list: # if there are column names dataset_values = pd.DataFrame(data=dataset_obj[:], columns=column_names_list) # Pandas DataFrame of dataset values else: # if there are no column names dataset_values = pd.DataFrame(data=dataset_obj[:]) # Pandas DataFrame of dataset values return dataset_values elif matrix_type == "numpy": dataset_values = np.array(dataset_obj) # NumPy Array of dataset values return dataset_values else: raise hdf5_exceptions.MatrixTypeError(matrix_type=matrix_type) elif dataset_path: # if dataset_path passed in valid_dataset_path = self.check_path(path_list=self.get_all_dataset_paths(), path_to_check=dataset_path) if valid_dataset_path: dataset_obj = self.get_dataset_obj(dataset_path=dataset_path) if matrix_type == "pandas": column_names_list = self.get_column_names(dataset_path=dataset_path) # list of dataset column names if column_names_list: # if there are column names dataset_values = pd.DataFrame(data=dataset_obj[:], columns=column_names_list) # Pandas DataFrame of dataset values else: # if there are no column names dataset_values = pd.DataFrame(data=dataset_obj[:]) # Pandas DataFrame of dataset values return dataset_values elif matrix_type == "numpy": dataset_values = np.array(dataset_obj) # NumPy Array of dataset values return dataset_values else: raise hdf5_exceptions.MatrixTypeError(matrix_type=matrix_type) except hdf5_exceptions.DatasetNameError as e: print(e) except hdf5_exceptions.PathError as e2: print(e2) except hdf5_exceptions.MatrixTypeError as e3: print(e3)
PypiClean
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/tools/write.py
import json def write_anki_table(conn, table_name, new_records, do_commit=True): """ :param sqlite3.Connection conn: :param 'notes'|'cards' table_name: :param iter of OrderedDict new_records: :param bool do_commit: :return: """ for new_record in new_records: conn.execute('INSERT INTO {} ({}) VALUES ({})' .format(table_name, ','.join(new_record.keys()), ','.join(['?' for _ in range(len(new_record))])), tuple(new_record.values())) if do_commit: conn.commit() def write_anki_json(conn, json_name, new_dicts, do_commit=True): """ :param sqlite3.Connection conn: :param 'models'|'decks' json_name: :param iter of dict new_dicts: :param bool do_commit: :return: """ cursor = conn.execute('SELECT {} FROM col'.format(json_name)) json_item = json.loads(cursor.fetchone()[0]) for new_dict in new_dicts: json_item[new_dict['id']] = new_dict conn.execute('UPDATE col SET {}=?'.format(json_name), (json.dumps(json_item),)) if do_commit: conn.commit() def write_anki_schema(conn): """ :param sqlite3.Connection conn: :return: """ conn.executescript(""" -- Cards are what you review. -- There can be multiple cards for each note, as determined by the Template. CREATE TABLE cards ( id integer primary key, -- the epoch milliseconds of when the card was created nid integer not null,-- -- notes.id did integer not null, -- deck id (available in col table) ord integer not null, -- ordinal : identifies which of the card templates it corresponds to -- valid values are from 0 to num templates - 1 mod integer not null, -- modificaton time as epoch seconds usn integer not null, -- update sequence number : used to figure out diffs when syncing. -- value of -1 indicates changes that need to be pushed to server. -- usn < server usn indicates changes that need to be pulled from server. type integer not null, -- 0=new, 1=learning, 2=due, 3=filtered queue integer not null, -- -3=sched buried, -2=user buried, -1=suspended, -- 0=new, 1=learning, 2=due (as for type) -- 3=in learning, next rev in at least a day after the previous review due integer not null, -- Due is used differently for different card types: -- new: note id or random int -- due: integer day, relative to the collection's creation time -- learning: integer timestamp ivl integer not null, -- interval (used in SRS algorithm). Negative = seconds, positive = days factor integer not null, -- factor (used in SRS algorithm) reps integer not null, -- number of reviews lapses integer not null, -- the number of times the card went from a "was answered correctly" -- to "was answered incorrectly" state left integer not null, -- reps left till graduation odue integer not null, -- original due: only used when the card is currently in filtered deck odid integer not null, -- original did: only used when the card is currently in filtered deck flags integer not null, -- currently unused data text not null -- currently unused ); -- col contains a single row that holds various information about the collection CREATE TABLE col ( id integer primary key, -- arbitrary number since there is only one row crt integer not null, -- created timestamp mod integer not null, -- last modified in milliseconds scm integer not null, -- schema mod time: time when "schema" was modified. -- If server scm is different from the client scm a full-sync is required ver integer not null, -- version dty integer not null, -- dirty: unused, set to 0 usn integer not null, -- update sequence number: used for finding diffs when syncing. -- See usn in cards table for more details. ls integer not null, -- "last sync time" conf text not null, -- json object containing configuration options that are synced models text not null, -- json array of json objects containing the models (aka Note types) decks text not null, -- json array of json objects containing the deck dconf text not null, -- json array of json objects containing the deck options tags text not null -- a cache of tags used in the collection (This list is displayed in the browser. Potentially at other place) ); -- Contains deleted cards, notes, and decks that need to be synced. -- usn should be set to -1, -- oid is the original id. -- type: 0 for a card, 1 for a note and 2 for a deck CREATE TABLE graves ( usn integer not null, oid integer not null, type integer not null ); -- Notes contain the raw information that is formatted into a number of cards -- according to the models CREATE TABLE notes ( id integer primary key, -- epoch seconds of when the note was created guid text not null, -- globally unique id, almost certainly used for syncing mid integer not null, -- model id mod integer not null, -- modification timestamp, epoch seconds usn integer not null, -- update sequence number: for finding diffs when syncing. -- See the description in the cards table for more info tags text not null, -- space-separated string of tags. -- includes space at the beginning and end, for LIKE "% tag %" queries flds text not null, -- the values of the fields in this note. separated by 0x1f (31) character. sfld text not null, -- sort field: used for quick sorting and duplicate check csum integer not null, -- field checksum used for duplicate check. -- integer representation of first 8 digits of sha1 hash of the first field flags integer not null, -- unused data text not null -- unused ); -- revlog is a review history; it has a row for every review you've ever done! CREATE TABLE revlog ( id integer primary key, -- epoch-milliseconds timestamp of when you did the review cid integer not null, -- cards.id usn integer not null, -- update sequence number: for finding diffs when syncing. -- See the description in the cards table for more info ease integer not null, -- which button you pushed to score your recall. -- review: 1(wrong), 2(hard), 3(ok), 4(easy) -- learn/relearn: 1(wrong), 2(ok), 3(easy) ivl integer not null, -- interval lastIvl integer not null, -- last interval factor integer not null, -- factor time integer not null, -- how many milliseconds your review took, up to 60000 (60s) type integer not null -- 0=learn, 1=review, 2=relearn, 3=cram ); CREATE INDEX ix_cards_nid on cards (nid); CREATE INDEX ix_cards_sched on cards (did, queue, due); CREATE INDEX ix_cards_usn on cards (usn); CREATE INDEX ix_notes_csum on notes (csum); CREATE INDEX ix_notes_usn on notes (usn); CREATE INDEX ix_revlog_cid on revlog (cid); CREATE INDEX ix_revlog_usn on revlog (usn); """) conn.commit()
PypiClean
/Flask-APIForm-1.0.tar.gz/flaskext/apiform.py
from __future__ import absolute_import import re class Form(object): def __init__(self, request=None, url=None): self._fields = self.switch_fields() self._data = { 'args' : request.args.to_dict() if request else None, 'form' : request.form.to_dict() if request else None, 'files': request.files.to_dict() if request else None, 'url' : url } def switch_fields(self): fields = [] for attr in dir(self): value = getattr(self, attr) if isinstance(value, Field): setattr(self, attr, None) setattr(self, '_' + attr, value) fields.append(attr) return fields def get(self, key): if self.data: return self.data.get(key) else: return False def validate(self): self.data = {} self.errors = [] self.valid = True for attr in self._fields: field = getattr(self, '_'+attr) if not self._data[field.source] or attr not in self._data[field.source]: if field.default: self.data[attr] = field.default setattr(self, attr, field.default) elif field.required: self.valid = False self.errors.append({'field': attr, 'code': 'missing_field'}) else: value = self._data[field.source].get(attr) value = field.validate(attr, value) if value is False: self.valid = False self.errors.extend(field.errors) else: self.data[attr] = value setattr(self, attr, value) return self.valid class Field(object): def __init__(self, required=True, allowed=None, default=None, needs=None, source='args'): self.errors = [] self.required = required self.allowed = allowed self.default = default self.source = source self.needs = needs def validate(self, field, value): self.errors = [] if self.required is True: if not value: if not self.default: self.add_error(field, 'empty_field') return False else: value = self.default elif self.default and not value: value = self.default if self.allowed and value not in self.allowed: self.add_error(field, 'invalid_value', '%s are the allowed values' % str(self.allowed)) return False return value def add_error(self, field, code, tip=''): self.errors.append({'field': field, 'code': code, 'tip': tip}) self.error = True class FileField(Field): def __init__(self, extensions=None, source='files', required=True): self.extensions = extensions super(FileField, self).__init__(source=source, required=required) def validate(self, field, value): value = super(FileField, self).validate(field, value) if not value: return False if self.extensions: filename = value.filename if '.' not in filename or filename.rsplit('.', 1)[1] not in self.extensions: self.add_error(field, 'invalid_file_type', 'allowed file types are: %s' % str(self.extensions)) return False return value class StringField(Field): def __init__(self, minlength=None, maxlength=None, regex=None, **kwargs): self.minlength = minlength self.maxlength = maxlength self.regex = regex super(StringField, self).__init__(**kwargs) def validate(self, field, value): value = super(StringField, self).validate(field, value) if not value: return False if self.is_string(value): value = str(value) else: self.add_error(field, 'invalid_type', 'should be a string') return False if self.minlength and len(value) < self.minlength: self.add_error(field, 'out_of_min_length', '%s is the minimum length' % str(self.minlength)) return False if self.maxlength and len(value) > self.maxlength: self.add_error(field, 'out_of_max_length', '%s is the maximum length' % str(self.maxlength)) return False if self.regex: if isinstance(self.regex, (list, tuple)): rgx_str = self.regex[0] tip = self.regex[1] else: rgx_str = self.regex tip = 'the value contains invalid characters' regex = re.compile(rgx_str) if not regex.match(value): self.add_error(field, 'invalid_value', tip) return False return value def is_string(self, value): if isinstance(value, (basestring, str)): return True else: try: str(value) except ValueError, TypeError: return False else: return True class EmailField(StringField): def validate(self, field, value): value = super(StringField, self).validate(field, value) if not value: return False regex = re.compile(r'^.+@[^.].*\.[a-z]{2,10}$', re.IGNORECASE) if not regex.match(value): self.add_error(field, 'invalid_type', 'this is not a valid email address') return False return value class NumField(Field): def __init__(self, min=None, max=None, **kwargs): self.min = min self.max = max super(NumField, self).__init__(**kwargs) def validate(self, field, value): value = super(NumField, self).validate(field, value) if not value: return False if self.is_number(value): value = float(value) else: self.add_error(field, 'invalid_type', 'should be a number') return False if self.min and value < self.min: self.add_error(field, 'out_of_min_range', '%s is the minimum range' % str(self.min)) return False if self.max and value > self.max: self.add_error(field, 'out_of_max_range', '%s is the minimum range' % str(self.max)) return False return value def is_number(self, value): try: float(value) except ValueError, TypeError: return False else: return True class IntField(NumField): def __init__(self, base=10, **kwargs): self.base = base super(IntField, self).__init__(**kwargs) def validate(self, field, value): value = super(NumField, self).validate(field, value) if not value: return False if self.is_integer(value): value = int(value, self.base) else: self.add_error(field, 'invalid_type', 'should be a integer of base %s' % str(self.base)) return False return value def is_integer(self, value): if isinstance(value, (int, long)): return True else: try: int(value, self.base) except ValueError, TypeError: return False else: return True class HexField(Field): def __init__(self, length=None, filter=None, **kwargs): self.length = length self.filter = filter super(HexField, self).__init__(**kwargs) def validate(self, field, value): value = super(HexField, self).validate(field, value) if not value: return False try: int(value, 16) except ValueError, TypeError: self.add_error(field, 'invalid_type', 'should be a hexadecimal string') return False if self.length and len(value) != self.length: self.add_error(field, 'invalid_length', 'should have a length of %s' % int(self.length)) return False if self.filter: value = self.filter(value) return value
PypiClean
/360monitoringcli-1.0.19-py3-none-any.whl/cli360monitoring/lib/sitenotifications.py
import json from prettytable import PrettyTable from datetime import datetime from .api import apiGet from .config import Config from .functions import printError, printWarn class SiteNotifications(object): def __init__(self, config: Config, format: str = 'table'): self.config = config self.format = format self.notifications = None self.table = PrettyTable(field_names=['Start', 'End', 'Status', 'Summary']) self.table.align['Start'] = 'c' self.table.align['End'] = 'c' self.table.align['Status'] = 'c' self.table.align['Summary'] = 'l' def fetchData(self, siteId: str, startTimestamp: float, endTimestamp: float): """Retrieve a list of all alerts of a specified site in the specified time period""" # if data is already downloaded, use cached data if self.notifications != None: return True params = self.config.params() params['start'] = int(startTimestamp) params['end'] = int(endTimestamp) response_json = apiGet('monitor/' + siteId + '/notifications', 200, self.config, params) if response_json: if 'data' in response_json: self.notifications = response_json['data'] return True else: printWarn('No notifications found for site', siteId) self.notifications = None return False else: self.notifications = None return False def list(self, siteId: str, startTimestamp: float, endTimestamp: float, sort: str = '', reverse: bool = False, limit: int = 0): """Iterate through list of site notifications and print details""" if self.fetchData(siteId, startTimestamp, endTimestamp): # if JSON was requested and no filters, then just print it without iterating through if self.format == 'json': print(json.dumps(self.notifications, indent=4)) return # Iterate through list of sites and print data, etc. for notification in self.notifications: self.print(notification) self.printFooter(sort=sort, reverse=reverse, limit=limit) def printFooter(self, sort: str = '', reverse: bool = False, limit: int = 0): """Print table if table format requested""" if (self.format == 'table'): # if self.config.hide_ids: # self.table.del_column('ID') if sort: # if sort contains the column index instead of the column name, get the column name instead if sort.isdecimal(): sort = self.table.get_csv_string().split(',')[int(sort) - 1] else: sort = None if limit > 0: print(self.table.get_string(sortby=sort, reversesort=reverse, start=0, end=limit)) else: print(self.table.get_string(sortby=sort, reversesort=reverse)) elif (self.format == 'csv'): print(self.table.get_csv_string(delimiter=self.config.delimiter)) def print(self, notification): """Print the data of the specified contact""" if (self.format == 'json'): print(json.dumps(notification, indent=4)) return startTimestamp = datetime.fromtimestamp(float(notification['start'])) endTimestamp = datetime.fromtimestamp(float(notification['end'])) status = notification['status'] summary = notification['summary'] self.table.add_row([startTimestamp.strftime('%Y-%m-%d %H:%M:%S'), endTimestamp.strftime('%Y-%m-%d %H:%M:%S'), status, summary])
PypiClean
/CocoPy-1.1.0rc.zip/CocoPy-1.1.0rc/testSuite/TestCasing_Scanner.py
import sys class Token( object ): def __init__( self ): self.kind = 0 # token kind self.pos = 0 # token position in the source text (starting at 0) self.col = 0 # token column (starting at 0) self.line = 0 # token line (starting at 1) self.val = u'' # token value self.next = None # AW 2003-03-07 Tokens are kept in linked list class Position( object ): # position of source code stretch (e.g. semantic action, resolver expressions) def __init__( self, buf, beg, len, col ): assert isinstance( buf, Buffer ) assert isinstance( beg, int ) assert isinstance( len, int ) assert isinstance( col, int ) self.buf = buf self.beg = beg # start relative to the beginning of the file self.len = len # length of stretch self.col = col # column number of start position def getSubstring( self ): return self.buf.readPosition( self ) class Buffer( object ): EOF = u'\u0100' # 256 def __init__( self, s ): self.buf = s self.bufLen = len(s) self.pos = 0 self.lines = s.splitlines( True ) def Read( self ): if self.pos < self.bufLen: result = unichr(ord(self.buf[self.pos]) & 0xff) # mask out sign bits self.pos += 1 return result else: return Buffer.EOF def ReadChars( self, numBytes=1 ): result = self.buf[ self.pos : self.pos + numBytes ] self.pos += numBytes return result def Peek( self ): if self.pos < self.bufLen: return unichr(ord(self.buf[self.pos]) & 0xff) # mask out sign bits else: return Scanner.buffer.EOF def getString( self, beg, end ): s = '' oldPos = self.getPos( ) self.setPos( beg ) while beg < end: s += self.Read( ) beg += 1 self.setPos( oldPos ) return s def getPos( self ): return self.pos def setPos( self, value ): if value < 0: self.pos = 0 elif value >= self.bufLen: self.pos = self.bufLen else: self.pos = value def readPosition( self, pos ): assert isinstance( pos, Position ) self.setPos( pos.beg ) return self.ReadChars( pos.len ) def __iter__( self ): return iter(self.lines) class Scanner(object): EOL = u'\n' eofSym = 0 charSetSize = 256 maxT = 6 noSym = 6 start = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1] valCh = u'' # current input character (for token.val) def __init__( self, s ): self.buffer = Buffer( unicode(s) ) # the buffer instance self.ch = u'\0' # current input character self.pos = -1 # column number of current character self.line = 1 # line number of current character self.lineStart = 0 # start position of current line self.oldEols = 0 # EOLs that appeared in a comment; self.NextCh( ) self.ignore = set( ) # set of characters to be ignored by the scanner self.ignore.add( ord(' ') ) # blanks are always white space # fill token list self.tokens = Token( ) # the complete input token stream node = self.tokens node.next = self.NextToken( ) node = node.next while node.kind != Scanner.eofSym: node.next = self.NextToken( ) node = node.next node.next = node node.val = u'EOF' self.t = self.tokens # current token self.pt = self.tokens # current peek token def NextCh( self ): if self.oldEols > 0: self.ch = Scanner.EOL self.oldEols -= 1 else: self.ch = self.buffer.Read( ) self.pos += 1 # replace isolated '\r' by '\n' in order to make # eol handling uniform across Windows, Unix and Mac if (self.ch == u'\r') and (self.buffer.Peek() != u'\n'): self.ch = Scanner.EOL if self.ch == Scanner.EOL: self.line += 1 self.lineStart = self.pos + 1 valCh = self.ch if self.ch != Buffer.EOF: self.ch = self.ch.lower() def CheckLiteral( self ): lit = self.t.val.lower() if lit == "aaa": self.t.kind = 4 elif lit == "bbb": self.t.kind = 5 def NextToken( self ): while ord(self.ch) in self.ignore: self.NextCh( ) self.t = Token( ) self.t.pos = self.pos self.t.col = self.pos - self.lineStart + 1 self.t.line = self.line state = self.start[ord(self.ch)] buf = u'' buf += unicode(self.ch) self.NextCh() done = False while not done: if state == -1: self.t.kind = Scanner.eofSym # NextCh already done done = True elif state == 0: self.t.kind = Scanner.noSym # NextCh already done done = True elif state == 1: if (self.ch >= 'a' and self.ch <= 'z'): buf += unicode(self.ch) self.NextCh() state = 1 else: self.t.kind = 1 self.t.val = buf self.CheckLiteral() return self.t elif state == 2: self.t.kind = 2 done = True elif state == 3: if (self.ch >= '0' and self.ch <= '9'): buf += unicode(self.ch) self.NextCh() state = 4 else: self.t.kind = Scanner.noSym done = True elif state == 4: if (self.ch >= '0' and self.ch <= '9'): buf += unicode(self.ch) self.NextCh() state = 4 else: self.t.kind = 3 done = True elif state == 5: if (self.ch == 'f'): buf += unicode(self.ch) self.NextCh() state = 2 elif (self.ch >= '0' and self.ch <= '9'): buf += unicode(self.ch) self.NextCh() state = 5 elif self.ch == 'e': buf += unicode(self.ch) self.NextCh() state = 3 else: self.t.kind = Scanner.noSym done = True self.t.val = buf return self.t def Scan( self ): self.t = self.t.next self.pt = self.t.next return self.t def Peek( self ): self.pt = self.pt.next while self.pt.kind > self.maxT: self.pt = self.pt.next return self.pt def ResetPeek( self ): self.pt = self.t
PypiClean
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/pace-progress/pace.js
(function() { var AjaxMonitor, Bar, DocumentMonitor, ElementMonitor, ElementTracker, EventLagMonitor, Evented, Events, NoTargetError, Pace, RequestIntercept, SOURCE_KEYS, Scaler, SocketRequestTracker, XHRRequestTracker, _WebSocket, _XDomainRequest, _XMLHttpRequest, _intercept, _pushState, _replaceState, animation, avgAmplitude, bar, cancelAnimation, cancelAnimationFrame, defaultOptions, extend, extendNative, getFromDOM, getIntercept, handlePushState, ignoreStack, init, k, len, now, options, ref, requestAnimationFrame, result, runAnimation, scalers, shouldIgnoreURL, shouldTrack, source, sources, uniScaler, slice = [].slice, hasProp = {}.hasOwnProperty, extend1 = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }; defaultOptions = { catchupTime: 100, initialRate: .03, minTime: 250, ghostTime: 100, maxProgressPerFrame: 20, easeFactor: 1.25, startOnPageLoad: true, restartOnPushState: true, restartOnRequestAfter: 500, target: 'body', elements: { checkInterval: 100, selectors: ['body'] }, eventLag: { minSamples: 10, sampleCount: 3, lagThreshold: 3 }, ajax: { trackMethods: ['GET'], trackWebSockets: true, ignoreURLs: [] } }; now = function() { var ref; return (ref = typeof performance !== "undefined" && performance !== null ? typeof performance.now === "function" ? performance.now() : void 0 : void 0) != null ? ref : +(new Date); }; requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame; cancelAnimationFrame = window.cancelAnimationFrame || window.mozCancelAnimationFrame; if (requestAnimationFrame == null) { requestAnimationFrame = function(fn) { return setTimeout(fn, 50); }; cancelAnimationFrame = function(id) { return clearTimeout(id); }; } runAnimation = function(fn) { var last, tick; last = now(); tick = function() { var diff; diff = now() - last; if (diff >= 33) { last = now(); return fn(diff, function() { return requestAnimationFrame(tick); }); } else { return setTimeout(tick, 33 - diff); } }; return tick(); }; result = function() { var args, key, obj; obj = arguments[0], key = arguments[1], args = 3 <= arguments.length ? slice.call(arguments, 2) : []; if (typeof obj[key] === 'function') { return obj[key].apply(obj, args); } else { return obj[key]; } }; extend = function() { var k, key, len, out, source, sources, val; out = arguments[0], sources = 2 <= arguments.length ? slice.call(arguments, 1) : []; for (k = 0, len = sources.length; k < len; k++) { source = sources[k]; if (source) { for (key in source) { if (!hasProp.call(source, key)) continue; val = source[key]; if ((out[key] != null) && typeof out[key] === 'object' && (val != null) && typeof val === 'object') { extend(out[key], val); } else { out[key] = val; } } } } return out; }; avgAmplitude = function(arr) { var count, k, len, sum, v; sum = count = 0; for (k = 0, len = arr.length; k < len; k++) { v = arr[k]; sum += Math.abs(v); count++; } return sum / count; }; getFromDOM = function(key, json) { var data, e, el, error; if (key == null) { key = 'options'; } if (json == null) { json = true; } el = document.querySelector("[data-pace-" + key + "]"); if (!el) { return; } data = el.getAttribute("data-pace-" + key); if (!json) { return data; } try { return JSON.parse(data); } catch (error) { e = error; return typeof console !== "undefined" && console !== null ? console.error("Error parsing inline pace options", e) : void 0; } }; Evented = (function() { function Evented() {} Evented.prototype.on = function(event, handler, ctx, once) { var base; if (once == null) { once = false; } if (this.bindings == null) { this.bindings = {}; } if ((base = this.bindings)[event] == null) { base[event] = []; } return this.bindings[event].push({ handler: handler, ctx: ctx, once: once }); }; Evented.prototype.once = function(event, handler, ctx) { return this.on(event, handler, ctx, true); }; Evented.prototype.off = function(event, handler) { var i, ref, results; if (((ref = this.bindings) != null ? ref[event] : void 0) == null) { return; } if (handler == null) { return delete this.bindings[event]; } else { i = 0; results = []; while (i < this.bindings[event].length) { if (this.bindings[event][i].handler === handler) { results.push(this.bindings[event].splice(i, 1)); } else { results.push(i++); } } return results; } }; Evented.prototype.trigger = function() { var args, ctx, event, handler, i, once, ref, ref1, results; event = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; if ((ref = this.bindings) != null ? ref[event] : void 0) { i = 0; results = []; while (i < this.bindings[event].length) { ref1 = this.bindings[event][i], handler = ref1.handler, ctx = ref1.ctx, once = ref1.once; handler.apply(ctx != null ? ctx : this, args); if (once) { results.push(this.bindings[event].splice(i, 1)); } else { results.push(i++); } } return results; } }; return Evented; })(); Pace = window.Pace || {}; window.Pace = Pace; extend(Pace, Evented.prototype); options = Pace.options = extend({}, defaultOptions, window.paceOptions, getFromDOM()); ref = ['ajax', 'document', 'eventLag', 'elements']; for (k = 0, len = ref.length; k < len; k++) { source = ref[k]; if (options[source] === true) { options[source] = defaultOptions[source]; } } NoTargetError = (function(superClass) { extend1(NoTargetError, superClass); function NoTargetError() { return NoTargetError.__super__.constructor.apply(this, arguments); } return NoTargetError; })(Error); Bar = (function() { function Bar() { this.progress = 0; } Bar.prototype.getElement = function() { var targetElement; if (this.el == null) { targetElement = document.querySelector(options.target); if (!targetElement) { throw new NoTargetError; } this.el = document.createElement('div'); this.el.classList.add('pace'); this.el.classList.add('pace-active'); document.body.classList.remove('pace-done'); document.body.classList.add('pace-running'); this.el.innerHTML = '<div class="pace-progress">\n <div class="pace-progress-inner"></div>\n</div>\n<div class="pace-activity"></div>'; if (targetElement.firstChild != null) { targetElement.insertBefore(this.el, targetElement.firstChild); } else { targetElement.appendChild(this.el); } } return this.el; }; Bar.prototype.finish = function() { var el; el = this.getElement(); el.classList.remove('pace-active'); el.classList.add('pace-inactive'); document.body.classList.remove('pace-running'); return document.body.classList.add('pace-done'); }; Bar.prototype.update = function(prog) { this.progress = prog; return this.render(); }; Bar.prototype.destroy = function() { var error; try { this.getElement().parentNode.removeChild(this.getElement()); } catch (error) { NoTargetError = error; } return this.el = void 0; }; Bar.prototype.render = function() { var el, key, l, len1, progressStr, ref1, transform; if (document.querySelector(options.target) == null) { return false; } el = this.getElement(); transform = "translate3d(" + this.progress + "%, 0, 0)"; ref1 = ['webkitTransform', 'msTransform', 'transform']; for (l = 0, len1 = ref1.length; l < len1; l++) { key = ref1[l]; el.children[0].style[key] = transform; } if (!this.lastRenderedProgress || this.lastRenderedProgress | 0 !== this.progress | 0) { el.children[0].setAttribute('data-progress-text', (this.progress | 0) + "%"); if (this.progress >= 100) { progressStr = '99'; } else { progressStr = this.progress < 10 ? "0" : ""; progressStr += this.progress | 0; } el.children[0].setAttribute('data-progress', "" + progressStr); } return this.lastRenderedProgress = this.progress; }; Bar.prototype.done = function() { return this.progress >= 100; }; return Bar; })(); Events = (function() { function Events() { this.bindings = {}; } Events.prototype.trigger = function(name, val) { var binding, l, len1, ref1, results; if (this.bindings[name] != null) { ref1 = this.bindings[name]; results = []; for (l = 0, len1 = ref1.length; l < len1; l++) { binding = ref1[l]; results.push(binding.call(this, val)); } return results; } }; Events.prototype.on = function(name, fn) { var base; if ((base = this.bindings)[name] == null) { base[name] = []; } return this.bindings[name].push(fn); }; return Events; })(); _XMLHttpRequest = window.XMLHttpRequest; _XDomainRequest = window.XDomainRequest; _WebSocket = window.WebSocket; extendNative = function(to, from) { var e, error, key, results; results = []; for (key in from.prototype) { try { if ((to[key] == null) && typeof from[key] !== 'function') { if (typeof Object.defineProperty === 'function') { results.push(Object.defineProperty(to, key, { get: function() { return from.prototype[key]; }, configurable: true, enumerable: true })); } else { results.push(to[key] = from.prototype[key]); } } else { results.push(void 0); } } catch (error) { e = error; } } return results; }; ignoreStack = []; Pace.ignore = function() { var args, fn, ret; fn = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; ignoreStack.unshift('ignore'); ret = fn.apply(null, args); ignoreStack.shift(); return ret; }; Pace.track = function() { var args, fn, ret; fn = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; ignoreStack.unshift('track'); ret = fn.apply(null, args); ignoreStack.shift(); return ret; }; shouldTrack = function(method) { var ref1; if (method == null) { method = 'GET'; } if (ignoreStack[0] === 'track') { return 'force'; } if (!ignoreStack.length && options.ajax) { if (method === 'socket' && options.ajax.trackWebSockets) { return true; } else if (ref1 = method.toUpperCase(), indexOf.call(options.ajax.trackMethods, ref1) >= 0) { return true; } } return false; }; RequestIntercept = (function(superClass) { extend1(RequestIntercept, superClass); function RequestIntercept() { var monitorXHR; RequestIntercept.__super__.constructor.apply(this, arguments); monitorXHR = (function(_this) { return function(req) { var _open; _open = req.open; return req.open = function(type, url, async) { if (shouldTrack(type)) { _this.trigger('request', { type: type, url: url, request: req }); } return _open.apply(req, arguments); }; }; })(this); window.XMLHttpRequest = function(flags) { var req; req = new _XMLHttpRequest(flags); monitorXHR(req); return req; }; try { extendNative(window.XMLHttpRequest, _XMLHttpRequest); } catch (undefined) {} if (_XDomainRequest != null) { window.XDomainRequest = function() { var req; req = new _XDomainRequest; monitorXHR(req); return req; }; try { extendNative(window.XDomainRequest, _XDomainRequest); } catch (undefined) {} } if ((_WebSocket != null) && options.ajax.trackWebSockets) { window.WebSocket = (function(_this) { return function(url, protocols) { var req; if (protocols != null) { req = new _WebSocket(url, protocols); } else { req = new _WebSocket(url); } if (shouldTrack('socket')) { _this.trigger('request', { type: 'socket', url: url, protocols: protocols, request: req }); } return req; }; })(this); try { extendNative(window.WebSocket, _WebSocket); } catch (undefined) {} } } return RequestIntercept; })(Events); _intercept = null; getIntercept = function() { if (_intercept == null) { _intercept = new RequestIntercept; } return _intercept; }; shouldIgnoreURL = function(url) { var l, len1, pattern, ref1; ref1 = options.ajax.ignoreURLs; for (l = 0, len1 = ref1.length; l < len1; l++) { pattern = ref1[l]; if (typeof pattern === 'string') { if (url.indexOf(pattern) !== -1) { return true; } } else { if (pattern.test(url)) { return true; } } } return false; }; getIntercept().on('request', function(arg) { var after, args, request, type, url; type = arg.type, request = arg.request, url = arg.url; if (shouldIgnoreURL(url)) { return; } if (!Pace.running && (options.restartOnRequestAfter !== false || shouldTrack(type) === 'force')) { args = arguments; after = options.restartOnRequestAfter || 0; if (typeof after === 'boolean') { after = 0; } return setTimeout(function() { var l, len1, ref1, ref2, results, stillActive; if (type === 'socket') { stillActive = request.readyState < 2; } else { stillActive = (0 < (ref1 = request.readyState) && ref1 < 4); } if (stillActive) { Pace.restart(); ref2 = Pace.sources; results = []; for (l = 0, len1 = ref2.length; l < len1; l++) { source = ref2[l]; if (source instanceof AjaxMonitor) { source.watch.apply(source, args); break; } else { results.push(void 0); } } return results; } }, after); } }); AjaxMonitor = (function() { function AjaxMonitor() { this.elements = []; getIntercept().on('request', (function(_this) { return function() { return _this.watch.apply(_this, arguments); }; })(this)); } AjaxMonitor.prototype.watch = function(arg) { var request, tracker, type, url; type = arg.type, request = arg.request, url = arg.url; if (shouldIgnoreURL(url)) { return; } if (type === 'socket') { tracker = new SocketRequestTracker(request); } else { tracker = new XHRRequestTracker(request); } return this.elements.push(tracker); }; return AjaxMonitor; })(); XHRRequestTracker = (function() { function XHRRequestTracker(request) { var _onreadystatechange, event, l, len1, ref1, size; this.progress = 0; if (window.ProgressEvent != null) { size = null; request.addEventListener('progress', (function(_this) { return function(evt) { if (evt.lengthComputable) { return _this.progress = 100 * evt.loaded / evt.total; } else { return _this.progress = _this.progress + (100 - _this.progress) / 2; } }; })(this), false); ref1 = ['load', 'abort', 'timeout', 'error']; for (l = 0, len1 = ref1.length; l < len1; l++) { event = ref1[l]; request.addEventListener(event, (function(_this) { return function() { return _this.progress = 100; }; })(this), false); } } else { _onreadystatechange = request.onreadystatechange; request.onreadystatechange = (function(_this) { return function() { var ref2; if ((ref2 = request.readyState) === 0 || ref2 === 4) { _this.progress = 100; } else if (request.readyState === 3) { _this.progress = 50; } return typeof _onreadystatechange === "function" ? _onreadystatechange.apply(null, arguments) : void 0; }; })(this); } } return XHRRequestTracker; })(); SocketRequestTracker = (function() { function SocketRequestTracker(request) { var event, l, len1, ref1; this.progress = 0; ref1 = ['error', 'open']; for (l = 0, len1 = ref1.length; l < len1; l++) { event = ref1[l]; request.addEventListener(event, (function(_this) { return function() { return _this.progress = 100; }; })(this), false); } } return SocketRequestTracker; })(); ElementMonitor = (function() { function ElementMonitor(options) { var l, len1, ref1, selector; if (options == null) { options = {}; } this.elements = []; if (options.selectors == null) { options.selectors = []; } ref1 = options.selectors; for (l = 0, len1 = ref1.length; l < len1; l++) { selector = ref1[l]; this.elements.push(new ElementTracker(selector)); } } return ElementMonitor; })(); ElementTracker = (function() { function ElementTracker(selector1) { this.selector = selector1; this.progress = 0; this.check(); } ElementTracker.prototype.check = function() { if (document.querySelector(this.selector)) { return this.done(); } else { return setTimeout(((function(_this) { return function() { return _this.check(); }; })(this)), options.elements.checkInterval); } }; ElementTracker.prototype.done = function() { return this.progress = 100; }; return ElementTracker; })(); DocumentMonitor = (function() { DocumentMonitor.prototype.states = { loading: 0, interactive: 50, complete: 100 }; function DocumentMonitor() { var _onreadystatechange, ref1; this.progress = (ref1 = this.states[document.readyState]) != null ? ref1 : 100; _onreadystatechange = document.onreadystatechange; document.onreadystatechange = (function(_this) { return function() { if (_this.states[document.readyState] != null) { _this.progress = _this.states[document.readyState]; } return typeof _onreadystatechange === "function" ? _onreadystatechange.apply(null, arguments) : void 0; }; })(this); } return DocumentMonitor; })(); EventLagMonitor = (function() { function EventLagMonitor() { var avg, interval, last, points, samples; this.progress = 0; avg = 0; samples = []; points = 0; last = now(); interval = setInterval((function(_this) { return function() { var diff; diff = now() - last - 50; last = now(); samples.push(diff); if (samples.length > options.eventLag.sampleCount) { samples.shift(); } avg = avgAmplitude(samples); if (++points >= options.eventLag.minSamples && avg < options.eventLag.lagThreshold) { _this.progress = 100; return clearInterval(interval); } else { return _this.progress = 100 * (3 / (avg + 3)); } }; })(this), 50); } return EventLagMonitor; })(); Scaler = (function() { function Scaler(source1) { this.source = source1; this.last = this.sinceLastUpdate = 0; this.rate = options.initialRate; this.catchup = 0; this.progress = this.lastProgress = 0; if (this.source != null) { this.progress = result(this.source, 'progress'); } } Scaler.prototype.tick = function(frameTime, val) { var scaling; if (val == null) { val = result(this.source, 'progress'); } if (val >= 100) { this.done = true; } if (val === this.last) { this.sinceLastUpdate += frameTime; } else { if (this.sinceLastUpdate) { this.rate = (val - this.last) / this.sinceLastUpdate; } this.catchup = (val - this.progress) / options.catchupTime; this.sinceLastUpdate = 0; this.last = val; } if (val > this.progress) { this.progress += this.catchup * frameTime; } scaling = 1 - Math.pow(this.progress / 100, options.easeFactor); this.progress += scaling * this.rate * frameTime; this.progress = Math.min(this.lastProgress + options.maxProgressPerFrame, this.progress); this.progress = Math.max(0, this.progress); this.progress = Math.min(100, this.progress); this.lastProgress = this.progress; return this.progress; }; return Scaler; })(); sources = null; scalers = null; bar = null; uniScaler = null; animation = null; cancelAnimation = null; Pace.running = false; handlePushState = function() { if (options.restartOnPushState) { return Pace.restart(); } }; if (window.history.pushState != null) { _pushState = window.history.pushState; window.history.pushState = function() { handlePushState(); return _pushState.apply(window.history, arguments); }; } if (window.history.replaceState != null) { _replaceState = window.history.replaceState; window.history.replaceState = function() { handlePushState(); return _replaceState.apply(window.history, arguments); }; } SOURCE_KEYS = { ajax: AjaxMonitor, elements: ElementMonitor, document: DocumentMonitor, eventLag: EventLagMonitor }; (init = function() { var l, len1, len2, m, ref1, ref2, ref3, type; Pace.sources = sources = []; ref1 = ['ajax', 'elements', 'document', 'eventLag']; for (l = 0, len1 = ref1.length; l < len1; l++) { type = ref1[l]; if (options[type] !== false) { sources.push(new SOURCE_KEYS[type](options[type])); } } ref3 = (ref2 = options.extraSources) != null ? ref2 : []; for (m = 0, len2 = ref3.length; m < len2; m++) { source = ref3[m]; sources.push(new source(options)); } Pace.bar = bar = new Bar; scalers = []; return uniScaler = new Scaler; })(); Pace.stop = function() { Pace.trigger('stop'); Pace.running = false; bar.destroy(); cancelAnimation = true; if (animation != null) { if (typeof cancelAnimationFrame === "function") { cancelAnimationFrame(animation); } animation = null; } return init(); }; Pace.restart = function() { Pace.trigger('restart'); Pace.stop(); return Pace.start(); }; Pace.go = function() { var start; Pace.running = true; bar.render(); start = now(); cancelAnimation = false; return animation = runAnimation(function(frameTime, enqueueNextFrame) { var avg, count, done, element, elements, i, j, l, len1, len2, m, ref1, remaining, scaler, scalerList, sum; remaining = 100 - bar.progress; count = sum = 0; done = true; for (i = l = 0, len1 = sources.length; l < len1; i = ++l) { source = sources[i]; scalerList = scalers[i] != null ? scalers[i] : scalers[i] = []; elements = (ref1 = source.elements) != null ? ref1 : [source]; for (j = m = 0, len2 = elements.length; m < len2; j = ++m) { element = elements[j]; scaler = scalerList[j] != null ? scalerList[j] : scalerList[j] = new Scaler(element); done &= scaler.done; if (scaler.done) { continue; } count++; sum += scaler.tick(frameTime); } } avg = sum / count; bar.update(uniScaler.tick(frameTime, avg)); if (bar.done() || done || cancelAnimation) { bar.update(100); Pace.trigger('done'); return setTimeout(function() { bar.finish(); Pace.running = false; return Pace.trigger('hide'); }, Math.max(options.ghostTime, Math.max(options.minTime - (now() - start), 0))); } else { return enqueueNextFrame(); } }); }; Pace.start = function(_options) { var error; extend(options, _options); Pace.running = true; try { bar.render(); } catch (error) { NoTargetError = error; } if (!document.querySelector('.pace')) { return setTimeout(Pace.start, 50); } else { Pace.trigger('start'); return Pace.go(); } }; if (typeof exports === 'object') { module.exports = Pace; } if (options.startOnPageLoad) { Pace.start(); } }).call(this);
PypiClean
/CLIP-Crawler-0.7.0.tar.gz/CLIP-Crawler-0.7.0/CLIPy/interface.py
import os from datetime import datetime from . import database as db, processors, crawler from .session import Session from .utils import populate class CacheStorage: def __init__(self, registry: db.SessionRegistry, database_cache=False): self.registry = registry self.controller = db.Controller(registry, cache=database_cache) @staticmethod def postgresql(username, password, schema, host=None): if host is None: # Let it default to localhost engine = db.create_engine('postgresql', username=username, password=password, schema=schema) else: engine = db.create_engine('postgresql', username=username, password=password, schema=schema, host=host) return CacheStorage(db.SessionRegistry(engine)) @staticmethod def sqlite(file): if not os.path.isfile(file): raise RuntimeError("Database file doesn't exist. Please create it manually.") engine = db.create_engine('sqlite', file=file) return CacheStorage(db.SessionRegistry(engine)) class Clip: def __init__(self, cache: CacheStorage, username, password): self.cache: CacheStorage = cache self.username = username self.password = password self.session = Session(username, password) def find_student(self, name, course_filter=None): return self.cache.controller.find_student(name, course=course_filter) def find_course(self, abbreviation, year=datetime.now().year): return self.cache.controller.get_course(abbreviation=abbreviation, year=year) def fetch_library_individual_room_availability(self, date: datetime.date = datetime.now().date()): return crawler.crawl_library_individual_room_availability(self.session, date) def fetch_library_group_room_availability(self, date: datetime.date = datetime.now().date()): return crawler.crawl_library_group_room_availability(self.session, date) def update_admissions(self): processors.institution_task(self.session, self.cache.registry, crawler.crawl_admissions) def update_teachers(self): processors.department_task(self.session, self.cache.registry, crawler.crawl_teachers) def update_classes(self): processors.department_task(self.session, self.cache.registry, crawler.crawl_classes) def update_class_info(self, year: int, period_part, period_parts): period = self.cache.controller.get_period(period_part, period_parts) if period is None: raise ValueError("Invalid period") processors.class_task(self.session, self.cache.registry, crawler.crawl_class_info, year=year, period=period) def update_class_enrollments(self, year: int, period_part, period_parts): period = self.cache.controller.get_period(period_part, period_parts) if period is None: raise ValueError("Invalid period") processors.class_task(self.session, self.cache.registry, crawler.crawl_class_info, year=year, period=period) def update_turns(self, year, period_part, period_parts): period = self.cache.controller.get_period(period_part, period_parts) if period is None: raise ValueError("Invalid period") processors.class_task(self.session, self.cache.registry, crawler.crawl_class_turns, year=year, period=period) def update_class_files(self, year, period_part, period_parts): period = self.cache.controller.get_period(period_part, period_parts) if period is None: raise ValueError("Invalid period") processors.class_task(self.session, self.cache.registry, crawler.crawl_files, year=year, period=period) processors.class_task(self.session, self.cache.registry, crawler.download_files, year=year, period=period) @staticmethod def populate(username, password, storage: CacheStorage, year: int = None, period: int = None): populate.bootstrap_database(Session(username, password), storage.registry, year, period) return Clip(storage, username, password)
PypiClean
/Cohen-0.7.4.tar.gz/Cohen-0.7.4/CHANGELOG.rst
0.7.3 - Fixes and improvements ------------------------------ 0.7.2 - Minor bugfixes ---------------------- Fixes: - Fix issue when Cohen fails to be discovered by xbox 360 dlna client - Fix issue when using Lazy Container on Samsung AllShare on 2012 Samsung TV fails 0.7.0 - Cohen project started ----------------------------- General: - lots of refactoring - removed lots of Coherence stuff - moved to lxml instead of (c)ElementTree - Twisted >= 14.0 is now required - livestreamer is now required - cleanups and fixes Backends: - twitch.tv backend added 0.0.1 - 0.7.0 - Coherence project --------------------------------- Changelog skipped
PypiClean
/JoeLiu-RF%20Refactoring-1.0.9.tar.gz/JoeLiu-RF Refactoring-1.0.9/rfrefactoring/newRfrefactoring/common/utility.py
import os import platform def normalize(text): return text.lower().replace(' ', '').replace('_', '') def recovery_models(models): for model in models: if isinstance(model, list): recovery_models(model) else: model.save() def is_KeywordCall(node): return node.__class__.__name__ == 'KeywordCall' def is_ForLoop(node): return node.__class__.__name__ == 'ForLoop' def is_Keyword_tag(node): return node.__class__.__name__ == 'SuiteSetup' or node.__class__.__name__ == 'SuiteTeardown' or node.__class__.__name__ == 'TestSetup' or node.__class__.__name__ == 'TestTeardown'or node.__class__.__name__ == 'Setup' or node.__class__.__name__ == 'Teardown' def get_file_name_from_path(_path): return os.path.split(_path)[1] def get_file_extension_from_path(_path): return os.path.splitext(_path)[1] def get_keywords_for_run_keywords(tokens): keywordToken = [] KeywordsToken = [] keywordsList = [] for token in tokens: if token.value != 'AND': keywordToken.append(token) else: KeywordsToken.append(list(keywordToken)) keywordToken = [] if keywordToken != []: KeywordsToken.append(list(keywordToken)) keywordToken = [] for keywordTokenList in KeywordsToken: keywordDict = {'keywordName': None, 'arguments': []} for index, keyword in enumerate(keywordTokenList): if index == 0: keywordDict['keywordName'] = keyword else: keywordDict['arguments'].append(keyword) keywordsList.append(keywordDict) return keywordsList def clear_screen(): """ Clear the terminal screen. """ command = 'cls' if platform.system().lower() == 'windows' else 'clear' os.system(command) def get_number_from_user(text): number = '' while not(number.isdigit()): number = input(text) return number def get_folder_path_from_user(text): projectPath = '' while not(os.path.isdir(projectPath)): clear_screen() projectPath = input(text) return projectPath def get_file_path_from_user(text): filePath = '' while not(os.path.isfile(filePath)): filePath = input(text) return filePath def save_model_and_update_old_models(model, oldModels): def update_model(model, allModels): for index, oldModel in enumerate(allModels): if isinstance(oldModel, list): update_model(model, oldModel) elif(model.source == oldModel.source): allModels[index] = model model.save() update_model(model, oldModels)
PypiClean
/Indomielibs-2.0.106.tar.gz/Indomielibs-2.0.106/pyrogram/sync.py
import asyncio import functools import inspect import threading from pyrogram import types from pyrogram.methods import Methods from pyrogram.methods.utilities import idle as idle_module, compose as compose_module def async_to_sync(obj, name): function = getattr(obj, name) main_loop = asyncio.get_event_loop() def async_to_sync_gen(agen, loop, is_main_thread): async def anext(agen): try: return await agen.__anext__(), False except StopAsyncIteration: return None, True while True: if is_main_thread: item, done = loop.run_until_complete(anext(agen)) else: item, done = asyncio.run_coroutine_threadsafe(anext(agen), loop).result() if done: break yield item @functools.wraps(function) def async_to_sync_wrap(*args, **kwargs): coroutine = function(*args, **kwargs) try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) if threading.current_thread() is threading.main_thread() or not main_loop.is_running(): if loop.is_running(): return coroutine else: if inspect.iscoroutine(coroutine): return loop.run_until_complete(coroutine) if inspect.isasyncgen(coroutine): return async_to_sync_gen(coroutine, loop, True) else: if inspect.iscoroutine(coroutine): if loop.is_running(): async def coro_wrapper(): return await asyncio.wrap_future(asyncio.run_coroutine_threadsafe(coroutine, main_loop)) return coro_wrapper() else: return asyncio.run_coroutine_threadsafe(coroutine, main_loop).result() if inspect.isasyncgen(coroutine): if loop.is_running(): return coroutine else: return async_to_sync_gen(coroutine, main_loop, False) setattr(obj, name, async_to_sync_wrap) def wrap(source): for name in dir(source): method = getattr(source, name) if not name.startswith("_"): if inspect.iscoroutinefunction(method) or inspect.isasyncgenfunction(method): async_to_sync(source, name) # Wrap all Client's relevant methods wrap(Methods) # Wrap types' bound methods for class_name in dir(types): cls = getattr(types, class_name) if inspect.isclass(cls): wrap(cls) # Special case for idle and compose, because they are not inside Methods async_to_sync(idle_module, "idle") idle = getattr(idle_module, "idle") async_to_sync(compose_module, "compose") compose = getattr(compose_module, "compose")
PypiClean
/BIT_framework-0.0.2-py3-none-any.whl/BIT_DL/pytorch/utils/average_recorder.py
from collections import deque from typing import Deque, Dict, List, Optional, Union, no_type_check from texar.torch.utils.types import MaybeList, MaybeSeq __all__ = [ '_SingleAverageRecorder', 'AverageRecorder', ] Scalar = Union[int, float] ID = Union[int, str] Record = Union[Dict[ID, Scalar], MaybeSeq[Scalar]] class _SingleAverageRecorder: r"""Maintains the moving average (i.e., the average of the latest N records) of a single metric. Args: size (int, optional): The window size of moving average. If `None`, the average of all added records is maintained. name (str, optional): name of the recorder. Used when printing. """ def __init__(self, size: Optional[int] = None, name: Optional[str] = None): if size is not None and size <= 0: raise ValueError("`size` must be > 0 or `None`.") self._size = size self._q: Deque[Scalar] = deque([]) self._w: Deque[Scalar] = deque([]) self._sum = 0. self._w_sum: Scalar = 0 self._name = name def add(self, record: Scalar, weight: Optional[Scalar] = None): r"""Appends a new record. Args: record: A scalar; the new record to append. weight (optional): A scalar, weight of the new record for calculating a weighted average. If `None`, weight is set to ``1``. For example, :attr:`weight` can be set to batch size and :attr:`record` the average value of certain metric on the batch in order to calculate the average metric value on a whole dataset. Returns: The (moving) average after appending the record. """ w = weight if weight is not None else 1 self._w_sum += w self._sum += record * w if self._size is not None: if len(self._q) == self._size: w_pop = self._w.popleft() self._sum -= self._q.popleft() * w_pop self._w_sum -= w_pop self._q.append(record) self._w.append(w) return self.avg() def avg(self) -> float: r"""Returns the (moving) average. """ if self._w_sum == 0: return 0. return self._sum / self._w_sum def reset(self) -> None: r"""Cleans all records. """ self._q.clear() self._w.clear() self._sum = 0. self._w_sum = 0 def to_str(self, precision: Optional[int] = None) -> str: r"""Returns a string of the average value. Args: precision (int, optional): The number of decimal places to keep in the returned string. For example, for an average value of ``0.1234``, :python:`precision = 2` leads to ``"0.12"``. Returns: A string of the average value. If :meth:`name` is given, the string is of the format like ``"name: 0.1234"`, otherwise the string is of the format like ``"0.1234"``. """ prec_str = "{}" if precision is not None: prec_str = "{:.%df}" % precision avg_str = prec_str.format(self.avg()) if self._name is not None: avg_str = "{}: {}".format(self._name, avg_str) return avg_str @property def name(self) -> str: r"""The name of the recorder. """ return self.name class AverageRecorder: r"""Maintains the moving averages (i.e., the average of the latest N records) of (possibly multiple) fields. Fields are determined by the first call of :meth:`add`. Args: size (int, optional): The window size of moving average. If `None`, the average of all added records is maintained. Example: .. code-block:: python ## Use to maintain moving average of training loss avg_rec = AverageRecorder(size=10) # average over latest 10 records while training: loss_0, loss_1 = ... avg_rec.add([loss_0, loss_1]) # avg_rec.avg() == [0.12343452, 0.567800323] # avg_rec.avg(0) == 0.12343452 # avg_rec.to_str(precision=2, ) == '0.12 0.57' ## Use to maintain average of test metrics on the whole test set avg_rec = AverageRecorder() # average over ALL records while test: metric_0, metric_1 = ... avg_rec.add({'m0': metric_0, 'm1': metric_1}) # dict is allowed print(avg_rec.to_str(precision=4, delimiter=' , ')) # 'm0: 0.1234 , m1: 0.5678' # # avg_rec.avg() == {'m0': 0.12343452, 'm1': 0.567800323} # avg_rec.avg(0) == 0.12343452 """ _recorders: Dict[ID, _SingleAverageRecorder] _record_type: type def __init__(self, size: Optional[int] = None): if size is not None and size <= 0: raise ValueError("`size` must be > 0 or `None`.") self._size = size self._recorders = None # type: ignore self._default_metric_name = "metric" self._record_type = None # type: ignore @no_type_check def _to_dict(self, record: Record) -> Dict[ID, Scalar]: if isinstance(record, dict): record_dict = record elif isinstance(record, (list, tuple)): record_dict = dict(enumerate(record)) else: record_dict = {self._default_metric_name: record} return record_dict def add(self, record: Record, weight: Optional[Scalar] = None): r"""Appends a new record. :attr:`record` can be a ``list``, ``dict``, or a single scalar. The record type is determined at the first time :meth:`add` is called. All subsequent calls to :meth:`add` must have the same type of :attr:`record`. :attr:`record` in subsequent calls to :meth:`add` can contain only a subset of fields than the first call to :meth:`add`. Example: .. code-block:: python recorder.add({'1': 0.2, '2': 0.2}) # 1st call to `add` x = recorder.add({'1': 0.4}) # 2nd call to `add` # x == {'1': 0.3, '2': 0.2} Args: record: A single scalar, a list of scalars, or a dict of scalars. weight (optional): A scalar, weight of the new record for calculating a weighted average. If `None`, weight is set to ``1``. For example, :attr:`weight` can be set to batch size and :attr:`record` the average value of certain metrics on the batch in order to calculate the average metric values on a whole dataset. Returns: The (moving) average after appending the record, with the same type as :attr:`record`. """ if self._record_type is None: self._record_type = type(record) elif self._record_type != type(record): raise ValueError('The type of `record` is not consistent. ' 'Expect type `{}`'.format(self._record_type)) record_dict = self._to_dict(record) if self._recorders is None: self._recorders = { name: _SingleAverageRecorder( self._size, name if self._record_type == dict else None) for name in record_dict.keys() } for name, val in record_dict.items(): self._recorders[name].add(val, weight=weight) return self.avg() def avg(self, id_or_name: Optional[MaybeList[ID]] = None) -> Record: r"""Returns the (moving) average. Args: id_or_name (optional): A list of or a single element. Each element is the index (if the record type is ``list``) or name (if the record type is ``dict``) of the field for which the average is calculated. If not given, the average of all fields are returned. Returns: The average value(s). If :attr:`id_or_name` is a single element (not a list), then returns the average value of the corresponding field. Otherwise, if :attr:`id_or_name` is a list of element(s), then returns average value(s) in the same type as :attr:`record` of :meth:`add`. """ if self._recorders is None: return 0. keys = id_or_name if keys is None: keys = list(self._recorders.keys()) if not isinstance(keys, (list, tuple)): return self._recorders[keys].avg() avg = {key: self._recorders[key].avg() for key in keys} if self._record_type in {list, tuple}: ret_avg = [] for k, v in avg.items(): if k in keys: ret_avg.append(v) return self._record_type(ret_avg) elif self._record_type == dict: return avg else: return avg[self._default_metric_name] def reset(self, id_or_name: Optional[MaybeList[ID]] = None): r"""Resets the record. Args: id_or_name (optional): A list or a single element. Each element is the index (if the record type is ``list``) or name (if the record type is ``dict``) of the field to reset. If `None`, all fields are reset. """ keys = id_or_name if keys is None: keys = list(self._recorders.keys()) elif not isinstance(keys, (list, tuple)): keys = [keys] for key in keys: self._recorders[key].reset() def to_str(self, precision: Optional[int] = None, delimiter: str = ' ') -> str: r"""Returns a string of the average values of the records. Args: precision (int, optional): The number of decimal places to keep in the returned string. For example, for an average value of ``0.1234``, :python:`precision = 2` leads to ``"0.12"``. delimiter (str): The delimiter string that separates between fields. Returns: A string of the average values. If record is of type ``dict``, the string is a concatenation of ``"field_name: average_value"``, delimited with :attr:`delimiter`. For example, ``"field_name_1: 0.1234 field_name_2: 0.5678 ..."``. Otherwise, the string is of a concatenation of 'average_value'. For example, ``"0.1234 0.5678 ..."`` """ strs = {name: rec.to_str(precision=precision) for name, rec in self._recorders.items()} str_list: List[str] = [] if self._record_type in [list, tuple]: # Enumerates the keys in order, which are the indexes str_list.extend(strs[i] for i in range(len(strs))) elif self._record_type == dict: str_list = list(strs.values()) else: str_list = [strs[self._default_metric_name]] avg_str = delimiter.join(str_list) return avg_str
PypiClean
/Authlib-1.2.1.tar.gz/Authlib-1.2.1/authlib/oauth2/rfc6749/grants/base.py
from authlib.consts import default_json_headers from ..requests import OAuth2Request from ..errors import InvalidRequestError class BaseGrant(object): #: Allowed client auth methods for token endpoint TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic'] #: Designed for which "grant_type" GRANT_TYPE = None # NOTE: there is no charset for application/json, since # application/json should always in UTF-8. # The example on RFC is incorrect. # https://tools.ietf.org/html/rfc4627 TOKEN_RESPONSE_HEADER = default_json_headers def __init__(self, request: OAuth2Request, server): self.prompt = None self.redirect_uri = None self.request = request self.server = server self._hooks = { 'after_validate_authorization_request': set(), 'after_validate_consent_request': set(), 'after_validate_token_request': set(), 'process_token': set(), } @property def client(self): return self.request.client def generate_token(self, user=None, scope=None, grant_type=None, expires_in=None, include_refresh_token=True): if grant_type is None: grant_type = self.GRANT_TYPE return self.server.generate_token( client=self.request.client, grant_type=grant_type, user=user, scope=scope, expires_in=expires_in, include_refresh_token=include_refresh_token, ) def authenticate_token_endpoint_client(self): """Authenticate client with the given methods for token endpoint. For example, the client makes the following HTTP request using TLS: .. code-block:: http POST /token HTTP/1.1 Host: server.example.com Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW Content-Type: application/x-www-form-urlencoded grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb Default available methods are: "none", "client_secret_basic" and "client_secret_post". :return: client """ client = self.server.authenticate_client( self.request, self.TOKEN_ENDPOINT_AUTH_METHODS) self.server.send_signal( 'after_authenticate_client', client=client, grant=self) return client def save_token(self, token): """A method to save token into database.""" return self.server.save_token(token, self.request) def validate_requested_scope(self): """Validate if requested scope is supported by Authorization Server.""" scope = self.request.scope state = self.request.state return self.server.validate_requested_scope(scope, state) def register_hook(self, hook_type, hook): if hook_type not in self._hooks: raise ValueError('Hook type %s is not in %s.', hook_type, self._hooks) self._hooks[hook_type].add(hook) def execute_hook(self, hook_type, *args, **kwargs): for hook in self._hooks[hook_type]: hook(self, *args, **kwargs) class TokenEndpointMixin(object): #: Allowed HTTP methods of this token endpoint TOKEN_ENDPOINT_HTTP_METHODS = ['POST'] #: Designed for which "grant_type" GRANT_TYPE = None @classmethod def check_token_endpoint(cls, request: OAuth2Request): return request.grant_type == cls.GRANT_TYPE and \ request.method in cls.TOKEN_ENDPOINT_HTTP_METHODS def validate_token_request(self): raise NotImplementedError() def create_token_response(self): raise NotImplementedError() class AuthorizationEndpointMixin(object): RESPONSE_TYPES = set() ERROR_RESPONSE_FRAGMENT = False @classmethod def check_authorization_endpoint(cls, request: OAuth2Request): return request.response_type in cls.RESPONSE_TYPES @staticmethod def validate_authorization_redirect_uri(request: OAuth2Request, client): if request.redirect_uri: if not client.check_redirect_uri(request.redirect_uri): raise InvalidRequestError( f'Redirect URI {request.redirect_uri} is not supported by client.', state=request.state) return request.redirect_uri else: redirect_uri = client.get_default_redirect_uri() if not redirect_uri: raise InvalidRequestError( 'Missing "redirect_uri" in request.', state=request.state) return redirect_uri def validate_consent_request(self): redirect_uri = self.validate_authorization_request() self.execute_hook('after_validate_consent_request', redirect_uri) self.redirect_uri = redirect_uri def validate_authorization_request(self): raise NotImplementedError() def create_authorization_response(self, redirect_uri: str, grant_user): raise NotImplementedError()
PypiClean
/DJModels-0.0.6-py3-none-any.whl/djmodels/dispatch/dispatcher.py
import threading import weakref from djmodels.utils.inspect import func_accepts_kwargs def _make_id(target): if hasattr(target, '__func__'): return (id(target.__self__), id(target.__func__)) return id(target) NONE_ID = _make_id(None) # A marker for caching NO_RECEIVERS = object() class Signal: """ Base class for all signals Internal attributes: receivers { receiverkey (id) : weakref(receiver) } """ def __init__(self, providing_args=None, use_caching=False): """ Create a new signal. providing_args A list of the arguments this signal can pass along in a send() call. """ self.receivers = [] if providing_args is None: providing_args = [] self.providing_args = set(providing_args) self.lock = threading.Lock() self.use_caching = use_caching # For convenience we create empty caches even if they are not used. # A note about caching: if use_caching is defined, then for each # distinct sender we cache the receivers that sender has in # 'sender_receivers_cache'. The cache is cleaned when .connect() or # .disconnect() is called and populated on send(). self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {} self._dead_receivers = False def connect(self, receiver, sender=None, weak=True, dispatch_uid=None): """ Connect receiver to sender for signal. Arguments: receiver A function or an instance method which is to receive signals. Receivers must be hashable objects. If weak is True, then receiver must be weak referenceable. Receivers must be able to accept keyword arguments. If a receiver is connected with a dispatch_uid argument, it will not be added if another receiver was already connected with that dispatch_uid. sender The sender to which the receiver should respond. Must either be a Python object, or None to receive events from any sender. weak Whether to use weak references to the receiver. By default, the module will attempt to use weak references to the receiver objects. If this parameter is false, then strong references will be used. dispatch_uid An identifier used to uniquely identify a particular instance of a receiver. This will usually be a string, though it may be anything hashable. """ from djmodels.conf import settings # If DEBUG is on, check that we got a good receiver if settings.configured and settings.DEBUG: assert callable(receiver), "Signal receivers must be callable." # Check for **kwargs if not func_accepts_kwargs(receiver): raise ValueError("Signal receivers must accept keyword arguments (**kwargs).") if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) if weak: ref = weakref.ref receiver_object = receiver # Check for bound methods if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'): ref = weakref.WeakMethod receiver_object = receiver.__self__ receiver = ref(receiver) weakref.finalize(receiver_object, self._remove_receiver) with self.lock: self._clear_dead_receivers() if not any(r_key == lookup_key for r_key, _ in self.receivers): self.receivers.append((lookup_key, receiver)) self.sender_receivers_cache.clear() def disconnect(self, receiver=None, sender=None, dispatch_uid=None): """ Disconnect receiver from sender for signal. If weak references are used, disconnect need not be called. The receiver will be removed from dispatch automatically. Arguments: receiver The registered receiver to disconnect. May be none if dispatch_uid is specified. sender The registered sender to disconnect dispatch_uid the unique identifier of the receiver to disconnect """ if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) disconnected = False with self.lock: self._clear_dead_receivers() for index in range(len(self.receivers)): (r_key, _) = self.receivers[index] if r_key == lookup_key: disconnected = True del self.receivers[index] break self.sender_receivers_cache.clear() return disconnected def has_listeners(self, sender=None): return bool(self._live_receivers(sender)) def send(self, sender, **named): """ Send signal from sender to all connected receivers. If any receiver raises an error, the error propagates back through send, terminating the dispatch loop. So it's possible that all receivers won't be called if an error is raised. Arguments: sender The sender of the signal. Either a specific object or None. named Named arguments which will be passed to receivers. Return a list of tuple pairs [(receiver, response), ... ]. """ if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS: return [] return [ (receiver, receiver(signal=self, sender=sender, **named)) for receiver in self._live_receivers(sender) ] def send_robust(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. If any receiver raises an error (specifically any subclass of Exception), return the error instance as the result for that receiver. """ if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS: return [] # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. responses = [] for receiver in self._live_receivers(sender): try: response = receiver(signal=self, sender=sender, **named) except Exception as err: responses.append((receiver, err)) else: responses.append((receiver, response)) return responses def _clear_dead_receivers(self): # Note: caller is assumed to hold self.lock. if self._dead_receivers: self._dead_receivers = False self.receivers = [ r for r in self.receivers if not(isinstance(r[1], weakref.ReferenceType) and r[1]() is None) ] def _live_receivers(self, sender): """ Filter sequence of receivers to get resolved, live receivers. This checks for weak references and resolves them, then returning only live receivers. """ receivers = None if self.use_caching and not self._dead_receivers: receivers = self.sender_receivers_cache.get(sender) # We could end up here with NO_RECEIVERS even if we do check this case in # .send() prior to calling _live_receivers() due to concurrent .send() call. if receivers is NO_RECEIVERS: return [] if receivers is None: with self.lock: self._clear_dead_receivers() senderkey = _make_id(sender) receivers = [] for (receiverkey, r_senderkey), receiver in self.receivers: if r_senderkey == NONE_ID or r_senderkey == senderkey: receivers.append(receiver) if self.use_caching: if not receivers: self.sender_receivers_cache[sender] = NO_RECEIVERS else: # Note, we must cache the weakref versions. self.sender_receivers_cache[sender] = receivers non_weak_receivers = [] for receiver in receivers: if isinstance(receiver, weakref.ReferenceType): # Dereference the weak reference. receiver = receiver() if receiver is not None: non_weak_receivers.append(receiver) else: non_weak_receivers.append(receiver) return non_weak_receivers def _remove_receiver(self, receiver=None): # Mark that the self.receivers list has dead weakrefs. If so, we will # clean those up in connect, disconnect and _live_receivers while # holding self.lock. Note that doing the cleanup here isn't a good # idea, _remove_receiver() will be called as side effect of garbage # collection, and so the call can happen while we are already holding # self.lock. self._dead_receivers = True def receiver(signal, **kwargs): """ A decorator for connecting receivers to signals. Used by passing in the signal (or list of signals) and keyword arguments to connect:: @receiver(post_save, sender=MyModel) def signal_receiver(sender, **kwargs): ... @receiver([post_save, post_delete], sender=MyModel) def signals_receiver(sender, **kwargs): ... """ def _decorator(func): if isinstance(signal, (list, tuple)): for s in signal: s.connect(func, **kwargs) else: signal.connect(func, **kwargs) return func return _decorator
PypiClean
/MOM-Tapyr-1.6.2.tar.gz/MOM-Tapyr-1.6.2/Legacy_Lifter.py
from __future__ import absolute_import, division, print_function, unicode_literals from _MOM import MOM from _TFL import TFL from _TFL.pyk import pyk import _TFL._Meta.Object import _TFL.import_module import _TFL.sos as os class _MOM_Legacy_Lifter_ \ (TFL.Meta.BaM (object, metaclass = TFL.Meta.M_Auto_Update_Combined)) : """Base class for project specific legacy lifters""" _real_name = "Legacy_Lifter" _attrs_to_update_combine = \ ( "Type_Name_Renaming", "Type_Name_Lifter", "E_Type_Lifter") Type_Name_Renaming = \ { "Auth.Account_P" : "Auth.Account" } Type_Name_Lifter = \ { "Auth.Account" : "_account_lifter" } E_Type_Lifter = {} def __init__ (self, db_man) : self.db_man = db_man self._et_lifter_map = {} # end def __init__ def lift_change (self, chg_cls, chg_dct, children_pc) : type_name = chg_dct ["type_name"] tn_changed = self.Type_Name_Renaming.get (type_name) if tn_changed : chg_dct ["type_name"] = tn_changed ### we changed the type name -> need to change it in the ### pickle cargo and the epk_pid, epk attributes as well for attr in "epk", "epk_pid" : chg_dct [attr] = chg_dct [attr] [:-1] + (tn_changed, ) chg_dct ["pickle_cargo"] = \ (tn_changed, ) + chg_dct ["pickle_cargo"] [1:] return chg_cls, chg_dct, children_pc # end def lift_change def lift_entity (self, type_name, pc, pid) : tn_changed = self.Type_Name_Renaming.get (type_name) if tn_changed : type_name = tn_changed if "Type_Name" in pc : pc ["Type_Name"] = [type_name] tn_lifter_name = self.Type_Name_Lifter.get (type_name) if tn_lifter_name : tn_lifter = getattr (self, tn_lifter_name) type_name, pc, pid = tn_lifter (type_name, pc, pid) if type_name and self.E_Type_Lifter : for et_lifter in self._et_lifters (type_name) : type_name, pc, pid = et_lifter (type_name, pc, pid) return type_name, pc, pid # end def lift_entity def _account_lifter (self, type_name, pc, pid) : if ("salt" in pc) and ("ph_name" not in pc) : salt = pc.pop ("salt") [0] pc ["password"] = ["%s::%s" % (salt, pc ["password"] [0])] pc ["ph_name"] = ["sha224"] return type_name, pc, pid # end def _account_lifter def _et_lifters (self, type_name) : map = self._et_lifter_map try : result = map [type_name] except KeyError : result = map [type_name] = [] apt = self.db_man.app_type pc_et = apt [type_name] for tn, et_lifter_name in pyk.iteritems (self.E_Type_Lifter) : et = apt [tn] if issubclass (pc_et, et) : result.append (getattr (self, et_lifter_name)) result.sort (key = TFL.Getter.i_rank) return result # end def _et_lifters Legacy_Lifter = _MOM_Legacy_Lifter_ # end class class Legacy_Lifter_Wrapper (TFL.Meta.Object) : """Wrapper around a `db_man` and a project specific legacy lifter""" def __init__ (self, db_man, module_name) : LL_Class = TFL.import_module (module_name).Legacy_Lifter self.legacy_lifter = LL_Class (db_man) # end def __init__ def entity_iter (self, db_iter) : lifter = self.legacy_lifter for type_name, pc, pid in db_iter : type_name, pc, pid = lifter.lift_entity (type_name, pc, pid) if type_name is not None : yield type_name, pc, pid # end def entity_iter def change_iter (self, db_iter) : lifter = self.legacy_lifter for chg_cls, chg_dct, children_pc in db_iter : yield lifter.lift_change (chg_cls, chg_dct, children_pc) # end def change_iter # end class Legacy_Lifter_Wrapper if __name__ != "__main__" : MOM._Export ("*") ### __END__ MOM.Legacy_Lifter
PypiClean
/KD_Lib-0.0.32.tar.gz/KD_Lib-0.0.32/KD_Lib/KD/vision/mean_teacher/mean_teacher.py
import torch import torch.nn.functional as F from torch import nn from KD_Lib.KD.common import BaseClass def symmetric_mse_loss(input1, input2): return torch.sum((input1 - input2) ** 2) class MeanTeacher(BaseClass): """ Implementation of Knowledge distillation using a mean teacher from the paper "Mean teachers are better role models:Weight-averaged consistency targets improvesemi-supervised deep learning results" https://arxiv.org/abs/1703.01780 :param teacher_model (torch.nn.Module): Teacher model :param student_model (torch.nn.Module): Student model :param train_loader (torch.utils.data.DataLoader): Dataloader for training :param val_loader (torch.utils.data.DataLoader): Dataloader for validation :param optimizer_teacher (torch.optim.*): Optimizer for training teacher :param optimizer_student (torch.optim.*): Optimizer for training student :param loss (str): Consistency criterion for loss :param class_loss (torch.nn.Module): Class Criterion for loss :param res_loss (torch.nn.Module): Residual Logit Criterion for loss :param temp (float): Temperature parameter for distillation :param distil_weight (float): Weight paramter for distillation loss :param device (str): Device for training; 'cpu' for cpu and 'cuda' for gpu :param log (bool): True if logging required :param logdir (str): Directory for storing logs """ def __init__( self, teacher_model, student_model, train_loader, val_loader, optimizer_teacher, optimizer_student, loss_fn=nn.MSELoss(), class_loss=nn.CrossEntropyLoss(), res_loss=symmetric_mse_loss, temp=20.0, distil_weight=0.5, device="cpu", log=False, logdir="./Experiments", ): super(MeanTeacher, self).__init__( teacher_model, student_model, train_loader, val_loader, optimizer_teacher, optimizer_student, loss_fn, temp, distil_weight, device, log, logdir, ) self.class_loss = class_loss.to(self.device) try: self.res_loss = res_loss.to(self.device) except: self.res_loss = res_loss self.loss_fn = loss_fn.to(self.device) self.log_softmax = nn.LogSoftmax(dim=1).to(self.device) def calculate_kd_loss(self, y_pred_student, y_pred_teacher, y_true): """ Function used for calculating the KD loss during distillation :param y_pred_student (torch.FloatTensor): Prediction made by the student model :param y_pred_teacher (torch.FloatTensor): Prediction made by the teacher model :param y_true (torch.FloatTensor): Original label """ class_logit, consis_logit = y_pred_student class_loss = self.class_loss(class_logit, y_true) num_classes = consis_logit.size()[1] res_loss = self.res_loss(class_logit, consis_logit) / num_classes student_softmax = self.log_softmax(consis_logit, dim=1) teacher_softmax = self.log_softmax(y_pred_teacher[0], dim=1) consis_loss = self.loss_fn(student_softmax, teacher_softmax) / num_classes return class_loss + res_loss + consis_loss def post_epoch_call(self, epoch): """ Exponentially updates the weights of teacher model. :param epoch (int): current epoch """ alpha = min(1e-3, epoch / (epoch + 1)) param_zip = zip( self.teacher_model.parameters(), self.student_model.parameters() ) for teacher_param, param in param_zip: teacher_param.data.mul_(alpha).add_(1 - alpha, param.data)
PypiClean
/Cubane-1.0.11.tar.gz/Cubane-1.0.11/cubane/cms/nav.py
from __future__ import unicode_literals from django.conf import settings from django.db.models import Q from cubane.lib.module import register_class_extensions from cubane.cms import get_page_model from cubane.cms.models import PageBase class CMSNavigationBuilder(object): @classmethod def register_extension(cls, *args): """ Register a new extension(s) for the CMS navigation builder class. """ return register_class_extensions('ExtendedCMSNavigationBuilder', cls, args) def __init__(self, cms, page_context, active_page=None, current_page_or_child_page=None, cache_context=None): """ Create a new navigation builder for the given page context. """ self.cms = cms self.page_context = page_context self.active_page = active_page self.active_page_id = self.active_page.id if self.active_page else None self.current_page_or_child_page = current_page_or_child_page self.cache_context = cache_context self.child_page_cache = {} self.pages = self.cache_context.cached('PAGES', self.get_pages) def get_objects(self, objects): """ Return a queryset that returns all pages on which bases navigation items are constructed. """ return objects def get_pages(self): """ Return a list of navigate-able content pages """ page_model = get_page_model() related_fields = [] if hasattr(settings, 'CMS_NAVIGATION_RELATED_FIELDS'): related_fields = related_fields + settings.CMS_NAVIGATION_RELATED_FIELDS related_fields = filter( lambda field: hasattr(page_model, field), related_fields ) pages = list( page_model.filter_visibility( self.get_objects( page_model.objects.select_related(*related_fields).filter( Q(_nav__isnull=False) | # appears in at least one # navigation section Q(identifier__isnull=False), # OR has an identifier disabled=False # not disabled ).order_by( 'seq', 'title' ) ) ) ) return pages def get_title(self, page): """ Return the navigation title for the given page. """ return page.navigation_title if hasattr(page, 'navigation_title') and page.navigation_title else page.title def has_active_child(self, items): """ Return True, if any child or sub-child of the given navigation items is active. """ for item in items: if item.get('active'): return True if self.has_active_child(item.get('children')): return True return False def has_active_child_page(self, page, children): """ Return True, if the current page is a child page of the navigation item page or any children thereof. """ if not hasattr(self.current_page_or_child_page, 'page_id'): return False if self.current_is_child_page_of(page): return True if children: for child in children: if self.current_is_child_page_of(child): return True return False def current_is_child_page_of(self, page): """ Return True, if the current page is a child page of the given page. """ pk = page.get('id') if isinstance(page, dict) else getattr(page, 'pk', None) return self.current_page_or_child_page.page_id == pk def child_page_model_excluded_from_nav(self, child_page_model): """ Return True, if the given child page model is excluded from navigation. """ if not hasattr(child_page_model, 'exclude_from_navigation'): return False else: return child_page_model.exclude_from_navigation def get_nav_child_pages(self, page, nav_name=None): """ Return a list of navigation items for all child pages of the given page, if this feature is enabled via CMS_NAVIGATION_INCLUDE_CHILD_PAGES. """ # empty list if feature is not enabled if not settings.CMS_NAVIGATION_INCLUDE_CHILD_PAGES: return [] # empty list if the given page is not a page that has child pages if not isinstance(page, get_page_model()): return [] # valid child page model that is not excluded for navigation child_page_model = page.get_entity_model() if not child_page_model or self.child_page_model_excluded_from_nav(child_page_model): return [] # get cached list of child pages for the given page key = unicode(page.pk) if key not in self.child_page_cache: self.child_page_cache[key] = [] # get child pages for this page child_pages = child_page_model.filter_visibility( child_page_model.objects.filter(page=page).exclude(disabled=True).order_by('seq') ) self.child_page_cache[key] = list([self.get_nav_item(p, nav_name) for p in child_pages]) # receive list of child pages return self.child_page_cache.get(key, []) def get_url_getter(self, page): """ Return a getter method for receiving the URL for the given page. """ def _get_url(): return '/' if self.page_context.page_is_homepage(page) else page.get_absolute_url() return _get_url def get_nav_item(self, page, nav_name=None): """ Return a navigation item for the given page. """ # get children children = self.get_nav_children(page) nav_children = self.get_nav_children(page, nav_name) # related fields item_fields = {} if hasattr(settings, 'CMS_NAVIGATION_RELATED_FIELDS'): for field in settings.CMS_NAVIGATION_RELATED_FIELDS: if hasattr(page, field): item_fields[field] = getattr(page, field) # child pages / posts child_pages = self.get_nav_child_pages(page, nav_name) item_fields.update({ 'id': page.id, 'identifier': page.identifier if hasattr(page, 'identifier') else None, 'title': self.get_title(page), 'slug': page.slug, 'page_title': page.title, 'nav_title': page.navigation_title if hasattr(page, 'navigation_title') else page.title, 'url': self.get_url_getter(page), 'active': (type(page) == type(self.current_page_or_child_page) and page.id == self.active_page_id) or (self.current_page_or_child_page != None and page.id == self.current_page_or_child_page.id and page.__class__ == self.current_page_or_child_page.__class__), 'active_child': self.has_active_child(children), 'active_child_page': self.has_active_child_page(page, children), 'excerpt': page.excerpt, 'entity_type': page.entity_type if hasattr(page, 'entity_type') else None, 'children': children, 'nav_children': nav_children, 'child_pages': child_pages, # legacy 'posts': child_pages, 'nav_image': page.nav_image if hasattr(page, 'nav_image_id') else None, 'updated_on': getattr(page, 'nav_updated_on', None) }) return item_fields def get_nav_children(self, parent, nav_name=None): """ Return the child pages of the given parent page based on the given list of all pages (cached). """ # get children if isinstance(parent, PageBase) and settings.PAGE_HIERARCHY: children = filter(lambda p: isinstance(p, PageBase) and p.parent_id == parent.id, self.pages) if nav_name: children = filter(lambda p: nav_name in p.nav, children) else: children = [] return [self.get_nav_item(p, nav_name) for p in children] def is_root_page(self, page): """ Return True, if the given page is a root page (has no parent pages). """ return page.parent_id == None def get_navigation(self): """ Return the website-wide navigation objects which contains a list of all pages that are at least in one navigation bar and/or are navigable because a page defines a unique identifier. """ # construict navigation structure _nav = {} _pages = {} _active_nav = None for p in self.pages: # add to list of navigatable pages if there is an identifier if hasattr(p, 'identifier') and p.identifier: _pages[p.identifier] = self.get_nav_item(p) # skip if enquiry template if self.page_context.page_is_enquiry_template(p): continue # skip if not root page if not self.is_root_page(p): continue # skip if no navigation is defined if not p._nav: continue # attach to navigation structure for name in p.nav: if name not in _nav: _nav[name] = [] item = self.get_nav_item(p, name) _nav[name].append(item) # return active navigation item seperately if not _active_nav and (item.get('active') or item.get('active_child')): _active_nav = item return (_nav, _active_nav, _pages)
PypiClean
/GraphLab_Create-2.1-cp27-none-macosx_10_5_x86_64.macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.macosx_10_11_intel.macosx_10_11_x86_64.whl/graphlab/mxnet/recordio.py
"""Python interface for DLMC RecrodIO data format""" from __future__ import absolute_import from collections import namedtuple import ctypes from .base import _LIB from .base import RecordIOHandle from .base import check_call import struct import numpy as np try: import cv2 opencv_available = True except ImportError: opencv_available = False class MXRecordIO(object): """Python interface for read/write RecordIO data formmat Parameters ---------- uri : string uri path to recordIO file. flag : string "r" for reading or "w" writing. """ def __init__(self, uri, flag): self.uri = ctypes.c_char_p(uri) self.handle = RecordIOHandle() self.flag = flag self.is_open = False self.open() def open(self): """Open record file""" if self.flag == "w": check_call(_LIB.MXRecordIOWriterCreate(self.uri, ctypes.byref(self.handle))) self.writable = True elif self.flag == "r": check_call(_LIB.MXRecordIOReaderCreate(self.uri, ctypes.byref(self.handle))) self.writable = False else: raise ValueError("Invalid flag %s"%self.flag) self.is_open = True def __del__(self): self.close() def close(self): """close record file""" if not self.is_open: return if self.writable: check_call(_LIB.MXRecordIOWriterFree(self.handle)) else: check_call(_LIB.MXRecordIOReaderFree(self.handle)) def reset(self): """Reset pointer to first item. If record is opened with 'w', this will truncate the file to empty""" self.close() self.open() def write(self, buf): """Write a string buffer as a record Parameters ---------- buf : string buffer to write. """ assert self.writable check_call(_LIB.MXRecordIOWriterWriteRecord(self.handle, ctypes.c_char_p(buf), ctypes.c_size_t(len(buf)))) def read(self): """Read a record as string Returns ---------- buf : string buffer read. """ assert not self.writable buf = ctypes.c_char_p() size = ctypes.c_size_t() check_call(_LIB.MXRecordIOReaderReadRecord(self.handle, ctypes.byref(buf), ctypes.byref(size))) if buf: buf = ctypes.cast(buf, ctypes.POINTER(ctypes.c_char*size.value)) return buf.contents.raw else: return None IRHeader = namedtuple('HEADER', ['flag', 'label', 'id', 'id2']) _IRFormat = 'IfQQ' _IRSize = struct.calcsize(_IRFormat) def pack(header, s): """pack an string into MXImageRecord Parameters ---------- header : IRHeader header of the image record s : str string to pack """ header = IRHeader(*header) s = struct.pack(_IRFormat, *header) + s return s def unpack(s): """unpack a MXImageRecord to string Parameters ---------- s : str string buffer from MXRecordIO.read Returns ------- header : IRHeader header of the image record s : str unpacked string """ header = IRHeader(*struct.unpack(_IRFormat, s[:_IRSize])) return header, s[_IRSize:] def unpack_img(s, iscolor=-1): """unpack a MXImageRecord to image Parameters ---------- s : str string buffer from MXRecordIO.read iscolor : int image format option for cv2.imdecode Returns ------- header : IRHeader header of the image record img : numpy.ndarray unpacked image """ header, s = unpack(s) img = np.fromstring(s, dtype=np.uint8) assert opencv_available img = cv2.imdecode(img, iscolor) return header, img def pack_img(header, img, quality=80, img_fmt='.jpg'): """pack an image into MXImageRecord Parameters ---------- header : IRHeader header of the image record img : numpy.ndarray image to pack quality : int quality for JPEG encoding. 1-100, or compression for PNG encoding. 1-9. img_fmt : str Encoding of the image. .jpg for JPEG, .png for PNG. Returns ------- s : str The packed string """ assert opencv_available jpg_formats = set(['.jpg', '.jpeg', '.JPG', '.JPEG']) png_formats = set(['.png', '.PNG']) encode_params = None if img_fmt in jpg_formats: encode_params = [cv2.IMWRITE_JPEG_QUALITY, quality] elif img_fmt in png_formats: encode_params = [cv2.IMWRITE_PNG_COMPRESSION, quality] ret, buf = cv2.imencode(img_fmt, img, encode_params) assert ret, 'failed encoding image' return pack(header, buf.tostring())
PypiClean
/MeshLabXML-2018.3.tar.gz/MeshLabXML-2018.3/meshlabxml/select.py
from . import util def all(script, face=True, vert=True): """ Select all the faces of the current mesh Args: script: the FilterScript object or script filename to write the filter to. faces (bool): If True the filter will select all the faces. verts (bool): If True the filter will select all the vertices. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Select All">\n', ' <Param name="allFaces" ', 'value="{}" '.format(str(face).lower()), 'description="DSelect all Faces" ', 'type="RichBool" ', '/>\n', ' <Param name="allVerts" ', 'value="{}" '.format(str(vert).lower()), 'description="Select all Vertices" ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def none(script, face=True, vert=True): """ Clear the current set of selected faces Args: script: the FilterScript object or script filename to write the filter to. faces (bool): If True the filter will deselect all the faces. verts (bool): If True the filter will deselect all the vertices. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Select None">\n', ' <Param name="allFaces" ', 'value="{}" '.format(str(face).lower()), 'description="De-select all Faces" ', 'type="RichBool" ', '/>\n', ' <Param name="allVerts" ', 'value="{}" '.format(str(vert).lower()), 'description="De-select all Vertices" ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def invert(script, face=True, vert=True): """ Invert the current set of selected faces Args: script: the FilterScript object or script filename to write the filter to. faces (bool): If True the filter will invert the selected faces. verts (bool): If True the filter will invert the selected vertices. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Invert Selection">\n', ' <Param name="InvFaces" ', 'value="{}" '.format(str(face).lower()), 'description="Invert Faces" ', 'type="RichBool" ', '/>\n', ' <Param name="InvVerts" ', 'value="{}" '.format(str(vert).lower()), 'description="Invert Vertices" ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def border(script): """ Select vertices and faces on the boundary Args: script: the FilterScript object or script filename to write the filter to. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Select Border"/>\n' util.write_filter(script, filter_xml) return None def grow(script, iterations=1): """ Grow (dilate, expand) the current set of selected faces Args: script: the FilterScript object or script filename to write the filter to. iterations (int): the number of times to grow the selection. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Dilate Selection"/>\n' for _ in range(iterations): util.write_filter(script, filter_xml) return None def shrink(script, iterations=1): """ Shrink (erode, reduce) the current set of selected faces Args: script: the FilterScript object or script filename to write the filter to. iterations (int): the number of times to shrink the selection. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Erode Selection"/>\n' for _ in range(iterations): util.write_filter(script, filter_xml) return None def self_intersecting_face(script): """ Select only self intersecting faces Args: script: the FilterScript object or script filename to write the filter to. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Select Self Intersecting Faces"/>\n' util.write_filter(script, filter_xml) return None def nonmanifold_vert(script): """ Select the non manifold vertices that do not belong to non manifold edges. For example two cones connected by their apex. Vertices incident on non manifold edges are ignored. Args: script: the FilterScript object or script filename to write the filter to. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Select non Manifold Vertices"/>\n' util.write_filter(script, filter_xml) return None def nonmanifold_edge(script): """ Select the faces and the vertices incident on non manifold edges (e.g. edges where more than two faces are incident). Note that this function selects the components that are related to non manifold edges. The case of non manifold vertices is specifically managed by nonmanifold_vert. Args: script: the FilterScript object or script filename to write the filter to. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ' <filter name="Select non Manifold Edges "/>\n' util.write_filter(script, filter_xml) return None def small_parts(script, ratio=0.2, non_closed_only=False): """ Select the small disconnected parts (components) of a mesh. Args: script: the FilterScript object or script filename to write the filter to. ratio (float): This ratio (between 0 and 1) defines the meaning of 'small' as the threshold ratio between the number of faces of the largest component and the other ones. A larger value will select more components. non_closed_only (bool): Select only non-closed components. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Small component selection">\n', ' <Param name="NbFaceRatio" ', 'value="{}" '.format(ratio), 'description="Small component ratio" ', 'type="RichFloat" ', '/>\n', ' <Param name="NonClosedOnly" ', 'value="{}" '.format(str(non_closed_only).lower()), 'description="Select only non closed components" ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def vert_quality(script, min_quality=0.0, max_quality=0.05, inclusive=True): """ Select all the faces and vertexes within the specified vertex quality range. Args: script: the FilterScript object or script filename to write the filter] to. min_quality (float): Minimum acceptable quality value. max_quality (float): Maximum acceptable quality value. inclusive (bool): If True only the faces with ALL the vertices within the specified range are selected. Otherwise any face with at least one vertex within the range is selected. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Select by Vertex Quality">\n', ' <Param name="minQ" ', 'value="{}" '.format(min_quality), 'description="Min Quality" ', 'min="0" ', 'max="{}" '.format(2 * max_quality), 'type="RichDynamicFloat" ', '/>\n', ' <Param name="maxQ" ', 'value="{}" '.format(max_quality), 'description="Max Quality" ', 'min="0" ', 'max="{}" '.format(2 * max_quality), 'type="RichDynamicFloat" ', '/>\n', ' <Param name="Inclusive" ', 'value="{}" '.format(str(inclusive).lower()), 'description="Inclusive Sel." ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def face_function(script, function='(fi == 0)'): """Boolean function using muparser lib to perform face selection over current mesh. See help(mlx.muparser_ref) for muparser reference documentation. It's possible to use parenthesis, per-vertex variables and boolean operator: (, ), and, or, <, >, = It's possible to use per-face variables like attributes associated to the three vertices of every face. Variables (per face): x0, y0, z0 for first vertex; x1,y1,z1 for second vertex; x2,y2,z2 for third vertex nx0, ny0, nz0, nx1, ny1, nz1, etc. for vertex normals r0, g0, b0, a0, etc. for vertex color q0, q1, q2 for quality wtu0, wtv0, wtu1, wtv1, wtu2, wtv2 (per wedge texture coordinates) ti for face texture index (>= ML2016.12) vsel0, vsel1, vsel2 for vertex selection (1 yes, 0 no) (>= ML2016.12) fr, fg, fb, fa for face color (>= ML2016.12) fq for face quality (>= ML2016.12) fnx, fny, fnz for face normal (>= ML2016.12) fsel face selection (1 yes, 0 no) (>= ML2016.12) Args: script: the FilterScript object or script filename to write the filter] to. function (str): a boolean function that will be evaluated in order to select a subset of faces. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="Conditional Face Selection">\n', ' <Param name="condSelect" ', 'value="{}" '.format(str(function).replace('&', '&amp;').replace('<', '&lt;')), 'description="boolean function" ', 'type="RichString" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None def vert_function(script, function='(q < 0)', strict_face_select=True): """Boolean function using muparser lib to perform vertex selection over current mesh. See help(mlx.muparser_ref) for muparser reference documentation. It's possible to use parenthesis, per-vertex variables and boolean operator: (, ), and, or, <, >, = It's possible to use the following per-vertex variables in the expression: Variables: x, y, z (coordinates) nx, ny, nz (normal) r, g, b, a (color) q (quality) rad vi (vertex index) vtu, vtv (texture coordinates) ti (texture index) vsel (is the vertex selected? 1 yes, 0 no) and all custom vertex attributes already defined by user. Args: script: the FilterScript object or script filename to write the filter] to. function (str): a boolean function that will be evaluated in order to select a subset of vertices. Example: (y > 0) and (ny > 0) strict_face_select (bool): if True a face is selected if ALL its vertices are selected. If False a face is selected if at least one of its vertices is selected. ML v1.3.4BETA only; this is ignored in 2016.12. In 2016.12 only vertices are selected. Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ if script.ml_version == '1.3.4BETA': strict_select = ''.join([ ' <Param name="strictSelect" ', 'value="{}" '.format(str(strict_face_select).lower()), 'description="Strict face selection" ', 'type="RichBool" ', '/>\n', ]) else: strict_select = '' filter_xml = ''.join([ ' <filter name="Conditional Vertex Selection">\n', ' <Param name="condSelect" ', 'value="{}" '.format(str(function).replace('&', '&amp;').replace('<', '&lt;')), 'description="boolean function" ', 'type="RichString" ', '/>\n', strict_select, ' </filter>\n']) util.write_filter(script, filter_xml) return None def cylindrical_vert(script, radius=1.0, inside=True): """Select all vertices within a cylindrical radius Args: radius (float): radius of the sphere center_pt (3 coordinate tuple or list): center point of the sphere Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ if inside: function = 'sqrt(x^2+y^2)<={}'.format(radius) else: function = 'sqrt(x^2+y^2)>={}'.format(radius) vert_function(script, function=function) return None def spherical_vert(script, radius=1.0, center_pt=(0.0, 0.0, 0.0)): """Select all vertices within a spherical radius Args: radius (float): radius of the sphere center_pt (3 coordinate tuple or list): center point of the sphere Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ function = 'sqrt((x-{})^2+(y-{})^2+(z-{})^2)<={}'.format( center_pt[0], center_pt[1], center_pt[2], radius) vert_function(script, function=function) return None
PypiClean
/MGP_SDK-1.1.1.tar.gz/MGP_SDK-1.1.1/src/MGP_SDK/tasking_service/tasking.py
import json import requests from datetime import datetime from MGP_SDK import process from MGP_SDK.auth.auth import Auth class Tasking: def __init__(self, auth: Auth): self.auth = auth self.api_version = self.auth.api_version self.base_url = f'{self.auth.api_base_url}/tasking/{self.api_version}/tasking' self.token = self.auth.refresh_token() self.authorization = {"Authorization": f"Bearer {self.token}"} def create_new_tasking(self, start_datetime: str, end_datetime: str, aoi_geojson: dict, recipe: str, order_templates: dict, validate=False, **kwargs): """ Make a tasking request based on imagery recipes Args: start_datetime (string) = ISO-8601 formatted date when the tasking should start end_datetime (string) = ISO-8601 formatted date when the tasking should end aoi_geojson (dict) = Geojson polygon of area to cover with tasking, e.g. {"type":"Polygon","coordinates":[[[...]]]} recipe (string) = The name of one of the configured recipes for tasking, e.g. "50cm_Color" or "30cm_Color" order_templates (dict) = Template for order to be placed. See ordering_service for examples validate (bool) = Binary whether to validate tasking request. Defaults to False Kwargs: max_cloud_cover (string) = Maximum cloud cover. max_off_nadir_angle (string) = Maximum off nadir angle. max_sun_elevation_angle (string) = Maximum sun elevation angle. Returns: Dictionary of submitted tasking request's details """ time_check = [start_datetime, end_datetime] for time in time_check: try: datetime.fromisoformat(time.replace('Z', '+00:00')) except: raise ValueError(f'{time} is not in a proper format. Example: 2020-07-10T15:00:00+00:00') if recipe is not '50cm_Color': if recipe is not '30cm_color': raise Exception('Recipe must be one of 50cm_Color or 30cm_Color') else: payload = { 'start_datetime': start_datetime, 'end_datetime': end_datetime, 'aoi_geojson': aoi_geojson, 'recipe': recipe, 'order_templates': order_templates } optional_parameters = ['max_cloud_cover', 'max_off_nadir_angle', 'min_sun_elevation_angle'] data = {**{k: v for k, v in kwargs.items() if k in optional_parameters}, **payload} if validate: url = self.base_url + "?validation_only=true" else: url = self.base_url response = requests.post(url, data=json.dumps(data), headers=self.authorization, verify=self.auth.SSL) process._response_handler(response) return response.json() def cancel_tasking(self, tasking_id: str, reason: str = None): """ Cancels a tasking request args: tasking_id (string) = ID of the requested tasking reason (string) = Reason for canceling the tasking Returns: Dictionary of cancelled tasking request's details """ payload = {} if reason: payload['reason'] = reason url = f'{self.base_url}/{tasking_id}/cancel' response = requests.post(url, json=payload, headers=self.authorization, verify=self.auth.SSL) process._response_handler(response) return response.json() def get_tasking_list(self, **kwargs): """ List all tasking requests Kwargs: limit (int) = How many items to return in the response list. Default 10 filter (list) = Filter results that match values contained in the given key separated by a colon. sort (string) = Indicates sort order, asc (default) for ascending order (alphabetical by name) and desc for descending order (reverse alphabetical by name) Returns: Dictionary of tasking requests and their details """ optional_parameters = ['limit', 'filter', 'sort'] params = {**{k: v for k, v in kwargs.items() if k in optional_parameters}} response = requests.get(self.base_url, headers=self.authorization, params=params, verify=self.auth.SSL) # TODO handle pagination process._response_handler(response) return response.json() def tasking_info(self, tasking_id: str): """ Retrieves the details of a tasking request Args: tasking_id (string) = ID of the requested tasking Returns: Dictionary of a tasking request and its details """ url = self.base_url + '/' + tasking_id response = requests.get(url, headers=self.authorization, verify=self.auth.SSL) process._response_handler(response) return response.json()
PypiClean
/dr14-t.meter-1.0.16.tar.gz/dr14-t.meter-1.0.16/dr14tmeter/read_metadata.py
import subprocess import sys import os import re from dr14tmeter.audio_decoder import AudioDecoder from dr14tmeter.dr14_global import get_ffmpeg_cmd # Test example !!!!! # a = subprocess.check_output( [ "ffprobe" , "-show_format" , "/media/esterno_xfs/data/Musica/Musica/aavv/01-blitzkrieg_bop_160_lame_abr.mp3" ] , stderr=subprocess.STDOUT , shell=False ) class RetirveMetadata: def __init__( self ): self._album = {} self._artist = {} self._tracks = {} if get_ffmpeg_cmd() == "ffmpeg" : self.__ffprobe_cmd = "ffprobe" elif get_ffmpeg_cmd() == "avconv" : self.__ffprobe_cmd = "avprobe" def scan_dir( self , dir_name , files_list=None ): self._album = {} self._tracks = {} self._artist = {} if files_list == None: dir_name = os.path.abspath( dir_name ) files_list = sorted( os.listdir( dir_name ) ) ad = AudioDecoder() for file_name in files_list : ( fn , ext ) = os.path.splitext( file_name ) full_file = os.path.join( dir_name , file_name ) if ext in ad.formats : self.scan_file( full_file ) #print( self._tracks ) def scan_file( self , file_name ): #print("") #print( file_name ) try: data_txt = subprocess.check_output( [ self.__ffprobe_cmd , "-show_format" , file_name ] , stderr=subprocess.STDOUT , shell=False ) except : data_txt = "ffprobe ERROR" if data_txt != "ffprobe ERROR" : try: data_txt = data_txt.decode(encoding='UTF-8') except: data_txt = data_txt.decode(encoding='ISO-8859-1') track = {} re_flags = ( re.MULTILINE | re.IGNORECASE | re.UNICODE ) m = re.search( r"\s*track\s*\:\s*(\d+)$" , data_txt , re_flags ) if m != None: track['nr'] = int( m.group(1) ) m = re.search( r"\s*album\s*\:\s*(.*)$" , data_txt , re_flags ) if m != None: #print( m.group(1) ) self._album.setdefault( m.group(1) , 0 ) self._album[m.group(1)] += 1 m = re.search( r"\s*title\s*\:\s*(.*)$" , data_txt , re_flags ) if m != None: track['title'] = m.group(1) m = re.search( r"\s*artist\s*\:\s*(.*)$" , data_txt , re_flags ) if m != None: self._artist.setdefault( m.group(1) , 0 ) self._artist[m.group(1)] += 1 m = re.search( r"\s*genre\s*\:\s*(.*)$" , data_txt , re_flags ) if m != None: track['genre'] = m.group(1) ########################################## # string examples: #Audio: flac, 44100 Hz, stereo, s16 #Stream #0:0(und): Audio: alac (alac / 0x63616C61), 44100 Hz, 2 channels, s16, 634 kb/s #Stream #0:0: Audio: flac, 44100 Hz, stereo, s16 m = re.search( r"\:\s*Audio\s*\:\s*(\w+)[^,]*,\s*(\d*)\s*Hz\s*,\s*([\w\s]*)\s*,\s*s(\d+)" , data_txt , re_flags ) if m != None: track['codec'] = m.group(1) track['s_rate'] = m.group(2) track['channel'] = m.group(3) track['bit'] = m.group(4) #print ( m.group(1) + " " + m.group(2)+ " " + m.group(3)+ " " + m.group(4) ) m = re.search( r"\,\s*bitrate\s*\:\s*(\d*)\s*kb" , data_txt , re_flags ) if m != None: track['bitrate'] = m.group(1) #print ( m.group(1) ) ( foo , f_key ) = os.path.split( file_name ) self._tracks[f_key] = track def album_len( self ): return len( self._tracks ) def get_album_title( self ): if len( self._album ) > 1 : return "Various" elif len( self._album ) == 0 : return None else : for k in self._album.keys(): res = k return res def get_album_artist( self ): if len( self._artist ) > 1 : return "Various Artists" elif len( self._artist ) == 0 : return None else : for k in self._artist.keys(): res = k return res def get_value( self , file_name , field ): f = self._tracks.get( file_name , None ) if f == None : return None return f.get( field , None )
PypiClean
/KratosShallowWaterApplication-9.4-cp311-cp311-win_amd64.whl/KratosMultiphysics/ShallowWaterApplication/set_initial_water_level_process.py
import KratosMultiphysics as KM import KratosMultiphysics.ShallowWaterApplication as SW def Factory(settings, Model): if not isinstance(settings, KM.Parameters): raise Exception("expected input shall be a Parameters object, encapsulating a json string") return SetInitialWaterLevelProcess(Model, settings["Parameters"]) ## This process sets the value of a scalar variable using the AssignScalarVariableProcess. class SetInitialWaterLevelProcess(KM.Process): def __init__(self, Model, settings): KM.Process.__init__(self) default_settings = KM.Parameters(""" { "model_part_name" : "please_specify_model_part_name", "variable_name" : "HEIGHT", "constrained" : false, "interval" : [0.0, 0.0], "value" : 1.0, "set_minimum_height" : true, "minimum_height_value" : 1e-4 } """ ) if settings.Has("value"): if settings["value"].IsString(): default_settings["value"].SetString("1.0") settings.ValidateAndAssignDefaults(default_settings) self.variable = settings["variable_name"].GetString() self.model_part = Model[settings["model_part_name"].GetString()] self.set_minimum_height = settings["set_minimum_height"].GetBool() self.minimum_height = settings["minimum_height_value"].GetDouble() settings.RemoveValue("set_minimum_height") settings.RemoveValue("minimum_height_value") from KratosMultiphysics.assign_scalar_variable_process import AssignScalarVariableProcess self.process = AssignScalarVariableProcess(Model, settings) def ExecuteInitialize(self): self.process.ExecuteInitializeSolutionStep() if self.variable == "HEIGHT": SW.ShallowWaterUtilities().ComputeFreeSurfaceElevation(self.model_part) elif self.variable == "FREE_SURFACE_ELEVATION": SW.ShallowWaterUtilities().ComputeHeightFromFreeSurface(self.model_part) if self.set_minimum_height: SW.ShallowWaterUtilities().SetMinimumValue(self.model_part, SW.HEIGHT, self.minimum_height) SW.ShallowWaterUtilities().ComputeFreeSurfaceElevation(self.model_part)
PypiClean
/EchoTorch-0.1.1.tar.gz/EchoTorch-0.1.1/echotorch/transforms/text/Character2Gram.py
# Imports import torch from .Transformer import Transformer import numpy as np # Transform text to character 2-gram class Character2Gram(Transformer): """ Transform text to character 2-grams """ # Constructor def __init__(self, uppercase=False, gram_to_ix=None, start_ix=0, fixed_length=-1, overlapse=True): """ Constructor """ # Gram to ix if gram_to_ix is not None: self.gram_count = len(gram_to_ix.keys()) self.gram_to_ix = gram_to_ix else: self.gram_count = start_ix self.gram_to_ix = dict() # end if # Ix to gram self.ix_to_gram = dict() if gram_to_ix is not None: for gram in gram_to_ix.keys(): self.ix_to_gram[gram_to_ix[gram]] = gram # end for # end if # Properties self.uppercase = uppercase self.fixed_length = fixed_length self.overlapse = overlapse # Super constructor super(Character2Gram, self).__init__() # end __init__ ############################################## # Public ############################################## ############################################## # Properties ############################################## # Get the number of inputs @property def input_dim(self): """ Get the number of inputs. :return: The input size. """ return 1 # end input_dim # Vocabulary size @property def voc_size(self): """ Vocabulary size :return: """ return self.gram_count # end voc_size ############################################## # Private ############################################## # To upper def to_upper(self, gram): """ To upper :param gram: :return: """ if not self.uppercase: return gram.lower() # end if return gram # end to_upper ############################################## # Override ############################################## # Convert a string def __call__(self, text): """ Convert a string to a ESN input :param text: Text to convert :return: Tensor of word vectors """ # Step if self.overlapse: step = 1 else: step = 2 # end if # Add to voc for i in np.arange(0, len(text) - 1, step): gram = self.to_upper(text[i] + text[i+1]) if gram not in self.gram_to_ix.keys(): self.gram_to_ix[gram] = self.gram_count self.ix_to_gram[self.gram_count] = gram self.gram_count += 1 # end if # end for # List of character to 2grams text_idxs = [self.gram_to_ix[self.to_upper(text[i] + text[i+1])] for i in range(len(text)-1)] # To long tensor text_idxs = torch.LongTensor(text_idxs) # Check length if self.fixed_length != -1: if text_idxs.size(0) > self.fixed_length: text_idxs = text_idxs[:self.fixed_length] elif text_idxs.size(0) < self.fixed_length: zero_idxs = torch.LongTensor(self.fixed_length).fill_(0) zero_idxs[:text_idxs.size(0)] = text_idxs text_idxs = zero_idxs # end if # end if return text_idxs, text_idxs.size(0) # end convert # end Character2Gram
PypiClean
/Mopidy-Muse-0.0.27.tar.gz/Mopidy-Muse-0.0.27/mopidy_muse/static/client/legacy/browse.f1ff8b53.js
import"core-js/modules/es.array.iterator.js";import"core-js/modules/es.object.to-string.js";import"core-js/modules/es.promise.js";import"core-js/modules/es.string.iterator.js";import"core-js/modules/web.dom-collections.iterator.js";import"core-js/modules/es.array.map.js";import"core-js/modules/web.url.js";import{_ as n,a as r,b as e,d as s,e as t,f as o,S as a,s as c,g as i,h as l,t as u,j as f,k as m,l as p,r as d,n as h,m as v,o as j,p as g,q as y,w as $,u as b,A as w,B as k,C as x,D as E,G as A,I,F as D,H as _,z as V,X as P,Y as R,N as O,i as B,aC as L,aD as q,aE as C,au as N,L as T,M as z,Q as F,R as H,Z as S,a0 as U,av as G,aA as M,a2 as Q,ad as W,W as X,ae as Y,ac as Z,aF as J,a4 as K,a5 as nn,ak as rn,al as en,af as sn,aR as tn,aS as on,aT as an,aU as cn}from"./client.4760e5ea.js";import"core-js/modules/es.symbol.js";import"core-js/modules/es.symbol.description.js";import"core-js/modules/es.symbol.iterator.js";import"core-js/modules/es.symbol.async-iterator.js";import"core-js/modules/es.symbol.to-string-tag.js";import"core-js/modules/es.json.to-string-tag.js";import"core-js/modules/es.math.to-string-tag.js";import"core-js/modules/es.object.get-prototype-of.js";import"core-js/modules/es.function.name.js";import"core-js/modules/es.object.set-prototype-of.js";import"core-js/modules/web.dom-collections.for-each.js";import"core-js/modules/es.array.slice.js";import"core-js/modules/es.regexp.exec.js";import"core-js/modules/es.string.split.js";import"core-js/modules/es.string.replace.js";import"core-js/modules/es.string.starts-with.js";import"core-js/modules/es.string.search.js";import"core-js/modules/es.object.assign.js";import"core-js/modules/es.string.match.js";import"core-js/modules/es.array.filter.js";import"core-js/modules/es.regexp.to-string.js";import"core-js/modules/es.array.reduce.js";import"core-js/modules/es.reflect.construct.js";import"core-js/modules/es.object.keys.js";import"core-js/modules/es.set.js";import"core-js/modules/es.array.concat.js";import"core-js/modules/es.typed-array.int32-array.js";import"core-js/modules/es.typed-array.copy-within.js";import"core-js/modules/es.typed-array.every.js";import"core-js/modules/es.typed-array.fill.js";import"core-js/modules/es.typed-array.filter.js";import"core-js/modules/es.typed-array.find.js";import"core-js/modules/es.typed-array.find-index.js";import"core-js/modules/es.typed-array.for-each.js";import"core-js/modules/es.typed-array.includes.js";import"core-js/modules/es.typed-array.index-of.js";import"core-js/modules/es.typed-array.iterator.js";import"core-js/modules/es.typed-array.join.js";import"core-js/modules/es.typed-array.last-index-of.js";import"core-js/modules/es.typed-array.map.js";import"core-js/modules/es.typed-array.reduce.js";import"core-js/modules/es.typed-array.reduce-right.js";import"core-js/modules/es.typed-array.reverse.js";import"core-js/modules/es.typed-array.set.js";import"core-js/modules/es.typed-array.slice.js";import"core-js/modules/es.typed-array.some.js";import"core-js/modules/es.typed-array.sort.js";import"core-js/modules/es.typed-array.subarray.js";import"core-js/modules/es.typed-array.to-locale-string.js";import"core-js/modules/es.typed-array.to-string.js";import"core-js/modules/es.array.sort.js";import"core-js/modules/es.array.index-of.js";import"core-js/modules/es.object.get-own-property-descriptors.js";import"core-js/modules/es.array.from.js";import"core-js/modules/es.array.splice.js";import"core-js/modules/es.string.trim.js";import"core-js/modules/es.array.join.js";import"core-js/modules/es.string.anchor.js";import"core-js/modules/es.map.js";import"core-js/modules/es.array.fill.js";import"core-js/modules/es.parse-float.js";import"core-js/modules/es.string.pad-start.js";import"core-js/modules/es.object.values.js";import"core-js/modules/es.array.find.js";import"core-js/modules/es.object.get-own-property-descriptor.js";import"core-js/modules/es.reflect.own-keys.js";import{c as ln}from"./clickOutside.eb6c57be.js";import"core-js/modules/es.array.flat.js";function un(n){var s=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(n){return!1}}();return function(){var t,o=r(n);if(s){var a=r(this).constructor;t=Reflect.construct(o,arguments,a)}else t=o.apply(this,arguments);return e(this,t)}}function fn(n){i(n,"svelte-6ensgs",'.breadcrumb.svelte-6ensgs.svelte-6ensgs.svelte-6ensgs{font-size:1rem;white-space:nowrap;margin-bottom:1.5rem}.breadcrumb.svelte-6ensgs a.svelte-6ensgs.svelte-6ensgs{align-items:center;display:flex;justify-content:center;padding:0 0.75em}.breadcrumb.svelte-6ensgs a.svelte-6ensgs.svelte-6ensgs:hover{color:#363636}.breadcrumb.svelte-6ensgs li.svelte-6ensgs.svelte-6ensgs{align-items:center;display:flex}.breadcrumb.svelte-6ensgs li:first-child a.svelte-6ensgs.svelte-6ensgs{padding-left:0}.breadcrumb.svelte-6ensgs li.is-active a.svelte-6ensgs.svelte-6ensgs{color:#363636;cursor:default;pointer-events:none}.breadcrumb.svelte-6ensgs li.svelte-6ensgs+li.svelte-6ensgs::before{color:#b5b5b5;content:"\\0002f"}.breadcrumb.svelte-6ensgs ul.svelte-6ensgs.svelte-6ensgs{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}')}function mn(n,r,e){var s=n.slice();return s[26]=r[e],s[28]=e,s}function pn(n,r,e){var s=n.slice();return s[30]=r[e],s[28]=e,s}function dn(n){var r,e,s,t,o,a,c=n[30].name+"";function i(){return n[15](n[30],n[28])}return{c:function(){r=l("li"),e=l("a"),s=u(c),t=f(),this.h()},l:function(n){r=m(n,"LI",{class:!0});var o=p(r);e=m(o,"A",{href:!0,class:!0});var a=p(e);s=d(a,c),a.forEach(h),t=v(o),o.forEach(h),this.h()},h:function(){j(e,"href",null),j(e,"class","svelte-6ensgs"),j(r,"class","svelte-6ensgs")},m:function(n,c){g(n,r,c),y(r,e),y(e,s),y(r,t),o||(a=$(e,"click",i),o=!0)},p:function(r,e){n=r,1&e[0]&&c!==(c=n[30].name+"")&&b(s,c)},d:function(n){n&&h(r),o=!1,a()}}}function hn(n){var r,e,s,t,o,a,c,i,b,P,R,O,B,L,q,C,N,T,z,F,H,W,X=[jn,vn],Y=[];function Z(n,r){return n[4]?0:1}return t=Z(n),o=Y[t]=X[t](n),P=new S({props:{icon:U,class:"icon is-small"}}),L=new S({props:{icon:G,class:"icon is-small"}}),T=new S({props:{icon:M,class:"icon is-small"}}),{c:function(){r=l("div"),e=l("div"),s=l("div"),o.c(),a=f(),c=l("div"),i=l("div"),b=l("a"),w(P.$$.fragment),R=u("  \n Play now"),O=f(),B=l("a"),w(L.$$.fragment),q=u("  \n Play shuffle"),C=f(),N=l("a"),w(T.$$.fragment),z=u("  \n Add to queue"),this.h()},l:function(n){r=m(n,"DIV",{class:!0});var t=p(r);e=m(t,"DIV",{class:!0});var l=p(e);s=m(l,"DIV",{class:!0});var u=p(s);o.l(u),u.forEach(h),a=v(l),c=m(l,"DIV",{class:!0,id:!0,role:!0});var f=p(c);i=m(f,"DIV",{class:!0});var j=p(i);b=m(j,"A",{href:!0,class:!0});var g=p(b);k(P.$$.fragment,g),R=d(g,"  \n Play now"),g.forEach(h),O=v(j),B=m(j,"A",{href:!0,class:!0});var y=p(B);k(L.$$.fragment,y),q=d(y,"  \n Play shuffle"),y.forEach(h),C=v(j),N=m(j,"A",{href:!0,class:!0});var $=p(N);k(T.$$.fragment,$),z=d($,"  \n Add to queue"),$.forEach(h),j.forEach(h),f.forEach(h),l.forEach(h),t.forEach(h),this.h()},h:function(){j(s,"class","dropdown-trigger"),j(b,"href",null),j(b,"class","dropdown-item"),j(B,"href",null),j(B,"class","dropdown-item"),j(N,"href",null),j(N,"class","dropdown-item"),j(i,"class","dropdown-content"),j(c,"class","dropdown-menu"),j(c,"id","dropdown-menu"),j(c,"role","menu"),j(e,"class","dropdown is-right"),x(e,"is-active",n[4]),j(r,"class","column is-narrow")},m:function(o,l){g(o,r,l),y(r,e),y(e,s),Y[t].m(s,null),y(e,a),y(e,c),y(c,i),y(i,b),E(P,b,null),y(b,R),y(i,O),y(i,B),E(L,B,null),y(B,q),y(i,C),y(i,N),E(T,N,null),y(N,z),F=!0,H||(W=[$(s,"click",n[16]),$(b,"click",n[17]),$(B,"click",n[18]),$(N,"click",n[19])],H=!0)},p:function(n,r){var a=t;(t=Z(n))===a?Y[t].p(n,r):(Q(),A(Y[a],1,1,function(){Y[a]=null}),I(),(o=Y[t])?o.p(n,r):(o=Y[t]=X[t](n)).c(),D(o,1),o.m(s,null)),16&r[0]&&x(e,"is-active",n[4])},i:function(n){F||(D(o),D(P.$$.fragment,n),D(L.$$.fragment,n),D(T.$$.fragment,n),F=!0)},o:function(n){A(o),A(P.$$.fragment,n),A(L.$$.fragment,n),A(T.$$.fragment,n),F=!1},d:function(n){n&&h(r),Y[t].d(),_(P),_(L),_(T),H=!1,V(W)}}}function vn(n){var r,e,s;return e=new S({props:{icon:W,class:"icon","aria-haspopup":"true","aria-controls":"dropdown-menu"}}),{c:function(){r=l("a"),w(e.$$.fragment),this.h()},l:function(n){r=m(n,"A",{href:!0,class:!0});var s=p(r);k(e.$$.fragment,s),s.forEach(h),this.h()},h:function(){j(r,"href",null),j(r,"class","button")},m:function(n,t){g(n,r,t),E(e,r,null),s=!0},p:X,i:function(n){s||(D(e.$$.fragment,n),s=!0)},o:function(n){A(e.$$.fragment,n),s=!1},d:function(n){n&&h(r),_(e)}}}function jn(n){var r,e,s;return e=new S({props:{icon:Y,class:"icon","aria-haspopup":"true","aria-controls":"dropdown-menu"}}),{c:function(){r=l("a"),w(e.$$.fragment),this.h()},l:function(n){r=m(n,"A",{href:!0,class:!0});var s=p(r);k(e.$$.fragment,s),s.forEach(h),this.h()},h:function(){j(r,"href",null),j(r,"class","button")},m:function(n,t){g(n,r,t),E(e,r,null),s=!0},p:X,i:function(n){s||(D(e.$$.fragment,n),s=!0)},o:function(n){A(e.$$.fragment,n),s=!1},d:function(n){n&&h(r),_(e)}}}function gn(n){var r,e,s,t={ctx:n,current:null,token:null,hasCatch:!0,pending:bn,then:$n,catch:yn,error:29,blocks:[,,,]};return P(e=n[2],t),{c:function(){r=l("div"),t.block.c(),this.h()},l:function(n){r=m(n,"DIV",{class:!0});var e=p(r);t.block.l(e),e.forEach(h),this.h()},h:function(){j(r,"class","list is-hoverable")},m:function(n,e){g(n,r,e),t.block.m(r,t.anchor=null),t.mount=function(){return r},t.anchor=null,s=!0},p:function(r,s){n=r,t.ctx=n,4&s[0]&&e!==(e=n[2])&&P(e,t)||R(t,n,s)},i:function(n){s||(D(t.block),s=!0)},o:function(n){for(var r=0;r<3;r+=1){var e=t.blocks[r];A(e)}s=!1},d:function(n){n&&h(r),t.block.d(),t.token=null,t=null}}}function yn(n){var r,e,s=n[29].message+"";return{c:function(){r=l("p"),e=u(s),this.h()},l:function(n){r=m(n,"P",{class:!0,style:!0});var t=p(r);e=d(t,s),t.forEach(h),this.h()},h:function(){j(r,"class","list-item"),Z(r,"color","red")},m:function(n,s){g(n,r,s),y(r,e)},p:function(n,r){4&r[0]&&s!==(s=n[29].message+"")&&b(e,s)},i:X,o:X,d:function(n){n&&h(r)}}}function $n(n){return{c:X,l:X,m:X,p:X,i:X,o:X,d:X}}function bn(n){var r,e,s,t,o=[kn,wn],a=[];function c(n,r){return n[5]?0:1}return r=c(n),e=a[r]=o[r](n),{c:function(){e.c(),s=T()},l:function(n){e.l(n),s=T()},m:function(n,e){a[r].m(n,e),g(n,s,e),t=!0},p:function(n,t){var i=r;(r=c(n))===i?a[r].p(n,t):(Q(),A(a[i],1,1,function(){a[i]=null}),I(),(e=a[r])?e.p(n,t):(e=a[r]=o[r](n)).c(),D(e,1),e.m(s.parentNode,s))},i:function(n){t||(D(e),t=!0)},o:function(n){A(e),t=!1},d:function(n){a[r].d(n),n&&h(s)}}}function wn(n){var r,e,s,t;return s=new S({props:{icon:en,class:"icon is-24",spin:!0}}),{c:function(){r=l("p"),e=u("Connecting to mopidy     \n "),w(s.$$.fragment),this.h()},l:function(n){r=m(n,"P",{class:!0});var t=p(r);e=d(t,"Connecting to mopidy     \n "),k(s.$$.fragment,t),t.forEach(h),this.h()},h:function(){j(r,"class","list-item")},m:function(n,o){g(n,r,o),y(r,e),E(s,r,null),t=!0},p:X,i:function(n){t||(D(s.$$.fragment,n),t=!0)},o:function(n){A(s.$$.fragment,n),t=!1},d:function(n){n&&h(r),_(s)}}}function kn(n){var r,e,s,t;return s=new S({props:{icon:en,spin:!0,class:"icon"}}),{c:function(){r=l("p"),e=u("Loading sources    \n "),w(s.$$.fragment),this.h()},l:function(n){r=m(n,"P",{class:!0});var t=p(r);e=d(t,"Loading sources    \n "),k(s.$$.fragment,t),t.forEach(h),this.h()},h:function(){j(r,"class","list-item")},m:function(n,o){g(n,r,o),y(r,e),E(s,r,null),t=!0},p:X,i:function(n){t||(D(s.$$.fragment,n),t=!0)},o:function(n){A(s.$$.fragment,n),t=!1},d:function(n){n&&h(r),_(s)}}}function xn(n){for(var r,e,s,t,o=n[1],a=[],c=0;c<o.length;c+=1)a[c]=Vn(mn(n,o,c));var i=function(n){return A(a[n],1,1,function(){a[n]=null})},u=null;return o.length||(u=En()),{c:function(){r=l("div");for(var n=0;n<a.length;n+=1)a[n].c();u&&u.c(),this.h()},l:function(n){r=m(n,"DIV",{class:!0});for(var e=p(r),s=0;s<a.length;s+=1)a[s].l(e);u&&u.l(e),e.forEach(h),this.h()},h:function(){j(r,"class","list is-hoverable")},m:function(o,c){g(o,r,c);for(var i=0;i<a.length;i+=1)a[i].m(r,null);u&&u.m(r,null),e=!0,s||(t=[O(ln.call(null,r)),$(r,"click_outside",n[22])],s=!0)},p:function(n,e){if(846&e[0]){var s;for(o=n[1],s=0;s<o.length;s+=1){var t=mn(n,o,s);a[s]?(a[s].p(t,e),D(a[s],1)):(a[s]=Vn(t),a[s].c(),D(a[s],1),a[s].m(r,null))}for(Q(),s=o.length;s<a.length;s+=1)i(s);I(),o.length?u&&(u.d(1),u=null):u||((u=En()).c(),u.m(r,null))}},i:function(n){if(!e){for(var r=0;r<o.length;r+=1)D(a[r]);e=!0}},o:function(n){a=a.filter(Boolean);for(var r=0;r<a.length;r+=1)A(a[r]);e=!1},d:function(n){n&&h(r),N(a,n),u&&u.d(),s=!1,V(t)}}}function En(n){var r,e;return{c:function(){r=l("a"),e=u("no results\n "),this.h()},l:function(n){r=m(n,"A",{href:!0,class:!0});var s=p(r);e=d(s,"no results\n "),s.forEach(h),this.h()},h:function(){j(r,"href",null),j(r,"class","list-item")},m:function(n,s){g(n,r,s),y(r,e)},d:function(n){n&&h(r)}}}function An(n){var r,e,s,t,o,a,c=[Dn,In],i=[];function u(n,r){return n[3]==n[28]?0:1}function f(){return n[21](n[28])}return e=u(n),s=i[e]=c[e](n),{c:function(){r=l("div"),s.c(),this.h()},l:function(n){r=m(n,"DIV",{class:!0});var e=p(r);s.l(e),e.forEach(h),this.h()},h:function(){j(r,"class","column is-narrow")},m:function(n,s){g(n,r,s),i[e].m(r,null),t=!0,o||(a=$(r,"click",f),o=!0)},p:function(t,o){var a=e;(e=u(n=t))===a?i[e].p(n,o):(Q(),A(i[a],1,1,function(){i[a]=null}),I(),(s=i[e])?s.p(n,o):(s=i[e]=c[e](n)).c(),D(s,1),s.m(r,null))},i:function(n){t||(D(s),t=!0)},o:function(n){A(s),t=!1},d:function(n){n&&h(r),i[e].d(),o=!1,a()}}}function In(n){var r,e;return r=new S({props:{icon:W,class:"icon","aria-haspopup":"true","aria-controls":"dropdown-menu"}}),{c:function(){w(r.$$.fragment)},l:function(n){k(r.$$.fragment,n)},m:function(n,s){E(r,n,s),e=!0},p:X,i:function(n){e||(D(r.$$.fragment,n),e=!0)},o:function(n){A(r.$$.fragment,n),e=!1},d:function(n){_(r,n)}}}function Dn(n){var r,e;return r=new S({props:{icon:Y,class:"icon","aria-haspopup":"true","aria-controls":"dropdown-menu"}}),{c:function(){w(r.$$.fragment)},l:function(n){k(r.$$.fragment,n)},m:function(n,s){E(r,n,s),e=!0},p:X,i:function(n){e||(D(r.$$.fragment,n),e=!0)},o:function(n){A(r.$$.fragment,n),e=!1},d:function(n){_(r,n)}}}function _n(n){var r,e,s,t,o,a,c,i,b,x,I,P,R,O,N,T,z,F;return t=new S({props:{icon:U,class:"icon is-small"}}),b=new S({props:{icon:J,class:"icon is-small"}}),O=new S({props:{icon:M,class:"icon is-small"}}),{c:function(){r=l("div"),e=l("div"),s=l("a"),w(t.$$.fragment),o=u(" \n Play now"),a=f(),c=l("div"),i=l("a"),w(b.$$.fragment),x=u(" \n Play next"),I=f(),P=l("div"),R=l("a"),w(O.$$.fragment),N=u(" \n Add to queue"),this.h()},l:function(n){r=m(n,"DIV",{class:!0});var l=p(r);e=m(l,"DIV",{class:!0});var u=p(e);s=m(u,"A",{href:!0,class:!0});var f=p(s);k(t.$$.fragment,f),o=d(f," \n Play now"),f.forEach(h),u.forEach(h),a=v(l),c=m(l,"DIV",{class:!0});var j=p(c);i=m(j,"A",{href:!0,class:!0});var g=p(i);k(b.$$.fragment,g),x=d(g," \n Play next"),g.forEach(h),j.forEach(h),I=v(l),P=m(l,"DIV",{class:!0});var y=p(P);R=m(y,"A",{href:!0,class:!0});var $=p(R);k(O.$$.fragment,$),N=d($," \n Add to queue"),$.forEach(h),y.forEach(h),l.forEach(h),this.h()},h:function(){j(s,"href",null),j(s,"class","dropdown-item"),j(e,"class","list-item"),j(i,"href",null),j(i,"class","dropdown-item"),j(c,"class","list-item"),j(R,"href",null),j(R,"class","dropdown-item"),j(P,"class","list-item"),j(r,"class","list is-hoverable")},m:function(l,u){g(l,r,u),y(r,e),y(e,s),E(t,s,null),y(s,o),y(r,a),y(r,c),y(c,i),E(b,i,null),y(i,x),y(r,I),y(r,P),y(P,R),E(O,R,null),y(R,N),T=!0,z||(F=[$(s,"click",function(){B(L(n[26].uri))&&L(n[26].uri).apply(this,arguments)}),$(i,"click",function(){B(q(n[26].uri))&&q(n[26].uri).apply(this,arguments)}),$(R,"click",function(){B(C(n[26].uri))&&C(n[26].uri).apply(this,arguments)})],z=!0)},p:function(r,e){n=r},i:function(n){T||(D(t.$$.fragment,n),D(b.$$.fragment,n),D(O.$$.fragment,n),T=!0)},o:function(n){A(t.$$.fragment,n),A(b.$$.fragment,n),A(O.$$.fragment,n),T=!1},d:function(n){n&&h(r),_(t),_(b),_(O),z=!1,V(F)}}}function Vn(n){var r,e,s,t,o,a,c,i,w,k,x=n[26].name+"",E=n[8](n[26]);function _(){return n[20](n[26],n[28])}var V=E&&An(n),P=n[3]==n[28]&&_n(n);return{c:function(){r=l("a"),e=l("div"),s=l("div"),t=u(x),o=f(),V&&V.c(),a=f(),P&&P.c(),c=f(),this.h()},l:function(n){r=m(n,"A",{href:!0,class:!0});var i=p(r);e=m(i,"DIV",{class:!0});var l=p(e);s=m(l,"DIV",{class:!0});var u=p(s);t=d(u,x),u.forEach(h),o=v(l),V&&V.l(l),l.forEach(h),a=v(i),P&&P.l(i),c=v(i),i.forEach(h),this.h()},h:function(){j(s,"class","column"),j(e,"class","columns is-mobile"),j(r,"href",null),j(r,"class","list-item")},m:function(n,l){g(n,r,l),y(r,e),y(e,s),y(s,t),y(e,o),V&&V.m(e,null),y(r,a),P&&P.m(r,null),y(r,c),i=!0,w||(k=$(s,"click",_),w=!0)},p:function(s,o){n=s,(!i||2&o[0])&&x!==(x=n[26].name+"")&&b(t,x),2&o[0]&&(E=n[8](n[26])),E?V?(V.p(n,o),2&o[0]&&D(V,1)):((V=An(n)).c(),D(V,1),V.m(e,null)):V&&(Q(),A(V,1,1,function(){V=null}),I()),n[3]==n[28]?P?(P.p(n,o),8&o[0]&&D(P,1)):((P=_n(n)).c(),D(P,1),P.m(r,c)):P&&(Q(),A(P,1,1,function(){P=null}),I())},i:function(n){i||(D(V),D(P),i=!0)},o:function(n){A(V),A(P),i=!1},d:function(n){n&&h(r),V&&V.d(),P&&P.d(),w=!1,k()}}}function Pn(n){for(var r,e,s,t,o,a,c,i,b,w,k,x,E,_,V,P,R,O,B,L=n[1].some(n[7]),q=n[0],C=[],F=0;F<q.length;F+=1)C[F]=dn(pn(n,q,F));var H=L&&hn(n),S=n[2]&&gn(n),U=n[1].length>0&&xn(n);return{c:function(){r=f(),e=l("h1"),s=u("Browse"),t=f(),o=l("div"),a=l("div"),c=l("nav"),i=l("ul"),b=l("li"),w=l("a"),k=u("Root"),x=f();for(var n=0;n<C.length;n+=1)C[n].c();E=f(),H&&H.c(),_=f(),S&&S.c(),V=f(),U&&U.c(),P=T(),this.h()},l:function(n){z('[data-svelte="svelte-1n5cjn7"]',document.head).forEach(h),r=v(n),e=m(n,"H1",{class:!0});var l=p(e);s=d(l,"Browse"),l.forEach(h),t=v(n),o=m(n,"DIV",{class:!0});var u=p(o);a=m(u,"DIV",{class:!0});var f=p(a);c=m(f,"NAV",{class:!0,"aria-label":!0});var j=p(c);i=m(j,"UL",{class:!0});var g=p(i);b=m(g,"LI",{class:!0});var y=p(b);w=m(y,"A",{href:!0,class:!0});var $=p(w);k=d($,"Root"),$.forEach(h),y.forEach(h),x=v(g);for(var A=0;A<C.length;A+=1)C[A].l(g);g.forEach(h),j.forEach(h),f.forEach(h),E=v(u),H&&H.l(u),u.forEach(h),_=v(n),S&&S.l(n),V=v(n),U&&U.l(n),P=T(),this.h()},h:function(){document.title="Browse",j(e,"class","title"),j(w,"href",null),j(w,"class","svelte-6ensgs"),j(b,"class","svelte-6ensgs"),j(i,"class","svelte-6ensgs"),j(c,"class","breadcrumb svelte-6ensgs"),j(c,"aria-label","breadcrumbs"),j(a,"class","column"),j(o,"class","columns is-mobile")},m:function(l,u){g(l,r,u),g(l,e,u),y(e,s),g(l,t,u),g(l,o,u),y(o,a),y(a,c),y(c,i),y(i,b),y(b,w),y(w,k),y(i,x);for(var f=0;f<C.length;f+=1)C[f].m(i,null);y(o,E),H&&H.m(o,null),g(l,_,u),S&&S.m(l,u),g(l,V,u),U&&U.m(l,u),g(l,P,u),R=!0,O||(B=$(w,"click",n[14]),O=!0)},p:function(n,r){if(69&r[0]){var e;for(q=n[0],e=0;e<q.length;e+=1){var s=pn(n,q,e);C[e]?C[e].p(s,r):(C[e]=dn(s),C[e].c(),C[e].m(i,null))}for(;e<C.length;e+=1)C[e].d(1);C.length=q.length}2&r[0]&&(L=n[1].some(n[7])),L?H?(H.p(n,r),2&r[0]&&D(H,1)):((H=hn(n)).c(),D(H,1),H.m(o,null)):H&&(Q(),A(H,1,1,function(){H=null}),I()),n[2]?S?(S.p(n,r),4&r[0]&&D(S,1)):((S=gn(n)).c(),D(S,1),S.m(V.parentNode,V)):S&&(Q(),A(S,1,1,function(){S=null}),I()),n[1].length>0?U?(U.p(n,r),2&r[0]&&D(U,1)):((U=xn(n)).c(),D(U,1),U.m(P.parentNode,P)):U&&(Q(),A(U,1,1,function(){U=null}),I())},i:function(n){R||(D(H),D(S),D(U),R=!0)},o:function(n){A(H),A(S),A(U),R=!1},d:function(n){n&&h(r),n&&h(e),n&&h(t),n&&h(o),N(C,n),H&&H.d(),n&&h(_),S&&S.d(n),n&&h(V),U&&U.d(n),n&&h(P),O=!1,B()}}}function Rn(n,r,e){var s;F(n,sn,function(n){return e(23,s=n)});var t,o,a,c=[],i=[],l=!1;H(K(nn.mark(function n(){return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:e(2,t=u());case 1:case"end":return n.stop()}},n)})));var u=function(){var n=K(nn.mark(function n(){return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:return n.t0=e,n.next=3,rn();case 3:return n.t1=a=n.sent,(0,n.t0)(5,n.t1),n.t2=e,n.next=8,s.library.browse({uri:null});case 8:n.t3=i=n.sent,(0,n.t2)(1,n.t3);case 10:case"end":return n.stop()}},n)}));return function(){return n.apply(this,arguments)}}(),f=function(){var n=K(nn.mark(function n(r,t,a){var l,u,f,p;return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:if(!(["directory","artist","album","playlist"].indexOf(r.type)>-1)){n.next=16;break}return e(3,o=null),n.next=4,s.library.browse({uri:r.uri});case 4:if(0!=(l=n.sent).length){n.next=12;break}return n.next=8,s.library.lookup({uris:[r.uri]});case 8:u=n.sent,e(1,i=u[r.uri]),n.next=13;break;case 12:e(1,i=l);case 13:"back"===a?(f=c.indexOf(r),p=c.slice(0,f+1),e(0,c=p)):"avance"===a&&e(0,c=[].concat(tn(c),[r])),n.next=17;break;case 16:"track"!==r.type&&"Track"!==r.__model__||m(t);case 17:case"end":return n.stop()}},n)}));return function(r,e,s){return n.apply(this,arguments)}}(),m=function(n){e(3,o=o==n?null:n)},p=function(){var n=K(nn.mark(function n(){return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:return n.t0=e,n.next=3,s.library.browse({uri:null});case 3:n.t1=i=n.sent,(0,n.t0)(1,n.t1),e(0,c=[]);case 6:case"end":return n.stop()}},n)}));return function(){return n.apply(this,arguments)}}(),d=function(){var n=K(nn.mark(function n(r){var e,t;return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:if("track"!==r.type&&"Track"!==r.__model__){n.next=4;break}return n.abrupt("return",r.uri);case 4:if(!("directory"===r.type&&r.uri.indexOf("file://")>-1)){n.next=12;break}return n.next=7,s.library.browse({uri:r.uri});case 7:return e=n.sent,n.next=10,Promise.all(e.map(function(n){return d(n)}));case 10:return t=n.sent,n.abrupt("return",t.flat(1));case 12:case"end":return n.stop()}},n)}));return function(r){return n.apply(this,arguments)}}(),h=function(){var n=K(nn.mark(function n(r){var s;return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,Promise.all(r.map(function(n){return d(n)}));case 2:s=n.sent,on(s.flat(1)),e(4,l=!1);case 5:case"end":return n.stop()}},n)}));return function(r){return n.apply(this,arguments)}}(),v=function(){var n=K(nn.mark(function n(r){var s;return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,Promise.all(r.map(function(n){return d(n)}));case 2:s=n.sent,an(null,s.flat(1)),e(4,l=!1);case 5:case"end":return n.stop()}},n)}));return function(r){return n.apply(this,arguments)}}(),j=function(){var n=K(nn.mark(function n(r){var e;return nn.wrap(function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,Promise.all(r.map(function(n){return d(n)}));case 2:e=n.sent,cn(null,e.flat(1));case 4:case"end":return n.stop()}},n)}));return function(r){return n.apply(this,arguments)}}();return[c,i,t,o,l,a,f,function(n){return["track"].indexOf(n.type)>-1||"Track"===n.__model__||"directory"===n.type&&n.uri.indexOf("file://")>-1},function(n){return["album","track","artist","playlist"].indexOf(n.type)>-1||"Track"===n.__model__},m,p,h,v,j,function(){return e(2,t=p())},function(n,r){return e(2,t=f(n,r,"back"))},function(){return e(4,l=!l)},function(){return h(i)},function(){return v(i)},function(){return j(i)},function(n,r){return e(2,t=f(n,r,"avance"))},function(n){return m(n)},function(){return e(3,o=null)}]}export default(function(r){n(i,a);var e=un(i);function i(n){var r;return s(this,i),r=e.call(this),t(o(r),n,Rn,Pn,c,{},fn,[-1,-1]),r}return i}());
PypiClean
/MetaCSV-0.1.1.tar.gz/MetaCSV-0.1.1/README.rst
======= MetaCSV ======= .. image:: https://img.shields.io/travis/ClimateImpactLab/metacsv/master.svg?style=flat-square :target: https://travis-ci.org/ClimateImpactLab/metacsv .. image:: https://img.shields.io/pypi/v/metacsv.svg?style=flat-square :target: https://pypi.python.org/pypi/MetaCSV .. image:: https://img.shields.io/coveralls/delgadom/metacsv/master.svg?style=flat-square :target: https://coveralls.io/github/delgadom/metacsv?branch=master .. image:: https://img.shields.io/pypi/pyversions/metacsv.svg?style=flat-square :target: https://pypi.python.org/pypi/MetaCSV .. image:: https://anaconda.org/delgadom/metacsv/badges/version.svg :target: https://anaconda.org/delgadom/metacsv .. image:: https://anaconda.org/delgadom/metacsv/badges/downloads.svg :target: https://anaconda.org/delgadom/metacsv .. image:: https://badges.gitter.im/metacsv/Lobby.svg :alt: Join the chat at https://gitter.im/metacsv/Lobby :target: https://gitter.im/metacsv/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge ``metacsv`` - Tools for documentation-aware data reading, writing, and analysis See the full documentation at ReadTheDocs_ .. _ReadTheDocs: http://metacsv.rtfd.org Overview ========= **MetaCSV** provides tools to read in CSV data with a yaml-compliant header directly into a ``pandas`` ``Series``, ``DataFrame``, or ``Panel`` or an ``xarray`` ``DataArray`` or ``Dataset``. Data specification ---------------------------- Data can be specified using a yaml-formatted header, with the YAML *start-mark* string (``---``) above and the YAML *end-mark* string (``...``) below the yaml block. Only one yaml block is allowed. If the doc-separation string is not the first (non-whitespace) line in the file, all of the file's contents will be interpreted by the csv reader. The yaml data can have arbitrary complexity. .. code-block:: python >>> import metacsv, numpy as np >>> import StringIO as io # import io for python 3 >>> doc = io.StringIO(''' ... --- ... author: A Person ... date: 2000-12-31 ... variables: ... pop: ... name: Population ... unit: millions ... gdp: ... name: Product ... unit: 2005 $Bn ... ... ... region,year,pop,gdp ... USA,2010,309.3,13599.3 ... USA,2011,311.7,13817.0 ... CAN,2010,34.0,1240.0 ... CAN,2011,34.3,1276.7 ... ''') Using MetaCSV-formatted files in python -------------------------------------------- Read MetaCSV-formatted data into python using pandas-like syntax: .. code-block:: python >>> df = metacsv.read_csv(doc, index_col=[0,1]) >>> df # doctest: +NORMALIZE_WHITESPACE <metacsv.core.containers.DataFrame (4, 2)> pop gdp region year USA 2010 309.3 13599.3 2011 311.7 13817.0 CAN 2010 34.0 1240.0 2011 34.3 1276.7 <BLANKLINE> Variables gdp: name Product unit 2005 $Bn pop: name Population unit millions Attributes author: A Person date: 2000-12-31 These properties can be transferred from one data container to another: .. code-block:: python >>> np.random.seed(1) >>> s = metacsv.Series(np.random.random(6)) >>> s <metacsv.core.containers.Series (6L,)> 0 0.417022 1 0.720324 2 0.000114 3 0.302333 4 0.146756 5 0.092339 dtype: float64 >>> s.attrs = df.attrs >>> s <metacsv.core.containers.Series (6L,)> 0 0.417022 1 0.720324 2 0.000114 3 0.302333 4 0.146756 5 0.092339 dtype: float64 <BLANKLINE> Attributes author: A Person date: 2000-12-31 All MetaCSV attributes, including the ``attrs`` Attribute object, can be copied, assigned to new objects, and deleted. Since these attributes are largely unstable across normal pandas data processing, it is recommended that attributes be copied before data work is attempted and then reassigned before IO conversions. Exporting MetaCSV data to other formats ----------------------------------------------- CSV ~~~~~~~~~ A MetaCSV ``Series`` or ``DataFrame`` can be written as a yaml-prefixed CSV using the same ``to_csv`` syntax as it's ``pandas`` counterpart: .. code-block:: python >>> df.attrs['new attribute'] = 'changed in python!' >>> df.to_csv('my_new_data.csv') The resulting csv will include a yaml-formatted header with the original metadata updated to include attr['new attribute']., pandas ~~~~~~~~~~~~~~~ The coordinates and MetaCSV attributes can be easily stripped from a MetaCSV Container: .. code-block:: python >>> df.to_pandas() # doctest: +NORMALIZE_WHITESPACE pop gdp region year USA 2010 309.3 13599.3 2011 311.7 13817.0 CAN 2010 34.0 1240.0 2011 34.3 1276.7 xarray/netCDF ~~~~~~~~~~~~~~~ `xArray <http://xarray.pydata.org/>`_ provides a pandas-like interface to operating on indexed ``ndarray`` data. It is modeled on the ``netCDF`` data storage format used frequently in climate science, but is useful for many applications with higher-order data. .. code-block:: python >>> ds = df.to_xarray() >>> ds <xarray.Dataset> Dimensions: (region: 2, year: 2) Coordinates: * region (region) object 'USA' 'CAN' * year (year) int64 2010 2011 Data variables: pop (region, year) float64 309.3 311.7 34.0 34.3 gdp (region, year) float64 1.36e+04 1.382e+04 1.24e+03 1.277e+03 Attributes: author: A Person date: 2000-12-31 new attribute: changed in python! >>> ds.to_netcdf('my_netcdf_data.nc') Pickling ~~~~~~~~~ Pickling works just like pandas. .. code-block:: python >>> df.to_pickle('my_metacsv_pickle.pkl') >>> metacsv.read_pickle('my_metacsv_pickle.pkl') <metacsv.core.containers.DataFrame (4, 2)> pop gdp region year USA 2010 309.3 13599.3 2011 311.7 13817.0 CAN 2010 34.0 1240.0 2011 34.3 1276.7 Variables gdp: OrderedDict([('name', 'Product'), ('unit', '2005 $Bn')]) pop: OrderedDict([('name', 'Population'), ('unit', 'millions')]) Attributes author: A Person date: 2000-12-31 new attribute: changed in python! Others ~~~~~~~~~ Currently, MetaCSV only supports conversion to CSV and to netCDF through the ``xarray`` module. However, feel free to suggest additional features and to contribute your own! Conversion to other types on the fly ----------------------------------------------- Special conversion utilities allow you to convert any metacsv, pandas, or xarray container or a CSV filepath into any other type in this group. All of these conversion utilities are also methods on metacsv containers. * to_csv ``to_csv`` allows you to write any container or csv file to a metacsv-formatted csv file. Keyword arguments ``attrs``, ``coords``, and ``variables`` will be attached to the data before it is written. Any conflicts in these attributes will be updated with the arguments to this function .. code-block:: python >>> import pandas as pd, numpy as np, xarray as xr, metacsv >>> df = pd.DataFrame(np.random.random((3,4)), columns=list('abcd')) >>> df a b c d 0 0.558083 0.665184 0.226173 0.339905 1 0.541712 0.835804 0.326078 0.179103 2 0.332869 0.435573 0.904612 0.823884 >>> metacsv.to_csv(df, 'mycsv.csv', attrs={'author': 'my name', 'date': '2015-12-31'}) >>> >>> df2 = metacsv.read_csv('mycsv.csv', index_col=[0]) >>> df2 <metacsv.core.containers.DataFrame (3, 4)> a b c d 0 0.558083 0.665184 0.226173 0.339905 1 0.541712 0.835804 0.326078 0.179103 2 0.332869 0.435573 0.904612 0.823884 Attributes author: my name date: 2015-12-31 new attribute: changed in python! >>> metacsv.to_csv(df2, 'mycsv.csv', attrs={'author': 'new name'}) >>> >>> metacsv.read_csv('mycsv.csv', index_col=[0]) <metacsv.core.containers.DataFrame (3, 4)> a b c d 0 0.558083 0.665184 0.226173 0.339905 1 0.541712 0.835804 0.326078 0.179103 2 0.332869 0.435573 0.904612 0.823884 Attributes author: new name date: 2015-12-31 new attribute: changed in python! * to_header ``to_header`` allows you to write the special attributes directly to a metacsv-formatted header file. The special attributes may be individually specified or taken from a metacsv container. The ``header_file`` argument to both ``read_csv`` and ``to_csv`` allow the creation of special header files which allow you to separate the metacsv-formatted header from the data if desired. For example, say you have a table to read into pandas .. code-block:: python >>> import metacsv, pandas as pd >>> pd.DataFrame( [['x',1,2,3],['y',4,5,6],['z',7,8,9]], columns=['index','a','b','c']).to_csv('mycsv.csv', index=None) >>> metacsv.read_csv('mycsv.csv') <metacsv.core.containers.DataFrame (3, 4)> index a b c 0 x 1 2 3 1 y 4 5 6 2 z 7 8 9 A separate header file can be created and used which can then be read in with the data: .. code-block:: python >>> metacsv.to_header('mycsv.header', attrs={'author': 'me'}, coords='index') >>> metacsv.read_csv('mycsv.csv', header_file='mycsv.header') <metacsv.core.containers.DataFrame (3, 3)> a b c index x 1 2 3 y 4 5 6 z 7 8 9 Coordinates * index (index) object x, y, z Attributes author: me * to_xarray ``to_xarray`` returns any container or csv file as an xarray container. Table data (CSV files and DataFrames) will create ``xarray.Dataset`` objects, while Series objects will create ``xarray.DataArray`` objects. Keyword arguments ``attrs``, ``coords``, and ``variables`` will be attached to the data before it is written. Any conflicts in these attributes will be updated with the arguments to this function. * to_dataarray ``to_dataarray`` returns any container or csv file as an ``xarray.DataArray``. Table data (CSV files and DataFrames) will be stacked, with columns re-arranged as new ``xarray.Coordinates``. Keyword arguments ``attrs``, ``coords``, and ``variables`` will be attached to the data before it is written. Any conflicts in these attributes will be updated with the arguments to this function. * to_dataset ``to_dataarray`` returns any container or csv file as an ``xarray.DataArray``. Table data (CSV files and DataFrames) will be stacked, with columns re-arranged as new ``xarray.Coordinates``. Keyword arguments ``attrs``, ``coords``, and ``variables`` will be attached to the data before it is written. Any conflicts in these attributes will be updated with the arguments to this function. * to_pandas ``to_pandas`` strips special attributes and returns an ordinary ``Series`` or ``DataFrame`` object. * to_netcdf ``to_netcdf`` first converts a container or csv file to an ``xarray.Dataset`` using the ``to_dataset`` function, then writes the dataset to file with the ``xarray`` ``ds.to_netcdf`` method. .. code-block:: python >>> metacsv.to_netcdf('mycsv.csv', 'mycsv.nc', header_file='mycsv.header') >>> import xarray as xr >>> xr.open_dataset('mycsv.nc') <xarray.Dataset> Dimensions: (index: 3) Coordinates: * index (index) |S1 'x' 'y' 'z' Data variables: a (index) int64 1 4 7 b (index) int64 2 5 8 c (index) int64 3 6 9 Attributes: author: me Special attributes ----------------------------------------------- The ``coords`` and ``variables`` attributes are keywords and are not simply passed to the MetaCSV object's ``attrs`` attribute. Variables ~~~~~~~~~~~~~ Variables are attributes which apply to speicific columns or data variables. In MetaCSV containers, variables are displayed as a separate set of attributes. On conversion to ``xarray``, these attributes are assigned to variable-specific ``attrs``: .. code-block:: python >>> ds = df.to_xarray() >>> ds <xarray.Dataset> Dimensions: (index: 4) Coordinates: * index (index) int64 0 1 2 3 Data variables: region (index) object 'USA' 'USA' 'CAN' 'CAN' year (index) int64 2010 2011 2010 2011 pop (index) float64 309.3 311.7 34.0 34.3 gdp (index) float64 1.36e+04 1.382e+04 1.24e+03 1.277e+03 Attributes: date: 2000-12-31 author: A Person >>> ds.pop <xarray.DataArray 'pop' (index: 4)> array([ 309.3, 311.7, 34. , 34.3]) Coordinates: * index (index) int64 0 1 2 3 Attributes: name: Population unit: millions Note that at present, variables are not persistent across slicing operations. **parse_vars** Variables have a special argument to ``read_csv``: ``parse_vars`` allows parsing of one-line variable definitions in the format ``var: description [unit]``: .. code-block:: python >>> doc = io.StringIO(''' --- author: A Person date: 2000-12-31 variables: pop: Population [millions] gdp: Product [2005 $Bn] ... region,year,pop,gdp USA,2010,309.3,13599.3 USA,2011,311.7,13817.0 CAN,2010,34.0,1240.0 CAN,2011,34.3,1276.7 ''') >>> metacsv.read_csv(doc, index_col=0, parse_vars=True) <metacsv.core.containers.DataFrame (4, 3)> year pop gdp region USA 2010 309.3 13599.3 USA 2011 311.7 13817.0 CAN 2010 34.0 1240.0 CAN 2011 34.3 1276.7 Variables gdp: {u'description': 'Product', u'unit': '2005 $Bn'} pop: {u'description': 'Population', u'unit': 'millions'} Attributes date: 2000-12-31 author: A Person Coordinates ~~~~~~~~~~~~~ The conceptual foundation of coordinates is taken from ``xarray``, where data is treated as an ndarray rather than a table. If you plan to only work with the pandas-like features of ``metacsv``, you do not really need coordinates. That said, specifying the ``coords`` attribute in a csv results in automatic index handling: .. code-block:: python >>> doc = io.StringIO(''' --- author: A Person date: 2000-12-31 variables: pop: name: Population unit: millions gdp: name: Product unit: 2005 $Bn coords: - region - year ... region,year,pop,gdp USA,2010,309.3,13599.3 USA,2011,311.7,13817.0 CAN,2010,34.0,1240.0 CAN,2011,34.3,1276.7 ''') >>> df = metacsv.read_csv(doc) >>> df <metacsv.core.containers.DataFrame (4, 2)> pop gdp region year USA 2010 309.3 13599.3 2011 311.7 13817.0 CAN 2010 34.0 1240.0 2011 34.3 1276.7 Coordinates * region (region) object CAN, USA * year (year) int64 2010, 2011 Variables gdp: OrderedDict([('name', 'Product'), ('unit', '2005 $Bn')]) pop: OrderedDict([('name', 'Population'), ('unit', 'millions')]) Attributes date: 2000-12-31 author: A Person Coordinates become especially useful, however, when moving to ``xarray`` objects or ``netCDF`` files. The ``DataFrame`` above will have no trouble, as ``region`` and ``year`` are orthoganal: .. code-block:: python >>> df.to_xarray() <xarray.Dataset> Dimensions: (region: 2, year: 2) Coordinates: * region (region) object 'USA' 'CAN' * year (year) int64 2010 2011 Data variables: pop (region, year) float64 309.3 311.7 34.0 34.3 gdp (region, year) float64 1.36e+04 1.382e+04 1.24e+03 1.277e+03 Attributes: date: 2000-12-31 author: A Person This becomes more complicated when columns in the index are not independent and cannot be thought of as orthogonal. In this case, you can specify ``coords`` as a dict-like attribute either in the CSV header or as an argument to the conversion method: .. code-block:: python doc = io.StringIO(''' --- coords: region: regname: 'region' continent: 'region' year: ... region,regname,continent,year,pop,gdp USA,United States,North America,2010,309.3,13599.3 USA,United States,North America,2011,311.7,13817.0 CAN,Canada,North America,2010,34.0,1240.0 CAN,Canada,North America,2011,34.3,1276.7 ''') >>> metacsv.to_xarray(doc) <xarray.Dataset> Dimensions: (region: 2, year: 2) Coordinates: * region (region) object 'USA' 'CAN' * year (year) int64 2010 2011 regname (region) object 'United States' 'Canada' continent (region) object 'North America' 'North America' Data variables: pop (region, year) float64 309.3 311.7 34.0 34.3 gdp (region, year) float64 1.36e+04 1.382e+04 1.24e+03 1.277e+03 Note that the resulting ``Dataset`` is not indexed by the cartesian product of all four coordinates, but only by the base coordinates, indicated by the ``*``. Without first setting the ``coords`` attribute this way, the resulting data would have ``NaN`` values corresponding to ``(USA, Canada)`` and ``(CAN, United States)``. TODO ============ * Allow automatic coersion of ``xarray.Dataset`` and ``xarray.DataArray`` objects to MetaCSV containers. * Extend metacsv functionality to ``Panel`` objects * Make ``coords`` and ``attrs`` persistent across slicing operations (try ``df['pop'].to_xarray()`` from above example and watch it fail...) * Improve hooks between ``pandas`` and ``metacsv``: - update ``coord`` names on ``df.index.names`` assignment - update ``coords`` on stack/unstack - update ``coords`` on * Improve parser to automatically strip trailing commas and other excel relics * Enable ``read_csv(engine='C')``... this currently does not work. * Handle attributes indexed by coord/variable names --> assign to coord/variable-specific ``attrs`` * Let's start an issue tracker and get rid of this section! * Should we rethink "special attribute," naming e.g. coords? Maybe these should have some special prefix like ``_coords`` when included in yaml headers to avoid confusion with other generic attributes... * Allow attribute assertions (e.g. ``version='>1.6.0'``) in ``read_csv`` call * Improve test coverage * Improve documentation & build readthedocs page Feature Requests ================== * Create syntax for ``multi-csv`` --> ``Panel`` or combining using filename regex * Eventually? allow for on-disk manipulation of many/large files with dask/xarray * Eventually? add xml, SQL, other structured syntax language conversions .. _BSD: http://opensource.org/licenses/BSD-3-Clause .. _Documentation: http://metacsv.readthedocs.org/en/latest/ .. _API: http://metacsv.readthedocs.org/en/latest/api.html
PypiClean
/GraphQL_core_next-1.1.1-py3-none-any.whl/graphql/type/validate.py
from operator import attrgetter, itemgetter from typing import ( Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, Union, cast, ) from ..error import GraphQLError from ..pyutils import inspect from ..language import NamedTypeNode, Node, OperationType, OperationTypeDefinitionNode from .definition import ( GraphQLEnumType, GraphQLInputField, GraphQLInputObjectType, GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType, is_enum_type, is_input_object_type, is_input_type, is_interface_type, is_named_type, is_non_null_type, is_object_type, is_output_type, is_union_type, is_required_argument, ) from ..utilities.assert_valid_name import is_valid_name_error from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .directives import is_directive, GraphQLDirective from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema __all__ = ["validate_schema", "assert_valid_schema"] def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: """Validate a GraphQL schema. Implements the "Type Validation" sub-sections of the specification's "Type System" section. Validation runs synchronously, returning a list of encountered errors, or an empty list if no errors were encountered and the Schema is valid. """ # First check to ensure the provided value is in fact a GraphQLSchema. assert_schema(schema) # If this Schema has already been validated, return the previous results. # noinspection PyProtectedMember errors = schema._validation_errors if errors is None: # Validate the schema, producing a list of errors. context = SchemaValidationContext(schema) context.validate_root_types() context.validate_directives() context.validate_types() # Persist the results of validation before returning to ensure validation does # not run multiple times for this schema. errors = context.errors schema._validation_errors = errors return errors def assert_valid_schema(schema: GraphQLSchema) -> None: """Utility function which asserts a schema is valid. Throws a TypeError if the schema is invalid. """ errors = validate_schema(schema) if errors: raise TypeError("\n\n".join(error.message for error in errors)) class SchemaValidationContext: """Utility class providing a context for schema validation.""" errors: List[GraphQLError] schema: GraphQLSchema def __init__(self, schema: GraphQLSchema) -> None: self.errors = [] self.schema = schema def report_error( self, message: str, nodes: Union[Optional[Node], Sequence[Optional[Node]]] = None, ): if nodes and not isinstance(nodes, Node): nodes = [node for node in nodes if node] nodes = cast(Optional[Sequence[Node]], nodes) self.add_error(GraphQLError(message, nodes)) def add_error(self, error: GraphQLError): self.errors.append(error) def validate_root_types(self): schema = self.schema query_type = schema.query_type if not query_type: self.report_error("Query root type must be provided.", schema.ast_node) elif not is_object_type(query_type): self.report_error( f"Query root type must be Object type, it cannot be {query_type}.", get_operation_type_node(schema, query_type, OperationType.QUERY), ) mutation_type = schema.mutation_type if mutation_type and not is_object_type(mutation_type): self.report_error( "Mutation root type must be Object type if provided," f" it cannot be {mutation_type}.", get_operation_type_node(schema, mutation_type, OperationType.MUTATION), ) subscription_type = schema.subscription_type if subscription_type and not is_object_type(subscription_type): self.report_error( "Subscription root type must be Object type if provided," f" it cannot be {subscription_type}.", get_operation_type_node( schema, subscription_type, OperationType.SUBSCRIPTION ), ) def validate_directives(self): directives = self.schema.directives for directive in directives: # Ensure all directives are in fact GraphQL directives. if not is_directive(directive): self.report_error( f"Expected directive but got: {inspect(directive)}.", getattr(directive, "ast_node", None), ) continue # Ensure they are named correctly. self.validate_name(directive) # Ensure the arguments are valid. arg_names = set() for arg_name, arg in directive.args.items(): # Ensure they are named correctly. self.validate_name(arg_name, arg) # Ensure they are unique per directive. if arg_name in arg_names: self.report_error( f"Argument @{directive.name}({arg_name}:)" " can only be defined once.", directive.ast_node and [ arg.ast_node for name, arg in directive.args.items() if name == arg_name ], ) continue arg_names.add(arg_name) # Ensure the type is an input type. if not is_input_type(arg.type): self.report_error( f"The type of @{directive.name}({arg_name}:)" f" must be Input Type but got: {inspect(arg.type)}.", arg.ast_node, ) def validate_name(self, node: Any, name: str = None): # Ensure names are valid, however introspection types opt out. try: if not name: name = node.name name = cast(str, name) ast_node = node.ast_node except AttributeError: pass else: error = is_valid_name_error(name, ast_node) if error: self.add_error(error) def validate_types(self): validate_input_object_circular_refs = InputObjectCircularRefsValidator(self) for type_ in self.schema.type_map.values(): # Ensure all provided types are in fact GraphQL type. if not is_named_type(type_): self.report_error( f"Expected GraphQL named type but got: {inspect(type)}.", type_.ast_node if type_ else None, ) continue # Ensure it is named correctly (excluding introspection types). if not is_introspection_type(type_): self.validate_name(type_) if is_object_type(type_): type_ = cast(GraphQLObjectType, type_) # Ensure fields are valid self.validate_fields(type_) # Ensure objects implement the interfaces they claim to. self.validate_object_interfaces(type_) elif is_interface_type(type_): type_ = cast(GraphQLInterfaceType, type_) # Ensure fields are valid. self.validate_fields(type_) elif is_union_type(type_): type_ = cast(GraphQLUnionType, type_) # Ensure Unions include valid member types. self.validate_union_members(type_) elif is_enum_type(type_): type_ = cast(GraphQLEnumType, type_) # Ensure Enums have valid values. self.validate_enum_values(type_) elif is_input_object_type(type_): type_ = cast(GraphQLInputObjectType, type_) # Ensure Input Object fields are valid. self.validate_input_fields(type_) # Ensure Input Objects do not contain non-nullable circular references validate_input_object_circular_refs(type_) def validate_fields(self, type_: Union[GraphQLObjectType, GraphQLInterfaceType]): fields = type_.fields # Objects and Interfaces both must define one or more fields. if not fields: self.report_error( f"Type {type_.name} must define one or more fields.", get_all_nodes(type_), ) for field_name, field in fields.items(): # Ensure they are named correctly. self.validate_name(field, field_name) # Ensure the type is an output type if not is_output_type(field.type): self.report_error( f"The type of {type_.name}.{field_name}" " must be Output Type but got: {inspect(field.type)}.", field.ast_node and field.ast_node.type, ) # Ensure the arguments are valid. arg_names: Set[str] = set() for arg_name, arg in field.args.items(): # Ensure they are named correctly. self.validate_name(arg, arg_name) # Ensure they are unique per field. if arg_name in arg_names: self.report_error( "Field argument" f" {type_.name}.{field_name}({arg_name}:)" " can only be defined once.", [ arg.ast_node for name, arg in field.args.items() if name == arg_name ], ) break arg_names.add(arg_name) # Ensure the type is an input type. if not is_input_type(arg.type): self.report_error( "Field argument" f" {type_.name}.{field_name}({arg_name}:)" f" must be Input Type but got: {inspect(arg.type)}.", arg.ast_node and arg.ast_node.type, ) def validate_object_interfaces(self, obj: GraphQLObjectType): implemented_type_names: Set[str] = set() for iface in obj.interfaces: if not is_interface_type(iface): self.report_error( f"Type {obj.name} must only implement Interface" f" types, it cannot implement {inspect(iface)}.", get_all_implements_interface_nodes(obj, iface), ) continue if iface.name in implemented_type_names: self.report_error( f"Type {obj.name} can only implement {iface.name} once.", get_all_implements_interface_nodes(obj, iface), ) continue implemented_type_names.add(iface.name) self.validate_object_implements_interface(obj, iface) def validate_object_implements_interface( self, obj: GraphQLObjectType, iface: GraphQLInterfaceType ): obj_fields, iface_fields = obj.fields, iface.fields # Assert each interface field is implemented. for field_name, iface_field in iface_fields.items(): obj_field = obj_fields.get(field_name) # Assert interface field exists on object. if not obj_field: self.report_error( f"Interface field {iface.name}.{field_name}" f" expected but {obj.name} does not provide it.", [iface_field.ast_node, *get_all_nodes(obj)], ) continue # Assert interface field type is satisfied by object field type, by being # a valid subtype (covariant). if not is_type_sub_type_of(self.schema, obj_field.type, iface_field.type): self.report_error( f"Interface field {iface.name}.{field_name}" f" expects type {iface_field.type}" f" but {obj.name}.{field_name}" f" is type {obj_field.type}.", [ iface_field.ast_node and iface_field.ast_node.type, obj_field.ast_node and obj_field.ast_node.type, ], ) # Assert each interface field arg is implemented. for arg_name, iface_arg in iface_field.args.items(): obj_arg = obj_field.args.get(arg_name) # Assert interface field arg exists on object field. if not obj_arg: self.report_error( "Interface field argument" f" {iface.name}.{field_name}({arg_name}:)" f" expected but {obj.name}.{field_name}" " does not provide it.", [iface_arg.ast_node, obj_field.ast_node], ) continue # Assert interface field arg type matches object field arg type # (invariant). if not is_equal_type(iface_arg.type, obj_arg.type): self.report_error( "Interface field argument" f" {iface.name}.{field_name}({arg_name}:)" f" expects type {iface_arg.type}" f" but {obj.name}.{field_name}({arg_name}:)" f" is type {obj_arg.type}.", [ iface_arg.ast_node and iface_arg.ast_node.type, obj_arg.ast_node and obj_arg.ast_node.type, ], ) # Assert additional arguments must not be required. for arg_name, obj_arg in obj_field.args.items(): iface_arg = iface_field.args.get(arg_name) if not iface_arg and is_required_argument(obj_arg): self.report_error( f"Object field {obj.name}.{field_name} includes" f" required argument {arg_name} that is missing from" f" the Interface field {iface.name}.{field_name}.", [obj_arg.ast_node, iface_field.ast_node], ) def validate_union_members(self, union: GraphQLUnionType): member_types = union.types if not member_types: self.report_error( f"Union type {union.name} must define one or more member types.", get_all_nodes(union), ) included_type_names: Set[str] = set() for member_type in member_types: if member_type.name in included_type_names: self.report_error( f"Union type {union.name} can only include type" f" {member_type.name} once.", get_union_member_type_nodes(union, member_type.name), ) continue included_type_names.add(member_type.name) def validate_enum_values(self, enum_type: GraphQLEnumType): enum_values = enum_type.values if not enum_values: self.report_error( f"Enum type {enum_type.name} must define one or more values.", get_all_nodes(enum_type), ) for value_name, enum_value in enum_values.items(): # Ensure valid name. self.validate_name(enum_value, value_name) if value_name in ("true", "false", "null"): self.report_error( f"Enum type {enum_type.name} cannot include value:" f" {value_name}.", enum_value.ast_node, ) def validate_input_fields(self, input_obj: GraphQLInputObjectType): fields = input_obj.fields if not fields: self.report_error( f"Input Object type {input_obj.name}" " must define one or more fields.", get_all_nodes(input_obj), ) # Ensure the arguments are valid for field_name, field in fields.items(): # Ensure they are named correctly. self.validate_name(field, field_name) # Ensure the type is an input type. if not is_input_type(field.type): self.report_error( f"The type of {input_obj.name}.{field_name}" f" must be Input Type but got: {inspect(field.type)}.", field.ast_node.type if field.ast_node else None, ) def get_operation_type_node( schema: GraphQLSchema, type_: GraphQLObjectType, operation: OperationType ) -> Optional[Node]: operation_nodes = cast( List[OperationTypeDefinitionNode], get_all_sub_nodes(schema, attrgetter("operation_types")), ) for node in operation_nodes: if node.operation == operation: return node.type return type_.ast_node class InputObjectCircularRefsValidator: """Modified copy of algorithm from validation.rules.NoFragmentCycles""" def __init__(self, context: SchemaValidationContext): self.context = context # Tracks already visited types to maintain O(N) and to ensure that cycles # are not redundantly reported. self.visited_types: Set[str] = set() # Array of input fields used to produce meaningful errors self.field_path: List[Tuple[str, GraphQLInputField]] = [] # Position in the type path self.field_path_index_by_type_name: Dict[str, int] = {} def __call__(self, input_obj: GraphQLInputObjectType): """Detect cycles recursively.""" # This does a straight-forward DFS to find cycles. # It does not terminate when a cycle was found but continues to explore # the graph to find all possible cycles. name = input_obj.name if name in self.visited_types: return self.visited_types.add(name) self.field_path_index_by_type_name[name] = len(self.field_path) for field_name, field in input_obj.fields.items(): if is_non_null_type(field.type) and is_input_object_type( field.type.of_type ): field_type = cast(GraphQLInputObjectType, field.type.of_type) cycle_index = self.field_path_index_by_type_name.get(field_type.name) self.field_path.append((field_name, field)) if cycle_index is None: self(field_type) else: cycle_path = self.field_path[cycle_index:] field_names = map(itemgetter(0), cycle_path) self.context.report_error( f"Cannot reference Input Object '{field_type.name}'" " within itself through a series of non-null fields:" f" '{'.'.join(field_names)}'.", cast( Sequence[Node], map(attrgetter("ast_node"), map(itemgetter(1), cycle_path)), ), ) self.field_path.pop() del self.field_path_index_by_type_name[name] SDLDefinedObject = Union[ GraphQLSchema, GraphQLDirective, GraphQLInterfaceType, GraphQLObjectType, GraphQLInputObjectType, GraphQLUnionType, GraphQLEnumType, ] def get_all_nodes(obj: SDLDefinedObject) -> List[Node]: node = obj.ast_node nodes: List[Node] = [node] if node else [] extension_nodes = getattr(obj, "extension_ast_nodes", None) if extension_nodes: nodes.extend(extension_nodes) return nodes def get_all_sub_nodes( obj: SDLDefinedObject, getter: Callable[[Node], List[Node]] ) -> List[Node]: result: List[Node] = [] for ast_node in get_all_nodes(obj): sub_nodes = getter(ast_node) if sub_nodes: result.extend(sub_nodes) return result def get_all_implements_interface_nodes( type_: GraphQLObjectType, iface: GraphQLInterfaceType ) -> List[NamedTypeNode]: implements_nodes = cast( List[NamedTypeNode], get_all_sub_nodes(type_, attrgetter("interfaces")) ) return [ iface_node for iface_node in implements_nodes if iface_node.name.value == iface.name ] def get_union_member_type_nodes( union: GraphQLUnionType, type_name: str ) -> Optional[List[NamedTypeNode]]: union_nodes = cast( List[NamedTypeNode], get_all_sub_nodes(union, attrgetter("types")) ) return [ union_node for union_node in union_nodes if union_node.name.value == type_name ]
PypiClean
/Chronoclust-0.2.1.2.tar.gz/Chronoclust-0.2.1.2/chronoclust/objects/predecon_mc.py
import numpy as np from chronoclust.objects.microcluster import Microcluster class PredeconMC(Microcluster): def __init__(self, centroid, id, is_core_cluster, cluster_CF1, cluster_CF2, cluster_cumulative_weight): """ This class implement both MicroCluster and Datapoint. It is being used in offline clustering step where PreDeCon is run on every pcore cluster, treating each pcore cluster as a datapoint. Args: centroid (numpy.array): datapoint's value. datapoint_id (:obj:set): id of the datapoint. is_core_cluster (bool): is this a core. cluster_CF1 (numpy.array): CF1 value. See Microcluster class. cluster_CF2 (numpy.array): CF2 value. See Microcluster class. cluster_cummulative_weight (float): Cummulative weight of the cluster. """ self.centroid = centroid # List of points' id that are within certain distance from this point. Distance is measured using # preferred weighted similarity. See definition 3, 4, 5 in paper [2]. self.weighted_neighbour_pts = [] # List containing all the datapoints who are this datapoint's neighbours self.neighbour_pts = [] # List representing subspace preference of a point. See definition 3 in paper [2]. A dimension is 1 # in the vector if it is preferred by the point i.e. the variance in the dimension is smaller than the # variance threshold (delta in paper [2)], k otherwise where k is just a constant. self.subspace_preference_vector = np.ones(len(centroid)) # A status assigned to this data point. # 'c' means classified, 'u' means unclassified, 'n' means noise. self._classification = 'u' self.core_status = is_core_cluster self.id = id Microcluster.__init__(self, cf1=cluster_CF1, cf2=cluster_CF2, cumulative_weight=cluster_cumulative_weight, id=id) def get_centroid(self): return self.centroid def is_core(self): return self.core_status def merge_mc(self, other_mc): """ Add this cluster to another cluster (supposedly core cluster). Args: other_mc (Microcluster): Cluster to add this cluster into. Returns: None. """ # me_as_microcluster = Microcluster(cf1=np.copy(self.CF1), cf2=np.copy(self.CF2), id=self.id, # cumulative_weight=self.cumulative_weight) # other_mc.add_new_cluster(me_as_microcluster) other_mc.CF1 += self.CF1 other_mc.CF2 += self.CF2 other_mc.cumulative_weight += self.cumulative_weight other_mc.id.add(self.id) other_mc.set_centroid() def get_pdim(self): """ Calculate the preference dimensionality (PDim in paper[2]) i.e. number of dimensions with variance less than delta. See definition 2 in paper[2]. Method does so by converting subspace_preference_vector into array of boolean i.e. the one with >1 value is true, otherwise false, then sum the array up (assuming true = 1, false = 0). Returns: Int: Preference dimensionality. """ return (np.array(self.subspace_preference_vector) > 1).sum() def is_classified(self): return self._classification == 'c' def is_noise(self): return self._classification == 'n' def is_unclassified(self): return self._classification == 'u' def set_classified(self): self._classification = 'c' def set_noise(self): self._classification = 'n'
PypiClean
/AirzoneCloudDaikin-0.4.0.tar.gz/AirzoneCloudDaikin-0.4.0/README.md
# Airzone Cloud Daikin - [Airzone Cloud Daikin](#airzone-cloud-daikin) - [Presentation](#presentation) - [Abstract](#abstract) - [Module classes](#module-classes) - [Usage](#usage) - [Install](#install) - [Start API](#start-api) - [Get installations](#get-installations) - [Get devices from installations](#get-devices-from-installations) - [Get all devices shortcut](#get-all-devices-shortcut) - [Control a device](#control-a-device) - [HVAC mode](#hvac-mode) - [Available modes](#available-modes) - [Set HVAC mode on a system (and its sub-zones)](#set-hvac-mode-on-a-system-and-its-sub-zones) - [API doc](#api-doc) - [Constructor](#constructor) ## Presentation ### Abstract Allow to communicate easily with Daikin Airzone Cloud to retrieve information or to send commands (on/off, temperature, HVAC mode, ...) This API is specific to Daikin implementation (try to connect to [dkn.airzonecloud.com](https://dkn.airzonecloud.com) to be sure). If you are looking for the main Airzone Cloud API (try to connect to [www.airzonecloud.com](https://www.airzonecloud.com)), you should use this package : [AirzoneCloud](https://github.com/max13fr/AirzoneCloud) ### Module classes - **AirzoneCloudDaikin** : represent your Daikin AirzoneCloud account. Contains a list of your **installations** : - **Installation**: represent one of your installation (like your home, an office, ...). Contains a list of its **devices** : - **Device** : represent your climate equipement to control ## Usage ### Install ```bash pip3 install AirzoneCloudDaikin ``` ### Start API ```python from AirzoneCloudDaikin import AirzoneCloudDaikin api = AirzoneCloudDaikin("[email protected]", "password") ``` ### Get installations ```python for installation in api.installations: print( "Installation(name={}, type={}, scenary={}, id={})".format( installation.name, installation.type, installation.scenary, installation.id ) ) ``` Output : <pre> Installation(name=My home, type=home, scenary=occupied, id=5d592c14646b6d798ccc2aaa) </pre> ### Get devices from installations ```python for installation in api.installations: for device in installation.devices: print( "Device(name={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={}, mac={})".format( device.name, device.is_on, device.mode, device.current_temperature, device.target_temperature, device.id, device.mac, ) ) ``` Output : <pre> Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0, id=5ab1875a651241708814575681, mac=AA:BB:CC:DD:EE:FF) </pre> ### Get all devices shortcut ```python for device in api.all_devices: print( "Device(name={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={}, mac={})".format( device.name, device.is_on, device.mode, device.current_temperature, device.target_temperature, device.id, device.mac, ) ) ``` Output : <pre> Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0, id=5ab1875a651241708814575681, mac=AA:BB:CC:DD:EE:FF) </pre> ### Control a device ```python device = api.all_devices[0] print(device) # start device device.turn_on() # set temperature device.set_temperature(26) print(device) # stopping device device.turn_off() print(device) ``` Output : <pre> Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=30.0) Device(name=Dknwserver, is_on=True, mode=cool, current_temp=25.0, target_temp=26.0) Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0) </pre> ### HVAC mode #### Available modes - **cool** : Cooling mode - **heat** : Heating mode - **ventilate** : Ventilation - **dehumidify** : Dry - **heat-cold-auto** : Auto heat/cold mode #### Set HVAC mode on a system (and its sub-zones) ```python device = api.all_devices[0] print(device) # set mode to heat device.set_mode("heat") print(device) ``` Output : <pre> Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0) Device(name=Dknwserver, is_on=False, mode=heat, current_temp=25.0, target_temp=23.0) </pre> > :warning: Daikin climate equipment has 2 consigns : one for heat & one of cold. > Its visible in the previous example, the target temperature has change from 26 to 23 just by changing the mode from cool to heat. > So don't forget to do your set_temperature() AFTER the set_mode() and not before ## API doc [API full doc](API.md) ### Constructor ```python AirzoneCloudDaikin(username, password, user_agent=None, base_url=None) ``` - **username** : you're username used to connect on Daikin Airzone Cloud website or app - **password** : you're password used to connect on Daikin Airzone Cloud website or app - **user_agent** : allow to change default user agent if set - **base_url** : allow to change base url of the Daikin Airzone Cloud API if set - default value : _https://dkn.airzonecloud.com_
PypiClean
/Dabo-0.9.16.tar.gz/Dabo-0.9.16/dabo/ui/uiwx/dShell.py
import __builtin__ import time import wx import wx.stc as stc import wx.py from wx.py import pseudo import dabo import dabo.dEvents as dEvents from dabo.dLocalize import _ if __name__ == "__main__": dabo.ui.loadUI("wx") from dSplitForm import dSplitForm from dabo.ui import makeDynamicProperty from dabo.ui import dKeys from dControlMixin import dControlMixin class _LookupPanel(dabo.ui.dPanel): """Used for the command history search""" def afterInit(self): self._history = None self._displayedHistory = None self.currentSearch = "" self.needRefilter = False self.lblSearch = dabo.ui.dLabel(self) self.lstMatch = dabo.ui.dListBox(self, ValueMode="string", Choices=[], MultipleSelect=True, OnMouseLeftDoubleClick=self.selectCmd, OnKeyChar=self.onListKey) self.Sizer = dabo.ui.dSizer("v", DefaultBorder=4) self.Sizer.append(self.lblSearch, halign="center") self.Sizer.append(self.lstMatch, "x", 1) self.Width = 400 self.layout() def clear(self): """Reset to original state.""" self.ok = False self.currentSearch = self.lblSearch.Caption = "" self.refilter() def onListKey(self, evt): """Process keypresses in the command list control""" kc = evt.keyCode char = evt.keyChar if kc in (dKeys.key_Return, dKeys.key_Numpad_enter): self.closeDialog(True) return elif kc == dKeys.key_Escape: self.closeDialog(False) if kc in dKeys.arrowKeys.values() or char is None: #ignore return if kc == dKeys.key_Back: self.currentSearch = self.currentSearch[:-1] else: self.currentSearch += char self.lblSearch.Caption = self.currentSearch self.layout() self.needRefilter = True evt.stop() def closeDialog(self, ok): """Hide the dialog, and set the ok/cancel flag""" self.ok = ok self.Form.hide() def getCmd(self): return self.lstMatch.Value def selectCmd(self, evt): self.closeDialog(True) def onIdle(self, evt): """For performance, don't filter on every keypress. Wait until idle.""" if self.needRefilter: self.needRefilter = False self.refilter() def refilter(self): """Display only those commands that contain the search string""" self.DisplayedHistory = self.History.filterByExpression(" '%s' in cmd.lower() " % self.currentSearch.lower()) lst = self.lstMatch sel = lst.Value lst.Choices = [rec["cmd"] for rec in self.DisplayedHistory] if sel: try: lst.Value = sel except ValueError: self._selectLast() else: self._selectLast() self._selectLast() def _selectFirst(self): """Select the first item in the list, if available.""" if len(self.lstMatch.Choices): self.lstMatch.PositionValue = 0 def _selectLast(self): """Select the first item in the list, if available.""" num = len(self.lstMatch.Choices) if num: self.lstMatch.PositionValue = num - 1 def _getHistory(self): if self._history is None: self._history = dabo.db.dDataSet() return self._history def _setHistory(self, val): if self._constructed(): self._history = self._displayedHistory = val try: self.lstMatch.Choices = [rec["cmd"] for rec in self.DisplayedHistory] self._selectLast() except AttributeError: pass else: self._properties["History"] = val def _getDisplayedHistory(self): if self._displayedHistory is None: self._displayedHistory = self.History return self._displayedHistory def _setDisplayedHistory(self, val): if self._constructed(): self._displayedHistory = val else: self._properties["DisplayedHistory"] = val DisplayedHistory = property(_getDisplayedHistory, _setDisplayedHistory, None, _("Filtered copy of the History (dDataSet)")) History = property(_getHistory, _setHistory, None, _("Dataset containing the command history (dDataSet)")) class dShell(dControlMixin, wx.py.shell.Shell): def __init__(self, parent, properties=None, attProperties=None, *args, **kwargs): self._isConstructed = False # Set some reasonable font defaults. self.plat = self.Application.Platform if self.plat == "GTK": self._fontFace = "Monospace" self._fontSize = 10 elif self.plat == "Mac": self._fontFace = "Monaco" self._fontSize = 12 elif self.plat == "Win": self._fontFace = "Courier New" self._fontSize = 10 self._baseClass = dShell preClass = wx.py.shell.Shell dControlMixin.__init__(self, preClass, parent, properties=properties, attProperties=attProperties, *args, **kwargs) @dabo.ui.deadCheck def ScrollToLine(self, lnum): """Need to check for the case where the control is released, as the wx-level shell makes a CallAfter for ScrollToLine(). """ super(dShell, self).ScrollToLine(lnum) def processLine(self): """ This is part of the underlying class. We need to add the command that gets processed into our internal stack. """ edt = self.CanEdit() super(dShell, self).processLine() if edt: # push the latest command into the stack try: self.Form.addToHistory() except AttributeError: # Not running in dShellForm pass def push(self, command, silent=False): """Need to raise an event when the interpreter executes a command.""" super(dShell, self).push(command, silent=silent) if not self.more: self.raiseEvent(dEvents.ShellCommandRun) def getAutoCompleteList(self, cmd): return self.interp.getAutoCompleteList(cmd, includeMagic=self.autoCompleteIncludeMagic, includeSingle=self.autoCompleteIncludeSingle, includeDouble=self.autoCompleteIncludeDouble) def setDefaultFont(self, fontFace, fontSize): # Global default styles for all languages self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%s,size:%d" % (fontFace, fontSize)) self.StyleClearAll() # Reset all to be like the default # Global default styles for all languages self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%s,size:%d" % (self._fontFace, fontSize)) self.StyleSetSpec(stc.STC_STYLE_LINENUMBER, "back:#C0C0C0,face:%s,size:%d" % (self._fontFace, 8)) self.StyleSetSpec(stc.STC_STYLE_CONTROLCHAR, "face:%s" % fontFace) self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, "fore:#000000,back:#00FF00,bold") self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, "fore:#000000,back:#FF0000,bold") def setPyFont(self, fontFace, fontSize): # Python-specific styles self.StyleSetSpec(stc.STC_P_DEFAULT, "fore:#000000,face:%s,size:%d" % (fontFace, fontSize)) # Comments self.StyleSetSpec(stc.STC_P_COMMENTLINE, "fore:#007F00,face:%s,size:%d,italic" % (fontFace, fontSize)) # Number self.StyleSetSpec(stc.STC_P_NUMBER, "fore:#007F7F,size:%d" % fontSize) # String self.StyleSetSpec(stc.STC_P_STRING, "fore:#7F007F,face:%s,size:%d" % (fontFace, fontSize)) # Single quoted string self.StyleSetSpec(stc.STC_P_CHARACTER, "fore:#7F007F,face:%s,size:%d" % (fontFace, fontSize)) # Keyword self.StyleSetSpec(stc.STC_P_WORD, "fore:#00007F,bold,size:%d" % fontSize) # Triple quotes self.StyleSetSpec(stc.STC_P_TRIPLE, "fore:#7F0000,size:%d,italic" % fontSize) # Triple double quotes self.StyleSetSpec(stc.STC_P_TRIPLEDOUBLE, "fore:#7F0000,size:%d,italic" % fontSize) # Class name definition self.StyleSetSpec(stc.STC_P_CLASSNAME, "fore:#0000FF,bold,underline,size:%d" % fontSize) # Function or method name definition self.StyleSetSpec(stc.STC_P_DEFNAME, "fore:#007F7F,bold,size:%d" % fontSize) # Operators self.StyleSetSpec(stc.STC_P_OPERATOR, "bold,size:%d" % fontSize) # Identifiers self.StyleSetSpec(stc.STC_P_IDENTIFIER, "fore:#000000,face:%s,size:%d" % (fontFace, fontSize)) # Comment-blocks self.StyleSetSpec(stc.STC_P_COMMENTBLOCK, "fore:#7F7F7F,size:%d,italic" % fontSize) # End of line where string is not closed self.StyleSetSpec(stc.STC_P_STRINGEOL, "fore:#000000,face:%s,back:#E0C0E0,eol,size:%d" % (fontFace, fontSize)) def OnKeyDown(self, evt): """Override on the Mac, as the navigation defaults are different than on Win/Lin""" if self.plat != "Mac": return super(dShell, self).OnKeyDown(evt) key = evt.GetKeyCode() # If the auto-complete window is up let it do its thing. if self.AutoCompActive(): evt.Skip() return # Prevent modification of previously submitted # commands/responses. controlDown = evt.ControlDown() altDown = evt.AltDown() shiftDown = evt.ShiftDown() cmdDown = evt.CmdDown() currpos = self.GetCurrentPos() endpos = self.GetTextLength() selecting = self.GetSelectionStart() != self.GetSelectionEnd() if cmdDown and (key == wx.WXK_LEFT): # Equivalent to Home home = self.promptPosEnd if currpos > home: self.SetCurrentPos(home) if not selecting and not shiftDown: self.SetAnchor(home) self.EnsureCaretVisible() return if cmdDown and (key == wx.WXK_RIGHT): # Equivalent to End linepos = self.GetLineEndPosition(self.GetCurrentLine()) if shiftDown: start = currpos else: start = linepos self.SetSelection(start, linepos) return elif cmdDown and (key == wx.WXK_UP): # Equivalent to Ctrl-Home if shiftDown: end = currpos else: end = 0 self.SetSelection(0, end) return elif cmdDown and (key == wx.WXK_DOWN): # Equivalent to Ctrl-End if shiftDown: start = currpos else: start = endpos self.SetSelection(start, endpos) return return super(dShell, self).OnKeyDown(evt) def _getFontSize(self): return self._fontSize def _setFontSize(self, val): if self._constructed(): self._fontSize = val self.setDefaultFont(self._fontFace, self._fontSize) self.setPyFont(self._fontFace, self._fontSize) self.Application.setUserSetting("shell.fontsize", self._fontSize) else: self._properties["FontSize"] = val def _getFontFace(self): return self._fontFace def _setFontFace(self, val): if self._constructed(): self._fontFace = val self.setDefaultFont(self._fontFace, self._fontSize) self.setPyFont(self._fontFace, self._fontSize) self.Application.setUserSetting("shell.fontface", self._fontFace) else: self._properties["FontFace"] = val FontFace = property(_getFontFace, _setFontFace, None, _("Name of the font face used in the shell (str)")) FontSize = property(_getFontSize, _setFontSize, None, _("Size of the font used in the shell (int)")) class dShellForm(dSplitForm): def _onDestroy(self, evt): self._clearOldHistory() __builtin__.raw_input = self._oldRawInput def _beforeInit(self, pre): # Set the sash self._sashPct = 0.6 # Class to use for creating the interactive shell self._shellClass = dShell super(dShellForm, self)._beforeInit(pre) def _afterInit(self): super(dShellForm, self)._afterInit() self.cmdHistKey = self.PreferenceManager.command_history self._historyPanel = None self._lastCmd = None # PyShell sets the raw_input function to a function of PyShell, # but doesn't set it back on destroy, resulting in errors later # on if something other than PyShell asks for raw_input (pdb, for # example). self._oldRawInput = __builtin__.raw_input self.bindEvent(dEvents.Destroy, self._onDestroy) splt = self.Splitter splt.MinimumPanelSize = 80 splt.unbindEvent() self.Orientation = "H" self.unsplit() self._splitState = False self.MainSplitter.bindEvent(dEvents.SashDoubleClick, self.sashDoubleClick) self.MainSplitter.bindEvent(dEvents.SashPositionChanged, self.sashPosChanged) cp = self.CmdPanel = self.Panel1 op = self.OutPanel = self.Panel2 cp.unbindEvent(dEvents.ContextMenu) op.unbindEvent(dEvents.ContextMenu) cp.Sizer = dabo.ui.dSizer() op.Sizer = dabo.ui.dSizer() pgf = self.pgfCodeShell = dabo.ui.dPageFrame(cp, PageCount=2) self.pgShell = pgf.Pages[0] self.pgCode = pgf.Pages[1] self.pgShell.Caption = _("Shell") self.pgCode.Caption = _("Code") cp.Sizer.append1x(pgf) self.shell = self.ShellClass(self.pgShell, DroppedTextHandler=self, DroppedFileHandler=self) self.pgShell.Sizer.append1x(self.shell, border=4) # Configure the shell's behavior self.shell.AutoCompSetIgnoreCase(True) self.shell.AutoCompSetAutoHide(False) ## don't hide when the typed string no longer matches self.shell.AutoCompStops(" ") ## characters that will stop the autocomplete self.shell.AutoCompSetFillUps(".(") # This lets you go all the way back to the '.' without losing the AutoComplete self.shell.AutoCompSetCancelAtStart(False) self.shell.Bind(wx.EVT_RIGHT_UP, self.onShellRight) self.shell.Bind(wx.wx.EVT_CONTEXT_MENU, self.onShellContext) # Create the Code control codeControl = dabo.ui.dEditor(self.pgCode, RegID="edtCode", Language="python", OnKeyDown=self.onCodeKeyDown, OnMouseRightDown=self.onCodeRightDown, DroppedTextHandler=self, DroppedFileHandler=self) self.pgCode.Sizer.append1x(codeControl, border=4) # This adds the interpreter's local namespace to the editor for code completion, etc. codeControl.locals = self.shell.interp.locals lbl = dabo.ui.dLabel(self.pgCode, ForeColor="blue", WordWrap=True, Caption=_("""Ctrl-Enter to run the code (or click the button to the right). Ctrl-Up/Down to scroll through history.""")) lbl.FontSize -= 3 runButton = dabo.ui.dButton(self.pgCode, Caption=_("Run"), OnHit=self.onRunCode) hsz = dabo.ui.dSizer("h") hsz.appendSpacer(20) hsz.append(lbl) hsz.append1x(dabo.ui.dPanel(self.pgCode)) hsz.append(runButton, valign="middle") hsz.appendSpacer(20) self.pgCode.Sizer.append(hsz, "x") # Stack to hold code history self._codeStack = [] self._codeStackPos = 0 # Restore the history self.restoreHistory() # Bring up history search self.bindKey("Ctrl+R", self.onHistoryPop) # Show/hide the code editing pane self.bindKey("Ctrl+E", self.onToggleCodePane) # Force the focus to the editor when the code page is activated. def _delayedSetFocus(evt): dabo.ui.callAfter(self.edtCode.setFocus) self.pgCode.bindEvent(dEvents.PageEnter, _delayedSetFocus) # create the output control outControl = dabo.ui.dEditBox(op, RegID="edtOut", ReadOnly=True) op.Sizer.append1x(outControl) outControl.bindEvent(dEvents.MouseRightDown, self.onOutputRightDown) self._stdOut = self.shell.interp.stdout self._stdErr = self.shell.interp.stderr self._pseudoOut = pseudo.PseudoFileOut(write=self.appendOut) self._pseudoErr = pseudo.PseudoFileOut(write=self.appendOut) self.SplitState = True # Make 'self' refer to the calling form, or this form if no calling form. # Make 'bo' refer to the primary bizobj of the calling form, if any. if self.Parent is None: ns = self else: ns = self.Parent bo = getattr(ns, "PrimaryBizobj", None) if bo: self.shell.interp.locals['bo'] = bo self.shell.interp.locals['self'] = ns self.Caption = _("dShellForm: self is %s") % ns.Name self.setStatusText(_("Use this shell to interact with the runtime environment")) self.fillMenu() self.shell.SetFocus() def appendOut(self, tx): ed = self.edtOut ed.Value += tx endpos = ed.GetLastPosition() # Either of these commands should scroll the edit box # to the bottom, but neither do (at least on OS X) when # called directly or via callAfter(). dabo.ui.callAfter(ed.ShowPosition, endpos) dabo.ui.callAfter(ed.SetSelection, endpos, endpos) def addToHistory(self, cmd=None): if cmd is None: cmd = self.shell.history[0] chk = self.cmdHistKey if cmd == self._lastCmd: # Don't add again return # Delete any old instances of this command chk.deleteByValue(cmd) self._lastCmd = cmd stamp = "%s" % int(round(time.time() * 100, 0)) self.cmdHistKey.setValue(stamp, cmd) def _loadHistory(self): ck = self.cmdHistKey cmds = [] for k in ck.getPrefKeys(): cmds.append({"stamp": k, "cmd": ck.get(k)}) dsu = dabo.db.dDataSet(cmds) if dsu: ds = dsu.sort("stamp", "asc") return ds else: return dsu def onToggleCodePane(self, evt): """Toggle between the Code Pane and the Output Pane""" self.pgfCodeShell.cyclePages(1) def processDroppedFiles(self, filelist): """ This will fire if files are dropped on the code editor. If more than one file is dropped, only open the first, and warn the user. """ if len(filelist) > 1: dabo.ui.exclaim(_("Only one file can be dropped at a time")) if self.pgfCodeShell.SelectedPage == self.pgShell: self.shell.AddText(filelist[0]) else: self.edtCode.Value = file(filelist[0]).read() def processDroppedText(self, txt): """Add the text to the code editor.""" cc = self.edtCode currText = cc.Value selStart, selEnd = cc.SelectionPosition cc.Value = "%s%s%s" % (currText[:selStart], txt, currText[selEnd:]) def onHistoryPop(self, evt): """ Let the user type in part of a command, and retrieve the matching commands from their history. """ ds = self._loadHistory() hp = self._HistoryPanel hp.History = ds fp = self.FloatingPanel # We want it centered, so set Owner to None fp.Owner = None hp.clear() fp.show() if hp.ok: cmds = hp.getCmd() for num, cmd in enumerate(cmds): # For all but the first, we need to process the previous command. if num: self.shell.processLine() try: pos = self.shell.history.index(cmd) except ValueError: # Not in the list return self.shell.replaceFromHistory(pos - self.shell.historyIndex) def restoreHistory(self): """ Get the stored history from previous sessions, and set the shell's internal command history list to it. """ ds = self._loadHistory() self.shell.history = [rec["cmd"] for rec in ds] def _clearOldHistory(self): """For performance reasons, only save up to 500 commands.""" numToSave = 500 ck = self.cmdHistKey ds = self._loadHistory() if len(ds) <= numToSave: return cutoff = ds[numToSave]["stamp"] bad = [] for rec in ds: if rec["stamp"] <= cutoff: bad.append(rec["stamp"]) for bs in bad: ck.deletePref(bs) def onRunCode(self, evt, addReturn=True): code = self.edtCode.Value.rstrip() if not code: return # See if this is already in the stack try: self._codeStackPos = self._codeStack.index(code) except ValueError: self._codeStack.append(code) self._codeStackPos = len(self._codeStack) self.edtCode.Value = "" self.shell.Execute(code) # If the last line is indented, run a blank line to complete the block if code.splitlines()[-1][0] in " \t": self.shell.run("", prompt=False) self.addToHistory() self.pgfCodeShell.SelectedPage = self.pgShell def onCodeKeyDown(self, evt): if not evt.controlDown: return keyCode = evt.keyCode if (keyCode == 13): evt.stop() self.onRunCode(None, addReturn=True) elif keyCode in (dKeys.key_Up, dKeys.key_Down): direction = {dKeys.key_Up: -1, dKeys.key_Down: 1}[keyCode] self.moveCodeStack(direction) def moveCodeStack(self, direction): size = len(self._codeStack) pos = self._codeStackPos newpos = max(0, pos + direction) if newpos == size: # at the end; clear the code self._codeStackPos = size - 1 self.edtCode.Value = "" else: code = self._codeStack[newpos] self._codeStackPos = newpos self.edtCode.Value = code def onCodeRightDown(self, evt): dabo.ui.info("Code!") def onOutputRightDown(self, evt): pop = dabo.ui.dMenu() pop.append(_("Clear"), OnHit=self.onClearOutput) if self.edtOut.SelectionLength: pop.append(_("Copy"), OnHit=self.Application.onEditCopy) self.showContextMenu(pop) evt.stop() def onClearOutput(self, evt): self.edtOut.Value = "" def onShellContext(self, evt): pop = dabo.ui.dMenu() if self.SplitState: pmpt = _("Unsplit") else: pmpt = _("Split") pop.append(pmpt, OnHit=self.onSplitContext) self.showContextMenu(pop) evt.StopPropagation() def onShellRight(self, evt): pop = dabo.ui.dMenu() if self.SplitState: pmpt = _("Unsplit") else: pmpt = _("Split") pop.append(pmpt, OnHit=self.onSplitContext) self.showContextMenu(pop) evt.StopPropagation() def onSplitContext(self, evt): self.SplitState = not self.SplitState evt.stop() def onResize(self, evt): self.SashPosition = self._sashPct * self.Height def sashDoubleClick(self, evt): # We don't want the window to unsplit evt.stop() def sashPosChanged(self, evt): self._sashPct = float(self.SashPosition) / self.Height def fillMenu(self): viewMenu = self.MenuBar.getMenu("base_view") if viewMenu.Children: viewMenu.appendSeparator() viewMenu.append(_("Zoom &In"), HotKey="Ctrl+=", OnHit=self.onViewZoomIn, ItemID="view_zoomin", bmp="zoomIn", help=_("Zoom In")) viewMenu.append(_("&Normal Zoom"), HotKey="Ctrl+/", OnHit=self.onViewZoomNormal, ItemID="view_zoomnormal", bmp="zoomNormal", help=_("Normal Zoom")) viewMenu.append(_("Zoom &Out"), HotKey="Ctrl+-", OnHit=self.onViewZoomOut, ItemID="view_zoomout", bmp="zoomOut", help=_("Zoom Out")) viewMenu.append(_("&Toggle Code Pane"), HotKey="Ctrl+E", OnHit=self.onToggleCodePane, ItemID="view_togglecode", bmp="", help=_("Show/hide Code Pane")) editMenu = self.MenuBar.getMenu("base_edit") if editMenu.Children: editMenu.appendSeparator() editMenu.append(_("nClear O&utput"), HotKey="Ctrl+Back", ItemID="edit_clearoutput", OnHit=self.onClearOutput, help=_("Clear Output Window")) def onViewZoomIn(self, evt): self.shell.SetZoom(self.shell.GetZoom()+1) def onViewZoomNormal(self, evt): self.shell.SetZoom(0) def onViewZoomOut(self, evt): self.shell.SetZoom(self.shell.GetZoom()-1) @classmethod def getBaseShellClass(cls): return dShell def _getFontSize(self): return self.shell.FontSize def _setFontSize(self, val): if self._constructed(): self.shell.FontSize = val else: self._properties["FontSize"] = val def _getFontFace(self): return self.shell.FontFace def _setFontFace(self, val): if self._constructed(): self.shell.FontFace = val else: self._properties["FontFace"] = val def _getHistoryPanel(self): fp = self.FloatingPanel try: create = self._historyPanel is None except AttributeError: create = True if create: fp.clear() pnl = self._historyPanel = _LookupPanel(fp) pnl.Height = max(200, self.Height-100) fp.Sizer.append(pnl) fp.fitToSizer() return self._historyPanel def _getShellClass(self): return self._shellClass def _setShellClass(self, val): if self._constructed(): self._shellClass = val else: self._properties["ShellClass"] = val def _getSplitState(self): return self._splitState def _setSplitState(self, val): if self._splitState != val: self._splitState = val if val: self.split() self.shell.interp.stdout = self._pseudoOut self.shell.interp.stderr = self._pseudoErr else: self.unsplit() self.shell.interp.stdout = self._stdOut self.shell.interp.stderr = self._stdErr FontFace = property(_getFontFace, _setFontFace, None, _("Name of the font face used in the shell (str)")) FontSize = property(_getFontSize, _setFontSize, None, _("Size of the font used in the shell (int)")) _HistoryPanel = property(_getHistoryPanel, None, None, _("Popup to display the command history (read-only) (dDialog)")) ShellClass = property(_getShellClass, _setShellClass, None, _("Class to use for the interactive shell (dShell)")) SplitState = property(_getSplitState, _setSplitState, None, _("""Controls whether the output is in a separate pane (default) or intermixed with the commands. (bool)""")) DynamicSplitState = makeDynamicProperty(SplitState) def main(): from dabo.dApp import dApp app = dApp(BasePrefKey="dabo.ui.dShellForm") app.MainFormClass = dShellForm app.setup() app.start() if __name__ == "__main__": main()
PypiClean
/Bonsu-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl/bonsu/sequences/algorithms.py
import sys import wx import numpy import vtk from ..interface.render import wxVTKRenderWindowInteractor from vtk.util import numpy_support import threading from ..operations.HIO import HIO from ..operations.HIO import HIOMask from ..operations.HIO import HIOPlus from ..operations.HIO import PCHIO from ..operations.HIO import PGCHIO from ..operations.HIO import HIOMaskPC from ..operations.CSHIO import CSHIO from ..operations.SO2D import SO2D from ..operations.ER import ER from ..operations.ER import ERMask from ..operations.ER import ERMaskPC from ..operations.POER import POER from ..operations.HPR import HPR from ..operations.HPR import HPRMaskPC from ..operations.RAAR import RAAR from ..operations.RAAR import RAARMaskPC from ..operations.wrap import WrapArray from ..operations.compact import CompactArray from ..operations.loadarray import NewArray from ..operations.loadarray import LoadArray from ..interface.common import CNTR_CLIP def PrepareVisualisation(self,pipelineitem): panelvisual = self.ancestor.GetPage(1) panelvisual.data = None panelvisual.widget.SetInteractor(panelvisual.renWin) panelvisual.widget.SetEnabled( 0 ) r = float(panelvisual.r)/255.0 g = float(panelvisual.g)/255.0 b = float(panelvisual.b)/255.0 panelvisual.renderer_amp_real.SetBackground(r, g, b) panelvisual.renderer_amp_recip.SetBackground(r, g, b) contour_real = CNTR_CLIP*numpy.max(numpy.abs(self.seqdata)) contour_recip = contour_real if (self.visual_amp_real is not None) or (self.visual_amp_recip is not None) or (self.visual_support is not None): panelvisual.renderer_amp_real.RemoveAllViewProps() panelvisual.renderer_phase_real .RemoveAllViewProps() panelvisual.renderer_amp_recip.RemoveAllViewProps() panelvisual.renderer_phase_recip.RemoveAllViewProps() panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_amp_real) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_phase_real) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_amp_recip) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_phase_recip) panelvisual.renWin.GetRenderWindow().Modified() panelvisual.renWin.SetInteractorStyle(panelvisual.style3D) panelvisual.SetPhaseVisualButtons() if self.visual_amp_real is not None: self.visual_amp_real[:] = numpy.abs(self.seqdata) panelvisual.flat_data_amp_real = (self.visual_amp_real).transpose(2,1,0).flatten() panelvisual.vtk_data_array_amp_real = numpy_support.numpy_to_vtk(panelvisual.flat_data_amp_real) points_amp_real = panelvisual.image_amp_real.GetPointData() points_amp_real.SetScalars(panelvisual.vtk_data_array_amp_real) panelvisual.image_amp_real.SetDimensions(self.visual_amp_real.shape) panelvisual.image_amp_real.Modified() panelvisual.lut_amp_real.SetNumberOfTableValues(256) panelvisual.lut_amp_real.SetTableRange(panelvisual.image_amp_real.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[0][0]][1] if self.ancestor.GetPage(0).cmls[0][1] == 0: for k in range(256): panelvisual.lut_amp_real.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_amp_real.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_amp_real.SetRamp(0) panelvisual.lut_amp_real.Build() panelvisual.scalebar_amp_real.SetTitle("") panelvisual.scalebar_amp_real.SetLookupTable(panelvisual.lut_amp_real) panelvisual.scalebar_amp_real.Modified() panelvisual.filter_amp_real.SetInputData(panelvisual.image_amp_real) panelvisual.filter_amp_real.ComputeNormalsOn() panelvisual.filter_amp_real.ComputeScalarsOn() panelvisual.filter_amp_real.SetNumberOfContours(1) panelvisual.filter_amp_real.SetValue( 0, contour_real) panelvisual.filter_amp_real.Modified() panelvisual.filter_amp_real.Update() panelvisual.smooth_filter_real.SetInputConnection(panelvisual.filter_amp_real.GetOutputPort()) panelvisual.smooth_filter_real.SetNumberOfIterations(15) panelvisual.smooth_filter_real.SetRelaxationFactor(0.1) panelvisual.smooth_filter_real.FeatureEdgeSmoothingOff() panelvisual.smooth_filter_real.BoundarySmoothingOn() panelvisual.normals_amp_real.SetInputConnection(panelvisual.smooth_filter_real.GetOutputPort()) panelvisual.normals_amp_real.SetFeatureAngle(90) panelvisual.normals_amp_real.ConsistencyOff() panelvisual.normals_amp_real.SplittingOff() panelvisual.normals_amp_real.AutoOrientNormalsOff() panelvisual.normals_amp_real.ComputePointNormalsOn() panelvisual.normals_amp_real.ComputeCellNormalsOff() panelvisual.normals_amp_real.NonManifoldTraversalOff() panelvisual.triangles_amp_real.SetInputConnection(panelvisual.normals_amp_real.GetOutputPort()) panelvisual.strips_amp_real.SetInputConnection(panelvisual.triangles_amp_real.GetOutputPort()) panelvisual.mapper_amp_real.SetInputConnection(panelvisual.strips_amp_real.GetOutputPort()) panelvisual.mapper_amp_real.SetLookupTable(panelvisual.lut_amp_real) panelvisual.mapper_amp_real.SetScalarRange(panelvisual.image_amp_real.GetPointData().GetScalars().GetRange()) panelvisual.mapper_amp_real.SetScalarModeToUsePointData() panelvisual.mapper_amp_real.Modified() panelvisual.mapper_amp_real.Update() panelvisual.actor_amp_real.GetProperty().SetOpacity(1.0) panelvisual.actor_amp_real.SetMapper(panelvisual.mapper_amp_real) panelvisual.renderer_amp_real.AddActor(panelvisual.actor_amp_real) panelvisual.renderer_amp_real.AddActor2D(panelvisual.scalebar_amp_real) panelvisual.renderer_amp_real.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_amp_real.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_amp_real.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_phase_real is not None: self.visual_phase_real[:] = numpy.angle(self.seqdata) panelvisual.flat_data_phase_real = (self.visual_phase_real).transpose(2,1,0).flatten() panelvisual.vtk_data_array_phase_real = numpy_support.numpy_to_vtk(panelvisual.flat_data_phase_real) panelvisual.vtk_data_array_phase_real.SetName("mapscalar") points_amp_real = panelvisual.image_amp_real.GetPointData() points_amp_real.AddArray(panelvisual.vtk_data_array_phase_real) panelvisual.image_amp_real.Modified() panelvisual.lut_phase_real.SetNumberOfTableValues(256) panelvisual.lut_phase_real.SetTableRange([-numpy.pi,numpy.pi]) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[1][0]][1] if self.ancestor.GetPage(0).cmls[1][1] == 0: for k in range(256): panelvisual.lut_phase_real.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_phase_real.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_phase_real.SetRamp(0) panelvisual.lut_phase_real.Build() panelvisual.scalebar_amp_real.SetLookupTable(panelvisual.lut_phase_real) panelvisual.scalebar_amp_real.Modified() panelvisual.filter_amp_real.Modified() panelvisual.filter_amp_real.Update() panelvisual.mapper_amp_real.SetScalarRange([-numpy.pi,numpy.pi]) panelvisual.mapper_amp_real.SetLookupTable(panelvisual.lut_phase_real) panelvisual.mapper_amp_real.SelectColorArray("mapscalar") panelvisual.mapper_amp_real.SetScalarModeToUsePointFieldData() panelvisual.mapper_amp_real.Modified() panelvisual.mapper_amp_real.Update() if self.visual_amp_recip is not None: self.visual_amp_recip[:] = numpy.abs(self.seqdata) panelvisual.flat_data_amp_recip= (self.visual_amp_recip).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_amp_recip = numpy_support.numpy_to_vtk(panelvisual.flat_data_amp_recip) panelvisual.points_amp_recip = panelvisual.image_amp_recip.GetPointData() panelvisual.points_amp_recip.SetScalars(panelvisual.vtk_data_array_amp_recip) panelvisual.image_amp_recip.SetDimensions(self.visual_amp_recip.shape) panelvisual.image_amp_recip.Modified() panelvisual.lut_amp_recip.SetNumberOfTableValues(256) panelvisual.lut_amp_recip.SetTableRange(panelvisual.image_amp_recip.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[2][0]][1] if self.ancestor.GetPage(0).cmls[2][1] == 0: for k in range(256): panelvisual.lut_amp_recip.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_amp_recip.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_amp_recip.SetRamp(0) panelvisual.lut_amp_recip.Build() panelvisual.scalebar_amp_recip.SetTitle("") panelvisual.scalebar_amp_recip.SetLookupTable(panelvisual.lut_amp_recip) panelvisual.filter_amp_recip.SetInputData(panelvisual.image_amp_recip) panelvisual.filter_amp_recip.ComputeNormalsOn() panelvisual.filter_amp_recip.ComputeScalarsOn() panelvisual.filter_amp_recip.SetNumberOfContours(1) panelvisual.filter_amp_recip.SetValue( 0, contour_recip) panelvisual.filter_amp_recip.Modified() panelvisual.filter_amp_recip.Update() panelvisual.smooth_filter_recip.SetInputConnection(panelvisual.filter_amp_recip.GetOutputPort()) panelvisual.smooth_filter_recip.SetNumberOfIterations(15) panelvisual.smooth_filter_recip.SetRelaxationFactor(0.1) panelvisual.smooth_filter_recip.FeatureEdgeSmoothingOff() panelvisual.smooth_filter_recip.BoundarySmoothingOn() panelvisual.normals_amp_recip.SetInputConnection(panelvisual.smooth_filter_recip.GetOutputPort()) panelvisual.normals_amp_recip.SetFeatureAngle(90) panelvisual.normals_amp_recip.ConsistencyOff() panelvisual.normals_amp_recip.SplittingOff() panelvisual.normals_amp_recip.AutoOrientNormalsOff() panelvisual.normals_amp_recip.ComputePointNormalsOn() panelvisual.normals_amp_recip.ComputeCellNormalsOff() panelvisual.normals_amp_recip.NonManifoldTraversalOff() panelvisual.triangles_amp_recip.SetInputConnection(panelvisual.normals_amp_recip.GetOutputPort()) panelvisual.strips_amp_recip.SetInputConnection(panelvisual.triangles_amp_recip.GetOutputPort()) panelvisual.mapper_amp_recip.SetInputConnection(panelvisual.strips_amp_recip.GetOutputPort()) panelvisual.mapper_amp_recip.SetLookupTable(panelvisual.lut_amp_recip) panelvisual.mapper_amp_recip.SetScalarRange(panelvisual.image_amp_recip.GetPointData().GetScalars().GetRange()) panelvisual.mapper_amp_recip.SetScalarModeToUsePointData() panelvisual.mapper_amp_recip.Modified() panelvisual.mapper_amp_recip.Update() panelvisual.actor_amp_recip.SetMapper(panelvisual.mapper_amp_recip) panelvisual.actor_amp_recip.GetProperty().SetOpacity(1.0) panelvisual.renderer_amp_recip.AddActor(panelvisual.actor_amp_recip) panelvisual.renderer_amp_recip.AddActor2D(panelvisual.scalebar_amp_recip) panelvisual.renderer_amp_recip.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_amp_recip.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_amp_recip.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_phase_recip is not None: self.visual_phase_recip[:] = numpy.angle(self.seqdata) panelvisual.flat_data_phase_recip = (self.visual_phase_recip).transpose(2,1,0).flatten() panelvisual.vtk_data_array_phase_recip = numpy_support.numpy_to_vtk(panelvisual.flat_data_phase_recip) panelvisual.vtk_data_array_phase_recip.SetName("mapscalar") points_amp_recip = panelvisual.image_amp_recip.GetPointData() points_amp_recip.AddArray(panelvisual.vtk_data_array_phase_recip) panelvisual.image_amp_recip.Modified() panelvisual.lut_phase_recip.SetNumberOfTableValues(256) panelvisual.lut_phase_recip.SetTableRange([-numpy.pi,numpy.pi]) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[3][0]][1] if self.ancestor.GetPage(0).cmls[3][1] == 0: for k in range(256): panelvisual.lut_phase_recip.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_phase_recip.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_phase_recip.SetRamp(0) panelvisual.lut_phase_recip.Build() panelvisual.scalebar_amp_recip.SetLookupTable(panelvisual.lut_phase_recip) panelvisual.scalebar_amp_recip.Modified() panelvisual.filter_amp_recip.Modified() panelvisual.filter_amp_recip.Update() panelvisual.mapper_amp_recip.SetScalarRange([-numpy.pi,numpy.pi]) panelvisual.mapper_amp_recip.SetLookupTable(panelvisual.lut_phase_recip) panelvisual.mapper_amp_recip.SelectColorArray("mapscalar") panelvisual.mapper_amp_recip.SetScalarModeToUsePointFieldData() panelvisual.mapper_amp_recip.Modified() panelvisual.mapper_amp_recip.Update() if self.visual_support is not None: self.visual_support[:] = numpy.array(self.support.real, copy=False) panelvisual.flat_data_support= (self.visual_support).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_support = numpy_support.numpy_to_vtk(panelvisual.flat_data_support) panelvisual.points_support = panelvisual.image_support.GetPointData() panelvisual.points_support.SetScalars(panelvisual.vtk_data_array_support) panelvisual.image_support.SetDimensions(self.visual_support.shape) panelvisual.image_support.Modified() panelvisual.filter_support.SetInputData(panelvisual.image_support) panelvisual.filter_support.ComputeNormalsOn() panelvisual.filter_support.ComputeScalarsOn() panelvisual.filter_support.SetNumberOfContours(1) panelvisual.filter_support.SetValue( 0, 1.0) panelvisual.filter_support.Update() panelvisual.smooth_filter_support.SetInputConnection(panelvisual.filter_support.GetOutputPort()) panelvisual.smooth_filter_support.SetNumberOfIterations(15) panelvisual.smooth_filter_support.SetRelaxationFactor(0.1) panelvisual.smooth_filter_support.FeatureEdgeSmoothingOff() panelvisual.smooth_filter_support.BoundarySmoothingOn() panelvisual.normals_support.SetInputConnection(panelvisual.smooth_filter_support.GetOutputPort()) panelvisual.normals_support.SetFeatureAngle(90) panelvisual.normals_support.ConsistencyOff() panelvisual.normals_support.SplittingOff() panelvisual.normals_support.AutoOrientNormalsOff() panelvisual.normals_support.ComputePointNormalsOn() panelvisual.normals_support.ComputeCellNormalsOff() panelvisual.normals_support.NonManifoldTraversalOff() panelvisual.triangles_support.SetInputConnection(panelvisual.normals_support.GetOutputPort()) panelvisual.strips_support.SetInputConnection(panelvisual.triangles_support.GetOutputPort()) panelvisual.mapper_support.SetInputConnection(panelvisual.strips_support.GetOutputPort()) panelvisual.mapper_support.SetScalarRange(panelvisual.image_support.GetPointData().GetScalars().GetRange()) panelvisual.actor_support.SetMapper(panelvisual.mapper_support) panelvisual.actor_support.GetProperty().SetOpacity(0.05) panelvisual.renderer_amp_real.AddActor( panelvisual.actor_support ) if self.visual_amp_real is not None or self.visual_support is not None: if self.visual_phase_real is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_phase_real) panelvisual.renderer_phase_real.SetViewport(1,1,1,1) panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_amp_real) panelvisual.renderer_amp_real.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if self.visual_amp_recip is not None: if self.visual_phase_recip is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_phase_recip) panelvisual.renderer_phase_recip.SetViewport(1,1,1,1) panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_amp_recip) panelvisual.renderer_amp_recip.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if (self.visual_amp_real is not None or self.visual_support is not None) and self.visual_amp_recip is not None: panelvisual.renderer_amp_real.SetViewport(0,0,0.5,1) panelvisual.renderer_amp_recip.SetViewport(0.5,0,1,1) if (self.visual_amp_real is not None or self.visual_support is not None) and self.visual_amp_recip is None: panelvisual.renderer_amp_real.SetViewport(0,0,1,1) panelvisual.renderer_amp_recip.SetViewport(0,0,0,0) if (self.visual_amp_real is None and self.visual_support is None) and self.visual_amp_recip is not None: panelvisual.renderer_amp_real.SetViewport(0,0,0,0) panelvisual.renderer_amp_recip.SetViewport(0,0,1,1) panelvisual.RefreshSceneFull(gotovisual=True) def PrepareVisualisation2D(self,pipelineitem): panelvisual = self.ancestor.GetPage(1) panelvisual.data = None panelvisual.widget.SetInteractor(panelvisual.renWin) panelvisual.widget.SetEnabled( 0 ) r = float(panelvisual.r)/255.0 g = float(panelvisual.g)/255.0 b = float(panelvisual.b)/255.0 panelvisual.renderer_amp_real.SetBackground(r, g, b) panelvisual.renderer_phase_real.SetBackground(r, g, b) panelvisual.renderer_amp_recip.SetBackground(r, g, b) panelvisual.renderer_phase_recip.SetBackground(r, g, b) if (self.visual_amp_real is not None) or (self.visual_amp_recip is not None) or (self.visual_support is not None): panelvisual.renderer_amp_real.RemoveAllViewProps() panelvisual.renderer_phase_real .RemoveAllViewProps() panelvisual.renderer_amp_recip.RemoveAllViewProps() panelvisual.renderer_phase_recip.RemoveAllViewProps() panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_amp_real) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_phase_real) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_amp_recip) panelvisual.renWin.GetRenderWindow().RemoveRenderer(panelvisual.renderer_phase_recip) panelvisual.renWin.GetRenderWindow().Modified() panelvisual.renWin.SetInteractorStyle(panelvisual.style2D) panelvisual.SetPhaseVisualButtons() if self.visual_amp_real is not None: panelvisual.flat_data_amp_real = (self.visual_amp_real).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_amp_real = numpy_support.numpy_to_vtk(panelvisual.flat_data_amp_real) points_amp_real = panelvisual.image_amp_real.GetPointData() points_amp_real.SetScalars(panelvisual.vtk_data_array_amp_real) panelvisual.image_amp_real.SetDimensions(self.visual_amp_real.shape) panelvisual.image_amp_real.Modified() panelvisual.lut_amp_real.SetNumberOfTableValues(256) panelvisual.lut_amp_real.SetTableRange(panelvisual.image_amp_real.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[0][0]][1] if self.ancestor.GetPage(0).cmls[0][1] == 0: for k in range(256): panelvisual.lut_amp_real.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_amp_real.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_amp_real.SetRamp(0) panelvisual.lut_amp_real.Build() panelvisual.scalebar_amp_real.SetTitle("") panelvisual.scalebar_amp_real.SetLookupTable(panelvisual.lut_amp_real) panelvisual.color_amp_real.SetLookupTable(panelvisual.lut_amp_real) panelvisual.color_amp_real.SetInputData(panelvisual.image_amp_real) panelvisual.color_amp_real.Update() panelvisual.actor2D_amp_real.GetMapper().SetInputConnection(panelvisual.color_amp_real.GetOutputPort()) panelvisual.renderer_amp_real.AddActor2D(panelvisual.actor2D_amp_real) panelvisual.renderer_amp_real.AddActor2D(panelvisual.scalebar_amp_real) panelvisual.renderer_amp_real.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_amp_real.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_amp_real.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_phase_real is not None: panelvisual.flat_data_phase_real = (self.visual_phase_real).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_phase_real = numpy_support.numpy_to_vtk(panelvisual.flat_data_phase_real) points_phase_real = panelvisual.image_phase_real.GetPointData() points_phase_real.SetScalars(panelvisual.vtk_data_array_phase_real) panelvisual.image_phase_real.SetDimensions(self.visual_phase_real.shape) panelvisual.image_phase_real.Modified() panelvisual.lut_phase_real.SetNumberOfTableValues(256) panelvisual.lut_phase_real.SetTableRange(panelvisual.image_phase_real.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[1][0]][1] if self.ancestor.GetPage(0).cmls[1][1] == 0: for k in range(256): panelvisual.lut_phase_real.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_phase_real.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_phase_real.SetRamp(0) panelvisual.lut_phase_real.Build() panelvisual.scalebar_phase_real.SetTitle("") panelvisual.scalebar_phase_real.SetLookupTable(panelvisual.lut_phase_real) panelvisual.color_phase_real.SetLookupTable(panelvisual.lut_phase_real) panelvisual.color_phase_real.SetInputData(panelvisual.image_phase_real) panelvisual.actor2D_phase_real.GetMapper().SetInputConnection(panelvisual.color_phase_real.GetOutputPort()) panelvisual.renderer_phase_real.AddActor2D(panelvisual.actor2D_phase_real) panelvisual.renderer_phase_real.AddActor2D(panelvisual.scalebar_phase_real) panelvisual.renderer_phase_real.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_phase_real.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_phase_real.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_amp_recip is not None: panelvisual.flat_data_amp_recip= (self.visual_amp_recip).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_amp_recip = numpy_support.numpy_to_vtk(panelvisual.flat_data_amp_recip) panelvisual.points_amp_recip = panelvisual.image_amp_recip.GetPointData() panelvisual.points_amp_recip.SetScalars(panelvisual.vtk_data_array_amp_recip) panelvisual.image_amp_recip.SetDimensions(self.visual_amp_recip.shape) panelvisual.image_amp_recip.Modified() panelvisual.lut_amp_recip.SetNumberOfTableValues(256) panelvisual.lut_amp_recip.SetTableRange(panelvisual.image_amp_recip.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[2][0]][1] if self.ancestor.GetPage(0).cmls[2][1] == 0: for k in range(256): panelvisual.lut_amp_recip.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_amp_recip.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_amp_recip.SetRamp(0) panelvisual.lut_amp_recip.Build() panelvisual.scalebar_amp_recip.SetTitle("") panelvisual.scalebar_amp_recip.SetLookupTable(panelvisual.lut_amp_recip) panelvisual.color_amp_recip.SetLookupTable(panelvisual.lut_amp_recip) panelvisual.color_amp_recip.SetInputData(panelvisual.image_amp_recip) panelvisual.actor2D_amp_recip.GetMapper().SetInputConnection(panelvisual.color_amp_recip.GetOutputPort()) panelvisual.renderer_amp_recip.AddActor2D(panelvisual.actor2D_amp_recip) panelvisual.renderer_amp_recip.AddActor2D(panelvisual.scalebar_amp_recip) panelvisual.renderer_amp_recip.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_amp_recip.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_amp_recip.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_phase_recip is not None: panelvisual.flat_data_phase_recip = (self.visual_phase_recip).transpose(2,1,0).flatten(); panelvisual.vtk_data_array_phase_recip = numpy_support.numpy_to_vtk(panelvisual.flat_data_phase_recip) points_phase_recip = panelvisual.image_phase_recip.GetPointData() points_phase_recip.SetScalars(panelvisual.vtk_data_array_phase_recip) panelvisual.image_phase_recip.SetDimensions(self.visual_phase_recip.shape) panelvisual.image_phase_recip.Modified() panelvisual.lut_phase_recip.SetNumberOfTableValues(256) panelvisual.lut_phase_recip.SetTableRange(panelvisual.image_phase_recip.GetPointData().GetScalars().GetRange()) lutsource = self.ancestor.GetPage(0).cms[self.ancestor.GetPage(0).cmls[3][0]][1] if self.ancestor.GetPage(0).cmls[3][1] == 0: for k in range(256): panelvisual.lut_phase_recip.SetTableValue(k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) else: for k in range(256): panelvisual.lut_phase_recip.SetTableValue(255-k, lutsource[k][0], lutsource[k][1], lutsource[k][2], 1) panelvisual.lut_phase_recip.SetRamp(0) panelvisual.lut_phase_recip.Build() panelvisual.scalebar_phase_recip.SetTitle("") panelvisual.scalebar_phase_recip.SetLookupTable(panelvisual.lut_phase_recip) panelvisual.color_phase_recip.SetLookupTable(panelvisual.lut_phase_recip) panelvisual.color_phase_recip.SetInputData(panelvisual.image_phase_recip) panelvisual.actor2D_phase_recip.GetMapper().SetInputConnection(panelvisual.color_phase_recip.GetOutputPort()) panelvisual.renderer_phase_recip.AddActor2D(panelvisual.actor2D_phase_recip) panelvisual.renderer_phase_recip.AddActor2D(panelvisual.scalebar_phase_recip) panelvisual.renderer_phase_recip.GetActiveCamera().SetPosition(0,0,1) panelvisual.renderer_phase_recip.GetActiveCamera().SetViewUp(0,1,0) panelvisual.renderer_phase_recip.GetActiveCamera().SetFocalPoint(0,0,0) if self.visual_amp_real is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_amp_real) panelvisual.renderer_amp_real.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if self.visual_phase_real is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_phase_real) panelvisual.renderer_phase_real.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if self.visual_amp_recip is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_amp_recip) panelvisual.renderer_amp_recip.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if self.visual_phase_recip is not None: panelvisual.renWin.GetRenderWindow().AddRenderer(panelvisual.renderer_phase_recip) panelvisual.renderer_phase_recip.ResetCamera() panelvisual.hboxrender.Layout() panelvisual.Layout() panelvisual.Show() if (self.visual_amp_real is not None) and (self.visual_amp_recip is not None) and (self.visual_phase_real is not None) and (self.visual_phase_recip is not None): panelvisual.renderer_amp_real.SetViewport(0,0.5,0.5,1.0) panelvisual.renderer_phase_real.SetViewport(0.5,0.5,1,1.0) panelvisual.renderer_amp_recip.SetViewport(0,0,0.5,0.5) panelvisual.renderer_phase_recip.SetViewport(0.5,0,1,0.5) elif (self.visual_amp_real is not None) and (self.visual_phase_real is not None): panelvisual.renderer_amp_real.SetViewport(0,0,0.5,1) panelvisual.renderer_phase_real.SetViewport(0.5,0,1,1) elif (self.visual_amp_recip is not None) and (self.visual_phase_recip is not None): panelvisual.renderer_amp_recip.SetViewport(0,0,0.5,1) panelvisual.renderer_phase_recip.SetViewport(0.5,0,1,1) elif (self.visual_amp_real is not None) and (self.visual_amp_recip is not None): panelvisual.renderer_amp_real.SetViewport(0,0,0.5,1) panelvisual.renderer_amp_recip.SetViewport(0.5,0,1,1) elif (self.visual_amp_real is not None): panelvisual.renderer_amp_real.SetViewport(0,0,1,1) panelvisual.RefreshSceneFull(gotovisual=True) class SequenceBase(): def __init__(self, parent, pipelineitem): self.parent = parent self.pipelineitem = pipelineitem if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.expdata_path = pipelineitem.exp_amps.objectpath.GetValue() if self.LoadExpData(): self.parent.pipeline_started = False return self.support_path = pipelineitem.support.objectpath.GetValue() if self.LoadSupport(): self.parent.pipeline_started = False return if self.ShapeCheck(): self.parent.pipeline_started = False return self.startiter = int(self.pipelineitem.start_iter) self.numiter = int(self.pipelineitem.niter.value.GetValue()) self.Prepare() def LoadExpData(self): try: self.parent.expdata = LoadArray(self.parent, self.expdata_path) if self.pipelineitem.chkbox_sqrt_expamps.GetValue() == True: numpy.sqrt(self.parent.expdata, self.parent.expdata) self.parent.expdata[:] = WrapArray(self.parent.expdata).copy() except: msg = "Could not load array from: \n"+ self.expdata_path + "\nPlease check the log." self.MsgDlg(msg) return True else: return False def LoadSupport(self): if self.support_path == "": try: assert type(self.parent.support).__module__ == numpy.__name__ except: msg = "No existing Support array found." self.MsgDlg(msg) return True else: try: self.parent.support = LoadArray(self.parent, self.support_path) except: msg = "Could not load array from: \n"+ self.support_path + "\nPlease check the log." self.MsgDlg(msg) return True else: return False def ShapeCheck(self): if (not self.parent.expdata.shape == self.parent.support.shape) == True: msg = "Array dimensions are inconsistent." self.MsgDlg(msg) self.parent.seqdata = None try: self.parent.visual_amp_real = None except: pass try: self.parent.visual_amp_recip = None except: pass try: self.parent.visual_amp_support = None except: pass return True def Prepare(self): if self.startiter == 0 and (self.parent.visual_amp_real is not None or self.parent.visual_support is not None or self.parent.visual_amp_recip is not None): if self.parent.expdata.shape[2] == 1: PrepareVisualisation2D(self.parent,self.pipelineitem) else: PrepareVisualisation(self.parent,self.pipelineitem) def MsgDlg(self, msg): dlg = wx.MessageDialog(self.parent, msg, "Pipeline Message", wx.OK) dlg.ShowModal() dlg.Destroy() def ThreadAlg(self): pass def StartPhasing(self): self.thread = threading.Thread(target=self.ThreadAlg) self.thread.daemon = True self.thread.start() class SequenceBaseMask(SequenceBase): def __init__(self, parent, pipelineitem): self.parent = parent self.pipelineitem = pipelineitem if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.mask_path = self.pipelineitem.mask.objectpath.GetValue() if self.LoadMask(): self.parent.pipeline_started = False return SequenceBase.__init__(self, parent, pipelineitem) def ShapeCheck(self): if (not (self.parent.expdata.shape == self.parent.support.shape and self.parent.expdata.shape == self.parent.mask.shape)) == True: msg = "Array dimensions are inconsistent." self.MsgDlg(msg) self.parent.seqdata = None try: self.parent.visual_amp_real = None except: pass try: self.parent.visual_amp_recip = None except: pass try: self.parent.visual_amp_support = None except: pass return True def LoadMask(self): try: self.parent.mask = LoadArray(self.parent, self.mask_path) self.parent.mask[:] = WrapArray(self.parent.mask).copy() except: msg = "Could not load array from: \n"+ self.mask_path + "\nPlease check the log." self.MsgDlg(msg) return True else: return False class SequenceBasePC(SequenceBaseMask): def __init__(self, parent, pipelineitem): self.parent = parent self.pipelineitem = pipelineitem if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.expdata_path = pipelineitem.exp_amps.objectpath.GetValue() if self.LoadExpData(): self.parent.pipeline_started = False return self.support_path = pipelineitem.support.objectpath.GetValue() if self.LoadSupport(): self.parent.pipeline_started = False return self.gammaHWHM = float(self.pipelineitem.gammaHWHM.value.GetValue()) if self.LoadPSF(): self.parent.pipeline_started = False return self.mask_path = self.pipelineitem.mask.objectpath.GetValue() if self.LoadMask(): self.parent.pipeline_started = False return if self.ShapeCheck(): self.parent.pipeline_started = False return self.startiter = int(self.pipelineitem.start_iter) self.numiter = int(self.pipelineitem.niter.value.GetValue()) self.Prepare() self.niterrlpre = int(self.pipelineitem.niterrlpre.value.GetValue()) self.niterrl = int(self.pipelineitem.niterrl.value.GetValue()) self.niterrlinterval = int(self.pipelineitem.niterrlinterval.value.GetValue()) self.accel = int(self.pipelineitem.accel.value.GetValue()) self.zex = int(self.pipelineitem.zedims[0].value.GetValue()) self.zey = int(self.pipelineitem.zedims[1].value.GetValue()) self.zez = int(self.pipelineitem.zedims[2].value.GetValue()) if self.pipelineitem.chkbox_reset_gamma.GetValue() == True: self.reset_gamma = 1 else: self.reset_gamma = 0 def ShapeCheck(self): if (not (self.parent.expdata.shape == self.parent.support.shape and self.parent.expdata.shape == self.parent.mask.shape and self.parent.expdata.shape == self.parent.psf.shape)) == True: msg = "Array dimensions are inconsistent." self.MsgDlg(msg) self.parent.seqdata = None try: self.parent.visual_amp_real = None except: pass try: self.parent.visual_amp_recip = None except: pass try: self.parent.visual_amp_support = None except: pass return True def LoadPSF(self): try: from ..lib.prfftw import lorentzftfill if self.parent.psf is None: self.parent.psf = NewArray(self.parent, *self.parent.seqdata.shape) lorentzftfill(self.parent.psf,self.gammaHWHM) except MemoryError: msg = "Could not load PSF array. Insufficient memory." self.MsgDlg(msg) return True else: return False class Sequence_HIO(SequenceBase): def __init__(self, parent, pipelineitem): SequenceBase.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HIO Algorithm...") self.parent.thread_register.put(1) HIO(self.parent, self.beta, self.startiter, self.numiter) self.parent.thread_register.get() class Sequence_ER(SequenceBase): def __init__(self, parent, pipelineitem): SequenceBase.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting ER Algorithm...") self.parent.thread_register.put(1) ER(self.parent, self.startiter, self.numiter) self.parent.thread_register.get() class Sequence_ERMask(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; if pipelineitem.chkbox.GetValue(): self.numiter_relax = int(pipelineitem.niter_relax.value.GetValue()) else: self.numiter_relax = 0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting ER Mask Algorithm...") self.parent.thread_register.put(1) ERMask(self.parent, self.startiter, self.numiter, self.numiter_relax) self.parent.thread_register.get() class Sequence_HIOMask(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) if pipelineitem.chkbox.GetValue(): self.numiter_relax = int(pipelineitem.niter_relax.value.GetValue()) else: self.numiter_relax = 0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HIO Algorithm...") self.parent.thread_register.put(1) HIOMask(self.parent, self.beta, self.startiter, self.numiter, self.numiter_relax) self.parent.thread_register.get() class Sequence_HIOPlus(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HIO+ Algorithm...") self.parent.thread_register.put(1) HIOPlus(self.parent, self.beta, self.startiter, self.numiter) self.parent.thread_register.get() class Sequence_HPR(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) if pipelineitem.chkbox.GetValue(): self.numiter_relax = int(pipelineitem.niter_relax.value.GetValue()) else: self.numiter_relax = 0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HPR Algorithm...") self.parent.thread_register.put(1) HPR(self.parent, self.beta, self.startiter, self.numiter, self.numiter_relax) self.parent.thread_register.get() class Sequence_RAAR(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) if pipelineitem.chkbox.GetValue(): self.numiter_relax = int(pipelineitem.niter_relax.value.GetValue()) else: self.numiter_relax = 0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting RAAR Algorithm...") self.parent.thread_register.put(1) RAAR(self.parent, self.beta, self.startiter, self.numiter, self.numiter_relax) self.parent.thread_register.get() class Sequence_POER(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting PO-ER Algorithm...") self.parent.thread_register.put(1) POER(self.parent, self.startiter, self.numiter) self.parent.thread_register.get() class Sequence_PCHIO(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.phasemax = float(self.pipelineitem.phasemax.value.GetValue()) self.phasemin = float(self.pipelineitem.phasemin.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting PCHIO Algorithm...") self.parent.thread_register.put(1) PCHIO(self.parent, self.beta, self.startiter, self.numiter, self.phasemax, self.phasemin) self.parent.thread_register.get() class Sequence_PGCHIO(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.phasemax = float(self.pipelineitem.phasemax.value.GetValue()) self.phasemin = float(self.pipelineitem.phasemin.value.GetValue()) self.qx = float(self.pipelineitem.qx.value.GetValue()) self.qy = float(self.pipelineitem.qy.value.GetValue()) self.qz = float(self.pipelineitem.qz.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting PGCHIO Algorithm...") self.parent.thread_register.put(1) PGCHIO(self.parent, self.beta, self.startiter, self.numiter, self.phasemax, self.phasemin, self.qx, self.qy, self.qz) self.parent.thread_register.get() class Sequence_CSHIO(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.cs_p = float(self.pipelineitem.cs_p.value.GetValue()) self.cs_epsilon = float(self.pipelineitem.cs_epsilon.value.GetValue()) self.cs_epsilon_min = float(self.pipelineitem.cs_epsilon_min.value.GetValue()) self.cs_d = float(self.pipelineitem.cs_d.value.GetValue()) self.cs_eta = float(self.pipelineitem.cs_eta.value.GetValue()) if self.pipelineitem.chkbox_relax.GetValue() == True: self.relax = 1 else: self.relax = 0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting CSHIO Algorithm...") self.parent.thread_register.put(1) CSHIO(self.parent, self.beta, self.startiter, self.numiter, self.cs_p, self.cs_epsilon, self.cs_epsilon_min, self.cs_d, self.cs_eta, self.relax) self.parent.thread_register.get() class Sequence_HIOMaskPC(SequenceBasePC): def __init__(self, parent, pipelineitem): SequenceBasePC.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HIO Mask PC Algorithm...") self.parent.thread_register.put(1) HIOMaskPC(self.parent, self.beta, self.startiter, self.numiter, self.niterrlpre, self.niterrl, self.niterrlinterval, self.gammaHWHM, self.zex, self.zey, self.zez, self.reset_gamma, self.accel) self.parent.thread_register.get() class Sequence_HPRMaskPC(SequenceBasePC): def __init__(self, parent, pipelineitem): SequenceBasePC.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting HPR Mask PC Algorithm...") self.parent.thread_register.put(1) HPRMaskPC(self.parent, self.beta, self.startiter, self.numiter, self.niterrlpre, self.niterrl, self.niterrlinterval, self.gammaHWHM, self.zex, self.zey, self.zez, self.reset_gamma, self.accel) self.parent.thread_register.get() class Sequence_RAARMaskPC(SequenceBasePC): def __init__(self, parent, pipelineitem): SequenceBasePC.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting RAAR Mask PC Algorithm...") self.parent.thread_register.put(1) RAARMaskPC(self.parent, self.beta, self.startiter, self.numiter, self.niterrlpre, self.niterrl, self.niterrlinterval, self.gammaHWHM, self.zex, self.zey, self.zez, self.reset_gamma, self.accel) self.parent.thread_register.get() class Sequence_ERMaskPC(SequenceBasePC): def __init__(self, parent, pipelineitem): SequenceBasePC.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting ER Mask PC Algorithm...") self.parent.thread_register.put(1) ERMaskPC(self.parent, self.startiter, self.numiter, self.niterrlpre, self.niterrl, self.niterrlinterval, self.gammaHWHM, self.zex, self.zey, self.zez, self.reset_gamma, self.accel) self.parent.thread_register.get() class Sequence_ShrinkWrap(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; from ..lib.prfftw import threshold from ..lib.prfftw import rangereplace from ..lib.prfftw import convolve_sw from ..lib.prfftw import medianfilter from ..lib.prfftw import wrap_nomem from ..lib.prfftw import copy_amp from ..lib.prfftw import copy_abs from ..lib.prfftw import max_value self.threshold = threshold self.rangereplace = rangereplace self.convolve = convolve_sw self.medianfilter = medianfilter self.wrap = wrap_nomem self.copy_abs = copy_abs self.copy_amp = copy_amp self.maxvalue = numpy.zeros((2), dtype=numpy.double) self.max_value = max_value self.beta = float(self.pipelineitem.beta.value.GetValue()) self.startiter = int(self.pipelineitem.start_iter) self.numiter = int(self.pipelineitem.niter.value.GetValue()) self.cycle = int(self.pipelineitem.cycle.value.GetValue()) self.phasemax = float(self.pipelineitem.phasemax.value.GetValue()) self.phasemin = float(self.pipelineitem.phasemin.value.GetValue()) self.cs_p = float(self.pipelineitem.cs_p.value.GetValue()) self.cs_epsilon = float(self.pipelineitem.cs_epsilon.value.GetValue()) self.cs_epsilon_min = float(self.pipelineitem.cs_epsilon_min.value.GetValue()) self.cs_d = float(self.pipelineitem.cs_d.value.GetValue()) self.cs_eta = float(self.pipelineitem.cs_eta.value.GetValue()) if self.pipelineitem.chkbox_relax.GetValue() == True: self.cs_relax = 1 else: self.cs_relax = 0 self.gc_phasemax = float(self.pipelineitem.gc_phasemax.value.GetValue()) self.gc_phasemin = float(self.pipelineitem.gc_phasemin.value.GetValue()) self.qx = float(self.pipelineitem.qx.value.GetValue()) self.qy = float(self.pipelineitem.qy.value.GetValue()) self.qz = float(self.pipelineitem.qz.value.GetValue()) self.sigma = float(self.pipelineitem.sigma.value.GetValue()) self.frac = float(self.pipelineitem.frac.value.GetValue()) if self.pipelineitem.chkbox_reweight.GetValue() == True: self.reweightiter = int(self.pipelineitem.reweightiter.value.GetValue()) else: self.reweightiter = -1 self.numsoiter = int(self.pipelineitem.nsoiter.value.GetValue()) self.dtaumax = float(self.pipelineitem.dtaumax.value.GetValue()) self.dtaumin = float(self.pipelineitem.dtaumin.value.GetValue()) self.psiexitratio = float(self.pipelineitem.psiexitratio.value.GetValue()) self.psiexiterror = float(self.pipelineitem.psiexiterror.value.GetValue()) self.psiresetratio = float(self.pipelineitem.psiresetratio.value.GetValue()) self.taumax = float(self.pipelineitem.taumax.value.GetValue()) self.alpha = 1.0 self.RSConst = self.pipelineitem.rbrs.GetStringSelection() if self.LoadTmp(): self.parent.pipeline_started = False return self.StartPhasing() def LoadTmp(self): try: from ..lib.prfftw import gaussian_fill self.parent.temparray = NewArray(self, *self.parent.support.shape) self.parent.temparray2 = NewArray(self, *self.parent.support.shape) gaussian_fill(self.parent.temparray, self.sigma) self.wrap(self.parent.temparray, self.parent.temparray2, 1) except: msg = "Insufficient memory for temporary arrays." self.MsgDlg(msg) return True else: return False def UpdateSupport(self): if self.parent.ancestor.GetPage(0).citer_flow[4] > 0: self.parent.ancestor.GetPage(0).queue_info.put("Updating support ...") self.copy_abs(self.parent.seqdata, self.parent.support) self.max_value(self.parent.support.real, self.maxvalue) self.threshold(self.parent.support, (self.frac*self.maxvalue[0]), self.maxvalue[0], 0.0) self.medianfilter(self.parent.support, self.parent.temparray2, 3,3,3, 0.0) self.wrap(self.parent.support, self.parent.temparray2, 1) self.convolve(self.parent.support, self.parent.temparray) self.wrap(self.parent.support, self.parent.temparray2, -1) self.rangereplace(self.parent.support, (self.frac*self.maxvalue[0]), sys.float_info.max, 0.0, 1.0) self.copy_amp(self.parent.support, self.parent.visual_support) self.parent.ancestor.GetPage(0).queue_info.put("... done.") def UpdateVisualSupport(self): if self.parent.ancestor.GetPage(0).citer_flow[4] > 0: wx.CallAfter(self.parent.ancestor.GetPage(1).UpdateSupport,) def GetIterVars(self, fstartiter, fnumiter, ii, fcycle): fsw_startiter = fstartiter + (ii * fcycle) if fnumiter < ((ii+1) * fcycle): fsw_numiter = fnumiter - (ii * fcycle) else: fsw_numiter = fcycle return fsw_startiter, fsw_numiter def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting shrink wrap algorithm using "+self.RSConst +"..." ) self.parent.thread_register.put(1) IterLoops = (self.numiter + self.cycle - 1)//self.cycle if self.RSConst == 'HIO': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) HIO(self.parent, self.beta, sw_startiter, sw_numiter) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'PCHIO': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) PCHIO(self.parent, self.beta, sw_startiter, sw_numiter, self.phasemax, self.phasemin) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'PGCHIO': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) PGCHIO(self.parent, self.beta, sw_startiter, sw_numiter, self.gc_phasemax, self.gc_phasemin, self.qx, self.qy, self.qz) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'HIOMask': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) HIOMask(self.parent, self.beta, sw_startiter, sw_numiter, 0) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'HIOPlus': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) HIOPlus(self.parent, self.beta, sw_startiter, sw_numiter) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'ER': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) ER(self.parent, sw_startiter, sw_numiter) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'HPR': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) HPR(self.parent, self.beta, sw_startiter, sw_numiter,0) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'RAAR': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) RAAR(self.parent, self.beta, sw_startiter, sw_numiter,0) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'CSHIO': for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) CSHIO(self.parent, self.beta, sw_startiter, sw_numiter, self.cs_p, self.cs_epsilon, self.cs_epsilon_min, self.cs_d, self.cs_eta, self.cs_relax) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() if self.RSConst == 'SO2D': alpha1,beta1 = self.alpha,self.beta for i in range( IterLoops ): sw_startiter, sw_numiter = self.GetIterVars(self.startiter, self.numiter, i, self.cycle) alpha1,beta1 = SO2D(self.parent, alpha1, beta1, sw_startiter, sw_numiter, self.numsoiter, self.reweightiter, self.dtaumax, self.dtaumin, self.psiexitratio, self.psiexiterror, self.psiresetratio, self.taumax) if self.parent.ancestor.GetPage(0).citer_flow[1] == 2: break self.UpdateSupport() self.UpdateVisualSupport() self.parent.thread_register.get() class Sequence_SO2D(SequenceBaseMask): def __init__(self, parent, pipelineitem): SequenceBaseMask.__init__(self, parent, pipelineitem) if parent.pipeline_started == True: if parent.citer_flow[1] == 2: return; self.beta = float(self.pipelineitem.beta.value.GetValue()) self.startiter = int(self.pipelineitem.start_iter) self.numiter = int(self.pipelineitem.niter.value.GetValue()) self.numsoiter = int(self.pipelineitem.nsoiter.value.GetValue()) if self.pipelineitem.chkbox_reweight.GetValue() == True: self.reweightiter = int(self.pipelineitem.reweightiter.value.GetValue()) else: self.reweightiter = -1 self.dtaumax = float(self.pipelineitem.dtaumax.value.GetValue()) self.dtaumin = float(self.pipelineitem.dtaumin.value.GetValue()) self.psiexitratio = float(self.pipelineitem.psiexitratio.value.GetValue()) self.psiexiterror = float(self.pipelineitem.psiexiterror.value.GetValue()) self.psiresetratio = float(self.pipelineitem.psiresetratio.value.GetValue()) self.taumax = float(self.pipelineitem.taumax.value.GetValue()) self.alpha = 1.0 self.StartPhasing() def ThreadAlg(self): self.parent.ancestor.GetPage(0).queue_info.put("Starting SO2D Algorithm...") self.parent.thread_register.put(1) SO2D(self.parent, self.alpha, self.beta, self.startiter, self.numiter, self.numsoiter, self.reweightiter, self.dtaumax, self.dtaumin, self.psiexitratio, self.psiexiterror, self.psiresetratio, self.taumax) self.parent.thread_register.get()
PypiClean
/Flask-AdminLTE2-1.0.0.tar.gz/Flask-AdminLTE2-1.0.0/flask_adminlte2/static/plugins/jquery-sparkline/jquery.sparkline.js
(function(document, Math, undefined) { // performance/minified-size optimization (function(factory) { if(typeof define === 'function' && define.amd) { define(['jquery'], factory); } else if (jQuery && !jQuery.fn.sparkline) { factory(jQuery); } } (function($) { 'use strict'; var UNSET_OPTION = {}, getDefaults, createClass, SPFormat, clipval, quartile, normalizeValue, normalizeValues, remove, isNumber, all, sum, addCSS, ensureArray, formatNumber, RangeMap, MouseHandler, Tooltip, barHighlightMixin, line, bar, tristate, discrete, bullet, pie, box, defaultStyles, initStyles, VShape, VCanvas_base, VCanvas_canvas, VCanvas_vml, pending, shapeCount = 0; /** * Default configuration settings */ getDefaults = function () { return { // Settings common to most/all chart types common: { type: 'line', lineColor: '#00f', fillColor: '#cdf', defaultPixelsPerValue: 3, width: 'auto', height: 'auto', composite: false, tagValuesAttribute: 'values', tagOptionsPrefix: 'spark', enableTagOptions: false, enableHighlight: true, highlightLighten: 1.4, tooltipSkipNull: true, tooltipPrefix: '', tooltipSuffix: '', disableHiddenCheck: false, numberFormatter: false, numberDigitGroupCount: 3, numberDigitGroupSep: ',', numberDecimalMark: '.', disableTooltips: false, disableInteraction: false }, // Defaults for line charts line: { spotColor: '#f80', highlightSpotColor: '#5f5', highlightLineColor: '#f22', spotRadius: 1.5, minSpotColor: '#f80', maxSpotColor: '#f80', lineWidth: 1, normalRangeMin: undefined, normalRangeMax: undefined, normalRangeColor: '#ccc', drawNormalOnTop: false, chartRangeMin: undefined, chartRangeMax: undefined, chartRangeMinX: undefined, chartRangeMaxX: undefined, tooltipFormat: new SPFormat('<span style="color: {{color}}">&#9679;</span> {{prefix}}{{y}}{{suffix}}') }, // Defaults for bar charts bar: { barColor: '#3366cc', negBarColor: '#f44', stackedBarColor: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00', '#dd4477', '#0099c6', '#990099'], zeroColor: undefined, nullColor: undefined, zeroAxis: true, barWidth: 4, barSpacing: 1, chartRangeMax: undefined, chartRangeMin: undefined, chartRangeClip: false, colorMap: undefined, tooltipFormat: new SPFormat('<span style="color: {{color}}">&#9679;</span> {{prefix}}{{value}}{{suffix}}') }, // Defaults for tristate charts tristate: { barWidth: 4, barSpacing: 1, posBarColor: '#6f6', negBarColor: '#f44', zeroBarColor: '#999', colorMap: {}, tooltipFormat: new SPFormat('<span style="color: {{color}}">&#9679;</span> {{value:map}}'), tooltipValueLookups: { map: { '-1': 'Loss', '0': 'Draw', '1': 'Win' } } }, // Defaults for discrete charts discrete: { lineHeight: 'auto', thresholdColor: undefined, thresholdValue: 0, chartRangeMax: undefined, chartRangeMin: undefined, chartRangeClip: false, tooltipFormat: new SPFormat('{{prefix}}{{value}}{{suffix}}') }, // Defaults for bullet charts bullet: { targetColor: '#f33', targetWidth: 3, // width of the target bar in pixels performanceColor: '#33f', rangeColors: ['#d3dafe', '#a8b6ff', '#7f94ff'], base: undefined, // set this to a number to change the base start number tooltipFormat: new SPFormat('{{fieldkey:fields}} - {{value}}'), tooltipValueLookups: { fields: {r: 'Range', p: 'Performance', t: 'Target'} } }, // Defaults for pie charts pie: { offset: 0, sliceColors: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00', '#dd4477', '#0099c6', '#990099'], borderWidth: 0, borderColor: '#000', tooltipFormat: new SPFormat('<span style="color: {{color}}">&#9679;</span> {{value}} ({{percent.1}}%)') }, // Defaults for box plots box: { raw: false, boxLineColor: '#000', boxFillColor: '#cdf', whiskerColor: '#000', outlierLineColor: '#333', outlierFillColor: '#fff', medianColor: '#f00', showOutliers: true, outlierIQR: 1.5, spotRadius: 1.5, target: undefined, targetColor: '#4a2', chartRangeMax: undefined, chartRangeMin: undefined, tooltipFormat: new SPFormat('{{field:fields}}: {{value}}'), tooltipFormatFieldlistKey: 'field', tooltipValueLookups: { fields: { lq: 'Lower Quartile', med: 'Median', uq: 'Upper Quartile', lo: 'Left Outlier', ro: 'Right Outlier', lw: 'Left Whisker', rw: 'Right Whisker'} } } }; }; // You can have tooltips use a css class other than jqstooltip by specifying tooltipClassname defaultStyles = '.jqstooltip { ' + 'position: absolute;' + 'left: 0px;' + 'top: 0px;' + 'visibility: hidden;' + 'background: rgb(0, 0, 0) transparent;' + 'background-color: rgba(0,0,0,0.6);' + 'filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000);' + '-ms-filter: "progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000)";' + 'color: white;' + 'font: 10px arial, san serif;' + 'text-align: left;' + 'white-space: nowrap;' + 'padding: 5px;' + 'border: 1px solid white;' + 'box-sizing: content-box;' + 'z-index: 10000;' + '}' + '.jqsfield { ' + 'color: white;' + 'font: 10px arial, san serif;' + 'text-align: left;' + '}'; /** * Utilities */ createClass = function (/* [baseclass, [mixin, ...]], definition */) { var Class, args; Class = function () { this.init.apply(this, arguments); }; if (arguments.length > 1) { if (arguments[0]) { Class.prototype = $.extend(new arguments[0](), arguments[arguments.length - 1]); Class._super = arguments[0].prototype; } else { Class.prototype = arguments[arguments.length - 1]; } if (arguments.length > 2) { args = Array.prototype.slice.call(arguments, 1, -1); args.unshift(Class.prototype); $.extend.apply($, args); } } else { Class.prototype = arguments[0]; } Class.prototype.cls = Class; return Class; }; /** * Wraps a format string for tooltips * {{x}} * {{x.2} * {{x:months}} */ $.SPFormatClass = SPFormat = createClass({ fre: /\{\{([\w.]+?)(:(.+?))?\}\}/g, precre: /(\w+)\.(\d+)/, init: function (format, fclass) { this.format = format; this.fclass = fclass; }, render: function (fieldset, lookups, options) { var self = this, fields = fieldset, match, token, lookupkey, fieldvalue, prec; return this.format.replace(this.fre, function () { var lookup; token = arguments[1]; lookupkey = arguments[3]; match = self.precre.exec(token); if (match) { prec = match[2]; token = match[1]; } else { prec = false; } fieldvalue = fields[token]; if (fieldvalue === undefined) { return ''; } if (lookupkey && lookups && lookups[lookupkey]) { lookup = lookups[lookupkey]; if (lookup.get) { // RangeMap return lookups[lookupkey].get(fieldvalue) || fieldvalue; } else { return lookups[lookupkey][fieldvalue] || fieldvalue; } } if (isNumber(fieldvalue)) { if (options.get('numberFormatter')) { fieldvalue = options.get('numberFormatter')(fieldvalue); } else { fieldvalue = formatNumber(fieldvalue, prec, options.get('numberDigitGroupCount'), options.get('numberDigitGroupSep'), options.get('numberDecimalMark')); } } return fieldvalue; }); } }); // convience method to avoid needing the new operator $.spformat = function(format, fclass) { return new SPFormat(format, fclass); }; clipval = function (val, min, max) { if (val < min) { return min; } if (val > max) { return max; } return val; }; quartile = function (values, q) { var vl; if (q === 2) { vl = Math.floor(values.length / 2); return values.length % 2 ? values[vl] : (values[vl-1] + values[vl]) / 2; } else { if (values.length % 2 ) { // odd vl = (values.length * q + q) / 4; return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 : values[vl-1]; } else { //even vl = (values.length * q + 2) / 4; return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 : values[vl-1]; } } }; normalizeValue = function (val) { var nf; switch (val) { case 'undefined': val = undefined; break; case 'null': val = null; break; case 'true': val = true; break; case 'false': val = false; break; default: nf = parseFloat(val); if (val == nf) { val = nf; } } return val; }; normalizeValues = function (vals) { var i, result = []; for (i = vals.length; i--;) { result[i] = normalizeValue(vals[i]); } return result; }; remove = function (vals, filter) { var i, vl, result = []; for (i = 0, vl = vals.length; i < vl; i++) { if (vals[i] !== filter) { result.push(vals[i]); } } return result; }; isNumber = function (num) { return !isNaN(parseFloat(num)) && isFinite(num); }; formatNumber = function (num, prec, groupsize, groupsep, decsep) { var p, i; num = (prec === false ? parseFloat(num).toString() : num.toFixed(prec)).split(''); p = (p = $.inArray('.', num)) < 0 ? num.length : p; if (p < num.length) { num[p] = decsep; } for (i = p - groupsize; i > 0; i -= groupsize) { num.splice(i, 0, groupsep); } return num.join(''); }; // determine if all values of an array match a value // returns true if the array is empty all = function (val, arr, ignoreNull) { var i; for (i = arr.length; i--; ) { if (ignoreNull && arr[i] === null) continue; if (arr[i] !== val) { return false; } } return true; }; // sums the numeric values in an array, ignoring other values sum = function (vals) { var total = 0, i; for (i = vals.length; i--;) { total += typeof vals[i] === 'number' ? vals[i] : 0; } return total; }; ensureArray = function (val) { return $.isArray(val) ? val : [val]; }; // http://paulirish.com/2008/bookmarklet-inject-new-css-rules/ addCSS = function(css) { var tag, iefail; if (document.createStyleSheet) { try { document.createStyleSheet().cssText = css; return; } catch (e) { // IE <= 9 maxes out at 31 stylesheets; inject into page instead. iefail = true; } } tag = document.createElement('style'); tag.type = 'text/css'; document.getElementsByTagName('head')[0].appendChild(tag); if (iefail) { document.styleSheets[document.styleSheets.length - 1].cssText = css; } else { tag[(typeof document.body.style.WebkitAppearance == 'string') /* webkit only */ ? 'innerText' : 'innerHTML'] = css; } }; // Provide a cross-browser interface to a few simple drawing primitives $.fn.simpledraw = function (width, height, useExisting, interact) { var target, mhandler; if (useExisting && (target = this.data('_jqs_vcanvas'))) { return target; } if ($.fn.sparkline.canvas === false) { // We've already determined that neither Canvas nor VML are available return false; } else if ($.fn.sparkline.canvas === undefined) { // No function defined yet -- need to see if we support Canvas or VML var el = document.createElement('canvas'); if (!!(el.getContext && el.getContext('2d'))) { // Canvas is available $.fn.sparkline.canvas = function(width, height, target, interact) { return new VCanvas_canvas(width, height, target, interact); }; } else if (document.namespaces && !document.namespaces.v) { // VML is available document.namespaces.add('v', 'urn:schemas-microsoft-com:vml', '#default#VML'); $.fn.sparkline.canvas = function(width, height, target, interact) { return new VCanvas_vml(width, height, target); }; } else { // Neither Canvas nor VML are available $.fn.sparkline.canvas = false; return false; } } if (width === undefined) { width = $(this).innerWidth(); } if (height === undefined) { height = $(this).innerHeight(); } target = $.fn.sparkline.canvas(width, height, this, interact); mhandler = $(this).data('_jqs_mhandler'); if (mhandler) { mhandler.registerCanvas(target); } return target; }; $.fn.cleardraw = function () { var target = this.data('_jqs_vcanvas'); if (target) { target.reset(); } }; $.RangeMapClass = RangeMap = createClass({ init: function (map) { var key, range, rangelist = []; for (key in map) { if (map.hasOwnProperty(key) && typeof key === 'string' && key.indexOf(':') > -1) { range = key.split(':'); range[0] = range[0].length === 0 ? -Infinity : parseFloat(range[0]); range[1] = range[1].length === 0 ? Infinity : parseFloat(range[1]); range[2] = map[key]; rangelist.push(range); } } this.map = map; this.rangelist = rangelist || false; }, get: function (value) { var rangelist = this.rangelist, i, range, result; if ((result = this.map[value]) !== undefined) { return result; } if (rangelist) { for (i = rangelist.length; i--;) { range = rangelist[i]; if (range[0] <= value && range[1] >= value) { return range[2]; } } } return undefined; } }); // Convenience function $.range_map = function(map) { return new RangeMap(map); }; MouseHandler = createClass({ init: function (el, options) { var $el = $(el); this.$el = $el; this.options = options; this.currentPageX = 0; this.currentPageY = 0; this.el = el; this.splist = []; this.tooltip = null; this.over = false; this.displayTooltips = !options.get('disableTooltips'); this.highlightEnabled = !options.get('disableHighlight'); }, registerSparkline: function (sp) { this.splist.push(sp); if (this.over) { this.updateDisplay(); } }, registerCanvas: function (canvas) { var $canvas = $(canvas.canvas); this.canvas = canvas; this.$canvas = $canvas; $canvas.mouseenter($.proxy(this.mouseenter, this)); $canvas.mouseleave($.proxy(this.mouseleave, this)); $canvas.click($.proxy(this.mouseclick, this)); }, reset: function (removeTooltip) { this.splist = []; if (this.tooltip && removeTooltip) { this.tooltip.remove(); this.tooltip = undefined; } }, mouseclick: function (e) { var clickEvent = $.Event('sparklineClick'); clickEvent.originalEvent = e; clickEvent.sparklines = this.splist; this.$el.trigger(clickEvent); }, mouseenter: function (e) { $(document.body).unbind('mousemove.jqs'); $(document.body).bind('mousemove.jqs', $.proxy(this.mousemove, this)); this.over = true; this.currentPageX = e.pageX; this.currentPageY = e.pageY; this.currentEl = e.target; if (!this.tooltip && this.displayTooltips) { this.tooltip = new Tooltip(this.options); this.tooltip.updatePosition(e.pageX, e.pageY); } this.updateDisplay(); }, mouseleave: function () { $(document.body).unbind('mousemove.jqs'); var splist = this.splist, spcount = splist.length, needsRefresh = false, sp, i; this.over = false; this.currentEl = null; if (this.tooltip) { this.tooltip.remove(); this.tooltip = null; } for (i = 0; i < spcount; i++) { sp = splist[i]; if (sp.clearRegionHighlight()) { needsRefresh = true; } } if (needsRefresh) { this.canvas.render(); } }, mousemove: function (e) { this.currentPageX = e.pageX; this.currentPageY = e.pageY; this.currentEl = e.target; if (this.tooltip) { this.tooltip.updatePosition(e.pageX, e.pageY); } this.updateDisplay(); }, updateDisplay: function () { var splist = this.splist, spcount = splist.length, needsRefresh = false, offset = this.$canvas.offset(), localX = this.currentPageX - offset.left, localY = this.currentPageY - offset.top, tooltiphtml, sp, i, result, changeEvent; if (!this.over) { return; } for (i = 0; i < spcount; i++) { sp = splist[i]; result = sp.setRegionHighlight(this.currentEl, localX, localY); if (result) { needsRefresh = true; } } if (needsRefresh) { changeEvent = $.Event('sparklineRegionChange'); changeEvent.sparklines = this.splist; this.$el.trigger(changeEvent); if (this.tooltip) { tooltiphtml = ''; for (i = 0; i < spcount; i++) { sp = splist[i]; tooltiphtml += sp.getCurrentRegionTooltip(); } this.tooltip.setContent(tooltiphtml); } if (!this.disableHighlight) { this.canvas.render(); } } if (result === null) { this.mouseleave(); } } }); Tooltip = createClass({ sizeStyle: 'position: static !important;' + 'display: block !important;' + 'visibility: hidden !important;' + 'float: left !important;', init: function (options) { var tooltipClassname = options.get('tooltipClassname', 'jqstooltip'), sizetipStyle = this.sizeStyle, offset; this.container = options.get('tooltipContainer') || document.body; this.tooltipOffsetX = options.get('tooltipOffsetX', 10); this.tooltipOffsetY = options.get('tooltipOffsetY', 12); // remove any previous lingering tooltip $('#jqssizetip').remove(); $('#jqstooltip').remove(); this.sizetip = $('<div/>', { id: 'jqssizetip', style: sizetipStyle, 'class': tooltipClassname }); this.tooltip = $('<div/>', { id: 'jqstooltip', 'class': tooltipClassname }).appendTo(this.container); // account for the container's location offset = this.tooltip.offset(); this.offsetLeft = offset.left; this.offsetTop = offset.top; this.hidden = true; $(window).unbind('resize.jqs scroll.jqs'); $(window).bind('resize.jqs scroll.jqs', $.proxy(this.updateWindowDims, this)); this.updateWindowDims(); }, updateWindowDims: function () { this.scrollTop = $(window).scrollTop(); this.scrollLeft = $(window).scrollLeft(); this.scrollRight = this.scrollLeft + $(window).width(); this.updatePosition(); }, getSize: function (content) { this.sizetip.html(content).appendTo(this.container); this.width = this.sizetip.width() + 1; this.height = this.sizetip.height(); this.sizetip.remove(); }, setContent: function (content) { if (!content) { this.tooltip.css('visibility', 'hidden'); this.hidden = true; return; } this.getSize(content); this.tooltip.html(content) .css({ 'width': this.width, 'height': this.height, 'visibility': 'visible' }); if (this.hidden) { this.hidden = false; this.updatePosition(); } }, updatePosition: function (x, y) { if (x === undefined) { if (this.mousex === undefined) { return; } x = this.mousex - this.offsetLeft; y = this.mousey - this.offsetTop; } else { this.mousex = x = x - this.offsetLeft; this.mousey = y = y - this.offsetTop; } if (!this.height || !this.width || this.hidden) { return; } y -= this.height + this.tooltipOffsetY; x += this.tooltipOffsetX; if (y < this.scrollTop) { y = this.scrollTop; } if (x < this.scrollLeft) { x = this.scrollLeft; } else if (x + this.width > this.scrollRight) { x = this.scrollRight - this.width; } this.tooltip.css({ 'left': x, 'top': y }); }, remove: function () { this.tooltip.remove(); this.sizetip.remove(); this.sizetip = this.tooltip = undefined; $(window).unbind('resize.jqs scroll.jqs'); } }); initStyles = function() { addCSS(defaultStyles); }; $(initStyles); pending = []; $.fn.sparkline = function (userValues, userOptions) { return this.each(function () { var options = new $.fn.sparkline.options(this, userOptions), $this = $(this), render, i; render = function () { var values, width, height, tmp, mhandler, sp, vals; if (userValues === 'html' || userValues === undefined) { vals = this.getAttribute(options.get('tagValuesAttribute')); if (vals === undefined || vals === null) { vals = $this.html(); } values = vals.replace(/(^\s*<!--)|(-->\s*$)|\s+/g, '').split(','); } else { values = userValues; } width = options.get('width') === 'auto' ? values.length * options.get('defaultPixelsPerValue') : options.get('width'); if (options.get('height') === 'auto') { if (!options.get('composite') || !$.data(this, '_jqs_vcanvas')) { // must be a better way to get the line height tmp = document.createElement('span'); tmp.innerHTML = 'a'; $this.html(tmp); height = $(tmp).innerHeight() || $(tmp).height(); $(tmp).remove(); tmp = null; } } else { height = options.get('height'); } if (!options.get('disableInteraction')) { mhandler = $.data(this, '_jqs_mhandler'); if (!mhandler) { mhandler = new MouseHandler(this, options); $.data(this, '_jqs_mhandler', mhandler); } else if (!options.get('composite')) { mhandler.reset(); } } else { mhandler = false; } if (options.get('composite') && !$.data(this, '_jqs_vcanvas')) { if (!$.data(this, '_jqs_errnotify')) { alert('Attempted to attach a composite sparkline to an element with no existing sparkline'); $.data(this, '_jqs_errnotify', true); } return; } sp = new $.fn.sparkline[options.get('type')](this, values, options, width, height); sp.render(); if (mhandler) { mhandler.registerSparkline(sp); } }; if (($(this).html() && !options.get('disableHiddenCheck') && $(this).is(':hidden')) || !$(this).parents('body').length) { if (!options.get('composite') && $.data(this, '_jqs_pending')) { // remove any existing references to the element for (i = pending.length; i; i--) { if (pending[i - 1][0] == this) { pending.splice(i - 1, 1); } } } pending.push([this, render]); $.data(this, '_jqs_pending', true); } else { render.call(this); } }); }; $.fn.sparkline.defaults = getDefaults(); $.sparkline_display_visible = function () { var el, i, pl; var done = []; for (i = 0, pl = pending.length; i < pl; i++) { el = pending[i][0]; if ($(el).is(':visible') && !$(el).parents().is(':hidden')) { pending[i][1].call(el); $.data(pending[i][0], '_jqs_pending', false); done.push(i); } else if (!$(el).closest('html').length && !$.data(el, '_jqs_pending')) { // element has been inserted and removed from the DOM // If it was not yet inserted into the dom then the .data request // will return true. // removing from the dom causes the data to be removed. $.data(pending[i][0], '_jqs_pending', false); done.push(i); } } for (i = done.length; i; i--) { pending.splice(done[i - 1], 1); } }; /** * User option handler */ $.fn.sparkline.options = createClass({ init: function (tag, userOptions) { var extendedOptions, defaults, base, tagOptionType; this.userOptions = userOptions = userOptions || {}; this.tag = tag; this.tagValCache = {}; defaults = $.fn.sparkline.defaults; base = defaults.common; this.tagOptionsPrefix = userOptions.enableTagOptions && (userOptions.tagOptionsPrefix || base.tagOptionsPrefix); tagOptionType = this.getTagSetting('type'); if (tagOptionType === UNSET_OPTION) { extendedOptions = defaults[userOptions.type || base.type]; } else { extendedOptions = defaults[tagOptionType]; } this.mergedOptions = $.extend({}, base, extendedOptions, userOptions); }, getTagSetting: function (key) { var prefix = this.tagOptionsPrefix, val, i, pairs, keyval; if (prefix === false || prefix === undefined) { return UNSET_OPTION; } if (this.tagValCache.hasOwnProperty(key)) { val = this.tagValCache.key; } else { val = this.tag.getAttribute(prefix + key); if (val === undefined || val === null) { val = UNSET_OPTION; } else if (val.substr(0, 1) === '[') { val = val.substr(1, val.length - 2).split(','); for (i = val.length; i--;) { val[i] = normalizeValue(val[i].replace(/(^\s*)|(\s*$)/g, '')); } } else if (val.substr(0, 1) === '{') { pairs = val.substr(1, val.length - 2).split(','); val = {}; for (i = pairs.length; i--;) { keyval = pairs[i].split(':', 2); val[keyval[0].replace(/(^\s*)|(\s*$)/g, '')] = normalizeValue(keyval[1].replace(/(^\s*)|(\s*$)/g, '')); } } else { val = normalizeValue(val); } this.tagValCache.key = val; } return val; }, get: function (key, defaultval) { var tagOption = this.getTagSetting(key), result; if (tagOption !== UNSET_OPTION) { return tagOption; } return (result = this.mergedOptions[key]) === undefined ? defaultval : result; } }); $.fn.sparkline._base = createClass({ disabled: false, init: function (el, values, options, width, height) { this.el = el; this.$el = $(el); this.values = values; this.options = options; this.width = width; this.height = height; this.currentRegion = undefined; }, /** * Setup the canvas */ initTarget: function () { var interactive = !this.options.get('disableInteraction'); if (!(this.target = this.$el.simpledraw(this.width, this.height, this.options.get('composite'), interactive))) { this.disabled = true; } else { this.canvasWidth = this.target.pixelWidth; this.canvasHeight = this.target.pixelHeight; } }, /** * Actually render the chart to the canvas */ render: function () { if (this.disabled) { this.el.innerHTML = ''; return false; } return true; }, /** * Return a region id for a given x/y co-ordinate */ getRegion: function (x, y) { }, /** * Highlight an item based on the moused-over x,y co-ordinate */ setRegionHighlight: function (el, x, y) { var currentRegion = this.currentRegion, highlightEnabled = !this.options.get('disableHighlight'), newRegion; if (x > this.canvasWidth || y > this.canvasHeight || x < 0 || y < 0) { return null; } newRegion = this.getRegion(el, x, y); if (currentRegion !== newRegion) { if (currentRegion !== undefined && highlightEnabled) { this.removeHighlight(); } this.currentRegion = newRegion; if (newRegion !== undefined && highlightEnabled) { this.renderHighlight(); } return true; } return false; }, /** * Reset any currently highlighted item */ clearRegionHighlight: function () { if (this.currentRegion !== undefined) { this.removeHighlight(); this.currentRegion = undefined; return true; } return false; }, renderHighlight: function () { this.changeHighlight(true); }, removeHighlight: function () { this.changeHighlight(false); }, changeHighlight: function (highlight) {}, /** * Fetch the HTML to display as a tooltip */ getCurrentRegionTooltip: function () { var options = this.options, header = '', entries = [], fields, formats, formatlen, fclass, text, i, showFields, showFieldsKey, newFields, fv, formatter, format, fieldlen, j; if (this.currentRegion === undefined) { return ''; } fields = this.getCurrentRegionFields(); formatter = options.get('tooltipFormatter'); if (formatter) { return formatter(this, options, fields); } if (options.get('tooltipChartTitle')) { header += '<div class="jqs jqstitle">' + options.get('tooltipChartTitle') + '</div>\n'; } formats = this.options.get('tooltipFormat'); if (!formats) { return ''; } if (!$.isArray(formats)) { formats = [formats]; } if (!$.isArray(fields)) { fields = [fields]; } showFields = this.options.get('tooltipFormatFieldlist'); showFieldsKey = this.options.get('tooltipFormatFieldlistKey'); if (showFields && showFieldsKey) { // user-selected ordering of fields newFields = []; for (i = fields.length; i--;) { fv = fields[i][showFieldsKey]; if ((j = $.inArray(fv, showFields)) != -1) { newFields[j] = fields[i]; } } fields = newFields; } formatlen = formats.length; fieldlen = fields.length; for (i = 0; i < formatlen; i++) { format = formats[i]; if (typeof format === 'string') { format = new SPFormat(format); } fclass = format.fclass || 'jqsfield'; for (j = 0; j < fieldlen; j++) { if (!fields[j].isNull || !options.get('tooltipSkipNull')) { $.extend(fields[j], { prefix: options.get('tooltipPrefix'), suffix: options.get('tooltipSuffix') }); text = format.render(fields[j], options.get('tooltipValueLookups'), options); entries.push('<div class="' + fclass + '">' + text + '</div>'); } } } if (entries.length) { return header + entries.join('\n'); } return ''; }, getCurrentRegionFields: function () {}, calcHighlightColor: function (color, options) { var highlightColor = options.get('highlightColor'), lighten = options.get('highlightLighten'), parse, mult, rgbnew, i; if (highlightColor) { return highlightColor; } if (lighten) { // extract RGB values parse = /^#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(color) || /^#([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$/i.exec(color); if (parse) { rgbnew = []; mult = color.length === 4 ? 16 : 1; for (i = 0; i < 3; i++) { rgbnew[i] = clipval(Math.round(parseInt(parse[i + 1], 16) * mult * lighten), 0, 255); } return 'rgb(' + rgbnew.join(',') + ')'; } } return color; } }); barHighlightMixin = { changeHighlight: function (highlight) { var currentRegion = this.currentRegion, target = this.target, shapeids = this.regionShapes[currentRegion], newShapes; // will be null if the region value was null if (shapeids) { newShapes = this.renderRegion(currentRegion, highlight); if ($.isArray(newShapes) || $.isArray(shapeids)) { target.replaceWithShapes(shapeids, newShapes); this.regionShapes[currentRegion] = $.map(newShapes, function (newShape) { return newShape.id; }); } else { target.replaceWithShape(shapeids, newShapes); this.regionShapes[currentRegion] = newShapes.id; } } }, render: function () { var values = this.values, target = this.target, regionShapes = this.regionShapes, shapes, ids, i, j; if (!this.cls._super.render.call(this)) { return; } for (i = values.length; i--;) { shapes = this.renderRegion(i); if (shapes) { if ($.isArray(shapes)) { ids = []; for (j = shapes.length; j--;) { shapes[j].append(); ids.push(shapes[j].id); } regionShapes[i] = ids; } else { shapes.append(); regionShapes[i] = shapes.id; // store just the shapeid } } else { // null value regionShapes[i] = null; } } target.render(); } }; /** * Line charts */ $.fn.sparkline.line = line = createClass($.fn.sparkline._base, { type: 'line', init: function (el, values, options, width, height) { line._super.init.call(this, el, values, options, width, height); this.vertices = []; this.regionMap = []; this.xvalues = []; this.yvalues = []; this.yminmax = []; this.hightlightSpotId = null; this.lastShapeId = null; this.initTarget(); }, getRegion: function (el, x, y) { var i, regionMap = this.regionMap; // maps regions to value positions for (i = regionMap.length; i--;) { if (regionMap[i] !== null && x >= regionMap[i][0] && x <= regionMap[i][1]) { return regionMap[i][2]; } } return undefined; }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion; return { isNull: this.yvalues[currentRegion] === null, x: this.xvalues[currentRegion], y: this.yvalues[currentRegion], color: this.options.get('lineColor'), fillColor: this.options.get('fillColor'), offset: currentRegion }; }, renderHighlight: function () { var currentRegion = this.currentRegion, target = this.target, vertex = this.vertices[currentRegion], options = this.options, spotRadius = options.get('spotRadius'), highlightSpotColor = options.get('highlightSpotColor'), highlightLineColor = options.get('highlightLineColor'), highlightSpot, highlightLine; if (!vertex) { return; } if (spotRadius && highlightSpotColor) { highlightSpot = target.drawCircle(vertex[0], vertex[1], spotRadius, undefined, highlightSpotColor); this.highlightSpotId = highlightSpot.id; target.insertAfterShape(this.lastShapeId, highlightSpot); } if (highlightLineColor) { highlightLine = target.drawLine(vertex[0], this.canvasTop, vertex[0], this.canvasTop + this.canvasHeight, highlightLineColor); this.highlightLineId = highlightLine.id; target.insertAfterShape(this.lastShapeId, highlightLine); } }, removeHighlight: function () { var target = this.target; if (this.highlightSpotId) { target.removeShapeId(this.highlightSpotId); this.highlightSpotId = null; } if (this.highlightLineId) { target.removeShapeId(this.highlightLineId); this.highlightLineId = null; } }, scanValues: function () { var values = this.values, valcount = values.length, xvalues = this.xvalues, yvalues = this.yvalues, yminmax = this.yminmax, i, val, isStr, isArray, sp; for (i = 0; i < valcount; i++) { val = values[i]; isStr = typeof(values[i]) === 'string'; isArray = typeof(values[i]) === 'object' && values[i] instanceof Array; sp = isStr && values[i].split(':'); if (isStr && sp.length === 2) { // x:y xvalues.push(Number(sp[0])); yvalues.push(Number(sp[1])); yminmax.push(Number(sp[1])); } else if (isArray) { xvalues.push(val[0]); yvalues.push(val[1]); yminmax.push(val[1]); } else { xvalues.push(i); if (values[i] === null || values[i] === 'null') { yvalues.push(null); } else { yvalues.push(Number(val)); yminmax.push(Number(val)); } } } if (this.options.get('xvalues')) { xvalues = this.options.get('xvalues'); } this.maxy = this.maxyorg = Math.max.apply(Math, yminmax); this.miny = this.minyorg = Math.min.apply(Math, yminmax); this.maxx = Math.max.apply(Math, xvalues); this.minx = Math.min.apply(Math, xvalues); this.xvalues = xvalues; this.yvalues = yvalues; this.yminmax = yminmax; }, processRangeOptions: function () { var options = this.options, normalRangeMin = options.get('normalRangeMin'), normalRangeMax = options.get('normalRangeMax'); if (normalRangeMin !== undefined) { if (normalRangeMin < this.miny) { this.miny = normalRangeMin; } if (normalRangeMax > this.maxy) { this.maxy = normalRangeMax; } } if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.miny)) { this.miny = options.get('chartRangeMin'); } if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.maxy)) { this.maxy = options.get('chartRangeMax'); } if (options.get('chartRangeMinX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMinX') < this.minx)) { this.minx = options.get('chartRangeMinX'); } if (options.get('chartRangeMaxX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMaxX') > this.maxx)) { this.maxx = options.get('chartRangeMaxX'); } }, drawNormalRange: function (canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey) { var normalRangeMin = this.options.get('normalRangeMin'), normalRangeMax = this.options.get('normalRangeMax'), ytop = canvasTop + Math.round(canvasHeight - (canvasHeight * ((normalRangeMax - this.miny) / rangey))), height = Math.round((canvasHeight * (normalRangeMax - normalRangeMin)) / rangey); this.target.drawRect(canvasLeft, ytop, canvasWidth, height, undefined, this.options.get('normalRangeColor')).append(); }, render: function () { var options = this.options, target = this.target, canvasWidth = this.canvasWidth, canvasHeight = this.canvasHeight, vertices = this.vertices, spotRadius = options.get('spotRadius'), regionMap = this.regionMap, rangex, rangey, yvallast, canvasTop, canvasLeft, vertex, path, paths, x, y, xnext, xpos, xposnext, last, next, yvalcount, lineShapes, fillShapes, plen, valueSpots, hlSpotsEnabled, color, xvalues, yvalues, i; if (!line._super.render.call(this)) { return; } this.scanValues(); this.processRangeOptions(); xvalues = this.xvalues; yvalues = this.yvalues; if (!this.yminmax.length || this.yvalues.length < 2) { // empty or all null valuess return; } canvasTop = canvasLeft = 0; rangex = this.maxx - this.minx === 0 ? 1 : this.maxx - this.minx; rangey = this.maxy - this.miny === 0 ? 1 : this.maxy - this.miny; yvallast = this.yvalues.length - 1; if (spotRadius && (canvasWidth < (spotRadius * 4) || canvasHeight < (spotRadius * 4))) { spotRadius = 0; } if (spotRadius) { // adjust the canvas size as required so that spots will fit hlSpotsEnabled = options.get('highlightSpotColor') && !options.get('disableInteraction'); if (hlSpotsEnabled || options.get('minSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.miny)) { canvasHeight -= Math.ceil(spotRadius); } if (hlSpotsEnabled || options.get('maxSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.maxy)) { canvasHeight -= Math.ceil(spotRadius); canvasTop += Math.ceil(spotRadius); } if (hlSpotsEnabled || ((options.get('minSpotColor') || options.get('maxSpotColor')) && (yvalues[0] === this.miny || yvalues[0] === this.maxy))) { canvasLeft += Math.ceil(spotRadius); canvasWidth -= Math.ceil(spotRadius); } if (hlSpotsEnabled || options.get('spotColor') || (options.get('minSpotColor') || options.get('maxSpotColor') && (yvalues[yvallast] === this.miny || yvalues[yvallast] === this.maxy))) { canvasWidth -= Math.ceil(spotRadius); } } canvasHeight--; if (options.get('normalRangeMin') !== undefined && !options.get('drawNormalOnTop')) { this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey); } path = []; paths = [path]; last = next = null; yvalcount = yvalues.length; for (i = 0; i < yvalcount; i++) { x = xvalues[i]; xnext = xvalues[i + 1]; y = yvalues[i]; xpos = canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)); xposnext = i < yvalcount - 1 ? canvasLeft + Math.round((xnext - this.minx) * (canvasWidth / rangex)) : canvasWidth; next = xpos + ((xposnext - xpos) / 2); regionMap[i] = [last || 0, next, i]; last = next; if (y === null) { if (i) { if (yvalues[i - 1] !== null) { path = []; paths.push(path); } vertices.push(null); } } else { if (y < this.miny) { y = this.miny; } if (y > this.maxy) { y = this.maxy; } if (!path.length) { // previous value was null path.push([xpos, canvasTop + canvasHeight]); } vertex = [xpos, canvasTop + Math.round(canvasHeight - (canvasHeight * ((y - this.miny) / rangey)))]; path.push(vertex); vertices.push(vertex); } } lineShapes = []; fillShapes = []; plen = paths.length; for (i = 0; i < plen; i++) { path = paths[i]; if (path.length) { if (options.get('fillColor')) { path.push([path[path.length - 1][0], (canvasTop + canvasHeight)]); fillShapes.push(path.slice(0)); path.pop(); } // if there's only a single point in this path, then we want to display it // as a vertical line which means we keep path[0] as is if (path.length > 2) { // else we want the first value path[0] = [path[0][0], path[1][1]]; } lineShapes.push(path); } } // draw the fill first, then optionally the normal range, then the line on top of that plen = fillShapes.length; for (i = 0; i < plen; i++) { target.drawShape(fillShapes[i], options.get('fillColor'), options.get('fillColor')).append(); } if (options.get('normalRangeMin') !== undefined && options.get('drawNormalOnTop')) { this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey); } plen = lineShapes.length; for (i = 0; i < plen; i++) { target.drawShape(lineShapes[i], options.get('lineColor'), undefined, options.get('lineWidth')).append(); } if (spotRadius && options.get('valueSpots')) { valueSpots = options.get('valueSpots'); if (valueSpots.get === undefined) { valueSpots = new RangeMap(valueSpots); } for (i = 0; i < yvalcount; i++) { color = valueSpots.get(yvalues[i]); if (color) { target.drawCircle(canvasLeft + Math.round((xvalues[i] - this.minx) * (canvasWidth / rangex)), canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[i] - this.miny) / rangey))), spotRadius, undefined, color).append(); } } } if (spotRadius && options.get('spotColor') && yvalues[yvallast] !== null) { target.drawCircle(canvasLeft + Math.round((xvalues[xvalues.length - 1] - this.minx) * (canvasWidth / rangex)), canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[yvallast] - this.miny) / rangey))), spotRadius, undefined, options.get('spotColor')).append(); } if (this.maxy !== this.minyorg) { if (spotRadius && options.get('minSpotColor')) { x = xvalues[$.inArray(this.minyorg, yvalues)]; target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)), canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.minyorg - this.miny) / rangey))), spotRadius, undefined, options.get('minSpotColor')).append(); } if (spotRadius && options.get('maxSpotColor')) { x = xvalues[$.inArray(this.maxyorg, yvalues)]; target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)), canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.maxyorg - this.miny) / rangey))), spotRadius, undefined, options.get('maxSpotColor')).append(); } } this.lastShapeId = target.getLastShapeId(); this.canvasTop = canvasTop; target.render(); } }); /** * Bar charts */ $.fn.sparkline.bar = bar = createClass($.fn.sparkline._base, barHighlightMixin, { type: 'bar', init: function (el, values, options, width, height) { var barWidth = parseInt(options.get('barWidth'), 10), barSpacing = parseInt(options.get('barSpacing'), 10), chartRangeMin = options.get('chartRangeMin'), chartRangeMax = options.get('chartRangeMax'), chartRangeClip = options.get('chartRangeClip'), stackMin = Infinity, stackMax = -Infinity, isStackString, groupMin, groupMax, stackRanges, numValues, i, vlen, range, zeroAxis, xaxisOffset, min, max, clipMin, clipMax, stacked, vlist, j, slen, svals, val, yoffset, yMaxCalc, canvasHeightEf; bar._super.init.call(this, el, values, options, width, height); // scan values to determine whether to stack bars for (i = 0, vlen = values.length; i < vlen; i++) { val = values[i]; isStackString = typeof(val) === 'string' && val.indexOf(':') > -1; if (isStackString || $.isArray(val)) { stacked = true; if (isStackString) { val = values[i] = normalizeValues(val.split(':')); } val = remove(val, null); // min/max will treat null as zero groupMin = Math.min.apply(Math, val); groupMax = Math.max.apply(Math, val); if (groupMin < stackMin) { stackMin = groupMin; } if (groupMax > stackMax) { stackMax = groupMax; } } } this.stacked = stacked; this.regionShapes = {}; this.barWidth = barWidth; this.barSpacing = barSpacing; this.totalBarWidth = barWidth + barSpacing; this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing); this.initTarget(); if (chartRangeClip) { clipMin = chartRangeMin === undefined ? -Infinity : chartRangeMin; clipMax = chartRangeMax === undefined ? Infinity : chartRangeMax; } numValues = []; stackRanges = stacked ? [] : numValues; var stackTotals = []; var stackRangesNeg = []; for (i = 0, vlen = values.length; i < vlen; i++) { if (stacked) { vlist = values[i]; values[i] = svals = []; stackTotals[i] = 0; stackRanges[i] = stackRangesNeg[i] = 0; for (j = 0, slen = vlist.length; j < slen; j++) { val = svals[j] = chartRangeClip ? clipval(vlist[j], clipMin, clipMax) : vlist[j]; if (val !== null) { if (val > 0) { stackTotals[i] += val; } if (stackMin < 0 && stackMax > 0) { if (val < 0) { stackRangesNeg[i] += Math.abs(val); } else { stackRanges[i] += val; } } else { stackRanges[i] += Math.abs(val - (val < 0 ? stackMax : stackMin)); } numValues.push(val); } } } else { val = chartRangeClip ? clipval(values[i], clipMin, clipMax) : values[i]; val = values[i] = normalizeValue(val); if (val !== null) { numValues.push(val); } } } this.max = max = Math.max.apply(Math, numValues); this.min = min = Math.min.apply(Math, numValues); this.stackMax = stackMax = stacked ? Math.max.apply(Math, stackTotals) : max; this.stackMin = stackMin = stacked ? Math.min.apply(Math, numValues) : min; if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < min)) { min = options.get('chartRangeMin'); } if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > max)) { max = options.get('chartRangeMax'); } this.zeroAxis = zeroAxis = options.get('zeroAxis', true); if (min <= 0 && max >= 0 && zeroAxis) { xaxisOffset = 0; } else if (zeroAxis == false) { xaxisOffset = min; } else if (min > 0) { xaxisOffset = min; } else { xaxisOffset = max; } this.xaxisOffset = xaxisOffset; range = stacked ? (Math.max.apply(Math, stackRanges) + Math.max.apply(Math, stackRangesNeg)) : max - min; // as we plot zero/min values a single pixel line, we add a pixel to all other // values - Reduce the effective canvas size to suit this.canvasHeightEf = (zeroAxis && min < 0) ? this.canvasHeight - 2 : this.canvasHeight - 1; if (min < xaxisOffset) { yMaxCalc = (stacked && max >= 0) ? stackMax : max; yoffset = (yMaxCalc - xaxisOffset) / range * this.canvasHeight; if (yoffset !== Math.ceil(yoffset)) { this.canvasHeightEf -= 2; yoffset = Math.ceil(yoffset); } } else { yoffset = this.canvasHeight; } this.yoffset = yoffset; if ($.isArray(options.get('colorMap'))) { this.colorMapByIndex = options.get('colorMap'); this.colorMapByValue = null; } else { this.colorMapByIndex = null; this.colorMapByValue = options.get('colorMap'); if (this.colorMapByValue && this.colorMapByValue.get === undefined) { this.colorMapByValue = new RangeMap(this.colorMapByValue); } } this.range = range; }, getRegion: function (el, x, y) { var result = Math.floor(x / this.totalBarWidth); return (result < 0 || result >= this.values.length) ? undefined : result; }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion, values = ensureArray(this.values[currentRegion]), result = [], value, i; for (i = values.length; i--;) { value = values[i]; result.push({ isNull: value === null, value: value, color: this.calcColor(i, value, currentRegion), offset: currentRegion }); } return result; }, calcColor: function (stacknum, value, valuenum) { var colorMapByIndex = this.colorMapByIndex, colorMapByValue = this.colorMapByValue, options = this.options, color, newColor; if (this.stacked) { color = options.get('stackedBarColor'); } else { color = (value < 0) ? options.get('negBarColor') : options.get('barColor'); } if (value === 0 && options.get('zeroColor') !== undefined) { color = options.get('zeroColor'); } if (colorMapByValue && (newColor = colorMapByValue.get(value))) { color = newColor; } else if (colorMapByIndex && colorMapByIndex.length > valuenum) { color = colorMapByIndex[valuenum]; } return $.isArray(color) ? color[stacknum % color.length] : color; }, /** * Render bar(s) for a region */ renderRegion: function (valuenum, highlight) { var vals = this.values[valuenum], options = this.options, xaxisOffset = this.xaxisOffset, result = [], range = this.range, stacked = this.stacked, target = this.target, x = valuenum * this.totalBarWidth, canvasHeightEf = this.canvasHeightEf, yoffset = this.yoffset, y, height, color, isNull, yoffsetNeg, i, valcount, val, minPlotted, allMin; vals = $.isArray(vals) ? vals : [vals]; valcount = vals.length; val = vals[0]; isNull = all(null, vals); allMin = all(xaxisOffset, vals, true); if (isNull) { if (options.get('nullColor')) { color = highlight ? options.get('nullColor') : this.calcHighlightColor(options.get('nullColor'), options); y = (yoffset > 0) ? yoffset - 1 : yoffset; return target.drawRect(x, y, this.barWidth - 1, 0, color, color); } else { return undefined; } } yoffsetNeg = yoffset; for (i = 0; i < valcount; i++) { val = vals[i]; if (stacked && val === xaxisOffset) { if (!allMin || minPlotted) { continue; } minPlotted = true; } if (range > 0) { height = Math.floor(canvasHeightEf * ((Math.abs(val - xaxisOffset) / range))) + 1; } else { height = 1; } if (val < xaxisOffset || (val === xaxisOffset && yoffset === 0)) { y = yoffsetNeg; yoffsetNeg += height; } else { y = yoffset - height; yoffset -= height; } color = this.calcColor(i, val, valuenum); if (highlight) { color = this.calcHighlightColor(color, options); } result.push(target.drawRect(x, y, this.barWidth - 1, height - 1, color, color)); } if (result.length === 1) { return result[0]; } return result; } }); /** * Tristate charts */ $.fn.sparkline.tristate = tristate = createClass($.fn.sparkline._base, barHighlightMixin, { type: 'tristate', init: function (el, values, options, width, height) { var barWidth = parseInt(options.get('barWidth'), 10), barSpacing = parseInt(options.get('barSpacing'), 10); tristate._super.init.call(this, el, values, options, width, height); this.regionShapes = {}; this.barWidth = barWidth; this.barSpacing = barSpacing; this.totalBarWidth = barWidth + barSpacing; this.values = $.map(values, Number); this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing); if ($.isArray(options.get('colorMap'))) { this.colorMapByIndex = options.get('colorMap'); this.colorMapByValue = null; } else { this.colorMapByIndex = null; this.colorMapByValue = options.get('colorMap'); if (this.colorMapByValue && this.colorMapByValue.get === undefined) { this.colorMapByValue = new RangeMap(this.colorMapByValue); } } this.initTarget(); }, getRegion: function (el, x, y) { return Math.floor(x / this.totalBarWidth); }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion; return { isNull: this.values[currentRegion] === undefined, value: this.values[currentRegion], color: this.calcColor(this.values[currentRegion], currentRegion), offset: currentRegion }; }, calcColor: function (value, valuenum) { var values = this.values, options = this.options, colorMapByIndex = this.colorMapByIndex, colorMapByValue = this.colorMapByValue, color, newColor; if (colorMapByValue && (newColor = colorMapByValue.get(value))) { color = newColor; } else if (colorMapByIndex && colorMapByIndex.length > valuenum) { color = colorMapByIndex[valuenum]; } else if (values[valuenum] < 0) { color = options.get('negBarColor'); } else if (values[valuenum] > 0) { color = options.get('posBarColor'); } else { color = options.get('zeroBarColor'); } return color; }, renderRegion: function (valuenum, highlight) { var values = this.values, options = this.options, target = this.target, canvasHeight, height, halfHeight, x, y, color; canvasHeight = target.pixelHeight; halfHeight = Math.round(canvasHeight / 2); x = valuenum * this.totalBarWidth; if (values[valuenum] < 0) { y = halfHeight; height = halfHeight - 1; } else if (values[valuenum] > 0) { y = 0; height = halfHeight - 1; } else { y = halfHeight - 1; height = 2; } color = this.calcColor(values[valuenum], valuenum); if (color === null) { return; } if (highlight) { color = this.calcHighlightColor(color, options); } return target.drawRect(x, y, this.barWidth - 1, height - 1, color, color); } }); /** * Discrete charts */ $.fn.sparkline.discrete = discrete = createClass($.fn.sparkline._base, barHighlightMixin, { type: 'discrete', init: function (el, values, options, width, height) { discrete._super.init.call(this, el, values, options, width, height); this.regionShapes = {}; this.values = values = $.map(values, Number); this.min = Math.min.apply(Math, values); this.max = Math.max.apply(Math, values); this.range = this.max - this.min; this.width = width = options.get('width') === 'auto' ? values.length * 2 : this.width; this.interval = Math.floor(width / values.length); this.itemWidth = width / values.length; if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.min)) { this.min = options.get('chartRangeMin'); } if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.max)) { this.max = options.get('chartRangeMax'); } this.initTarget(); if (this.target) { this.lineHeight = options.get('lineHeight') === 'auto' ? Math.round(this.canvasHeight * 0.3) : options.get('lineHeight'); } }, getRegion: function (el, x, y) { return Math.floor(x / this.itemWidth); }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion; return { isNull: this.values[currentRegion] === undefined, value: this.values[currentRegion], offset: currentRegion }; }, renderRegion: function (valuenum, highlight) { var values = this.values, options = this.options, min = this.min, max = this.max, range = this.range, interval = this.interval, target = this.target, canvasHeight = this.canvasHeight, lineHeight = this.lineHeight, pheight = canvasHeight - lineHeight, ytop, val, color, x; val = clipval(values[valuenum], min, max); x = valuenum * interval; ytop = Math.round(pheight - pheight * ((val - min) / range)); color = (options.get('thresholdColor') && val < options.get('thresholdValue')) ? options.get('thresholdColor') : options.get('lineColor'); if (highlight) { color = this.calcHighlightColor(color, options); } return target.drawLine(x, ytop, x, ytop + lineHeight, color); } }); /** * Bullet charts */ $.fn.sparkline.bullet = bullet = createClass($.fn.sparkline._base, { type: 'bullet', init: function (el, values, options, width, height) { var min, max, vals; bullet._super.init.call(this, el, values, options, width, height); // values: target, performance, range1, range2, range3 this.values = values = normalizeValues(values); // target or performance could be null vals = values.slice(); vals[0] = vals[0] === null ? vals[2] : vals[0]; vals[1] = values[1] === null ? vals[2] : vals[1]; min = Math.min.apply(Math, values); max = Math.max.apply(Math, values); if (options.get('base') === undefined) { min = min < 0 ? min : 0; } else { min = options.get('base'); } this.min = min; this.max = max; this.range = max - min; this.shapes = {}; this.valueShapes = {}; this.regiondata = {}; this.width = width = options.get('width') === 'auto' ? '4.0em' : width; this.target = this.$el.simpledraw(width, height, options.get('composite')); if (!values.length) { this.disabled = true; } this.initTarget(); }, getRegion: function (el, x, y) { var shapeid = this.target.getShapeAt(el, x, y); return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined; }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion; return { fieldkey: currentRegion.substr(0, 1), value: this.values[currentRegion.substr(1)], region: currentRegion }; }, changeHighlight: function (highlight) { var currentRegion = this.currentRegion, shapeid = this.valueShapes[currentRegion], shape; delete this.shapes[shapeid]; switch (currentRegion.substr(0, 1)) { case 'r': shape = this.renderRange(currentRegion.substr(1), highlight); break; case 'p': shape = this.renderPerformance(highlight); break; case 't': shape = this.renderTarget(highlight); break; } this.valueShapes[currentRegion] = shape.id; this.shapes[shape.id] = currentRegion; this.target.replaceWithShape(shapeid, shape); }, renderRange: function (rn, highlight) { var rangeval = this.values[rn], rangewidth = Math.round(this.canvasWidth * ((rangeval - this.min) / this.range)), color = this.options.get('rangeColors')[rn - 2]; if (highlight) { color = this.calcHighlightColor(color, this.options); } return this.target.drawRect(0, 0, rangewidth - 1, this.canvasHeight - 1, color, color); }, renderPerformance: function (highlight) { var perfval = this.values[1], perfwidth = Math.round(this.canvasWidth * ((perfval - this.min) / this.range)), color = this.options.get('performanceColor'); if (highlight) { color = this.calcHighlightColor(color, this.options); } return this.target.drawRect(0, Math.round(this.canvasHeight * 0.3), perfwidth - 1, Math.round(this.canvasHeight * 0.4) - 1, color, color); }, renderTarget: function (highlight) { var targetval = this.values[0], x = Math.round(this.canvasWidth * ((targetval - this.min) / this.range) - (this.options.get('targetWidth') / 2)), targettop = Math.round(this.canvasHeight * 0.10), targetheight = this.canvasHeight - (targettop * 2), color = this.options.get('targetColor'); if (highlight) { color = this.calcHighlightColor(color, this.options); } return this.target.drawRect(x, targettop, this.options.get('targetWidth') - 1, targetheight - 1, color, color); }, render: function () { var vlen = this.values.length, target = this.target, i, shape; if (!bullet._super.render.call(this)) { return; } for (i = 2; i < vlen; i++) { shape = this.renderRange(i).append(); this.shapes[shape.id] = 'r' + i; this.valueShapes['r' + i] = shape.id; } if (this.values[1] !== null) { shape = this.renderPerformance().append(); this.shapes[shape.id] = 'p1'; this.valueShapes.p1 = shape.id; } if (this.values[0] !== null) { shape = this.renderTarget().append(); this.shapes[shape.id] = 't0'; this.valueShapes.t0 = shape.id; } target.render(); } }); /** * Pie charts */ $.fn.sparkline.pie = pie = createClass($.fn.sparkline._base, { type: 'pie', init: function (el, values, options, width, height) { var total = 0, i; pie._super.init.call(this, el, values, options, width, height); this.shapes = {}; // map shape ids to value offsets this.valueShapes = {}; // maps value offsets to shape ids this.values = values = $.map(values, Number); if (options.get('width') === 'auto') { this.width = this.height; } if (values.length > 0) { for (i = values.length; i--;) { total += values[i]; } } this.total = total; this.initTarget(); this.radius = Math.floor(Math.min(this.canvasWidth, this.canvasHeight) / 2); }, getRegion: function (el, x, y) { var shapeid = this.target.getShapeAt(el, x, y); return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined; }, getCurrentRegionFields: function () { var currentRegion = this.currentRegion; return { isNull: this.values[currentRegion] === undefined, value: this.values[currentRegion], percent: this.values[currentRegion] / this.total * 100, color: this.options.get('sliceColors')[currentRegion % this.options.get('sliceColors').length], offset: currentRegion }; }, changeHighlight: function (highlight) { var currentRegion = this.currentRegion, newslice = this.renderSlice(currentRegion, highlight), shapeid = this.valueShapes[currentRegion]; delete this.shapes[shapeid]; this.target.replaceWithShape(shapeid, newslice); this.valueShapes[currentRegion] = newslice.id; this.shapes[newslice.id] = currentRegion; }, renderSlice: function (valuenum, highlight) { var target = this.target, options = this.options, radius = this.radius, borderWidth = options.get('borderWidth'), offset = options.get('offset'), circle = 2 * Math.PI, values = this.values, total = this.total, next = offset ? (2*Math.PI)*(offset/360) : 0, start, end, i, vlen, color; vlen = values.length; for (i = 0; i < vlen; i++) { start = next; end = next; if (total > 0) { // avoid divide by zero end = next + (circle * (values[i] / total)); } if (valuenum === i) { color = options.get('sliceColors')[i % options.get('sliceColors').length]; if (highlight) { color = this.calcHighlightColor(color, options); } return target.drawPieSlice(radius, radius, radius - borderWidth, start, end, undefined, color); } next = end; } }, render: function () { var target = this.target, values = this.values, options = this.options, radius = this.radius, borderWidth = options.get('borderWidth'), shape, i; if (!pie._super.render.call(this)) { return; } if (borderWidth) { target.drawCircle(radius, radius, Math.floor(radius - (borderWidth / 2)), options.get('borderColor'), undefined, borderWidth).append(); } for (i = values.length; i--;) { if (values[i]) { // don't render zero values shape = this.renderSlice(i).append(); this.valueShapes[i] = shape.id; // store just the shapeid this.shapes[shape.id] = i; } } target.render(); } }); /** * Box plots */ $.fn.sparkline.box = box = createClass($.fn.sparkline._base, { type: 'box', init: function (el, values, options, width, height) { box._super.init.call(this, el, values, options, width, height); this.values = $.map(values, Number); this.width = options.get('width') === 'auto' ? '4.0em' : width; this.initTarget(); if (!this.values.length) { this.disabled = 1; } }, /** * Simulate a single region */ getRegion: function () { return 1; }, getCurrentRegionFields: function () { var result = [ { field: 'lq', value: this.quartiles[0] }, { field: 'med', value: this.quartiles[1] }, { field: 'uq', value: this.quartiles[2] } ]; if (this.loutlier !== undefined) { result.push({ field: 'lo', value: this.loutlier}); } if (this.routlier !== undefined) { result.push({ field: 'ro', value: this.routlier}); } if (this.lwhisker !== undefined) { result.push({ field: 'lw', value: this.lwhisker}); } if (this.rwhisker !== undefined) { result.push({ field: 'rw', value: this.rwhisker}); } return result; }, render: function () { var target = this.target, values = this.values, vlen = values.length, options = this.options, canvasWidth = this.canvasWidth, canvasHeight = this.canvasHeight, minValue = options.get('chartRangeMin') === undefined ? Math.min.apply(Math, values) : options.get('chartRangeMin'), maxValue = options.get('chartRangeMax') === undefined ? Math.max.apply(Math, values) : options.get('chartRangeMax'), canvasLeft = 0, lwhisker, loutlier, iqr, q1, q2, q3, rwhisker, routlier, i, size, unitSize; if (!box._super.render.call(this)) { return; } if (options.get('raw')) { if (options.get('showOutliers') && values.length > 5) { loutlier = values[0]; lwhisker = values[1]; q1 = values[2]; q2 = values[3]; q3 = values[4]; rwhisker = values[5]; routlier = values[6]; } else { lwhisker = values[0]; q1 = values[1]; q2 = values[2]; q3 = values[3]; rwhisker = values[4]; } } else { values.sort(function (a, b) { return a - b; }); q1 = quartile(values, 1); q2 = quartile(values, 2); q3 = quartile(values, 3); iqr = q3 - q1; if (options.get('showOutliers')) { lwhisker = rwhisker = undefined; for (i = 0; i < vlen; i++) { if (lwhisker === undefined && values[i] > q1 - (iqr * options.get('outlierIQR'))) { lwhisker = values[i]; } if (values[i] < q3 + (iqr * options.get('outlierIQR'))) { rwhisker = values[i]; } } loutlier = values[0]; routlier = values[vlen - 1]; } else { lwhisker = values[0]; rwhisker = values[vlen - 1]; } } this.quartiles = [q1, q2, q3]; this.lwhisker = lwhisker; this.rwhisker = rwhisker; this.loutlier = loutlier; this.routlier = routlier; unitSize = canvasWidth / (maxValue - minValue + 1); if (options.get('showOutliers')) { canvasLeft = Math.ceil(options.get('spotRadius')); canvasWidth -= 2 * Math.ceil(options.get('spotRadius')); unitSize = canvasWidth / (maxValue - minValue + 1); if (loutlier < lwhisker) { target.drawCircle((loutlier - minValue) * unitSize + canvasLeft, canvasHeight / 2, options.get('spotRadius'), options.get('outlierLineColor'), options.get('outlierFillColor')).append(); } if (routlier > rwhisker) { target.drawCircle((routlier - minValue) * unitSize + canvasLeft, canvasHeight / 2, options.get('spotRadius'), options.get('outlierLineColor'), options.get('outlierFillColor')).append(); } } // box target.drawRect( Math.round((q1 - minValue) * unitSize + canvasLeft), Math.round(canvasHeight * 0.1), Math.round((q3 - q1) * unitSize), Math.round(canvasHeight * 0.8), options.get('boxLineColor'), options.get('boxFillColor')).append(); // left whisker target.drawLine( Math.round((lwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 2), Math.round((q1 - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 2), options.get('lineColor')).append(); target.drawLine( Math.round((lwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 4), Math.round((lwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight - canvasHeight / 4), options.get('whiskerColor')).append(); // right whisker target.drawLine(Math.round((rwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 2), Math.round((q3 - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 2), options.get('lineColor')).append(); target.drawLine( Math.round((rwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight / 4), Math.round((rwhisker - minValue) * unitSize + canvasLeft), Math.round(canvasHeight - canvasHeight / 4), options.get('whiskerColor')).append(); // median line target.drawLine( Math.round((q2 - minValue) * unitSize + canvasLeft), Math.round(canvasHeight * 0.1), Math.round((q2 - minValue) * unitSize + canvasLeft), Math.round(canvasHeight * 0.9), options.get('medianColor')).append(); if (options.get('target')) { size = Math.ceil(options.get('spotRadius')); target.drawLine( Math.round((options.get('target') - minValue) * unitSize + canvasLeft), Math.round((canvasHeight / 2) - size), Math.round((options.get('target') - minValue) * unitSize + canvasLeft), Math.round((canvasHeight / 2) + size), options.get('targetColor')).append(); target.drawLine( Math.round((options.get('target') - minValue) * unitSize + canvasLeft - size), Math.round(canvasHeight / 2), Math.round((options.get('target') - minValue) * unitSize + canvasLeft + size), Math.round(canvasHeight / 2), options.get('targetColor')).append(); } target.render(); } }); // Setup a very simple "virtual canvas" to make drawing the few shapes we need easier // This is accessible as $(foo).simpledraw() VShape = createClass({ init: function (target, id, type, args) { this.target = target; this.id = id; this.type = type; this.args = args; }, append: function () { this.target.appendShape(this); return this; } }); VCanvas_base = createClass({ _pxregex: /(\d+)(px)?\s*$/i, init: function (width, height, target) { if (!width) { return; } this.width = width; this.height = height; this.target = target; this.lastShapeId = null; if (target[0]) { target = target[0]; } $.data(target, '_jqs_vcanvas', this); }, drawLine: function (x1, y1, x2, y2, lineColor, lineWidth) { return this.drawShape([[x1, y1], [x2, y2]], lineColor, lineWidth); }, drawShape: function (path, lineColor, fillColor, lineWidth) { return this._genShape('Shape', [path, lineColor, fillColor, lineWidth]); }, drawCircle: function (x, y, radius, lineColor, fillColor, lineWidth) { return this._genShape('Circle', [x, y, radius, lineColor, fillColor, lineWidth]); }, drawPieSlice: function (x, y, radius, startAngle, endAngle, lineColor, fillColor) { return this._genShape('PieSlice', [x, y, radius, startAngle, endAngle, lineColor, fillColor]); }, drawRect: function (x, y, width, height, lineColor, fillColor) { return this._genShape('Rect', [x, y, width, height, lineColor, fillColor]); }, getElement: function () { return this.canvas; }, /** * Return the most recently inserted shape id */ getLastShapeId: function () { return this.lastShapeId; }, /** * Clear and reset the canvas */ reset: function () { alert('reset not implemented'); }, _insert: function (el, target) { $(target).html(el); }, /** * Calculate the pixel dimensions of the canvas */ _calculatePixelDims: function (width, height, canvas) { // XXX This should probably be a configurable option var match; match = this._pxregex.exec(height); if (match) { this.pixelHeight = match[1]; } else { this.pixelHeight = $(canvas).height(); } match = this._pxregex.exec(width); if (match) { this.pixelWidth = match[1]; } else { this.pixelWidth = $(canvas).width(); } }, /** * Generate a shape object and id for later rendering */ _genShape: function (shapetype, shapeargs) { var id = shapeCount++; shapeargs.unshift(id); return new VShape(this, id, shapetype, shapeargs); }, /** * Add a shape to the end of the render queue */ appendShape: function (shape) { alert('appendShape not implemented'); }, /** * Replace one shape with another */ replaceWithShape: function (shapeid, shape) { alert('replaceWithShape not implemented'); }, /** * Insert one shape after another in the render queue */ insertAfterShape: function (shapeid, shape) { alert('insertAfterShape not implemented'); }, /** * Remove a shape from the queue */ removeShapeId: function (shapeid) { alert('removeShapeId not implemented'); }, /** * Find a shape at the specified x/y co-ordinates */ getShapeAt: function (el, x, y) { alert('getShapeAt not implemented'); }, /** * Render all queued shapes onto the canvas */ render: function () { alert('render not implemented'); } }); VCanvas_canvas = createClass(VCanvas_base, { init: function (width, height, target, interact) { VCanvas_canvas._super.init.call(this, width, height, target); this.canvas = document.createElement('canvas'); if (target[0]) { target = target[0]; } $.data(target, '_jqs_vcanvas', this); $(this.canvas).css({ display: 'inline-block', width: width, height: height, verticalAlign: 'top' }); this._insert(this.canvas, target); this._calculatePixelDims(width, height, this.canvas); this.canvas.width = this.pixelWidth; this.canvas.height = this.pixelHeight; this.interact = interact; this.shapes = {}; this.shapeseq = []; this.currentTargetShapeId = undefined; $(this.canvas).css({width: this.pixelWidth, height: this.pixelHeight}); }, _getContext: function (lineColor, fillColor, lineWidth) { var context = this.canvas.getContext('2d'); if (lineColor !== undefined) { context.strokeStyle = lineColor; } context.lineWidth = lineWidth === undefined ? 1 : lineWidth; if (fillColor !== undefined) { context.fillStyle = fillColor; } return context; }, reset: function () { var context = this._getContext(); context.clearRect(0, 0, this.pixelWidth, this.pixelHeight); this.shapes = {}; this.shapeseq = []; this.currentTargetShapeId = undefined; }, _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) { var context = this._getContext(lineColor, fillColor, lineWidth), i, plen; context.beginPath(); context.moveTo(path[0][0] + 0.5, path[0][1] + 0.5); for (i = 1, plen = path.length; i < plen; i++) { context.lineTo(path[i][0] + 0.5, path[i][1] + 0.5); // the 0.5 offset gives us crisp pixel-width lines } if (lineColor !== undefined) { context.stroke(); } if (fillColor !== undefined) { context.fill(); } if (this.targetX !== undefined && this.targetY !== undefined && context.isPointInPath(this.targetX, this.targetY)) { this.currentTargetShapeId = shapeid; } }, _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) { var context = this._getContext(lineColor, fillColor, lineWidth); context.beginPath(); context.arc(x, y, radius, 0, 2 * Math.PI, false); if (this.targetX !== undefined && this.targetY !== undefined && context.isPointInPath(this.targetX, this.targetY)) { this.currentTargetShapeId = shapeid; } if (lineColor !== undefined) { context.stroke(); } if (fillColor !== undefined) { context.fill(); } }, _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) { var context = this._getContext(lineColor, fillColor); context.beginPath(); context.moveTo(x, y); context.arc(x, y, radius, startAngle, endAngle, false); context.lineTo(x, y); context.closePath(); if (lineColor !== undefined) { context.stroke(); } if (fillColor) { context.fill(); } if (this.targetX !== undefined && this.targetY !== undefined && context.isPointInPath(this.targetX, this.targetY)) { this.currentTargetShapeId = shapeid; } }, _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) { return this._drawShape(shapeid, [[x, y], [x + width, y], [x + width, y + height], [x, y + height], [x, y]], lineColor, fillColor); }, appendShape: function (shape) { this.shapes[shape.id] = shape; this.shapeseq.push(shape.id); this.lastShapeId = shape.id; return shape.id; }, replaceWithShape: function (shapeid, shape) { var shapeseq = this.shapeseq, i; this.shapes[shape.id] = shape; for (i = shapeseq.length; i--;) { if (shapeseq[i] == shapeid) { shapeseq[i] = shape.id; } } delete this.shapes[shapeid]; }, replaceWithShapes: function (shapeids, shapes) { var shapeseq = this.shapeseq, shapemap = {}, sid, i, first; for (i = shapeids.length; i--;) { shapemap[shapeids[i]] = true; } for (i = shapeseq.length; i--;) { sid = shapeseq[i]; if (shapemap[sid]) { shapeseq.splice(i, 1); delete this.shapes[sid]; first = i; } } for (i = shapes.length; i--;) { shapeseq.splice(first, 0, shapes[i].id); this.shapes[shapes[i].id] = shapes[i]; } }, insertAfterShape: function (shapeid, shape) { var shapeseq = this.shapeseq, i; for (i = shapeseq.length; i--;) { if (shapeseq[i] === shapeid) { shapeseq.splice(i + 1, 0, shape.id); this.shapes[shape.id] = shape; return; } } }, removeShapeId: function (shapeid) { var shapeseq = this.shapeseq, i; for (i = shapeseq.length; i--;) { if (shapeseq[i] === shapeid) { shapeseq.splice(i, 1); break; } } delete this.shapes[shapeid]; }, getShapeAt: function (el, x, y) { this.targetX = x; this.targetY = y; this.render(); return this.currentTargetShapeId; }, render: function () { var shapeseq = this.shapeseq, shapes = this.shapes, shapeCount = shapeseq.length, context = this._getContext(), shapeid, shape, i; context.clearRect(0, 0, this.pixelWidth, this.pixelHeight); for (i = 0; i < shapeCount; i++) { shapeid = shapeseq[i]; shape = shapes[shapeid]; this['_draw' + shape.type].apply(this, shape.args); } if (!this.interact) { // not interactive so no need to keep the shapes array this.shapes = {}; this.shapeseq = []; } } }); VCanvas_vml = createClass(VCanvas_base, { init: function (width, height, target) { var groupel; VCanvas_vml._super.init.call(this, width, height, target); if (target[0]) { target = target[0]; } $.data(target, '_jqs_vcanvas', this); this.canvas = document.createElement('span'); $(this.canvas).css({ display: 'inline-block', position: 'relative', overflow: 'hidden', width: width, height: height, margin: '0px', padding: '0px', verticalAlign: 'top'}); this._insert(this.canvas, target); this._calculatePixelDims(width, height, this.canvas); this.canvas.width = this.pixelWidth; this.canvas.height = this.pixelHeight; groupel = '<v:group coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '"' + ' style="position:absolute;top:0;left:0;width:' + this.pixelWidth + 'px;height=' + this.pixelHeight + 'px;"></v:group>'; this.canvas.insertAdjacentHTML('beforeEnd', groupel); this.group = $(this.canvas).children()[0]; this.rendered = false; this.prerender = ''; }, _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) { var vpath = [], initial, stroke, fill, closed, vel, plen, i; for (i = 0, plen = path.length; i < plen; i++) { vpath[i] = '' + (path[i][0]) + ',' + (path[i][1]); } initial = vpath.splice(0, 1); lineWidth = lineWidth === undefined ? 1 : lineWidth; stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="' + lineWidth + 'px" strokeColor="' + lineColor + '" '; fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" '; closed = vpath[0] === vpath[vpath.length - 1] ? 'x ' : ''; vel = '<v:shape coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '" ' + ' id="jqsshape' + shapeid + '" ' + stroke + fill + ' style="position:absolute;left:0px;top:0px;height:' + this.pixelHeight + 'px;width:' + this.pixelWidth + 'px;padding:0px;margin:0px;" ' + ' path="m ' + initial + ' l ' + vpath.join(', ') + ' ' + closed + 'e">' + ' </v:shape>'; return vel; }, _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) { var stroke, fill, vel; x -= radius; y -= radius; stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="' + lineWidth + 'px" strokeColor="' + lineColor + '" '; fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" '; vel = '<v:oval ' + ' id="jqsshape' + shapeid + '" ' + stroke + fill + ' style="position:absolute;top:' + y + 'px; left:' + x + 'px; width:' + (radius * 2) + 'px; height:' + (radius * 2) + 'px"></v:oval>'; return vel; }, _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) { var vpath, startx, starty, endx, endy, stroke, fill, vel; if (startAngle === endAngle) { return ''; // VML seems to have problem when start angle equals end angle. } if ((endAngle - startAngle) === (2 * Math.PI)) { startAngle = 0.0; // VML seems to have a problem when drawing a full circle that doesn't start 0 endAngle = (2 * Math.PI); } startx = x + Math.round(Math.cos(startAngle) * radius); starty = y + Math.round(Math.sin(startAngle) * radius); endx = x + Math.round(Math.cos(endAngle) * radius); endy = y + Math.round(Math.sin(endAngle) * radius); if (startx === endx && starty === endy) { if ((endAngle - startAngle) < Math.PI) { // Prevent very small slices from being mistaken as a whole pie return ''; } // essentially going to be the entire circle, so ignore startAngle startx = endx = x + radius; starty = endy = y; } if (startx === endx && starty === endy && (endAngle - startAngle) < Math.PI) { return ''; } vpath = [x - radius, y - radius, x + radius, y + radius, startx, starty, endx, endy]; stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="1px" strokeColor="' + lineColor + '" '; fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" '; vel = '<v:shape coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '" ' + ' id="jqsshape' + shapeid + '" ' + stroke + fill + ' style="position:absolute;left:0px;top:0px;height:' + this.pixelHeight + 'px;width:' + this.pixelWidth + 'px;padding:0px;margin:0px;" ' + ' path="m ' + x + ',' + y + ' wa ' + vpath.join(', ') + ' x e">' + ' </v:shape>'; return vel; }, _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) { return this._drawShape(shapeid, [[x, y], [x, y + height], [x + width, y + height], [x + width, y], [x, y]], lineColor, fillColor); }, reset: function () { this.group.innerHTML = ''; }, appendShape: function (shape) { var vel = this['_draw' + shape.type].apply(this, shape.args); if (this.rendered) { this.group.insertAdjacentHTML('beforeEnd', vel); } else { this.prerender += vel; } this.lastShapeId = shape.id; return shape.id; }, replaceWithShape: function (shapeid, shape) { var existing = $('#jqsshape' + shapeid), vel = this['_draw' + shape.type].apply(this, shape.args); existing[0].outerHTML = vel; }, replaceWithShapes: function (shapeids, shapes) { // replace the first shapeid with all the new shapes then toast the remaining old shapes var existing = $('#jqsshape' + shapeids[0]), replace = '', slen = shapes.length, i; for (i = 0; i < slen; i++) { replace += this['_draw' + shapes[i].type].apply(this, shapes[i].args); } existing[0].outerHTML = replace; for (i = 1; i < shapeids.length; i++) { $('#jqsshape' + shapeids[i]).remove(); } }, insertAfterShape: function (shapeid, shape) { var existing = $('#jqsshape' + shapeid), vel = this['_draw' + shape.type].apply(this, shape.args); existing[0].insertAdjacentHTML('afterEnd', vel); }, removeShapeId: function (shapeid) { var existing = $('#jqsshape' + shapeid); this.group.removeChild(existing[0]); }, getShapeAt: function (el, x, y) { var shapeid = el.id.substr(8); return shapeid; }, render: function () { if (!this.rendered) { // batch the intial render into a single repaint this.group.innerHTML = this.prerender; this.rendered = true; } } }); }))}(document, Math));
PypiClean
/FiletransferApp-0.0.2b2.tar.gz/FiletransferApp-0.0.2b2/app/filetransfer/views.py
from django.http import HttpResponse from django.conf import settings import os,socket import logging logger = logging.getLogger(__name__) accesslogger = logging.getLogger("access") # Create your views here. def postfile(request): logip(request) if request.method == "POST": # 请求方法为POST时,进行处理 result = "" if settings.MEDIA_SIZE==None: settings.MEDIA_SIZE=getFolderSize(settings.MEDIA_ROOT) logger.info("getFolderSize:"+str(settings.MEDIA_SIZE)) Files =request.FILES.getlist("file", None) # 获取上传的文件,如果没有文件,则默认为None if not Files: return HttpResponse("no files for upload!") for File in Files: #检查文件是否存在 if os.path.exists(os.path.join(settings.MEDIA_ROOT,File.name)): logger.info(File.name+" "+str(File.size)+" Fail, File exists") result =result+File.name+" "+str(File.size)+" Fail, File exists<br>\n" else: #检查大小超过限制么 if (File.size+settings.MEDIA_SIZE>settings.MEDIA_LIMIT_SIZE): logger.info(File.name+" "+str(File.size)+" Fail, Total limit exceeded") result = result+File.name+" "+str(File.size)+" Fail, Total limit exceeded<br>\n" continue destination = open(os.path.join(settings.MEDIA_ROOT,File.name),'wb+') # 打开特定的文件进行二进制的写操作 for chunk in File.chunks(): # 分块写入文件 destination.write(chunk) destination.close() settings.MEDIA_SIZE+=File.size logger.info(File.name+" "+str(File.size)+" Success") result =result+File.name+" "+str(File.size)+" Success<br>\n" return HttpResponse(result) else: return HttpResponse("") def upload(request): return HttpResponse("""<form enctype="multipart/form-data" action="/postfile/" method="post"> <input type="file" multiple="multiple" name="file" /> <input type="submit" value="upload"/> </form> """) def index(request): ip=get_host_ip() return HttpResponse("welcome! <a href=\"http://"+ip+"\">"+ip+"</a>") def get_host_ip(): '''获取本机ip''' try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('8.8.8.8', 80)) ip = s.getsockname()[0] return ip finally: s.close() #获取大小 def getFolderSize(filePath, size=0): for root, dirs, files in os.walk(filePath): for f in files: size += os.path.getsize(os.path.join(root, f)) return size #ip记录 def logip(request): remote_info = '' x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: remote_info = x_forwarded_for.split(',')[0] else: remote_info = "n" remote_addr = request.META.get('REMOTE_ADDR') if remote_addr: remote_info += '/' + remote_addr accesslogger.info("[%s]" % (remote_info))
PypiClean
/Generation_Image_Gaston-0.2.2.tar.gz/Generation_Image_Gaston-0.2.2/README.md
Generation-image_Gaston Description Generation-image_Gaston is a Python package that allows you to generate images from a given prompt using the craiyon.com site. This package uses Selenium to automate the image build and upload process. Facility You can install the Generation-image_Gaston package using pip: pip install Generation-image_Gaston Make sure you have the following dependencies installed: undetected_chromedriver selenium It settles down on its own normally. Use Here is an example of using the package: from Generation_Image_Gaston import SelectStyle SelectStyle("Three Crying Cat...", "PHOTO") The SelectStyle function takes two parameters: Prompt: The text to use as a prompt to generate the image. Style: The desired image style. The available options are "PHOTO", "DRAWING" and "ART". The function performs the following steps: Opens the craiyon.com website. Accept cookies (if necessary). Selects the specified Picture Style. Enters the given prompt. Generates the image. Download the image to your computer. Please note that the build process may take some time. When the download is complete, the message "Download is complete" will be displayed. Make sure you have Chrome installed on your system, as the package uses Chrome to interact with the website.
PypiClean
/DeepGraph-0.2.3.tar.gz/DeepGraph-0.2.3/deepgraph/functions.py
from __future__ import print_function, division, absolute_import # Copyright (C) 2017-2020 by # Dominik Traxl <[email protected]> # All rights reserved. # BSD license. # py2/3 compatibility try: range = xrange except NameError: pass import numpy as np __all__ = ['great_circle_dist', 'cp_node_intersection', 'cp_intersection_strength', 'hypergeometric_p_value', ] # ============================================================================ # CONNECTORS # ============================================================================ def great_circle_dist(lat_s, lat_t, lon_s, lon_t): """Return the great circle distance between nodes. The latitude and longitude values in the node table have to be in signed decimal degrees without compass direction (the sign indicates west/south). The great circle distance is calculated using the spherical law of cosines. """ # dtypes lat_s = np.array(lat_s, dtype=float) lat_t = np.array(lat_t, dtype=float) lon_s = np.array(lon_s, dtype=float) lon_t = np.array(lon_t, dtype=float) # select by event_indices phi_i = np.radians(lat_s) phi_j = np.radians(lat_t) delta_alpha = np.radians(lon_t) - np.radians(lon_s) # earth's radius R = 6371 # spatial distance of nodes gcd = np.arccos(np.sin(phi_i) * np.sin(phi_j) + np.cos(phi_i) * np.cos(phi_j) * np.cos(delta_alpha)) * R # for 0 gcd, there might be nans, convert to 0. gcd = np.nan_to_num(gcd) return gcd def cp_node_intersection(supernode_ids, sources, targets): """Work in progress! """ nodess = supernode_ids[sources] nodest = supernode_ids[targets] identical_nodes = (nodess == nodest) intsec = np.zeros(len(sources), dtype=object) intsec_card = np.zeros(len(sources), dtype=np.int) for i in range(len(sources)): intsec[i] = nodess[i].intersection(nodest[i]) intsec_card[i] = len(intsec[i]) return intsec, intsec_card, identical_nodes def cp_intersection_strength(n_unique_nodes, intsec_card, sources, targets): """Work in progress! """ us = n_unique_nodes[sources] ut = n_unique_nodes[targets] # min cardinality min_card = np.array(np.vstack((us, ut)).min(axis=0), dtype=np.float64) # intersection strength intsec_strength = intsec_card / min_card return intsec_strength def hypergeometric_p_value(n_unique_nodes, intsec_card, sources, targets): """Work in progress! """ from scipy.stats import hypergeom us = n_unique_nodes[sources] ut = n_unique_nodes[targets] # population size M = 220*220 # number of success states in population n = np.vstack((us, ut)).max(axis=0) # total draws N = np.vstack((us, ut)).min(axis=0) # successes x = intsec_card hg_p = np.zeros(len(sources)) for i in range(len(sources)): hg_p[i] = hypergeom.sf(x[i], M, n[i], N[i]) return hg_p # ============================================================================ # Selectors # ============================================================================
PypiClean
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/src/htsql/tweak/shell/vendor/codemirror-2.13/demo/complete.js
(function () { // Minimal event-handling wrapper. function stopEvent() { if (this.preventDefault) {this.preventDefault(); this.stopPropagation();} else {this.returnValue = false; this.cancelBubble = true;} } function addStop(event) { if (!event.stop) event.stop = stopEvent; return event; } function connect(node, type, handler) { function wrapHandler(event) {handler(addStop(event || window.event));} if (typeof node.addEventListener == "function") node.addEventListener(type, wrapHandler, false); else node.attachEvent("on" + type, wrapHandler); } function forEach(arr, f) { for (var i = 0, e = arr.length; i < e; ++i) f(arr[i]); } var editor = CodeMirror.fromTextArea(document.getElementById("code"), { lineNumbers: true, theme: "night", onKeyEvent: function(i, e) { // Hook into ctrl-space if (e.keyCode == 32 && (e.ctrlKey || e.metaKey) && !e.altKey) { e.stop(); return startComplete(); } } }); function startComplete() { // We want a single cursor position. if (editor.somethingSelected()) return; // Find the token at the cursor var cur = editor.getCursor(false), token = editor.getTokenAt(cur), tprop = token; // If it's not a 'word-style' token, ignore the token. if (!/^[\w$_]*$/.test(token.string)) { token = tprop = {start: cur.ch, end: cur.ch, string: "", state: token.state, className: token.string == "." ? "js-property" : null}; } // If it is a property, find out what it is a property of. while (tprop.className == "js-property") { tprop = editor.getTokenAt({line: cur.line, ch: tprop.start}); if (tprop.string != ".") return; tprop = editor.getTokenAt({line: cur.line, ch: tprop.start}); if (!context) var context = []; context.push(tprop); } var completions = getCompletions(token, context); if (!completions.length) return; function insert(str) { editor.replaceRange(str, {line: cur.line, ch: token.start}, {line: cur.line, ch: token.end}); } // When there is only one completion, use it directly. if (completions.length == 1) {insert(completions[0]); return true;} // Build the select widget var complete = document.createElement("div"); complete.className = "completions"; var sel = complete.appendChild(document.createElement("select")); sel.multiple = true; for (var i = 0; i < completions.length; ++i) { var opt = sel.appendChild(document.createElement("option")); opt.appendChild(document.createTextNode(completions[i])); } sel.firstChild.selected = true; sel.size = Math.min(10, completions.length); var pos = editor.cursorCoords(); complete.style.left = pos.x + "px"; complete.style.top = pos.yBot + "px"; document.body.appendChild(complete); // Hack to hide the scrollbar. if (completions.length <= 10) complete.style.width = (sel.clientWidth - 1) + "px"; var done = false; function close() { if (done) return; done = true; complete.parentNode.removeChild(complete); } function pick() { insert(sel.options[sel.selectedIndex].value); close(); setTimeout(function(){editor.focus();}, 50); } connect(sel, "blur", close); connect(sel, "keydown", function(event) { var code = event.keyCode; // Enter and space if (code == 13 || code == 32) {event.stop(); pick();} // Escape else if (code == 27) {event.stop(); close(); editor.focus();} else if (code != 38 && code != 40) {close(); editor.focus(); setTimeout(startComplete, 50);} }); connect(sel, "dblclick", pick); sel.focus(); // Opera sometimes ignores focusing a freshly created node if (window.opera) setTimeout(function(){if (!done) sel.focus();}, 100); return true; } var stringProps = ("charAt charCodeAt indexOf lastIndexOf substring substr slice trim trimLeft trimRight " + "toUpperCase toLowerCase split concat match replace search").split(" "); var arrayProps = ("length concat join splice push pop shift unshift slice reverse sort indexOf " + "lastIndexOf every some filter forEach map reduce reduceRight ").split(" "); var funcProps = "prototype apply call bind".split(" "); var keywords = ("break case catch continue debugger default delete do else false finally for function " + "if in instanceof new null return switch throw true try typeof var void while with").split(" "); function getCompletions(token, context) { var found = [], start = token.string; function maybeAdd(str) { if (str.indexOf(start) == 0) found.push(str); } function gatherCompletions(obj) { if (typeof obj == "string") forEach(stringProps, maybeAdd); else if (obj instanceof Array) forEach(arrayProps, maybeAdd); else if (obj instanceof Function) forEach(funcProps, maybeAdd); for (var name in obj) maybeAdd(name); } if (context) { // If this is a property, see if it belongs to some object we can // find in the current environment. var obj = context.pop(), base; if (obj.className == "js-variable") base = window[obj.string]; else if (obj.className == "js-string") base = ""; else if (obj.className == "js-atom") base = 1; while (base != null && context.length) base = base[context.pop().string]; if (base != null) gatherCompletions(base); } else { // If not, just look in the window object and any local scope // (reading into JS mode internals to get at the local variables) for (var v = token.state.localVars; v; v = v.next) maybeAdd(v.name); gatherCompletions(window); forEach(keywords, maybeAdd); } return found; } })();
PypiClean
/InjecToast-1.1.6.tar.gz/InjecToast-1.1.6/README.md
# InjecToast A tool to inject toasts in Android applications. ![injectoast-logofinal](https://user-images.githubusercontent.com/116693779/219882646-4178fc5e-585f-4196-8076-093af1694a3b.png) <p align="center"> <img src="https://img.shields.io/badge/Creator-PatzEdi-brightgreen" height="23"> <img src="https://img.shields.io/badge/Version-Latest-brightgreen" height="23"> <img src="https://img.shields.io/badge/Name-InjecToast-brightgreen" height="23"> <img src="https://img.shields.io/badge/License-MIT-brightgreen" height="23"> </p> This script uses android application decompilation tools and the FinderZ library in order to function properly. HUGE credits to the authors of the tools that are able to decompile android applications. **This tool is made for educational purposes only!** ## **A command line application that provides you with the ability to inject toasts in Android Applications, written in Python** ____________________________________________________________________________ ## **CHANGELOG: 1.1.6** - Release Version 1.1.6 - Fixed comptaibility issue with FinderZ V2. ## **Tool Information (IMPORTANT)** - Before you proceed, make sure to install my FinderZ library at: [FinderZ](https://pypi.org/project/FinderZ/) This is used in order to manage files properly. - It is important to understand that a decompilation tool is required. You MUST decompile the .apk file before using the injector, you can not just give the injector the .apk file, you need to decompile it first. - Concerning which tools to use, HUGE credits to those who decompile android applications: - Apktool, and ApkToolM. - Here are the links: - [ApkTool](https://ibotpeaches.github.io/Apktool/) - [ApkToolM (For Android Devices only)](https://maximoff.su/apktool/?lang=en) - FOR ANDROID DEVICES, READ BELOW: - If you are on an android device, ApkTool M is a useful application that lets you decompile applications on your device. In order to use InjecToast, however, you also need [Termux](https://termux.dev/en/) - With Termux, you can emulate a terminal, making it very useful. You can run pip commands, run packages, do anything, which is why if you are on an Android device, you need to have this in order to execute InjecToast. ## **Installation** Install with pip: ``` pip3 install InjecToast ``` ## **Usage** Parameters/Arguments: ``` -t TOASTMESSAGE -d DECOMPILEDAPPDIRECTORY -m METHOD ``` Concerning the Arguments, -t is for the toast message (Important, make sure to put your toast in between quotation marks!), -d is for the decompiled application directory, and -m is optional at a default of onCreate (MainActivity). If you want to inject a custom method, use -m and put the name of the method after the -m. Please note that the toast message and directory arguments are required. Execute From Command Line: ``` InjecToast ``` Execute from Command Line with arguments: ``` InjecToast -t "ToastMessage" -d /path/to/decompiledapp/ -m Method #This is optional, default = onCreate / MainActivity. ``` Full steps: - Decompile the app using apktool (if you are on android, you can use ApkTool M. More information above under Tool Information) - Copy the directory of the decompiled app. - Go to your terminal, and enter: ``` InjecToast -t "YourMessageHere" -d TheAppDirectory #Please note that the toast message NEEDS to be in between quotes! ``` - Once the Log and process has finished, you can recompile the app with apktool of ApkToolM (For Android). - Once it is compiled, make sure the apk is signed, and you will be good to go! ## **Features** - Inject toasts in Android Applications. - Error catching, ability to try and attempt to inject the toast even if something fails - Inject a toast message in any method of the application. - Very easy to use. - Command line interface helps save time, rather than having a menu to choose options from.. - Logs to print out at what stage the execution is. - Cross-platform support, works on Android devices, Mac, PC, Linux, you name it! - Fast and efficient, no time lost. Just inject, compile, and you are good to go. ____________________________________________________________________________ ## **Why?** - I wanted to showcase how one can easily create a tool that edits other things based off of different code. It was really cool and fun doing this project. - Toast messages are a way to decorate an android app, or even debug it. Say, for example, you had your android phone on you, and you experience a bug. You then decompile your app, inject a toast in any method, and look at what happened. - Not only that, but many more reasons, one of them being for fun and to showcase how one can easily edit other code by programming in a different language. ____________________________________________________________________________ ## **How?** - Using the FinderZ library created by myself, everything was much easier to do. Dealing with files was no longer painful. 20 to 30 lines of code, really only became one, thanks to the FinderZ library. - Concerning the code, the summary of what it does is that it basically searches the files for the method, gets the smali contents, and then writes those contents to the line number after the ".locals" of the method. - In case you did not know, smali is a code that is semi human-readable, and you get the code when decompiling android applications. ____________________________________________________________________________ ## **User notice** - **This tool is made for educational purposes only!** - Please note that you need to know what you are going when it comes to decompiling, recompiling, and signing an android application. - When dealing with the toast message, -t, put your toast message in between two quotes! - You MUST have the FinderZ library installed in order for this to function. This provides the application with the needed functions to search for files and file content in the app directory. - For the application to work, you can use apktool, or, if you are on an Android phone, you can use ApkToolM. More information above under the Tool Information section! - This project is under MIT license! Enjoy :) ____________________________________________________________________________ ## **DEALING WITH ERRORS:** - If you encounter an error in the compilation of an application, it means that the application is not supported. Overall, the injector works for around 90-95 percent of applications. Sometimes, a compilation tool such as apktool will give you register errors. This means that the application code conflicts with the injector's code. - If you get other errors, script wise, such as parsing errors, file finding errors, etc, feel free to create an issue on the GitHub page. ## **Services used (Credits):** - [FinderZ](https://github.com/PatzEdi/FinderZ/) - [XML](https://docs.python.org/3/library/xml.dom.minidom.html) - Services used to decompile android apps: - [ApkTool](https://ibotpeaches.github.io/Apktool/) - [ApkToolM (For Android Devices only)](https://maximoff.su/apktool/?lang=en) ____________________________________________________________________________ ## **Make sure to leave a star!** - If you like this project, leaving a star is what motivates me in doing more. Thank you, and I hope this is useful to all.
PypiClean