repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Linaro/lava-dispatcher | lava_dispatcher/utils/installers.py | 1 | 2344 | # Copyright (C) 2016 Linaro Limited
#
# Author: Matthew Hart <[email protected]>
#
# This file is part of LAVA Dispatcher.
#
# LAVA Dispatcher is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# LAVA Dispatcher is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along
# with this program; if not, see <http://www.gnu.org/licenses>.
import re
def add_to_kickstart(preseedfile, extra_command):
with open(preseedfile, 'a') as pf:
pf.write('\n')
pf.write('%post\n')
pf.write('exec < /dev/console > /dev/console\n')
pf.write(extra_command + '\n')
pf.write('%end\n')
pf.close()
def add_late_command(preseedfile, extra_command):
added = False
with open(preseedfile, "r") as pf:
lines = pf.readlines()
pf.close()
endstring = '\\\n'
while endsin(lines, endstring):
for linenum, data in enumerate(lines):
if endsin(data, endstring):
lines[linenum] = lines[linenum].replace(endstring, '') + lines[linenum + 1]
del lines[linenum + 1]
for linenum, data in enumerate(lines):
if re.match("d-i preseed/late_command string(.*)", data):
# late_command already exists, append to it
append_line = "; " + extra_command + "\n"
lines[linenum] = lines[linenum].rstrip(' ;\n') + append_line
added = True
if not added:
append_line = extra_command + "\n"
lines.append("d-i preseed/late_command string " + append_line)
with open(preseedfile, "w") as pf:
for line in lines:
pf.write(line)
pf.close()
def endsin(lines, endstring):
match = False
if type(lines) is list:
for line in lines:
if line.endswith(endstring):
match = True
elif type(lines) is str:
if lines.endswith(endstring):
match = True
return match
| gpl-2.0 | -4,192,824,074,895,961,000 | 32.485714 | 91 | 0.627133 | false | 3.697161 | false | false | false |
rjpower/fastnet | test/test_cudaconv2.py | 1 | 1092 | from pycuda import gpuarray, driver
from scipy.signal import convolve2d
import cudaconv2
import cudaconv2
import numpy as np
import pycuda.driver as cuda
import sys
cudaconv2.init()
def test_convolution():
imgSize = 32
filterSize = 5
padding = 2
color = 1
imgNum = 1
filterNum = 64
stride = 1
modulesX = 1 + int(((2 * padding + imgSize - filterSize) / float(stride)))
print 'Modules X', modulesX
img = gpuarray.to_gpu(np.ones((imgSize * imgSize * color, imgNum)).astype(np.float32))
filter = gpuarray.to_gpu(np.ones((filterSize * filterSize * color, filterNum)).astype(np.float32))
target = gpuarray.to_gpu(np.ones((modulesX * modulesX * filterNum, imgNum)).astype(np.float32))
print 'standard output for convolution'
print convolve2d(np.ones((imgSize, imgSize)).astype(np.float32), np.ones((filterSize, filterSize)).astype(np.float32),'valid')
cudaconv2.convFilterActs(img, filter, target, imgSize, modulesX, modulesX, -padding, stride, color, 1, 0.0, 1.0)
print 'pycuda output for convolution'
atarget = target.get()
print atarget | gpl-3.0 | 1,375,027,835,190,148,900 | 28.540541 | 128 | 0.711538 | false | 3.279279 | false | false | false |
pyannote/pyannote-metrics | pyannote/metrics/detection.py | 1 | 20201 | #!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2012-2020 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
# Marvin LAVECHIN
from .base import BaseMetric, f_measure
from .utils import UEMSupportMixin
DER_NAME = 'detection error rate'
DER_TOTAL = 'total'
DER_FALSE_ALARM = 'false alarm'
DER_MISS = 'miss'
class DetectionErrorRate(UEMSupportMixin, BaseMetric):
"""Detection error rate
This metric can be used to evaluate binary classification tasks such as
speech activity detection, for instance. Inputs are expected to only
contain segments corresponding to the positive class (e.g. speech regions).
Gaps in the inputs considered as the negative class (e.g. non-speech
regions).
It is computed as (fa + miss) / total, where fa is the duration of false
alarm (e.g. non-speech classified as speech), miss is the duration of
missed detection (e.g. speech classified as non-speech), and total is the
total duration of the positive class in the reference.
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
"""
@classmethod
def metric_name(cls):
return DER_NAME
@classmethod
def metric_components(cls):
return [DER_TOTAL, DER_FALSE_ALARM, DER_MISS]
def __init__(self, collar=0.0, skip_overlap=False, **kwargs):
super(DetectionErrorRate, self).__init__(**kwargs)
self.collar = collar
self.skip_overlap = skip_overlap
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
reference_ = reference.gaps(support=uem)
hypothesis_ = hypothesis.gaps(support=uem)
false_positive = 0.
for r_, h in reference_.co_iter(hypothesis):
false_positive += (r_ & h).duration
false_negative = 0.
for r, h_ in reference.co_iter(hypothesis_):
false_negative += (r & h_).duration
detail = {}
detail[DER_MISS] = false_negative
detail[DER_FALSE_ALARM] = false_positive
detail[DER_TOTAL] = reference.duration()
return detail
def compute_metric(self, detail):
error = 1. * (detail[DER_FALSE_ALARM] + detail[DER_MISS])
total = 1. * detail[DER_TOTAL]
if total == 0.:
if error == 0:
return 0.
else:
return 1.
else:
return error / total
ACCURACY_NAME = 'detection accuracy'
ACCURACY_TRUE_POSITIVE = 'true positive'
ACCURACY_TRUE_NEGATIVE = 'true negative'
ACCURACY_FALSE_POSITIVE = 'false positive'
ACCURACY_FALSE_NEGATIVE = 'false negative'
class DetectionAccuracy(DetectionErrorRate):
"""Detection accuracy
This metric can be used to evaluate binary classification tasks such as
speech activity detection, for instance. Inputs are expected to only
contain segments corresponding to the positive class (e.g. speech regions).
Gaps in the inputs considered as the negative class (e.g. non-speech
regions).
It is computed as (tp + tn) / total, where tp is the duration of true
positive (e.g. speech classified as speech), tn is the duration of true
negative (e.g. non-speech classified as non-speech), and total is the total
duration of the input signal.
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
"""
@classmethod
def metric_name(cls):
return ACCURACY_NAME
@classmethod
def metric_components(cls):
return [ACCURACY_TRUE_POSITIVE, ACCURACY_TRUE_NEGATIVE,
ACCURACY_FALSE_POSITIVE, ACCURACY_FALSE_NEGATIVE]
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
reference_ = reference.gaps(support=uem)
hypothesis_ = hypothesis.gaps(support=uem)
true_positive = 0.
for r, h in reference.co_iter(hypothesis):
true_positive += (r & h).duration
true_negative = 0.
for r_, h_ in reference_.co_iter(hypothesis_):
true_negative += (r_ & h_).duration
false_positive = 0.
for r_, h in reference_.co_iter(hypothesis):
false_positive += (r_ & h).duration
false_negative = 0.
for r, h_ in reference.co_iter(hypothesis_):
false_negative += (r & h_).duration
detail = {}
detail[ACCURACY_TRUE_NEGATIVE] = true_negative
detail[ACCURACY_TRUE_POSITIVE] = true_positive
detail[ACCURACY_FALSE_NEGATIVE] = false_negative
detail[ACCURACY_FALSE_POSITIVE] = false_positive
return detail
def compute_metric(self, detail):
numerator = 1. * (detail[ACCURACY_TRUE_NEGATIVE] +
detail[ACCURACY_TRUE_POSITIVE])
denominator = 1. * (detail[ACCURACY_TRUE_NEGATIVE] +
detail[ACCURACY_TRUE_POSITIVE] +
detail[ACCURACY_FALSE_NEGATIVE] +
detail[ACCURACY_FALSE_POSITIVE])
if denominator == 0.:
return 1.
else:
return numerator / denominator
PRECISION_NAME = 'detection precision'
PRECISION_RETRIEVED = 'retrieved'
PRECISION_RELEVANT_RETRIEVED = 'relevant retrieved'
class DetectionPrecision(DetectionErrorRate):
"""Detection precision
This metric can be used to evaluate binary classification tasks such as
speech activity detection, for instance. Inputs are expected to only
contain segments corresponding to the positive class (e.g. speech regions).
Gaps in the inputs considered as the negative class (e.g. non-speech
regions).
It is computed as tp / (tp + fp), where tp is the duration of true positive
(e.g. speech classified as speech), and fp is the duration of false
positive (e.g. non-speech classified as speech).
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
"""
@classmethod
def metric_name(cls):
return PRECISION_NAME
@classmethod
def metric_components(cls):
return [PRECISION_RETRIEVED, PRECISION_RELEVANT_RETRIEVED]
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
reference_ = reference.gaps(support=uem)
true_positive = 0.
for r, h in reference.co_iter(hypothesis):
true_positive += (r & h).duration
false_positive = 0.
for r_, h in reference_.co_iter(hypothesis):
false_positive += (r_ & h).duration
detail = {}
detail[PRECISION_RETRIEVED] = true_positive + false_positive
detail[PRECISION_RELEVANT_RETRIEVED] = true_positive
return detail
def compute_metric(self, detail):
relevant_retrieved = 1. * detail[PRECISION_RELEVANT_RETRIEVED]
retrieved = 1. * detail[PRECISION_RETRIEVED]
if retrieved == 0.:
return 1.
else:
return relevant_retrieved / retrieved
RECALL_NAME = 'detection recall'
RECALL_RELEVANT = 'relevant'
RECALL_RELEVANT_RETRIEVED = 'relevant retrieved'
class DetectionRecall(DetectionErrorRate):
"""Detection recall
This metric can be used to evaluate binary classification tasks such as
speech activity detection, for instance. Inputs are expected to only
contain segments corresponding to the positive class (e.g. speech regions).
Gaps in the inputs considered as the negative class (e.g. non-speech
regions).
It is computed as tp / (tp + fn), where tp is the duration of true positive
(e.g. speech classified as speech), and fn is the duration of false
negative (e.g. speech classified as non-speech).
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
"""
@classmethod
def metric_name(cls):
return RECALL_NAME
@classmethod
def metric_components(cls):
return [RECALL_RELEVANT, RECALL_RELEVANT_RETRIEVED]
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
hypothesis_ = hypothesis.gaps(support=uem)
true_positive = 0.
for r, h in reference.co_iter(hypothesis):
true_positive += (r & h).duration
false_negative = 0.
for r, h_ in reference.co_iter(hypothesis_):
false_negative += (r & h_).duration
detail = {}
detail[RECALL_RELEVANT] = true_positive + false_negative
detail[RECALL_RELEVANT_RETRIEVED] = true_positive
return detail
def compute_metric(self, detail):
relevant_retrieved = 1. * detail[RECALL_RELEVANT_RETRIEVED]
relevant = 1. * detail[RECALL_RELEVANT]
if relevant == 0.:
if relevant_retrieved == 0:
return 1.
else:
return 0.
else:
return relevant_retrieved / relevant
DFS_NAME = 'F[precision|recall]'
DFS_PRECISION_RETRIEVED = 'retrieved'
DFS_RECALL_RELEVANT = 'relevant'
DFS_RELEVANT_RETRIEVED = 'relevant retrieved'
class DetectionPrecisionRecallFMeasure(UEMSupportMixin, BaseMetric):
"""Compute detection precision and recall, and return their F-score
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
beta : float, optional
When beta > 1, greater importance is given to recall.
When beta < 1, greater importance is given to precision.
Defaults to 1.
See also
--------
pyannote.metrics.detection.DetectionPrecision
pyannote.metrics.detection.DetectionRecall
pyannote.metrics.base.f_measure
"""
@classmethod
def metric_name(cls):
return DFS_NAME
@classmethod
def metric_components(cls):
return [DFS_PRECISION_RETRIEVED, DFS_RECALL_RELEVANT, DFS_RELEVANT_RETRIEVED]
def __init__(self, collar=0.0, skip_overlap=False,
beta=1., **kwargs):
super(DetectionPrecisionRecallFMeasure, self).__init__(**kwargs)
self.collar = collar
self.skip_overlap = skip_overlap
self.beta = beta
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
reference_ = reference.gaps(support=uem)
hypothesis_ = hypothesis.gaps(support=uem)
# Better to recompute everything from scratch instead of calling the
# DetectionPrecision & DetectionRecall classes (we skip one of the loop
# that computes the amount of true positives).
true_positive = 0.
for r, h in reference.co_iter(hypothesis):
true_positive += (r & h).duration
false_positive = 0.
for r_, h in reference_.co_iter(hypothesis):
false_positive += (r_ & h).duration
false_negative = 0.
for r, h_ in reference.co_iter(hypothesis_):
false_negative += (r & h_).duration
detail = {DFS_PRECISION_RETRIEVED: true_positive + false_positive,
DFS_RECALL_RELEVANT: true_positive + false_negative,
DFS_RELEVANT_RETRIEVED: true_positive}
return detail
def compute_metric(self, detail):
_, _, value = self.compute_metrics(detail=detail)
return value
def compute_metrics(self, detail=None):
detail = self.accumulated_ if detail is None else detail
precision_retrieved = detail[DFS_PRECISION_RETRIEVED]
recall_relevant = detail[DFS_RECALL_RELEVANT]
relevant_retrieved = detail[DFS_RELEVANT_RETRIEVED]
# Special cases : precision
if precision_retrieved == 0.:
precision = 1
else:
precision = relevant_retrieved / precision_retrieved
# Special cases : recall
if recall_relevant == 0.:
if relevant_retrieved == 0:
recall = 1.
else:
recall = 0.
else:
recall = relevant_retrieved / recall_relevant
return precision, recall, f_measure(precision, recall, beta=self.beta)
DCF_NAME = 'detection cost function'
DCF_POS_TOTAL = 'positive class total' # Total duration of positive class.
DCF_NEG_TOTAL = 'negative class total' # Total duration of negative class.
DCF_FALSE_ALARM = 'false alarm' # Total duration of false alarms.
DCF_MISS = 'miss' # Total duration of misses.
class DetectionCostFunction(UEMSupportMixin, BaseMetric):
"""Detection cost function.
This metric can be used to evaluate binary classification tasks such as
speech activity detection. Inputs are expected to only contain segments
corresponding to the positive class (e.g. speech regions). Gaps in the
inputs considered as the negative class (e.g. non-speech regions).
Detection cost function (DCF), as defined by NIST for OpenSAT 2019, is
0.25*far + 0.75*missr, where far is the false alarm rate
(i.e., the proportion of non-speech incorrectly classified as speech)
and missr is the miss rate (i.e., the proportion of speech incorrectly
classified as non-speech.
Parameters
----------
collar : float, optional
Duration (in seconds) of collars removed from evaluation around
boundaries of reference segments (one half before, one half after).
Defaults to 0.0.
skip_overlap : bool, optional
Set to True to not evaluate overlap regions.
Defaults to False (i.e. keep overlap regions).
fa_weight : float, optional
Weight for false alarm rate.
Defaults to 0.25.
miss_weight : float, optional
Weight for miss rate.
Defaults to 0.75.
kwargs
Keyword arguments passed to :class:`pyannote.metrics.base.BaseMetric`.
References
----------
"OpenSAT19 Evaluation Plan v2." https://www.nist.gov/system/files/documents/2018/11/05/opensat19_evaluation_plan_v2_11-5-18.pdf
"""
def __init__(self, collar=0.0, skip_overlap=False, fa_weight=0.25,
miss_weight=0.75, **kwargs):
super(DetectionCostFunction, self).__init__(**kwargs)
self.collar = collar
self.skip_overlap = skip_overlap
self.fa_weight = fa_weight
self.miss_weight = miss_weight
@classmethod
def metric_name(cls):
return DCF_NAME
@classmethod
def metric_components(cls):
return [DCF_POS_TOTAL, DCF_NEG_TOTAL, DCF_FALSE_ALARM, DCF_MISS]
def compute_components(self, reference, hypothesis, uem=None, **kwargs):
reference, hypothesis, uem = self.uemify(
reference, hypothesis, uem=uem,
collar=self.collar, skip_overlap=self.skip_overlap,
returns_uem=True)
# Obtain timelines corresponding to positive class.
reference = reference.get_timeline(copy=False).support()
hypothesis = hypothesis.get_timeline(copy=False).support()
# Obtain timelines corresponding to negative class.
reference_ = reference.gaps(support=uem)
hypothesis_ = hypothesis.gaps(support=uem)
# Compute total positive/negative durations.
pos_dur = reference.duration()
neg_dur = reference_.duration()
# Compute total miss duration.
miss_dur = 0.0
for r, h_ in reference.co_iter(hypothesis_):
miss_dur += (r & h_).duration
# Compute total false alarm duration.
fa_dur = 0.0
for r_, h in reference_.co_iter(hypothesis):
fa_dur += (r_ & h).duration
components = {
DCF_POS_TOTAL : pos_dur,
DCF_NEG_TOTAL : neg_dur,
DCF_MISS : miss_dur,
DCF_FALSE_ALARM : fa_dur}
return components
def compute_metric(self, components):
def _compute_rate(num, denom):
if denom == 0.0:
if num == 0.0:
return 0.0
return 1.0
return num/denom
# Compute false alarm rate.
neg_dur = components[DCF_NEG_TOTAL]
fa_dur = components[DCF_FALSE_ALARM]
fa_rate = _compute_rate(fa_dur, neg_dur)
# Compute miss rate.
pos_dur = components[DCF_POS_TOTAL]
miss_dur = components[DCF_MISS]
miss_rate = _compute_rate(miss_dur, pos_dur)
return self.fa_weight*fa_rate + self.miss_weight*miss_rate
| mit | -7,760,661,166,790,345,000 | 34.069444 | 131 | 0.645941 | false | 3.966228 | false | false | false |
lcpt/xc | python_modules/postprocess/xcVtk/element_property_diagram.py | 1 | 6110 | # -*- coding: utf-8 -*-
''' Diagram display a property defined over linear elements. '''
__author__= "Luis C. Pérez Tato (LCPT) , Ana Ortega (AO_O) "
__copyright__= "Copyright 2016, LCPT, AO_O"
__license__= "GPL"
__version__= "3.0"
__email__= "[email protected], [email protected] "
from miscUtils import LogMessages as lmsg
import vtk
from postprocess.xcVtk import colored_diagram as cd
from postprocess import extrapolate_elem_attr
class ElementPropertyDiagram(cd.ColoredDiagram):
'''Diagram of element properties'''
envelopes= set(['N+','N-','My+','My-','Mz+','Mz-','Vy+','Vy-','Vz+','Vz-','T+','T-'])
def __init__(self,scaleFactor,fUnitConv,sets,propertyName):
'''Diagram that represents a property value over several sets of elements.
:param scaleFactor: scale factor (size of the diagram).
:param fUnitConv: unit conversion factor (i.e N->kN => fUnitConv= 1e-3).
:param propertyName: name of the element's property to represent.
:param sets: represent the field over those element sets.
'''
super(ElementPropertyDiagram,self).__init__(scaleFactor,fUnitConv)
self.lstSets= sets
self.propertyName= propertyName
def appendDataSetToDiagram(self, eSet,indxDiagrama,defFScale=0.0):
''' Append property values to diagram .
:param eSet: Element set.
:param defFScale: factor to apply to current displacement of nodes
so that the display position of each node equals to
the initial position plus its displacement multiplied
by this factor. (Defaults to 0.0, i.e. display of
initial/undeformed shape)
'''
elems= eSet.getElements
if(self.propertyName=='FCTNCP'):
for e in elems:
self.vDir= e.getJVector3d(True) #initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='FCVCP'):
for e in elems:
self.vDir= e.getJVector3d(True) #initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='N+'):
for e in elems:
self.vDir= e.getJVector3d(True) #initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='N-'):
for e in elems:
self.vDir= e.getJVector3d(True) #initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='My+'):
for e in elems:
self.vDir= e.elem.getKVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='My-'):
for e in elems:
self.vDir= e.elem.getKVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Mz+'):
for e in elems:
self.vDir= e.elem.getJVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Mz-'):
for e in elems:
self.vDir= e.elem.getJVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Vy+'):
for e in elems:
self.vDir= e.elem.getJVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Vy-'):
for e in elems:
self.vDir= e.elem.getJVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Vz+'):
for e in elems:
self.vDir= e.elem.getKVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
elif(self.propertyName=='Vz-'):
for e in elems:
self.vDir= e.elem.getKVector3d(True) # initialGeometry= True
values= e.getProp(self.propertyName) # [back node value, front node value]
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,values[0],values[1],defFScale)
else:
extrapolate_elem_attr.extrapolate_elem_function_attr(elems,self.propertyName,"getProp", self.propertyName)
for e in elems:
self.vDir= e.getJVector3d(True) #initialGeometry= True
v0= e.getNodes[0].getProp(self.propertyName)
v1= e.getNodes[1].getProp(self.propertyName)
indxDiagrama= self.appendDataToDiagram(e,indxDiagrama,v0,v1,defFScale)
def addDiagram(self):
self.creaEstrucDatosDiagrama()
self.creaLookUpTable()
self.creaActorDiagrama()
indxDiagrama= 0
indiceSet= 0
numSetsDiagrama= len(self.lstSets)
for s in self.lstSets:
self.appendDataSetToDiagram(s,indxDiagrama)
self.updateLookUpTable()
self.updateActorDiagrama()
| gpl-3.0 | 8,623,899,374,031,308,000 | 47.102362 | 112 | 0.678343 | false | 3.206824 | false | false | false |
tcmitchell/geni-tools | src/gcf/geni/util/cred_util.py | 3 | 18496 | #----------------------------------------------------------------------
# Copyright (c) 2010-2016 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
'''
Credential creation and verification utilities.
'''
from __future__ import absolute_import
import os
import logging
import xmlrpclib
import sys
import datetime
import dateutil
from ...sfa.trust import credential as cred
from ...sfa.trust import gid
from ...sfa.trust import rights
from ...sfa.util.xrn import hrn_authfor_hrn
from ...sfa.trust.credential_factory import CredentialFactory
from ...sfa.trust.abac_credential import ABACCredential
from ...sfa.trust.certificate import Certificate
from .speaksfor_util import determine_speaks_for
def naiveUTC(dt):
"""Converts dt to a naive datetime in UTC.
if 'dt' has a timezone then
convert to UTC
strip off timezone (make it "naive" in Python parlance)
"""
if dt.tzinfo:
tz_utc = dateutil.tz.tzutc()
dt = dt.astimezone(tz_utc)
dt = dt.replace(tzinfo=None)
return dt
class CredentialVerifier(object):
"""Utilities to verify signed credentials from a given set of
root certificates. Will compare target and source URNs, and privileges.
See verify and verify_from_strings methods in particular."""
CATEDCERTSFNAME = 'CATedCACerts.pem'
# root_cert_fileordir is a trusted root cert file or directory of
# trusted roots for verifying credentials
def __init__(self, root_cert_fileordir):
self.logger = logging.getLogger('cred-verifier')
if root_cert_fileordir is None:
raise Exception("Missing Root certs argument")
elif os.path.isdir(root_cert_fileordir):
files = os.listdir(root_cert_fileordir)
self.root_cert_files = []
for file in files:
# FIXME: exclude files that aren't cert files?
if file == CredentialVerifier.CATEDCERTSFNAME:
continue
self.root_cert_files.append(os.path.expanduser(os.path.join(root_cert_fileordir, file)))
self.logger.info('Will accept credentials signed by any of %d root certs found in %s: %r' % (len(self.root_cert_files), root_cert_fileordir, self.root_cert_files))
elif os.path.isfile(root_cert_fileordir):
self.logger.info('Will accept credentials signed by the single root cert %s' % root_cert_fileordir)
self.root_cert_files = [root_cert_fileordir]
else:
raise Exception("Couldn't find Root certs in %s" % root_cert_fileordir)
@classmethod
def getCAsFileFromDir(cls, caCerts):
'''Take a directory of CA certificates and concatenate them into a single
file suitable for use by the Python SSL library to validate client
credentials. Existing file is replaced.'''
if caCerts is None:
raise Exception ('Missing caCerts argument')
if os.path.isfile(os.path.expanduser(caCerts)):
return caCerts
if not os.path.isdir(os.path.expanduser(caCerts)):
raise Exception ('caCerts arg Not a file or a dir: %s' % caCerts)
logger = logging.getLogger('cred-verifier')
# Now we have a dir of caCerts files
# For each file in the dir (isfile), concatenate them into a new file
comboFullPath = os.path.join(caCerts, CredentialVerifier.CATEDCERTSFNAME)
caFiles = os.listdir(caCerts)
#logger.debug('Got %d potential caCert files in the dir', len(caFiles))
outfile = open(comboFullPath, "w")
okFileCount = 0
for filename in caFiles:
filepath = os.path.join(caCerts, filename)
# Confirm it's a CA file?
# if not file.endswith('.pem'):
# continue
if not os.path.isfile(os.path.expanduser(filepath)):
logger.debug('Skipping non file %s', filepath)
continue
if filename == CredentialVerifier.CATEDCERTSFNAME:
# logger.debug('Skipping previous cated certs file')
continue
okFileCount += 1
logger.info("Adding trusted cert file %s", filename)
certfile = open(filepath)
for line in certfile:
outfile.write(line)
certfile.close()
outfile.close()
if okFileCount == 0:
sys.exit('Found NO trusted certs in %s!' % caCerts)
else:
logger.info('Combined dir of %d trusted certs %s into file %s for Python SSL support', okFileCount, caCerts, comboFullPath)
return comboFullPath
# Get the GID of the caller, substituting the real user if this is a 'speaks-for' invocation
def get_caller_gid(self, gid_string, cred_strings, options=None):
root_certs = \
[Certificate(filename=root_cert_file) \
for root_cert_file in self.root_cert_files]
caller_gid = gid.GID(string=gid_string)
# Potentially, change gid_string to be the cert of the actual user
# if this is a 'speaks-for' invocation
speaksfor_gid = \
determine_speaks_for(self.logger, \
cred_strings, # May include ABAC speaks_for credential
caller_gid, # Caller cert (may be the tool 'speaking for' user)
options, # May include 'geni_speaking_for' option with user URN
root_certs
)
if caller_gid.get_subject() != speaksfor_gid.get_subject():
speaksfor_urn = speaksfor_gid.get_urn()
self.logger.info("Speaks-for Invocation: %s speaking for %s" % (caller_gid.get_urn(), speaksfor_urn))
caller_gid = speaksfor_gid
return caller_gid
def verify_from_strings(self, gid_string, cred_strings, target_urn,
privileges, options=None):
'''Create Credential and GID objects from the given strings,
and then verify the GID has the right privileges according
to the given credentials on the given target.'''
def make_cred(cred_string):
credO = None
try:
credO = CredentialFactory.createCred(credString=cred_string)
except Exception, e:
self.logger.warn("Skipping unparsable credential. Error: %s. Credential begins: %s...", e, cred_string[:60])
return credO
# Get the GID of the caller, substituting the real user if this is a 'speaks-for' invocation
caller_gid = self.get_caller_gid(gid_string, cred_strings, options)
# Remove the abac credentials
cred_strings = [cred_string for cred_string in cred_strings \
if CredentialFactory.getType(cred_string) == cred.Credential.SFA_CREDENTIAL_TYPE]
return self.verify(caller_gid,
map(make_cred, cred_strings),
target_urn,
privileges)
def verify_source(self, source_gid, credential):
'''Ensure the credential is giving privileges to the caller/client.
Return True iff the given source (client) GID's URN
is == the given credential's Caller (Owner) URN'''
source_urn = source_gid.get_urn()
cred_source_urn = credential.get_gid_caller().get_urn()
#self.logger.debug('Verifying source %r against credential source %r (cred target %s)',
# source_urn, cred_source_urn, credential.get_gid_object().get_urn())
result = (cred_source_urn == source_urn)
if result:
# self.logger.debug('Source URNs match')
pass
else:
self.logger.debug('Source URNs do not match. Source URN %r != credential source URN %r', source_urn, cred_source_urn)
return result
def verify_target(self, target_urn, credential):
'''Ensure the credential is giving privileges on the right subject/target.
Return True if no target is specified, or the target URN
matches the credential's Object's (target's) URN, else return False.
No target is required, for example, to ListResources.'''
if not target_urn:
# self.logger.debug('No target specified, considering it a match.')
return True
else:
cred_target_urn = credential.get_gid_object().get_urn()
# self.logger.debug('Verifying target %r against credential target %r',
# target_urn, cred_target_urn)
result = target_urn == cred_target_urn
if result:
# self.logger.debug('Target URNs match.')
pass
else:
self.logger.debug('Target URNs do NOT match. Target URN %r != Credential URN %r', target_urn, cred_target_urn)
return result
def verify_privileges(self, privileges, credential):
''' Return True iff the given credential gives the privilege
to perform ALL of the privileges (actions) in the given list.
In particular, the given list of 'privileges' is really a list
of names of operations. The privileges in credentials are
each turned in to Rights objects (see sfa/trust/rights.py).
And the SFA rights table is used to map from names of privileges
as specified in credentials, to names of operations.'''
result = True
privs = credential.get_privileges()
for priv in privileges:
if not privs.can_perform(priv):
self.logger.debug('Privilege %s not found on credential %s of %s', priv, credential.get_gid_object().get_urn(), credential.get_gid_caller().get_urn())
result = False
return result
def verify(self, gid, credentials, target_urn, privileges):
'''Verify that the given Source GID supplied at least one credential
in the given list of credentials that has all the privileges required
in the privileges list on the given target.
IE if any of the supplied credentials has a caller that matches gid
and a target that matches target_urn, and has all the privileges in
the given list, then return the list of credentials that were ok.
If no target_urn is supplied, then no credential is required, but any supplied
credential must be valid.
Throw an Exception if we fail to verify any credential.'''
# Note that here we treat a list of credentials as being options
# Alternatively could accumulate privileges for example.
# The semantics of the list of credentials is under specified.
self.logger.debug('Verifying privileges')
result = list()
failure = ""
tried_creds = ""
if len(credentials) == 0:
if (target_urn is None):
self.logger.debug("No credentials, but also no target, so OK")
return result
else:
# EG a slice_urn was supplied but no credentials
failure = "No credentials found"
for cred in credentials:
if cred is None:
failure = "Credential was unparseable"
continue
if cred.get_cred_type() == cred.SFA_CREDENTIAL_TYPE:
cS = cred.get_gid_caller().get_urn()
elif cred.get_cred_type() == ABACCredential.ABAC_CREDENTIAL_TYPE:
cS = cred.get_summary_tostring()
else:
cS = "Unknown credential type %s" % cred.get_cred_type()
if tried_creds != "":
tried_creds = "%s, %s" % (tried_creds, cS)
else:
tried_creds = cS
if cred.get_cred_type() != cred.SFA_CREDENTIAL_TYPE:
failure = "Not an SFA credential: " + cS
continue
if not self.verify_source(gid, cred):
failure = "Cred %s fails: Credential doesn't grant rights to you (%s), but to %s (over object %s)" % (cred.get_gid_caller().get_urn(), gid.get_urn(), cred.get_gid_caller().get_urn(), cred.get_gid_object().get_urn())
continue
if not self.verify_target(target_urn, cred):
failure = "Cred granting rights to %s on %s fails: It grants permissions over a different target, not %s (URNs dont match)" % (cred.get_gid_caller().get_urn(), cred.get_gid_object().get_urn(), target_urn)
continue
if not self.verify_privileges(privileges, cred):
failure = "Cred for %s over %s doesn't provide sufficient privileges" % (cred.get_gid_caller().get_urn(), cred.get_gid_object().get_urn())
continue
try:
if not cred.verify(self.root_cert_files):
failure = "Couldn't validate credential for caller %s with target %s with any of %d known root certs" % (cred.get_gid_caller().get_urn(), cred.get_gid_object().get_urn(), len(self.root_cert_files))
continue
except Exception, exc:
failure = "Couldn't validate credential for caller %s with target %s with any of %d known root certs: %s: %s" % (cred.get_gid_caller().get_urn(), cred.get_gid_object().get_urn(), len(self.root_cert_files), exc.__class__.__name__, exc)
self.logger.info(failure)
continue
# If got here it verified
result.append(cred)
if result and result != list():
# At least one credential verified ok and was added to the list
# return that list
return result
else:
# We did not find any credential with sufficient privileges
# Raise an exception.
fault_code = 'Insufficient privileges'
fault_string = 'No credential was found with appropriate privileges. Tried %s. Last failure: %s' % (tried_creds, failure)
self.logger.error(fault_string)
# GCF ticket #120 - do not raise an xmlrpclib Fault here -
# just an Exception. But the caller may want to turn this
# into one
# raise xmlrpclib.Fault(fault_code, fault_string)
raise Exception(fault_string)
def create_credential(caller_gid, object_gid, expiration, typename, issuer_keyfile, issuer_certfile, trusted_roots, delegatable=False):
'''Create and Return a Credential object issued by given key/cert for the given caller
and object GID objects, given life in seconds, and given type.
Privileges are determined by type per sfa/trust/rights.py
Privileges are delegatable if requested.'''
# FIXME: Validate args: my gids, >0 life,
# type of cred one I can issue
# and readable key and cert files
if caller_gid is None:
raise ValueError("Missing Caller GID")
if object_gid is None:
raise ValueError("Missing Object GID")
if expiration is None:
raise ValueError("Missing expiration")
naive_expiration = naiveUTC(expiration)
duration = naive_expiration - datetime.datetime.utcnow()
life_secs = duration.seconds + duration.days * 24 * 3600
if life_secs < 1:
raise ValueError("Credential expiration is in the past")
if trusted_roots is None:
raise ValueError("Missing list of trusted roots")
if typename is None or typename.strip() == '':
raise ValueError("Missing credential type")
typename = typename.strip().lower()
if typename not in ("user", "sa", "ma", "authority", "slice", "component"):
raise ValueError("Unknown credential type %s" % typename)
if not os.path.isfile(issuer_keyfile):
raise ValueError("Cant read issuer key file %s" % issuer_keyfile)
if not os.path.isfile(issuer_certfile):
raise ValueError("Cant read issuer cert file %s" % issuer_certfile)
issuer_gid = gid.GID(filename=issuer_certfile)
if not (object_gid.get_urn() == issuer_gid.get_urn() or
(issuer_gid.get_type().find('authority') == 0 and
hrn_authfor_hrn(issuer_gid.get_hrn(), object_gid.get_hrn()))):
raise ValueError("Issuer not authorized to issue credential: Issuer=%s Target=%s" % (issuer_gid.get_urn(), object_gid.get_urn()))
ucred = cred.Credential()
# FIXME: Validate the caller_gid and object_gid
# are my user and slice
# Do get_issuer and compare to the issuer cert?
# Or do gid.is_signed_by_cert(issuer_certfile)?
ucred.set_gid_caller(caller_gid)
ucred.set_gid_object(object_gid)
ucred.set_expiration(expiration)
# Use sfa/trust/rights.py to figure out what privileges
# the credential should have.
# user means refresh, resolve, info
# per the privilege_table that lets users do
# remove, update, resolve, list, getcredential,
# listslices, listnodes, getpolicy
# Note that it does not allow manipulating slivers
# And every right is delegatable if any are delegatable (default False)
privileges = rights.determine_rights(typename, None)
privileges.delegate_all_privileges(delegatable)
ucred.set_privileges(privileges)
ucred.encode()
ucred.set_issuer_keys(issuer_keyfile, issuer_certfile)
ucred.sign()
try:
ucred.verify(trusted_roots)
except Exception, exc:
raise Exception("Create Credential failed to verify new credential from trusted roots: %s" % exc)
return ucred
| mit | -8,162,798,833,670,636,000 | 45.825316 | 250 | 0.629001 | false | 4.164828 | false | false | false |
intel-hpdd/intel-manager-for-lustre | tests/unit/services/http_agent/test_register.py | 1 | 4359 | import json
import mock
from django.test import Client
from chroma_core.models import ManagedHost, ServerProfile, Nid
from chroma_core.models.registration_token import RegistrationToken
from chroma_core.services.crypto import Crypto
from chroma_core.services.job_scheduler.job_scheduler_client import JobSchedulerClient
from chroma_agent_comms.views import ValidatedClientView
from tests.unit.chroma_core.helpers import generate_csr, synthetic_host, load_default_profile
from tests.unit.lib.iml_unit_test_case import IMLUnitTestCase
from tests.utils import patch, timed
import settings
class TestRegistration(IMLUnitTestCase):
"""API unit tests for functionality used only by the agent"""
mock_servers = {
"mynewhost": {
"fqdn": "mynewhost.mycompany.com",
"nodename": "test01.mynewhost.mycompany.com",
"nids": [Nid.Nid("192.168.0.1", "tcp", 0)],
}
}
def setUp(self):
super(TestRegistration, self).setUp()
load_default_profile()
self.old_create_host = JobSchedulerClient.create_host
JobSchedulerClient.create_host = mock.Mock(
side_effect=lambda *args, **kwargs: (
synthetic_host("mynewhost", **self.mock_servers["mynewhost"]),
mock.Mock(id="bar"),
)
)
ValidatedClientView.valid_certs = {}
def tearDown(self):
JobSchedulerClient.create_host = self.old_create_host
def test_version(self):
host_info = self.mock_servers["mynewhost"]
with timed("csr", 10):
data = {
"fqdn": host_info["fqdn"],
"nodename": host_info["nodename"],
"version": "1.0",
"capabilities": ["manage_targets"],
"address": "mynewhost",
"csr": generate_csr(host_info["fqdn"]),
}
with patch(settings, VERSION="2.0"):
# Try with a mis-matched version
token = RegistrationToken.objects.create(profile=ServerProfile.objects.get())
with timed("register fail", 10):
response = Client().post(
"/agent/register/%s/" % token.secret, data=json.dumps(data), content_type="application/json"
)
self.assertEqual(response.status_code, 400)
# Try with a matching version
token = RegistrationToken.objects.create(profile=ServerProfile.objects.get())
settings.VERSION = "1.1"
with timed("register pass", 10):
response = Client().post(
"/agent/register/%s/" % token.secret, data=json.dumps(data), content_type="application/json"
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content)
# reregistration should fail with unknown serial
data = {"address": "mynewhost", "fqdn": "mynewhost.newcompany.com"}
headers = {"HTTP_X_SSL_CLIENT_NAME": host_info["fqdn"], "HTTP_X_SSL_CLIENT_SERIAL": ""}
response = Client().post(
"/agent/reregister/", data=json.dumps(data), content_type="application/json", **headers
)
self.assertEqual(response.status_code, 403)
# reregistration should update host's domain name
headers["HTTP_X_SSL_CLIENT_SERIAL"] = Crypto().get_serial(content["certificate"])
response = Client().post(
"/agent/reregister/", data=json.dumps(data), content_type="application/json", **headers
)
self.assertEqual(response.status_code, 200)
host = ManagedHost.objects.get(id=content["host_id"])
self.assertEqual(host.fqdn, data["fqdn"])
# TOOD: reinstate selinux check, probably within the agent itself (it should fail
# its own registration step without even talking to the manager)
# def test_selinux_detection(self):
# """Test that a host with SELinux enabled fails setup."""
# MockAgentRpc.selinux_enabled = True
# try:
# import time
# host = self._create_host('myaddress')
# self.assertTrue(Command.objects.all().order_by("-id")[0].errored)
# self.assertState(host, 'unconfigured')
# finally:
# MockAgentRpc.selinux_enabled = False
| mit | 4,557,491,256,692,526,600 | 40.514286 | 112 | 0.607938 | false | 4.028651 | true | false | false |
jjbunn/PyGeneticAlgorithm | TravellingSalesmanGA.py | 1 | 4203 | '''
Created on May 5, 2015
@author: julian
'''
import math
import random
import matplotlib.pyplot as plt
from Chromosome import *
REGION_SIZE = 100.0
NUMBER_OF_CITIES = 10
NUMBER_OF_CHROMOSOMES = 100
NUMBER_OF_EPOCHS = 100
MUTATION_PROBABILITY = 0.25
RANDOM_SEED = 9876
cities = {}
def plot_cities(chromosome=None, epoch=None):
plt.xlim((-REGION_SIZE*0.1,REGION_SIZE*1.1))
plt.ylim((-REGION_SIZE*0.1,REGION_SIZE*1.1))
plt.scatter([p[0] for p in cities.itervalues()], [p[1] for p in cities.itervalues()])
for name,xy in cities.iteritems():
plt.annotate(name,xy=xy,xytext=(xy[0]+1,xy[1]-1))
if chromosome:
plt.plot([cities[c][0] for c in chromosome], [cities[c][1] for c in chromosome])
plt.text(cities[chromosome[0]][0] + 2.0, cities[chromosome[0]][1] + 2.0, 'Start')
plt.text(cities[chromosome[-1]][0] + 2.0, cities[chromosome[-1]][1] + 2.0, 'Finish')
if epoch:
plt.title('EPOCH '+str(epoch))
plt.show()
def main():
# create a set of cities in a spiral in a square region
# choose an angle for the start of the spiral
phi = 0
# set the spiral twist loop number
loops = 1.5
# calculate the change in angle for each city
dphi = math.pi * loops / float(NUMBER_OF_CITIES)
for i in range(NUMBER_OF_CITIES):
# get radius of city centre
r = 0.5*REGION_SIZE*float(i+1)/float(NUMBER_OF_CITIES)
phi += dphi
xcity = 0.5*REGION_SIZE + r*math.cos(phi);
ycity = 0.5*REGION_SIZE + r*math.sin(phi);
city_name = chr(i+65)
# add this city to the dictionary
cities[city_name] = (xcity, ycity)
#plot_cities()
# create a population of chromosomes
# each chromosome will get a random ordering of cities to visit
chromosomes = []
random.seed(RANDOM_SEED)
for i in range(NUMBER_OF_CHROMOSOMES):
city_list = list(cities.keys())
random.shuffle(city_list)
chromosomes.append(Chromosome(city_list))
# we define a function which computes the path length for a given order of cities
def path_length(city_list, cities):
sum = 0.0
for i in range(1,len(city_list)):
(x1,y1) = cities[city_list[i-1]]
(x2,y2) = cities[city_list[i]]
sum += math.sqrt((x1-x2)**2+(y1-y2)**2)
return sum
epoch = 1
while True:
# find the path length for each chromosome
path_lengths = {}
for c in chromosomes:
path_lengths[c] = path_length(c.chromosome, cities)
sorted_chromosomes = sorted(path_lengths, key=path_lengths.get, reverse=False)
print 'Epoch',epoch,'Best chromosome',sorted_chromosomes[0].chromosome_string(), \
path_lengths[sorted_chromosomes[0]]
epoch += 1
if epoch > NUMBER_OF_EPOCHS:
break
# select the mating population
mating_population = sorted_chromosomes[:NUMBER_OF_CHROMOSOMES/2]
# have the population mate in pairs, to produce offspring
offspring_population = []
while len(offspring_population) < NUMBER_OF_CHROMOSOMES/2:
mother = random.choice(mating_population)
father = random.choice(mating_population)
(offspring1, offspring2) = mother.mate_no_duplicates(father)
# mutate the offspring with some probability
if random.random() < MUTATION_PROBABILITY:
offspring1.mutate_swap()
if random.random() < MUTATION_PROBABILITY:
offspring2.mutate_swap()
offspring_population.append(offspring1)
offspring_population.append(offspring2)
# the new population is the mating population plus the offspring
chromosomes = mating_population + offspring_population
# we plot the solution at the stopping condition
plot_cities(chromosomes[0].chromosome, str(epoch-1) + ' Best ' + chromosomes[0].chromosome_string())
if __name__ == '__main__':
main() | apache-2.0 | 1,180,130,674,434,832,100 | 28.605634 | 104 | 0.595051 | false | 3.450739 | false | false | false |
JiaMingLin/de-identification | prob_models/dep_graph.py | 1 | 3714 | import common.constant as c
import time
import numpy as np
import pandas as pd
from common.base import Base
from itertools import combinations
from itertools import groupby
class DependencyGraph(Base):
# The dependency graph
dep_graph = None
def __init__(
self,
data = None,
edges = None,
noise_flag = True,
white_list = [],
eps1_val = c.EPSILON_1,
cramer = 0.2):
"""
__init__
Input:
1. DataUtils.Data
Procedure
1. Convert the given data frame to dataframe in R
2. Convert the given Domain(in python dict) to ListVector
3. Instantial the attributes dependency.
"""
self.LOG = Base.get_logger("DepGraph")
self.noise_flag = noise_flag
self.eps1_val = eps1_val
self.cramer = cramer
self.data = data
if data is None:
self.edges = edges
else:
self.edges = self._run()
self.white_list = white_list
def get_dep_edges(self, display = True):
pairwise_white_list = reduce(lambda acc, curr: acc+curr
,[list(combinations(cluster, 2)) for cluster in self.white_list]
,[])
if display is False:
return _get_edges_in_r(self.edges + pairwise_white_list)
return self.edges + pairwise_white_list
def set_white_list(self, white_list):
self.white_list = white_list
return self
def _run(self):
# get pandas df
if self.data is None:
raise Exception("The data is not specified.")
pandas_df = self.data.get_df()
# get domain
domains = self.data.get_domains()
self.LOG.info("Starting to compute Dep-Graph with eps1: %.2f..." % self.eps1_val)
start = time.time()
# attributes' name
attr_names = domains.keys()
# combinations of 2
comb = combinations(attr_names, 2)
mi_scale = self.compute_mi_scale()
noise_thresh_cv2 = np.random.laplace(0, mi_scale, 1)
filtered_pairs = []
for attrs_pair in comb:
col1_val = pandas_df[attrs_pair[0]]
col2_val = pandas_df[attrs_pair[1]]
if self.g_test(col1_val, col2_val, mi_scale, noise_thresh_cv2):
filtered_pairs += [attrs_pair]
end = time.time()
self.LOG.info("Compute Dep-Graph complete in %d seconds." % (end-start))
return filtered_pairs
def g_test(self, col1, col2, mi_scale, noise_thresh_cv2):
xmat = self.find_crosstab(col1, col2)
mi = self.get_mi(xmat)
attr1_lvs = sorted(set(col1))
attr2_lvs = sorted(set(col2))
min_length = min(len(attr1_lvs), len(attr2_lvs)) - 1
cv2_lh = mi + np.random.laplace(0, mi_scale, 1)
cv2_rh = (self.cramer ** 2) * min_length/2. + noise_thresh_cv2
return cv2_lh >= cv2_rh
def find_crosstab(self, col1, col2):
xtab = pd.crosstab(col1, col2)
return np.asarray(xtab)
def get_expected_sum(self, xmat):
rsums = np.sum(xmat, axis = 0).reshape(-1,1)
csums = np.sum(xmat, axis = 1).reshape(1,-1)
expected_sum = rsums * csums / float(np.sum(csums))
return expected_sum
def get_mi(self, xmat):
xmat = xmat / float(np.sum(xmat))
expected_sum = self.get_expected_sum(xmat)
summand = xmat/expected_sum.T
zeros = np.where(summand == 0)
summand[zeros] = 1
return np.sum(xmat * np.log(summand))
def compute_mi_scale(self):
eps_alpha_1 = self.amplify_epsilon_under_sampling(self.eps1_val)
sensitivity_scale_mi = self.compute_mi_sensitivity_scale(self.data.get_nrows(), False)
scale_mi = 2 * sensitivity_scale_mi / eps_alpha_1
return scale_mi
def amplify_epsilon_under_sampling(self, eps1, beta = 1):
eps_alpha = np.log(np.exp(1) ** (eps1) -1 + beta) - np.log(beta)
return eps_alpha
def compute_mi_sensitivity_scale(self, N, all_binary):
N = float(N)
if all_binary is True:
sen_scale = (1/N) * np.log(N) + (( N-1 )/N) * np.log(N/(N-1))
else:
sen_scale = (2/N) * np.log((N + 1)/2) + ((N-1)/N) * np.log((N+1)/(N-1))
return sen_scale | apache-2.0 | 9,219,060,019,872,601,000 | 26.316176 | 88 | 0.660474 | false | 2.652857 | false | false | false |
nagyistoce/edx-analytics-dashboard | analytics_dashboard/core/templatetags/dashboard_extras.py | 3 | 3284 | from django import template
from django.conf import settings
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from opaque_keys.edx.keys import CourseKey
from slugify import slugify
register = template.Library()
@register.simple_tag
def settings_value(name):
"""
Retrieve a value from settings.
If setting is not found, None is returned.
"""
return getattr(settings, name)
@register.filter
def metric_percentage(value):
# Translators: Simply move the percent symbol (%) to the correct location. Do NOT translate the word statistic.
percent_stat = _('{statistic}%')
percent = '0'
if value:
if value < 0.01:
percent = '< 1'
else:
percent = '{0}'.format(round(value, 3) * 100)
# pylint: disable=no-member
return percent_stat.format(statistic=percent)
@register.tag(name='captureas')
def do_captureas(parser, token):
"""
Capture contents of block into context.
Source:
https://djangosnippets.org/snippets/545/
Example:
{% captureas foo %}{{ foo.value }}-suffix{% endcaptureas %}
{% if foo in bar %}{% endif %}
"""
try:
__, args = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("'captureas' node requires a variable name.")
nodelist = parser.parse(('endcaptureas',))
parser.delete_first_token()
return CaptureasNode(nodelist, args)
class CaptureasNode(template.Node):
def __init__(self, nodelist, varname):
self.nodelist = nodelist
self.varname = varname
def render(self, context):
output = mark_safe(self.nodelist.render(context).strip())
context[self.varname] = output
return ''
@register.inclusion_tag('summary_point.html')
def summary_point(value, label, subheading=None, tooltip=None):
return {
'value': value,
'label': label,
'subheading': subheading,
'tooltip': tooltip
}
@register.inclusion_tag('section_error.html')
def show_chart_error(background_class=''):
"""
Returns the error section with default context.
Arguments
background_class -- CSS class to add to the background style
(e.g. 'white-background'). Default background is gray.
"""
return _get_base_error_context('chart', background_class)
@register.inclusion_tag('section_error.html')
def show_table_error():
return _get_base_error_context('table')
@register.inclusion_tag('section_error.html')
def show_metrics_error():
return _get_base_error_context('metrics')
def _get_base_error_context(content_type, background_class=''):
return {
'content_type': content_type,
'load_error_message': settings.DOCUMENTATION_LOAD_ERROR_MESSAGE,
'background_class': background_class
}
@register.filter
def format_course_key(course_key, separator=u'/'):
if isinstance(course_key, basestring):
course_key = CourseKey.from_string(course_key)
return separator.join([course_key.org, course_key.course, course_key.run])
@register.filter(is_safe=True)
@stringfilter
def unicode_slugify(value):
return slugify(value)
| agpl-3.0 | -858,292,709,170,517,100 | 25.918033 | 115 | 0.671133 | false | 3.909524 | false | false | false |
lsdlab/awesome_coffice | coffice/migrations/0001_initial.py | 1 | 2228 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-29 03:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_message', models.CharField(blank=True, default='', max_length=140, null=True)),
('comment_user_name', models.CharField(blank=True, default='', max_length=70, null=True)),
('comment_date', models.DateTimeField(default=django.utils.timezone.now)),
('comment_mark', models.CharField(blank=True, default='comment', max_length=10, null=True)),
('comment_user_avatarurl', models.URLField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Spot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(max_length=70)),
('name', models.CharField(max_length=70)),
('longitude', models.FloatField()),
('latitude', models.FloatField()),
('download_speed', models.CharField(blank=True, default='', max_length=70, null=True)),
('upload_speed', models.CharField(blank=True, default='', max_length=70, null=True)),
('speed_test_link', models.URLField(blank=True, default='', max_length=100, null=True)),
('price_indication', models.CharField(blank=True, default='', max_length=70, null=True)),
('bathroom', models.BooleanField(default=False)),
('commit_user_name', models.CharField(blank=True, default='', max_length=70, null=True)),
('commit_message', models.CharField(blank=True, max_length=140, null=True)),
('commit_date', models.DateTimeField(default=django.utils.timezone.now)),
],
),
]
| mit | 4,686,375,144,648,305,000 | 47.434783 | 114 | 0.585727 | false | 4.18797 | false | false | false |
lucacontini/blog | website/views.py | 1 | 1302 | # -*- coding: utf-8 -*-
from django.views.generic.list import ListView
from django.views.generic import TemplateView
from posts.models import Post, PostTag
from posts.views import PostListView
# Home page
class IndexView(PostListView):
template_name = "website/index.html"
# FAQ
class FAQView(TemplateView):
template_name = "website/faq.html"
# For XML serving
class XmlView(ListView):
queryset = Post.objects.all()
def get_context_data(self, *args, **kwargs):
request = self.request
protocol = "https://" if request.is_secure() else "http://"
context = super(ListView, self).get_context_data(*args, **kwargs)
context['posts'] = Post.objects.order_by('-create_date')
return context
# Sitemap (xml)
class SiteMapXML(XmlView):
template_name = "sitemap.xml"
content_type = "application/xml"
def get_context_data(self, *args, **kwargs):
context = super(SiteMapXML, self).get_context_data(*args, **kwargs)
context['tags'] = PostTag.objects.all()
return context
# Atom Feed (xml)
class AtomFeedXML(XmlView):
template_name = "atom.xml"
content_type = "application/atom+xml"
# RSS Feed (xml)
class RssFeedXML(XmlView):
template_name = "rss.xml"
content_type = "application/rss+xml"
| gpl-2.0 | -6,517,135,939,094,588,000 | 22.25 | 75 | 0.670507 | false | 3.472 | false | false | false |
Madefire/ios-video-wall | manager.py | 1 | 2481 | #!/usr/bin/env python
# Copyright (c) 2012, Madefire Inc.
# All rights reserved.
from __future__ import absolute_import
from threading import Event, Thread
from time import sleep
import SocketServer
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
PREPARE_DELAY = 2.5
IDLE_DELAY = 2.5
class Library:
def __init__(self, videos):
self._videos = videos
self._index = len(videos) - 1
def current(self):
return self._videos[self._index]
def next(self):
self._index += 1
if self._index == len(self._videos):
self._index = 0
return self.current()
event = Event()
# fill in the following array with the names of your videos and their
# durations, rounding up is better than down.
library = Library([('<video-name>', <video-duration-in-seconds>), ...])
class PlayRequestHandler(SocketServer.BaseRequestHandler ):
def setup(self):
logger.info('gained connection to %s', self.client_address)
def handle(self):
while True:
event.wait()
video = library.current()
try:
logger.debug('sending %s to %s', video[0], self.client_address)
self.request.send('prepare %s' % video[0])
sleep(PREPARE_DELAY)
self.request.send('play')
except:
return
def finish(self):
logger.info('lost connection to %s', self.client_address)
class Director(Thread):
def __init__(self):
self._running = True
Thread.__init__(self)
def run(self):
sleep(1)
while self._running:
video = library.next()
logger.info('playing %s for %d seconds', video[0], video[1])
event.set()
event.clear()
sleep(video[1] + PREPARE_DELAY + IDLE_DELAY)
logger.info('director finished')
def shutdown(self):
self._running = False
director = Director()
director.start()
class Server(SocketServer.ThreadingTCPServer):
def __init__(self, *args, **kwargs):
SocketServer.ThreadingTCPServer.__init__(self, *args, **kwargs)
self.allow_reuse_address = True
server = Server(('', 3333), PlayRequestHandler)
try:
logger.info('serving')
server.serve_forever()
except KeyboardInterrupt:
logger.info('shutting down')
director.shutdown()
server.shutdown()
director.join()
logger.info('done')
| bsd-3-clause | 7,586,206,250,553,452,000 | 24.060606 | 79 | 0.607416 | false | 3.907087 | false | false | false |
TresysTechnology/setools | setoolsgui/apol/terulequery.py | 1 | 18847 | # Copyright 2015-2016, Tresys Technology, LLC
#
# This file is part of SETools.
#
# SETools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1 of
# the License, or (at your option) any later version.
#
# SETools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SETools. If not, see
# <http://www.gnu.org/licenses/>.
#
import logging
from PyQt5.QtCore import Qt, QSortFilterProxyModel, QStringListModel, QThread
from PyQt5.QtGui import QPalette, QTextCursor
from PyQt5.QtWidgets import QCompleter, QHeaderView, QMessageBox, QProgressDialog
from setools import TERuleQuery
from ..logtosignal import LogHandlerToSignal
from ..models import PermListModel, SEToolsListModel, invert_list_selection
from ..terulemodel import TERuleTableModel
from .analysistab import AnalysisTab
from .exception import TabFieldError
from .queryupdater import QueryResultsUpdater
from .workspace import load_checkboxes, load_lineedits, load_listviews, load_textedits, \
save_checkboxes, save_lineedits, save_listviews, save_textedits
class TERuleQueryTab(AnalysisTab):
"""A Type Enforcement rule query."""
def __init__(self, parent, policy, perm_map):
super(TERuleQueryTab, self).__init__(parent)
self.log = logging.getLogger(__name__)
self.policy = policy
self.query = TERuleQuery(policy)
self.setupUi()
def __del__(self):
self.thread.quit()
self.thread.wait(5000)
logging.getLogger("setools.terulequery").removeHandler(self.handler)
def setupUi(self):
self.load_ui("apol/terulequery.ui")
# set up source/target autocompletion
typeattr_completion_list = [str(t) for t in self.policy.types()]
typeattr_completion_list.extend(str(a) for a in self.policy.typeattributes())
typeattr_completer_model = QStringListModel(self)
typeattr_completer_model.setStringList(sorted(typeattr_completion_list))
self.typeattr_completion = QCompleter()
self.typeattr_completion.setModel(typeattr_completer_model)
self.source.setCompleter(self.typeattr_completion)
self.target.setCompleter(self.typeattr_completion)
# set up default autocompletion
type_completion_list = [str(t) for t in self.policy.types()]
type_completer_model = QStringListModel(self)
type_completer_model.setStringList(sorted(type_completion_list))
self.type_completion = QCompleter()
self.type_completion.setModel(type_completer_model)
self.default_type.setCompleter(self.type_completion)
# setup indications of errors on source/target/default
self.errors = set()
self.orig_palette = self.source.palette()
self.error_palette = self.source.palette()
self.error_palette.setColor(QPalette.Base, Qt.red)
self.clear_source_error()
self.clear_target_error()
self.clear_default_error()
self.clear_xperm_error()
# populate class list
self.class_model = SEToolsListModel(self)
self.class_model.item_list = sorted(self.policy.classes())
self.tclass.setModel(self.class_model)
# populate perm list
self.perms_model = PermListModel(self, self.policy)
self.perms.setModel(self.perms_model)
# populate bool list
self.bool_model = SEToolsListModel(self)
self.bool_model.item_list = sorted(self.policy.bools())
self.bool_criteria.setModel(self.bool_model)
# set up results
self.table_results_model = TERuleTableModel(self)
self.sort_proxy = QSortFilterProxyModel(self)
self.sort_proxy.setSourceModel(self.table_results_model)
self.table_results.setModel(self.sort_proxy)
self.table_results.sortByColumn(0, Qt.AscendingOrder)
# set up processing thread
self.thread = QThread()
self.worker = QueryResultsUpdater(self.query, self.table_results_model)
self.worker.moveToThread(self.thread)
self.worker.raw_line.connect(self.raw_results.appendPlainText)
self.worker.finished.connect(self.update_complete)
self.worker.finished.connect(self.thread.quit)
self.thread.started.connect(self.worker.update)
# create a "busy, please wait" dialog
self.busy = QProgressDialog(self)
self.busy.setModal(True)
self.busy.setRange(0, 0)
self.busy.setMinimumDuration(0)
self.busy.canceled.connect(self.thread.requestInterruption)
self.busy.reset()
# update busy dialog from query INFO logs
self.handler = LogHandlerToSignal()
self.handler.message.connect(self.busy.setLabelText)
logging.getLogger("setools.terulequery").addHandler(self.handler)
# Ensure settings are consistent with the initial .ui state
self.set_source_regex(self.source_regex.isChecked())
self.set_target_regex(self.target_regex.isChecked())
self.set_default_regex(self.default_regex.isChecked())
self.toggle_xperm_criteria()
self.criteria_frame.setHidden(not self.criteria_expander.isChecked())
self.notes.setHidden(not self.notes_expander.isChecked())
# connect signals
self.buttonBox.clicked.connect(self.run)
self.allowxperm.toggled.connect(self.toggle_xperm_criteria)
self.auditallowxperm.toggled.connect(self.toggle_xperm_criteria)
self.neverallowxperm.toggled.connect(self.toggle_xperm_criteria)
self.dontauditxperm.toggled.connect(self.toggle_xperm_criteria)
self.clear_ruletypes.clicked.connect(self.clear_all_ruletypes)
self.all_ruletypes.clicked.connect(self.set_all_ruletypes)
self.source.textEdited.connect(self.clear_source_error)
self.source.editingFinished.connect(self.set_source)
self.source_regex.toggled.connect(self.set_source_regex)
self.target.textEdited.connect(self.clear_target_error)
self.target.editingFinished.connect(self.set_target)
self.target_regex.toggled.connect(self.set_target_regex)
self.tclass.selectionModel().selectionChanged.connect(self.set_tclass)
self.invert_class.clicked.connect(self.invert_tclass_selection)
self.perms.selectionModel().selectionChanged.connect(self.set_perms)
self.invert_perms.clicked.connect(self.invert_perms_selection)
self.xperms.textEdited.connect(self.clear_xperm_error)
self.xperms.editingFinished.connect(self.set_xperm)
self.default_type.textEdited.connect(self.clear_default_error)
self.default_type.editingFinished.connect(self.set_default_type)
self.default_regex.toggled.connect(self.set_default_regex)
self.bool_criteria.selectionModel().selectionChanged.connect(self.set_bools)
#
# Ruletype criteria
#
def _set_ruletypes(self, value):
self.allow.setChecked(value)
self.allowxperm.setChecked(value)
self.auditallow.setChecked(value)
self.auditallowxperm.setChecked(value)
self.neverallow.setChecked(value)
self.neverallowxperm.setChecked(value)
self.dontaudit.setChecked(value)
self.dontauditxperm.setChecked(value)
self.type_transition.setChecked(value)
self.type_member.setChecked(value)
self.type_change.setChecked(value)
def set_all_ruletypes(self):
self._set_ruletypes(True)
def clear_all_ruletypes(self):
self._set_ruletypes(False)
#
# Source criteria
#
def clear_source_error(self):
self.clear_criteria_error(self.source, "Match the source type/attribute of the rule.")
def set_source(self):
try:
self.query.source = self.source.text()
except Exception as ex:
self.log.error("Source type/attribute error: {0}".format(ex))
self.set_criteria_error(self.source, ex)
def set_source_regex(self, state):
self.log.debug("Setting source_regex {0}".format(state))
self.query.source_regex = state
self.clear_source_error()
self.set_source()
#
# Target criteria
#
def clear_target_error(self):
self.clear_criteria_error(self.target, "Match the target type/attribute of the rule.")
def set_target(self):
try:
self.query.target = self.target.text()
except Exception as ex:
self.log.error("Target type/attribute error: {0}".format(ex))
self.set_criteria_error(self.target, ex)
def set_target_regex(self, state):
self.log.debug("Setting target_regex {0}".format(state))
self.query.target_regex = state
self.clear_target_error()
self.set_target()
#
# Class criteria
#
def set_tclass(self):
selected_classes = []
for index in self.tclass.selectionModel().selectedIndexes():
selected_classes.append(self.class_model.data(index, Qt.UserRole))
self.query.tclass = selected_classes
self.perms_model.set_classes(selected_classes)
def invert_tclass_selection(self):
invert_list_selection(self.tclass.selectionModel())
#
# Permissions criteria
#
def set_perms(self):
selected_perms = []
for index in self.perms.selectionModel().selectedIndexes():
selected_perms.append(self.perms_model.data(index, Qt.UserRole))
self.query.perms = selected_perms
def invert_perms_selection(self):
invert_list_selection(self.perms.selectionModel())
#
# Extended permission criteria
#
def toggle_xperm_criteria(self):
mode = any((self.allowxperm.isChecked(),
self.auditallowxperm.isChecked(),
self.neverallowxperm.isChecked(),
self.dontauditxperm.isChecked()))
self.xperms.setEnabled(mode)
self.xperms_equal.setEnabled(mode)
def clear_xperm_error(self):
self.clear_criteria_error(self.xperms,
"Match the extended permissions of the rule. "
"Comma-separated permissions or ranges of permissions.")
def set_xperm(self):
xperms = []
try:
text = self.xperms.text()
if text:
for item in self.xperms.text().split(","):
rng = item.split("-")
if len(rng) == 2:
xperms.append((int(rng[0], base=16), int(rng[1], base=16)))
elif len(rng) == 1:
xperms.append((int(rng[0], base=16), int(rng[0], base=16)))
else:
raise ValueError("Enter an extended permission or extended permission "
"range, e.g. 0x5411 or 0x8800-0x88ff.")
self.query.xperms = xperms
else:
self.query.xperms = None
except Exception as ex:
self.log.error("Extended permissions error: {0}".format(ex))
self.set_criteria_error(self.xperms, ex)
#
# Default criteria
#
def clear_default_error(self):
self.clear_criteria_error(self.default_type, "Match the default type the rule.")
def set_default_type(self):
self.query.default_regex = self.default_regex.isChecked()
try:
self.query.default = self.default_type.text()
except Exception as ex:
self.log.error("Default type error: {0}".format(ex))
self.set_criteria_error(self.default_type, ex)
def set_default_regex(self, state):
self.log.debug("Setting default_regex {0}".format(state))
self.query.default_regex = state
self.clear_default_error()
self.set_default_type()
#
# Boolean criteria
#
def set_bools(self):
selected_bools = []
for index in self.bool_criteria.selectionModel().selectedIndexes():
selected_bools.append(self.bool_model.data(index, Qt.UserRole))
self.query.boolean = selected_bools
#
# Save/Load tab
#
def save(self):
"""Return a dictionary of settings."""
if self.errors:
raise TabFieldError("Field(s) are in error: {0}".
format(" ".join(o.objectName() for o in self.errors)))
settings = {}
save_checkboxes(self, settings, ["criteria_expander", "notes_expander",
"allow", "allowxperm",
"auditallow", "auditallowxperm",
"neverallow", "neverallowxperm",
"dontaudit", "dontauditxperm",
"type_transition", "type_change", "type_member",
"source_indirect", "source_regex",
"target_indirect", "target_regex",
"perms_subset",
"xperms_equal",
"default_regex",
"bools_equal"])
save_lineedits(self, settings, ["source", "target", "xperms", "default_type"])
save_listviews(self, settings, ["tclass", "perms", "bool_criteria"])
save_textedits(self, settings, ["notes"])
return settings
def load(self, settings):
load_checkboxes(self, settings, ["allow", "allowxperm",
"auditallow", "auditallowxperm",
"neverallow", "neverallowxperm",
"dontaudit", "dontauditxperm",
"type_transition", "type_change", "type_member",
"criteria_expander", "notes_expander",
"source_indirect", "source_regex",
"target_indirect", "target_regex",
"perms_subset",
"xperms_equal",
"default_regex",
"bools_equal"])
load_lineedits(self, settings, ["source", "target", "xperms", "default_type"])
load_listviews(self, settings, ["tclass", "perms", "bool_criteria"])
load_textedits(self, settings, ["notes"])
#
# Results runner
#
def run(self, button):
# right now there is only one button.
rule_types = []
max_results = 0
if self.allow.isChecked():
rule_types.append("allow")
max_results += self.policy.allow_count
if self.allowxperm.isChecked():
rule_types.append("allowxperm")
max_results += self.policy.allowxperm_count
if self.auditallow.isChecked():
rule_types.append("auditallow")
max_results += self.policy.auditallow_count
if self.auditallowxperm.isChecked():
rule_types.append("auditallowxperm")
max_results += self.policy.auditallowxperm_count
if self.neverallow.isChecked():
rule_types.append("neverallow")
max_results += self.policy.neverallow_count
if self.neverallowxperm.isChecked():
rule_types.append("neverallowxperm")
max_results += self.policy.neverallowxperm_count
if self.dontaudit.isChecked():
rule_types.append("dontaudit")
max_results += self.policy.dontaudit_count
if self.dontauditxperm.isChecked():
rule_types.append("dontauditxperm")
max_results += self.policy.dontauditxperm_count
if self.type_transition.isChecked():
rule_types.append("type_transition")
max_results += self.policy.type_transition_count
if self.type_member.isChecked():
rule_types.append("type_member")
max_results += self.policy.type_member_count
if self.type_change.isChecked():
rule_types.append("type_change")
max_results += self.policy.type_change_count
self.query.ruletype = rule_types
self.query.source_indirect = self.source_indirect.isChecked()
self.query.target_indirect = self.target_indirect.isChecked()
self.query.perms_subset = self.perms_subset.isChecked()
self.query.boolean_equal = self.bools_equal.isChecked()
# if query is broad, show warning.
if not any((self.query.source, self.query.target, self.query.tclass, self.query.perms,
self.query.xperms, self.query.default, self.query.boolean)) \
and max_results > 1000:
reply = QMessageBox.question(
self, "Continue?",
"This is a broad query, estimated to return {0} results. Continue?".
format(max_results), QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.No:
return
# start processing
self.busy.setLabelText("Processing query...")
self.busy.show()
self.raw_results.clear()
self.thread.start()
def update_complete(self, count):
self.log.info("{0} type enforcement rule(s) found.".format(count))
# update sizes/location of result displays
if not self.busy.wasCanceled():
self.busy.setLabelText("Resizing the result table's columns; GUI may be unresponsive")
self.busy.repaint()
self.table_results.resizeColumnsToContents()
# If the permissions column width is too long, pull back
# to a reasonable size
header = self.table_results.horizontalHeader()
if header.sectionSize(4) > 400:
header.resizeSection(4, 400)
if not self.busy.wasCanceled():
self.busy.setLabelText("Resizing the result table's rows; GUI may be unresponsive")
self.busy.repaint()
self.table_results.resizeRowsToContents()
if not self.busy.wasCanceled():
self.busy.setLabelText("Moving the raw result to top; GUI may be unresponsive")
self.busy.repaint()
self.raw_results.moveCursor(QTextCursor.Start)
self.busy.reset()
| lgpl-2.1 | -4,385,313,039,911,049,000 | 39.794372 | 98 | 0.614899 | false | 4.042686 | false | false | false |
akash1808/python-novaclient | novaclient/tests/fixture_data/images.py | 1 | 4037 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from novaclient.tests import fakes
from novaclient.tests.fixture_data import base
class V1(base.Fixture):
base_url = 'images'
def setUp(self):
super(V1, self).setUp()
get_images = {
'images': [
{'id': 1, 'name': 'CentOS 5.2'},
{'id': 2, 'name': 'My Server Backup'}
]
}
headers = {'Content-Type': 'application/json'}
self.requests.register_uri('GET', self.url(),
json=get_images,
headers=headers)
image_1 = {
'id': 1,
'name': 'CentOS 5.2',
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "ACTIVE",
"metadata": {
"test_key": "test_value",
},
"links": {},
}
image_2 = {
"id": 2,
"name": "My Server Backup",
"serverId": 1234,
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "SAVING",
"progress": 80,
"links": {},
}
self.requests.register_uri('GET', self.url('detail'),
json={'images': [image_1, image_2]},
headers=headers)
self.requests.register_uri('GET', self.url(1),
json={'image': image_1},
headers=headers)
self.requests.register_uri('GET', self.url(2),
json={'image': image_2},
headers=headers)
self.requests.register_uri('GET', self.url(456),
json={'image': image_2},
headers=headers)
def post_images(request, context):
body = jsonutils.loads(request.body)
assert list(body) == ['image']
fakes.assert_has_keys(body['image'], required=['serverId', 'name'])
return images_1
self.requests.register_uri('POST', self.url(),
json=post_images,
headers=headers,
status_code=202)
def post_images_1_metadata(request, context):
body = jsonutils.loads(request.body)
assert list(body) == ['metadata']
fakes.assert_has_keys(body['metadata'], required=['test_key'])
return {'metadata': image_1['metadata']}
self.requests.register_uri('POST', self.url(1, 'metadata'),
json=post_images_1_metadata,
headers=headers)
for u in (1, 2, '1/metadata/test_key'):
self.requests.register_uri('DELETE', self.url(u), status_code=204)
image_headers = {'x-image-meta-id': '1',
'x-image-meta-name': 'CentOS 5.2',
'x-image-meta-updated': '2010-10-10T12:00:00Z',
'x-image-meta-created': '2010-10-10T12:00:00Z',
'x-image-meta-status': 'ACTIVE',
'x-image-meta-property-test-key': 'test_value'}
self.requests.register_uri('HEAD', self.url(1), headers=image_headers)
class V3(V1):
base_url = 'v1/images'
| apache-2.0 | 973,671,741,062,356,100 | 34.725664 | 79 | 0.490959 | false | 4.132037 | true | false | false |
yegong/stock | providers/spider.py | 1 | 1719 | #!/usr/bin/env python
# -*- coding: utf8 -*-
__author__ = 'cooper'
import traceback
import logging
from threading import Thread
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import log, signals
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DontCloseSpider
from scrapy.utils.project import get_project_settings
from stockspider.spiders.hq_spider import HqSpider
from common import inject, depends_on
@depends_on('sql_engine')
class ScrapySpider:
def __init__(self):
self.spider = HqSpider()
self.crawler = crawler = Crawler(get_project_settings())
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(self.spider)
dispatcher.connect(self._dont_close_me, signals.spider_idle)
self.thread = None
self._started = False
self._stopped = False
def start(self):
def run():
try:
logging.info('Start spider')
reactor.run(installSignalHandlers=False)
except Exception, e:
print traceback.format_exc()
if not self._started:
self._started = True
self.crawler.start()
log.start_from_settings(get_project_settings())
self.thread = Thread(target=run)
log.msg('Start')
self.thread.start()
else:
raise Exception('spider has already started.')
def stop(self):
if not self._started:
raise Exception('spider not started.')
elif self._stopped:
raise Exception('spider has already stopped')
else:
log.msg('Stop')
self._stopped = True
self.crawler.stop()
def _dont_close_me(self, spider):
raise DontCloseSpider("..I prefer live spiders.")
| apache-2.0 | 4,436,104,143,725,482,000 | 27.65 | 71 | 0.690518 | false | 3.880361 | false | false | false |
ILoveMuffins/TheGame | app/model/cell.py | 1 | 1588 | #!/usr/bin/env python3.4
from model.energy import Energy
from model.matter import Matter
from model.position import Position
class Cell:
def __init__(self, energy: Energy, matter: Matter, position: Position, max_age,
min_energy: Energy, max_energy: Energy, min_matter: Matter, max_matter: Matter,
life_function, absorb_energy_function, absorb_matter_function):
self.energy = energy
self.matter = matter
self.position = position
self.life_function = life_function
self.absorb_energy_function = absorb_energy_function
self.absorb_matter_function = absorb_matter_function
self.min_energy = min_energy
self.max_energy = max_energy
self.min_matter = min_matter
self.max_matter = max_matter
self.max_age = max_age
self.age = 0
self.life = True
def next_life_step(self):
self.age += 1
self.process_energy()
self.process_matter()
self.life_function()
return self.life
def process_energy(self):
self.energy += self.absorb_energy_function()
def process_matter(self):
self.matter += self.absorb_matter_function()
def is_too_old(self):
return self.age > self.max_age
def has_not_enough_energy(self): # @TODO has_enough_energy OR "return self.energy < self.min_energy>"
return self.energy > self.min_energy
def has_not_enough_matter(self): # @TODO has_enough_energy OR "return self.energy < self.min_energy>"
return self.matter > self.min_matter
| gpl-3.0 | -5,911,487,594,279,590,000 | 33.521739 | 105 | 0.640428 | false | 3.452174 | false | false | false |
r3tard/BartusBot | lib/protorpc/webapp_test_util.py | 23 | 12358 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Testing utilities for the webapp libraries.
GetDefaultEnvironment: Method for easily setting up CGI environment.
RequestHandlerTestBase: Base class for setting up handler tests.
"""
__author__ = '[email protected] (Rafe Kaplan)'
import cStringIO
import threading
import urllib2
from wsgiref import simple_server
from wsgiref import validate
from . import protojson
from . import remote
from . import test_util
from . import transport
from .webapp import service_handlers
from .webapp.google_imports import webapp
class TestService(remote.Service):
"""Service used to do end to end tests with."""
@remote.method(test_util.OptionalMessage,
test_util.OptionalMessage)
def optional_message(self, request):
if request.string_value:
request.string_value = '+%s' % request.string_value
return request
def GetDefaultEnvironment():
"""Function for creating a default CGI environment."""
return {
'LC_NUMERIC': 'C',
'wsgi.multiprocess': True,
'SERVER_PROTOCOL': 'HTTP/1.0',
'SERVER_SOFTWARE': 'Dev AppServer 0.1',
'SCRIPT_NAME': '',
'LOGNAME': 'nickjohnson',
'USER': 'nickjohnson',
'QUERY_STRING': 'foo=bar&foo=baz&foo2=123',
'PATH': '/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/bin/X11',
'LANG': 'en_US',
'LANGUAGE': 'en',
'REMOTE_ADDR': '127.0.0.1',
'LC_MONETARY': 'C',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'wsgi.url_scheme': 'http',
'SERVER_PORT': '8080',
'HOME': '/home/mruser',
'USERNAME': 'mruser',
'CONTENT_LENGTH': '',
'USER_IS_ADMIN': '1',
'PYTHONPATH': '/tmp/setup',
'LC_TIME': 'C',
'HTTP_USER_AGENT': 'Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US; '
'rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6',
'wsgi.multithread': False,
'wsgi.version': (1, 0),
'USER_EMAIL': '[email protected]',
'USER_EMAIL': '112',
'wsgi.input': cStringIO.StringIO(),
'PATH_TRANSLATED': '/tmp/request.py',
'SERVER_NAME': 'localhost',
'GATEWAY_INTERFACE': 'CGI/1.1',
'wsgi.run_once': True,
'LC_COLLATE': 'C',
'HOSTNAME': 'myhost',
'wsgi.errors': cStringIO.StringIO(),
'PWD': '/tmp',
'REQUEST_METHOD': 'GET',
'MAIL': '/dev/null',
'MAILCHECK': '0',
'USER_NICKNAME': 'test',
'HTTP_COOKIE': 'dev_appserver_login="test:[email protected]:True"',
'PATH_INFO': '/tmp/myhandler'
}
class RequestHandlerTestBase(test_util.TestCase):
"""Base class for writing RequestHandler tests.
To test a specific request handler override CreateRequestHandler.
To change the environment for that handler override GetEnvironment.
"""
def setUp(self):
"""Set up test for request handler."""
self.ResetHandler()
def GetEnvironment(self):
"""Get environment.
Override for more specific configurations.
Returns:
dict of CGI environment.
"""
return GetDefaultEnvironment()
def CreateRequestHandler(self):
"""Create RequestHandler instances.
Override to create more specific kinds of RequestHandler instances.
Returns:
RequestHandler instance used in test.
"""
return webapp.RequestHandler()
def CheckResponse(self,
expected_status,
expected_headers,
expected_content):
"""Check that the web response is as expected.
Args:
expected_status: Expected status message.
expected_headers: Dictionary of expected headers. Will ignore unexpected
headers and only check the value of those expected.
expected_content: Expected body.
"""
def check_content(content):
self.assertEquals(expected_content, content)
def start_response(status, headers):
self.assertEquals(expected_status, status)
found_keys = set()
for name, value in headers:
name = name.lower()
try:
expected_value = expected_headers[name]
except KeyError:
pass
else:
found_keys.add(name)
self.assertEquals(expected_value, value)
missing_headers = set(expected_headers.keys()) - found_keys
if missing_headers:
self.fail('Expected keys %r not found' % (list(missing_headers),))
return check_content
self.handler.response.wsgi_write(start_response)
def ResetHandler(self, change_environ=None):
"""Reset this tests environment with environment changes.
Resets the entire test with a new handler which includes some changes to
the default request environment.
Args:
change_environ: Dictionary of values that are added to default
environment.
"""
environment = self.GetEnvironment()
environment.update(change_environ or {})
self.request = webapp.Request(environment)
self.response = webapp.Response()
self.handler = self.CreateRequestHandler()
self.handler.initialize(self.request, self.response)
class SyncedWSGIServer(simple_server.WSGIServer):
pass
class ServerThread(threading.Thread):
"""Thread responsible for managing wsgi server.
This server does not just attach to the socket and listen for requests. This
is because the server classes in Python 2.5 or less have no way to shut them
down. Instead, the thread must be notified of how many requests it will
receive so that it listens for each one individually. Tests should tell how
many requests to listen for using the handle_request method.
"""
def __init__(self, server, *args, **kwargs):
"""Constructor.
Args:
server: The WSGI server that is served by this thread.
As per threading.Thread base class.
State:
__serving: Server is still expected to be serving. When False server
knows to shut itself down.
"""
self.server = server
# This timeout is for the socket when a connection is made.
self.server.socket.settimeout(None)
# This timeout is for when waiting for a connection. The allows
# server.handle_request() to listen for a short time, then timeout,
# allowing the server to check for shutdown.
self.server.timeout = 0.05
self.__serving = True
super(ServerThread, self).__init__(*args, **kwargs)
def shutdown(self):
"""Notify server that it must shutdown gracefully."""
self.__serving = False
def run(self):
"""Handle incoming requests until shutdown."""
while self.__serving:
self.server.handle_request()
self.server = None
class TestService(remote.Service):
"""Service used to do end to end tests with."""
def __init__(self, message='uninitialized'):
self.__message = message
@remote.method(test_util.OptionalMessage, test_util.OptionalMessage)
def optional_message(self, request):
if request.string_value:
request.string_value = '+%s' % request.string_value
return request
@remote.method(response_type=test_util.OptionalMessage)
def init_parameter(self, request):
return test_util.OptionalMessage(string_value=self.__message)
@remote.method(test_util.NestedMessage, test_util.NestedMessage)
def nested_message(self, request):
request.string_value = '+%s' % request.string_value
return request
@remote.method()
def raise_application_error(self, request):
raise remote.ApplicationError('This is an application error', 'ERROR_NAME')
@remote.method()
def raise_unexpected_error(self, request):
raise TypeError('Unexpected error')
@remote.method()
def raise_rpc_error(self, request):
raise remote.NetworkError('Uncaught network error')
@remote.method(response_type=test_util.NestedMessage)
def return_bad_message(self, request):
return test_util.NestedMessage()
class AlternateService(remote.Service):
"""Service used to requesting non-existant methods."""
@remote.method()
def does_not_exist(self, request):
raise NotImplementedError('Not implemented')
class WebServerTestBase(test_util.TestCase):
SERVICE_PATH = '/my/service'
def setUp(self):
self.server = None
self.schema = 'http'
self.ResetServer()
self.bad_path_connection = self.CreateTransport(self.service_url + '_x')
self.bad_path_stub = TestService.Stub(self.bad_path_connection)
super(WebServerTestBase, self).setUp()
def tearDown(self):
self.server.shutdown()
super(WebServerTestBase, self).tearDown()
def ResetServer(self, application=None):
"""Reset web server.
Shuts down existing server if necessary and starts a new one.
Args:
application: Optional WSGI function. If none provided will use
tests CreateWsgiApplication method.
"""
if self.server:
self.server.shutdown()
self.port = test_util.pick_unused_port()
self.server, self.application = self.StartWebServer(self.port, application)
self.connection = self.CreateTransport(self.service_url)
def CreateTransport(self, service_url, protocol=protojson):
"""Create a new transportation object."""
return transport.HttpTransport(service_url, protocol=protocol)
def StartWebServer(self, port, application=None):
"""Start web server.
Args:
port: Port to start application on.
application: Optional WSGI function. If none provided will use
tests CreateWsgiApplication method.
Returns:
A tuple (server, application):
server: An instance of ServerThread.
application: Application that web server responds with.
"""
if not application:
application = self.CreateWsgiApplication()
validated_application = validate.validator(application)
server = simple_server.make_server('localhost', port, validated_application)
server = ServerThread(server)
server.start()
return server, application
def make_service_url(self, path):
"""Make service URL using current schema and port."""
return '%s://localhost:%d%s' % (self.schema, self.port, path)
@property
def service_url(self):
return self.make_service_url(self.SERVICE_PATH)
class EndToEndTestBase(WebServerTestBase):
# Sub-classes may override to create alternate configurations.
DEFAULT_MAPPING = service_handlers.service_mapping(
[('/my/service', TestService),
('/my/other_service', TestService.new_factory('initialized')),
])
def setUp(self):
super(EndToEndTestBase, self).setUp()
self.stub = TestService.Stub(self.connection)
self.other_connection = self.CreateTransport(self.other_service_url)
self.other_stub = TestService.Stub(self.other_connection)
self.mismatched_stub = AlternateService.Stub(self.connection)
@property
def other_service_url(self):
return 'http://localhost:%d/my/other_service' % self.port
def CreateWsgiApplication(self):
"""Create WSGI application used on the server side for testing."""
return webapp.WSGIApplication(self.DEFAULT_MAPPING, True)
def DoRawRequest(self,
method,
content='',
content_type='application/json',
headers=None):
headers = headers or {}
headers.update({'content-length': len(content or ''),
'content-type': content_type,
})
request = urllib2.Request('%s.%s' % (self.service_url, method),
content,
headers)
return urllib2.urlopen(request)
def RawRequestError(self,
method,
content=None,
content_type='application/json',
headers=None):
try:
self.DoRawRequest(method, content, content_type, headers)
self.fail('Expected HTTP error')
except urllib2.HTTPError as err:
return err.code, err.read(), err.headers
| apache-2.0 | -5,773,854,052,402,225,000 | 30.050251 | 80 | 0.674057 | false | 4.030659 | true | false | false |
PapenfussLab/sv_tools | sv_tools/simulator.py | 1 | 5099 | import numpy as np
import sv_data
import sv_diagram as sv_d
def map_kmers(f, k):
""" Takes a list function f and returns a function that applies
f to k-mers of a list, returning the results as a list with
None values discarded.
"""
def g(input_list, *args, **kwargs):
outputs = [f(input_list[i:i+k], *args, **kwargs)
for i in range(len(input_list) + 1 - k)]
return [x for x in outputs if x != None]
return g
def map_kbins(f, k):
def g(input_list, *args, **kwargs):
# Length of the input list must be a multiple of k.
assert len(input_list) % k == 0
outputs = [f(input_list[i:i+k], *args, **kwargs)
for i in range(0, len(input_list), k)]
return [x for x in outputs if x != None]
return g
# Sequence of letters to rearranged positions
def pair_to_letters(pair):
letter, possible_tick = pair
if not letter.isalpha():
return None
elif possible_tick != "'":
return letter
else:
return letter + possible_tick
def letters_to_letterlist(letters):
letters += "A"
to_letterlist = map_kmers(pair_to_letters, 2)
return to_letterlist(letters)
def pair_to_positions(pair, length = 10):
letter, possible_tick = pair
positions = list(np.arange(length) + (ord(letter) * length))
inverted = list(positions[::-1])
if not letter.isalpha():
return None
elif possible_tick != "'":
return positions
else:
return inverted
def letters_to_positions(letters):
letters += "A" # Not read.
pairs_to_positions = map_kmers(pair_to_positions, 2)
positions = [x for letter_sequence in pairs_to_positions(letters)
for x in letter_sequence]
return positions
# Rearranged positions to sequence of letters
def positions_to_letter(positions, length = 10):
assert len(positions) == length
base_position = min(positions[0], positions[-1])
if base_position == positions[0]:
inverted = False
elif base_position == positions[-1]:
inverted = True
else:
print positions[0], positions[-1], base_position
letter = chr(base_position / len(positions))
if not inverted:
return letter
else:
return letter + "'"
def positions_to_letters(positions):
positions_to_list = map_kbins(positions_to_letter, 10)
list_of_letters = positions_to_list(positions)
return "".join(list_of_letters)
positions_to_ticks = map_kbins(np.mean, 10)
# Fusions from rearranged chromosome.
def detect_fusions(sites):
""" Takes a list of four sites, and returns either None, or a
fusion-tuple based on a length-four paired-end read, e.g.
01[2398]7
-><-
T T
"""
assert len(sites) == 4
breakdiff = abs(sites[1] - sites[2])
diff1 = sites[0] - sites[1]
diff2 = sites[2] - sites[3]
if breakdiff == 1:
return None
else:
# Differences should be 1 or -1 normally.
strand1 = {-1:"+", 1:"-"}.get(diff1, "?")
strand2 = {1:"+", -1:"-"}.get(diff2, "?")
bp1 = sv_data.Breakpoint(chrom = "",
pos = sites[1] * 1e6,
strand = strand1)
bp2 = sv_data.Breakpoint(chrom = "",
pos = sites[2] * 1e6,
strand = strand2)
return sv_data.Fusion(bp1, bp2)
get_fusions = map_kmers(detect_fusions, 4)
# Copy number from rearranged chromosome
def get_x_cn(positions):
counts = [(p, positions.count(p)) for p in positions]
x_tuple, cn_tuple = zip(*counts)
x, cn = list(x_tuple), list(cn_tuple)
return x, cn
## Campbellgrams ##
def simulate_sv_diagram(
letters, outfile = None,
**kwargs):
if outfile == None:
outfile = "../output/simulation/simulation_%s.pdf" % letters
### Simulation-specific stuff
positions = letters_to_positions(letters)
fusions = get_fusions(positions)
x, cn = get_x_cn(positions)
kwargs['yticks'] = range(max(cn) + 2)
kwargs['ymax'] = max(cn) + 1
kwargs['ymin'] = 0
kwargs['xlabel'] = letters
###
fig = sv_d.setup_figure()
cn_axes, fusion_axes = sv_d.sv_diagram_axes()
# Copy number
sv_d.plot_cn(cn_axes, x, cn)
sv_d.set_cn_axes_options(cn_axes, x, cn, kwargs)
sv_d.set_cn_axes_aesthetics(cn_axes)
sv_d.plt.minorticks_off()
### Simulation-specific stuff
x_range = range(min(x), max(x) + 1)
x_letters = letters_to_letterlist(positions_to_letters(x_range))
x_ticks = positions_to_ticks(x_range)
cn_axes.set_xticks(x_ticks, minor = True)
cn_axes.set_xticklabels(x_letters, minor = True)
sv_d.plt.setp(cn_axes.get_xticklabels(), visible=False)
###
# Fusions
sv_d.setup_fusion_axes(fusion_axes, min(x), max(x))
for fusion in fusions:
sv_d.plot_fusion(cn_axes, fusion_axes, fusion)
# Ensure everything fits
sv_d.plt.tight_layout()
# Output
fig.savefig(outfile)
sv_d.plt.close(fig)
| mit | 5,066,468,894,900,355,000 | 26.711957 | 69 | 0.594823 | false | 3.328329 | false | false | false |
sagost/VideoUavTracker | CanvasMarkers.py | 1 | 2935 | # -*- coding: utf-8 -*-
'''
Video Uav Tracker v 2.0
Replay a video in sync with a gps track displayed on the map.
-------------------
copyright : (C) 2017 by Salvatore Agosta
email : [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
INSTRUCTION:
Synching:
- Create new project
- Select video and gps track (1 trkpt per second)
- Identify first couple Frame/GpsTime and select it.
- Push Synchronize
- Push Start
Replay:
- Move on map
- Create associated DB shapefile
- Add POI with associated video frame saved
- Extract frames with associated coordinates for rapid photogrammetry use
'''
from PyQt5 import QtGui
from qgis.core import *
from qgis.gui import *
class PositionMarker(QgsMapCanvasItem):
""" marker for current GPS position """
def __init__(self, canvas, alpha=255):
QgsMapCanvasItem.__init__(self, canvas)
self.pos = None
self.hasPosition = False
self.d = 20
self.angle = 0
self.setZValue(100) # must be on top
self.alpha=alpha
def newCoords(self, pos):
if self.pos != pos:
self.pos = QgsPointXY(pos) # copy
self.updatePosition()
def setHasPosition(self, has):
if self.hasPosition != has:
self.hasPosition = has
self.update()
def updatePosition(self):
if self.pos:
self.setPos(self.toCanvasCoordinates(self.pos))
self.update()
def paint(self, p, xxx, xxx2):
if not self.pos:
return
path = QtGui.QPainterPath()
path.moveTo(0,-15)
path.lineTo(15,15)
path.lineTo(0,7)
path.lineTo(-15,15)
path.lineTo(0,-15)
# render position with angle
p.save()
p.setRenderHint(QtGui.QPainter.Antialiasing)
if self.hasPosition:
p.setBrush(QtGui.QBrush(QtGui.QColor(0,0,0, self.alpha)))
else:
p.setBrush(QtGui.QBrush(QtGui.QColor(200,200,200, self.alpha)))
p.setPen(QtGui.QColor(255,255,0, self.alpha))
p.rotate(self.angle)
p.drawPath(path)
p.restore()
def boundingRect(self):
return QtCore.QRectF(-self.d,-self.d, self.d*2, self.d*2)
class ReplayPositionMarker(PositionMarker):
def __init__(self, canvas):
PositionMarker.__init__(self, canvas)
def paint(self, p, xxx, xxx2):
if not self.pos:
return
path = QtGui.QPainterPath()
path.moveTo(-10,1)
path.lineTo(10,1)
path.lineTo(10,0)
path.lineTo(1,0)
path.lineTo(1,-5)
path.lineTo(4,-5)
path.lineTo(0,-9)
path.lineTo(-4,-5)
path.lineTo(-1,-5)
path.lineTo(-1,0)
path.lineTo(-10,0)
path.lineTo(-10,1)
# render position with angle
p.save()
p.setRenderHint(QtGui.QPainter.Antialiasing)
p.setBrush(QtGui.QBrush(QtGui.QColor(255,0,0)))
p.setPen(QtGui.QColor(255,255,0))
p.rotate(self.angle)
p.drawPath(path)
p.restore()
| gpl-2.0 | -7,505,872,444,015,693,000 | 22.861789 | 73 | 0.673595 | false | 2.863415 | false | false | false |
d-k-b/udacity-deep-learning | seq2seq/helper.py | 1 | 1420 | import os
def load_data(path):
input_file = os.path.join(path)
with open(input_file, "r", encoding='utf-8', errors='ignore') as f:
data = f.read()
return data
def extract_vocab(data):
special_words = ['<pad>', '<unk>', '<s>', '<\s>']
set_words = set([word for line in data.split('\n') for word in line.split()])
int_to_vocab = {word_i: word for word_i, word in enumerate(special_words + list(set_words))}
vocab_to_int = {word: word_i for word_i, word in int_to_vocab.items()}
return int_to_vocab, vocab_to_int
def pad_id_sequences(source_ids, source_vocab_to_int, target_ids, target_vocab_to_int, sequence_length):
new_source_ids = [list(reversed(sentence + [source_vocab_to_int['<pad>']] * (sequence_length - len(sentence)))) \
for sentence in source_ids]
new_target_ids = [sentence + [target_vocab_to_int['<pad>']] * (sequence_length - len(sentence)) \
for sentence in target_ids]
return new_source_ids, new_target_ids
def batch_data(source, target, batch_size):
"""
Batch source and target together
"""
for batch_i in range(0, len(source)//batch_size):
start_i = batch_i * batch_size
source_batch = source[start_i:start_i + batch_size]
target_batch = target[start_i:start_i + batch_size]
yield source_batch, target_batch
| mit | 4,802,038,180,412,981,000 | 34.410256 | 117 | 0.602817 | false | 3.325527 | false | false | false |
drogenlied/qudi | logic/jupyterkernel/display_trap.py | 4 | 3061 | # -*- coding: utf-8 -*-
"""
A context manager for handling sys.displayhook.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
#-----------------------------------------------------------------------------
# Authors:
#
# * Robert Kern
# * Brian Granger
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file documentation/BSDLicense_IPython.md, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class DisplayTrap:
"""Object to manage sys.displayhook.
This came from IPython.core.kernel.display_hook, but is simplified
(no callbacks or formatters) until more of the core is refactored.
"""
def __init__(self, hook=None):
self.old_hook = None
self.hook = hook
# We define this to track if a single BuiltinTrap is nested.
# Only turn off the trap when the outermost call to __exit__ is made.
self._nested_level = 0
def __enter__(self):
""" Enter a code segment where displayhook is set.
"""
if self._nested_level == 0:
self.set()
self._nested_level += 1
return self
def __exit__(self, type, value, traceback):
""" Leave a code segmen swhere displayhook is unset.
@param type:
@param value:
@param traceback:
"""
if self._nested_level == 1:
self.unset()
self._nested_level -= 1
# Returning False will cause exceptions to propagate
return False
def set(self):
"""Set the hook."""
if self.hook is not None and sys.displayhook is not self.hook:
self.old_hook = sys.displayhook
sys.displayhook = self.hook
def unset(self):
"""Unset the hook."""
if self.hook is not None and sys.displayhook is not self.old_hook:
sys.displayhook = self.old_hook
| gpl-3.0 | 2,587,610,995,733,073,000 | 33.011111 | 86 | 0.554067 | false | 4.637879 | false | false | false |
bt3gl/Python-and-Algorithms-and-Data-Structures | First_edition_2014/ebook_src/builtin_structures/check_if_2_numbers_sum_to_k.py | 2 | 1636 | #!/usr/bin/env python
__author__ = "bt3"
"""
Given an integer x and an unsorted array of integers, describe an
algorithm to determine whether two of the numbers add up to x.
1. Using hash tables.
2. Sorting the array and keeping two pointers in the array, one in
the beginning and one in the end. Whenever the sum of the current
two integers is less than x, move the first pointer forwards, and
whenever the sum is greater than x, move the second pointer
backwards. O(nln n).
3. Create a BST with x minus each element in the array.
Check whether any element of the array appears in the BST.
It takes O(nlog n) times two.
"""
from collections import defaultdict, Counter
def check_sum(array, k):
'''
>>> check_sum([3, 2, 6, 7, 9, 1], 8)
[(6, 2), (1, 7)]
>>> check_sum([5, 2, 6, 7, 9, 1], 4)
[]
>>>
'''
dict = defaultdict()
res = []
for i in array:
if k-i in dict:
res.append((i, k-i))
del dict[k-i]
else:
dict[i] = 1
return res
def check_sum2(array, k):
'''
>>> check_sum2([1, 4, 2, 7, 1, 3, 10, 15, 3, 1], 6)
set([(3, 3)])
>>> check_sum2([1, 4, 2, 7, 1, 3, 10, 15, 3, 1], 0)
set([])
'''
dict = Counter()
res = set()
for i in array:
dict[i] += 1
for i in array:
if dict[k-i] > 0:
if i == k-i and dict[k-i] > 1:
res.add((i, k-i))
dict[k-i] -= 2
elif i == k-i:
res.add((i, k-i))
dict[k-i] -= 1
return res
if __name__ == '__main__':
import doctest
doctest.testmod() | mit | 56,266,094,355,373,880 | 22.056338 | 66 | 0.528117 | false | 3.176699 | false | false | false |
rerobins/django_auth_addon | django_auth_addon/backend/google_authentication.py | 1 | 3221 | from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
from apiclient.discovery import build
import httplib2
import json
from uuid import uuid4
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
from django.conf import settings
from django.template.defaultfilters import slugify
from django_auth_addon.models import GooglePlusCredentialsModel
SERVICE = build('plus', 'v1')
class GooglePlusBackend(ModelBackend):
def authenticate(self, access_code=None):
if access_code is None:
return None
try:
oauth_flow = flow_from_clientsecrets(settings.CLIENT_SECRETS, scope='')
oauth_flow.redirect_uri = 'postmessage'
self.credentials = oauth_flow.step2_exchange(access_code)
except FlowExchangeError:
return None
# Check that the access token is valid.
access_token = self.credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
# If there was an error in the access token info, abort.
if result.get('error') is not None:
return None
access_token = self.credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
token_info = json.loads(h.request(url, 'GET')[1])
# http = httplib2.Http()
# http = self.credentials.authorize(http)
# # Get a list of people that this user has shared with this app.
# google_request = SERVICE.people().get(userId='me')
# people_document = google_request.execute(http=http)
# context['given_name'] = self.people_document['name']['givenName']
# context['family_name'] = self.people_document['name']['familyName']
# Check to see if there is a google plus credential object with the provided user id from google
google_plus_credentials = GooglePlusCredentialsModel.objects.filter(gplus_id=token_info['user_id'])
if len(google_plus_credentials) == 0:
credentials = GooglePlusCredentialsModel()
credentials.gplus_id = token_info['user_id']
# Need to create a whole new user object and move on.
user = User.objects.create_user(get_username(), token_info['email'])
credentials.user = user
user.save()
credentials.save()
else:
# Check to see if the credentials object has a user and then return it.
user = google_plus_credentials[0].user
return user
def get_username():
max_length = 30
username = slugify(uuid4().get_hex()[:max_length])
while not is_valid_username(username):
username = slugify(uuid4().get_hex()[:max_length])
return username
def is_valid_username(username):
if username is None:
return False
user_list = User.objects.filter(username=username)
return len(user_list) == 0
| bsd-3-clause | 7,810,339,707,430,348,000 | 32.905263 | 107 | 0.652903 | false | 4.087563 | false | false | false |
scemama/quantum_package | scripts/module/module_handler.py | 1 | 9384 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Module utilitary
Usage:
module_handler.py print_descendant [<module_name>...]
module_handler.py create_png [<module_name>...]
module_handler.py clean [ --all | <module_name>...]
module_handler.py create_git_ignore [<module_name>...]
Options:
print_descendant Print the genealogy of the NEEDED_CHILDREN_MODULES
aka (children, subchildren, etc)
create_png Create a png of the file
NEEDED_CHILDREN_MODULES The path of NEEDED_CHILDREN_MODULES
by default try to open the file in the current path
"""
import os
import sys
import os.path
import shutil
try:
from docopt import docopt
from qp_path import QP_SRC, QP_ROOT, QP_PLUGINS
except ImportError:
print "source .quantum_package.rc"
raise
def is_module(path_module_rel):
return os.path.isfile(os.path.join(QP_SRC, path_module_rel,
"NEEDED_CHILDREN_MODULES"))
def is_plugin(path_module_rel):
return os.path.isfile(os.path.join(QP_PLUGINS, path_module_rel,
"NEEDED_CHILDREN_MODULES"))
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK) and not fpath.endswith(".py")
def get_dict_child(l_root_abs=None):
"""Loop over MODULE in QP_ROOT/src, open all the NEEDED_CHILDREN_MODULES
and create a dict[MODULE] = [sub module needed, ...]
"""
d_ref = dict()
if not l_root_abs:
l_root_abs = [QP_SRC]
for root_abs in l_root_abs:
for module_rel in os.listdir(root_abs):
module_abs = os.path.join(root_abs, module_rel)
try:
path_file = os.path.join(module_abs, "NEEDED_CHILDREN_MODULES")
with open(path_file, "r") as f:
l_children = f.read().split()
except IOError:
pass
else:
if module_rel not in d_ref:
d_ref[module_rel] = l_children
else:
print "Module {0} alredy defined"
print "Abort"
sys.exit(1)
return d_ref
def get_l_module_descendant(d_child, l_module):
"""
From a list of module return the module and descendant
"""
l = []
for module in l_module:
if module not in l:
l.append(module)
try:
l.extend(get_l_module_descendant(d_child, d_child[module]))
except KeyError:
print >> sys.stderr, "Error: "
print >> sys.stderr, "`{0}` is not a submodule".format(module)
print >> sys.stderr, "Check the typo (spelling, case, '/', etc.) "
sys.exit(1)
return list(set(l))
class ModuleHandler():
def __init__(self, l_root_abs=None):
self.dict_child = get_dict_child(l_root_abs)
@property
def l_module(self):
return self.dict_child.keys()
@property
def dict_parent(self):
"""
Get a dic of the first parent
"""
d_child = self.dict_child
d = {}
for module_name in d_child:
d[module_name] = [i for i in d_child.keys()
if module_name in d_child[i]]
return d
@property
def dict_descendant(self):
"""
Get a dic of all the genealogy desc (children and all_children)
"""
d = {}
d_child = self.dict_child
for module_name in d_child:
try:
d[module_name] = get_l_module_descendant(d_child,
d_child[module_name])
except KeyError:
print "Check NEEDED_CHILDREN_MODULES for {0}".format(
module_name)
sys.exit(1)
return d
@property
def dict_root(self):
"""
Return a dict(module_name) = module_boss
The top node in a tree.
"""
d_asc = self.dict_parent
d_desc = self.dict_descendant
l_all_module = self.l_module
dict_root = {}
for module in l_all_module:
dict_root[module] = [p for p in l_all_module
if module in [p] + d_desc[p] and not d_asc[p]
][0]
return dict_root
def l_descendant_unique(self, l_module):
d_desc = self.dict_descendant
d = {}
for module in l_module:
for e in d_desc[module]:
d[e] = 1
return d.keys()
def l_reduce_tree(self, l_module):
"""For a list of module in input return only the root"""
l_d_u = self.l_descendant_unique(l_module)
l_module_reduce = []
for module in l_module:
if module not in l_d_u:
l_module_reduce.append(module)
return l_module_reduce
def create_png(self, l_module):
"""Create the png of the dependency tree for a l_module"""
# Don't update if we are not in the main repository
from is_master_repository import is_master_repository
if not is_master_repository:
return
basename = "tree_dependency"
path = '{0}.png'.format(basename)
from graphviz import Digraph
all_ready_done = []
def draw_module_edge(module, l_children):
"Draw all the module recursifly"
if module not in all_ready_done:
for children in l_children:
# Add Edge
graph.edge(module, children)
# Recurs
draw_module_edge(children, d_ref[children])
all_ready_done.append(module)
graph = Digraph(comment=l_module, format="png", filename=basename)
d_ref = self.dict_child
# Create all the edge
for module in l_module:
graph.node(module, fontcolor="red")
draw_module_edge(module, d_ref[module])
# Try to render the png
# If not just touch it
try:
graph.render(cleanup=True)
except:
with open(path, 'a'):
os.utime(path, None)
return
if __name__ == '__main__':
arguments = docopt(__doc__)
if arguments['--all']:
l_module = [f for f in os.listdir(QP_SRC)
if os.path.isdir(os.path.join(QP_SRC, f))]
elif not arguments['<module_name>']:
dir_ = os.getcwd()
l_module = [os.path.basename(dir_)]
else:
l_module = arguments['<module_name>']
for module in l_module:
if not is_module(module):
print "{0} is not a valide module. Abort".format(module)
print "No NEEDED_CHILDREN_MODULES in it"
sys.exit(1)
m = ModuleHandler()
if arguments['print_descendant']:
for module in l_module:
print " ".join(sorted(m.l_descendant_unique([module])))
if arguments["create_png"]:
try:
m.create_png(l_module)
except RuntimeError:
pass
except SyntaxError:
print "Warning: The graphviz API dropped support for python 2.6."
pass
if arguments["clean"] or arguments["create_git_ignore"]:
l_dir = ['IRPF90_temp', 'IRPF90_man']
l_file = ["irpf90_entities", "tags", "irpf90.make", "Makefile",
"Makefile.depend", ".ninja_log", ".ninja_deps",
"ezfio_interface.irp.f"]
for module in l_module:
module_abs = os.path.realpath(os.path.join(QP_SRC, module))
l_symlink = m.l_descendant_unique([module])
l_exe = [f for f in os.listdir(module_abs)
if is_exe(os.path.join(module_abs, f))]
if arguments["clean"]:
for f in l_dir:
try:
shutil.rmtree(os.path.join(module_abs, f))
except:
pass
for symlink in l_symlink:
try:
os.unlink(os.path.join(module_abs, symlink))
except:
pass
for f in l_file:
try:
os.remove(os.path.join(module_abs, f))
except:
pass
for f in l_exe:
try:
os.remove(os.path.join(module_abs, f))
except:
pass
if arguments["create_git_ignore"]:
# Don't update if we are not in the main repository
from is_master_repository import is_master_repository
if not is_master_repository:
print >> sys.stderr, 'Not in the master repo'
sys.exit(0)
path = os.path.join(module_abs, ".gitignore")
with open(path, "w+") as f:
f.write("# Automatically created by {0} \n".format(__file__).replace(QP_ROOT,"$QP_ROOT"))
l_text = l_dir + l_file + l_symlink + l_exe
l_text.sort()
f.write("\n".join(l_text))
| agpl-3.0 | 5,152,649,960,677,695,000 | 28.980831 | 109 | 0.507033 | false | 3.932942 | false | false | false |
lalitkumarj/NEXT-psych | gui/base/models/experiment.py | 2 | 1041 | from base import db
class Experiment(db.Document):
exp_uid = db.StringField()
exp_key = db.StringField()
perm_key = db.StringField()
app_id = db.StringField()
name = db.StringField()
description = db.StringField()
instructions = db.StringField()
debrief = db.StringField()
params = db.DictField()
status = db.StringField(default="staging")
target_set = db.ReferenceField('TargetSet')
query_tries = db.IntField()
query_duration = db.IntField()
info = db.DictField()
# Use set's on any parameters that can be changed outside of a constructor.
def set_status(self,status):
self.status = status
self.save()
def set_exp_uid(self,exp_uid):
self.exp_uid = exp_uid
self.save()
def set_exp_key(self,exp_key):
self.exp_key = exp_key
self.save()
def set_perm_key(self,perm_key):
self.perm_key = perm_key
self.save()
def set_info(self,info):
self.info = info
self.save()
| apache-2.0 | 1,253,482,499,794,655,700 | 24.390244 | 79 | 0.605187 | false | 3.627178 | false | false | false |
simonwittber/netwrok-server | src/netwrok/member.py | 2 | 8748 | import os
import hashlib
import asyncio
import psycopg2.extras
import aiopg
from . import nwdb
from . import core
from . import mailqueue
from . import room
@core.function
def authenticate(client, email, password):
"""
Authenticate the client by matching email and password.
Note, the password must not be sent in cleartext, it is sent as a
sha356(uid + sha256(password)), where uid is sent with the initial
welcome message.
"""
hash = client.uid
with (yield from nwdb.connection(readonly=True)) as conn:
cursor = yield from conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
yield from cursor.execute("""
select A.id, A.handle, A.email, A.password, M.clan_id, B.alliance_id, B.name as clan_name, C.name as alliance_name, M.id as membership_id
from member A
left outer join membership M on M.member_id = A.id
left outer join clan B on B.id = M.clan_id
left outer join alliance C on C.id = B.alliance_id
where lower(A.email) = lower(%s)
""", [email])
rs = yield from cursor.fetchone()
authenticated = False
if rs is None:
print("rsIsNone")
authenticated = False
else:
h = (hash + rs[3]).encode("utf8")
if hashlib.sha256(h).hexdigest() == password:
client.member_id = client.session["member_id"] = rs["id"]
client.clan_id = rs["clan_id"]
client.alliance_id = rs["alliance_id"]
client.handle = rs["handle"]
client.clan_name = rs["clan_name"]
client.alliance_name = rs["alliance_name"]
cursor.execute("select name from role A inner join role_owner B on B.membership_id = %s", rs["membership_id"])
client.roles = roles = [i.name for i in cursor.fetchall()]
client.member_info = dict(
id=client.member_id,
clan_id=client.clan_id,
alliance_id=client.alliance_id,
handle=client.handle,
clan_name=client.clan_name,
alliance_name=client.alliance_name,
roles=client.roles
)
authenticated = True
if 'Banned' in client.roles:
yield from client.send("member.banned")
authenticated = False
else:
authenticated = False
if(not authenticated):
yield from asyncio.sleep(3)
client.authenticated = authenticated
if authenticated:
yield from client.on_authenticated()
yield from client.send("member.info", client.member_info)
if client.clan_id is not None:
clan_room = room.Room.get("Clan " + str(client.clan_id))
yield from client.join(clan_room)
return authenticated
@core.function
def register(client, handle, email, password):
"""
Register a new user. Handle and email must be unique, and password
must be sha256(password), not cleartext.
"""
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
try:
yield from cursor.execute("""
insert into member(handle, email, password)
select %s, %s, %s
returning id
""", [handle, email, password])
except Exception as e:
return False
else:
rs = yield from cursor.fetchone()
client.session["member_id"] = rs[0]
yield from mailqueue.send(client, email, "Welcome.", "Thanks for registering.")
return True
@core.handler
def password_reset_request(client, email):
"""
Request a password reset for an email address. A code is sent to the
email address which must be passed in via th password_reset message.
"""
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
token = hashlib.md5(os.urandom(8)).hexdigest()[:8]
try:
yield from cursor.execute("""
insert into password_reset_request(member_id, token)
select id, %s from member where lower(email) = lower(%s)
returning id
""", [token, email])
rs = yield from cursor.fetchone()
except Exception as e:
yield from client.send("member.password_reset_request", False)
else:
yield from mailqueue.send(client, email, "Password Reset Request", "Code: " + token)
yield from client.send("member.password_reset_request", True)
@core.function
def password_reset(client, email, token, password):
"""
Change the password by using the provided token. The password must be
sha256(password), not cleartext.
"""
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
success = False
try:
yield from cursor.execute("""
update member A
set password = %s
where lower(A.email) = lower(%s)
and exists (select token from password_reset_request where member_id = A.id and lower(token) = lower(%s))
returning A.id
""", [password, email, token])
except Exception as e:
logging.warning(str(type(e)) + " " + str(e))
success = False
else:
rs = yield from cursor.fetchone()
if rs is None:
siccess = False
else:
success = True
member_id = rs[0]
yield from cursor.execute("delete from password_reset_request where member_id = %s", [member_id])
yield from mailqueue.send(client, email, "Password Reset", "Success")
return success
@core.handler
def ban(client, member_id):
client.require_role('Operator')
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
success = False
yield from cursor.execute("""
select add_role(%s, 'Banned');
""", member_id)
@core.handler
def unban(client, member_id):
client.require_role('Operator')
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
select remove_role(%s, 'Banned');
""", member_id)
@core.handler
def add_role(client, member_id, role):
client.require_role('Operator')
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
success = False
yield from cursor.execute("""
select add_role(%s, %s);
""", member_id, role)
@core.handler
def remove_role(client, member_id, role):
client.require_role('Operator')
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
select remove_role(%s, %s);
""", member_id, role)
@core.handler
def set_object(client, key, value):
"""
Save an arbitrary object for a member under a key.
"""
client.require_auth()
value = json.dumps(value)
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
update member_store set value = %s
where key = %s and member_id = %s
returning id
""", [value, key, client.member_id])
rs = yield from cursor.fetchone()
if rs is None:
yield from cursor.execute("""
insert into member_store(member_id, key, value)
select %s, %s, %s
""", [client.member_id, key, value])
@core.function
def get_object(client, key):
"""
Retrieves an arbitrary object previously stored by the member under a key.
"""
client.require_auth()
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
select value from member_store
where member_id = %s and key = %s
""", [client.member_id, key])
rs = yield from cursor.fetchone()
if rs is not None:
rs = json.loads(rs[0])
return rs
@core.function
def get_object_keys(client):
"""
Retrieves all keys stored by the member.
"""
client.require_auth()
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
select key from member_store
where member_id = %s
""", [client.member_id])
rs = yield from cursor.fetchall()
return list(i[0] for i in rs)
| mit | -5,596,224,290,785,613,000 | 33.714286 | 145 | 0.577732 | false | 4.128362 | false | false | false |
WillianPaiva/1flow | oneflow/core/models/reldb/item/article.py | 1 | 23927 | # -*- coding: utf-8 -*-
"""
Copyright 2013-2014 Olivier Cortès <[email protected]>
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
import logging
from statsd import statsd
# from constance import config
from celery import chain as tasks_chain
from django.conf import settings
from django.db import models, IntegrityError, transaction
from django.db.models.signals import post_save, pre_save, pre_delete
from django.utils.translation import ugettext_lazy as _
from django.utils.text import slugify
from simple_history.models import HistoricalRecords
from sparks.foundations.utils import combine_dicts
from oneflow.base.utils import register_task_method
from oneflow.base.utils.http import clean_url
from oneflow.base.utils.dateutils import now, datetime, benchmark
from ..common import (
DjangoUser as User,
CONTENT_TYPES,
ARTICLE_ORPHANED_BASE,
)
from ..processor import get_default_processing_chain_for
from common import generate_orphaned_hash
from base import (
BaseItemQuerySet,
BaseItemManager,
BaseItem,
baseitem_process_task,
baseitem_create_reads_task,
)
from original_data import baseitem_postprocess_original_data_task
from abstract import (
UrlItem,
ContentItem,
baseitem_absolutize_url_task,
)
LOGGER = logging.getLogger(__name__)
MIGRATION_DATETIME = datetime(2014, 11, 1)
__all__ = [
'Article',
'create_article_from_url',
# Tasks will be added below.
]
def create_article_from_url(url, feeds, origin):
""" Create an article from a web url, in feeds, with an origin. """
# TODO: find article publication date while fetching content…
# TODO: set Title during fetch…
try:
new_article, created = Article.create_article(
url=url.replace(' ', '%20'),
title=_(u'Imported item from {0}').format(clean_url(url)),
feeds=feeds, origin=origin)
except:
# NOTE: duplication handling is already
# taken care of in Article.create_article().
LOGGER.exception(u'Article creation from URL %s failed.', url)
return None, False
mutualized = created is None
if created or mutualized:
for feed in feeds:
feed.recent_items_count += 1
feed.all_items_count += 1
for feed in feeds:
if new_article.date_published:
if new_article.date_published > feed.latest_item_date_published:
feed.latest_item_date_published = new_article.date_published
# Even if the article wasn't created, we need to create reads.
# In the case of a mutualized article, it will be fetched only
# once, but all subscribers of all feeds must be connected to
# it to be able to read it.
for subscription in feed.subscriptions.all():
subscription.create_read(new_article, verbose=created)
# Don't forget the parenthesis else we return ``False`` everytime.
return new_article, created or (None if mutualized else False)
def _format_feeds(feeds):
""" Return feeds in a compact string form for displaying in logs. """
return u', '.join(u'{0} ({1})'.format(f.name, f.id) for f in feeds)
# —————————————————————————————————————————————————————————— Manager / QuerySet
def BaseItemQuerySet_article_method(self):
""" Patch BaseItemQuerySet to know how to return articles. """
return self.instance_of(Article)
BaseItemQuerySet.article = BaseItemQuerySet_article_method
# ——————————————————————————————————————————————————————————————————————— Model
# BIG FAT WARNING: inheritance order matters. BaseItem must come first,
# else `create_post_task()` is not found by register_task_method().
class Article(BaseItem, UrlItem, ContentItem):
""" Some kind of news article, or web page. """
class Meta:
app_label = 'core'
verbose_name = _(u'Article')
verbose_name_plural = _(u'Articles')
objects = BaseItemManager()
# Django simple history.
history = HistoricalRecords()
version_description = models.CharField(
max_length=128, null=True, blank=True,
verbose_name=_(u'Version description'),
help_text=_(u'Set by content processors or author to know with which '
u'processor chain this version was produced. Can be a '
u'code or a processor chain ID/slug to help querying.')
)
publishers = models.ManyToManyField(
User, null=True, blank=True, related_name='publications')
# —————————————————————————————————————————————————————————————— Django
def __unicode__(self):
return _(u'{0} (#{1}) from {2}').format(
self.name[:40] + (self.name[40:] and u'…'), self.id, self.url)
# —————————————————————————————————————————————————————————————— Properties
@property
def is_good(self):
""" Return True if all our base classes don't return False. """
if not BaseItem.is_good.fget(self) \
or not UrlItem.is_good.fget(self) \
or not ContentItem.is_good.fget(self):
return False
return True
@property
def is_processed(self):
""" See if all relevant processors have run on the current instance. """
if not BaseItem.is_processed.fget(self) \
or not UrlItem.is_processed.fget(self) \
or not ContentItem.is_processed.fget(self):
return False
return True
@property
def processing_parameters(self):
""" Return a merge of all inherited classes processing parameters.
.. todo:: get and merge feeds parameters, if any.
.. todo:: cache the result via `cacheops` if possible and relevant.
"""
return combine_dicts(
BaseItem.processing_parameters.fget(self),
combine_dicts(
UrlItem.processing_parameters.fget(self),
ContentItem.processing_parameters.fget(self)
)
)
# ————————————————————————————————————————————————————————————————— Methods
def get_processing_chain(self):
""" Return a processor chain suitable for current article.
If our website has one, it will be returned.
Else, the default processor chain for articles will be returned.
"""
website = self.website
if website.processing_chain is None:
return get_default_processing_chain_for(self._meta.model)
else:
return website.processing_chain
def processing_must_abort(self, verbose=True, force=False, commit=True):
""" Return True if processing of current instance must be aborted.
.. versionadded:: 0.90.x. This is the new method, used by the 2015
processing infrastructure.
"""
# HEADS UP: we do not test self.is_processed, it's up to every
# base class to do it in their processing_must_abort()
# method.
# NOTE: we use all() and not any(). This is intentional. In the
# current processors implementation this is needed.
#
# Example: When an article URL is absolutized,
# UrlItem.processing_must_abort() will return True.
# But we must not abort the whole processing: we still
# need to continue processing to handle the `content`
# downloading and conversion to markdown (and soon
# {pre,post}_processing content enhancements.
#
# As every processor will be protected by its accepts()
# method, there will never be no double-processing. Only
# a little too much testing, at worst.
#
# Even if we manage to forward the current processing
# category to the processing_must_abort() method, there
# will always be the accepts() tests. Bypassing them is
# a design error for me. In this context, we would only
# gain the all(True) → any(False) transformation.
#
# And that would imply much more code. Thus, I consider
# the current implementation an acceptable tradeoff.
#
# As a final addition, we have exactly the same logic in
# Article.is_processed, and there it feels perfectly fine:
# an article is not considered processed if any of its part
# is not. Perhaps it's just the name of the current method
# that is a little misleading…
return all(
klass.processing_must_abort(self, verbose=verbose,
force=force, commit=commit)
for klass in (BaseItem, UrlItem, ContentItem)
)
def reset(self, force=False, commit=True):
""" clear the article content & content type.
This method exists for testing / debugging purposes.
"""
if settings.DEBUG:
force = True
if not force:
LOGGER.warning(u'Cannot reset article without `force` argument.')
return
for klass in (BaseItem, UrlItem, ContentItem):
try:
klass.reset(self, force=force, commit=False)
except:
LOGGER.exception('%s %s: could not reset %s class.',
self._meta.verbose_name, self.id, klass)
if commit:
# We are reseting, don't waste a version.
self.save_without_historical_record()
def reprocess(self, verbose=True):
""" A shortcut to reset()/process() without the need to absolutize. """
url_absolute = self.url_absolute
is_orphaned = self.is_orphaned
redo = not url_absolute
self.reset(force=True)
if redo:
self.absolutize_url()
else:
self.url_absolute = url_absolute
self.is_orphaned = is_orphaned
self.process(verbose=verbose)
@classmethod
def create_article(cls, title, url, feeds, **kwargs):
""" Returns ``True`` if article created, ``False`` if a pure duplicate
(already exists in the same feed), ``None`` if exists but not in
the same feed. If more than one feed given, only returns ``True``
or ``False`` (mutualized state is not checked). """
tags = kwargs.pop('tags', [])
if url is None:
# We have to build a reliable orphaned URL, because orphaned
# articles are often duplicates. RSS feeds serve us many times
# the same article, without any URL, and we keep recording it
# as new (but orphaned) content… Seen 20141111 on Chuck Norris
# facts, where the content is in the title, and there is no URL.
# We have 860k+ items, out of 1k real facts… Doomed.
url = ARTICLE_ORPHANED_BASE + generate_orphaned_hash(title, feeds)
defaults = {
'name': title,
'is_orphaned': True,
# Skip absolutization, it's useless.
'url_absolute': True
}
defaults.update(kwargs)
article, created = cls.objects.get_or_create(url=url,
defaults=defaults)
# HEADS UP: no statsd here, it's handled by post_save().
else:
url = clean_url(url)
defaults = {'name': title}
defaults.update(kwargs)
article, created = cls.objects.get_or_create(url=url,
defaults=defaults)
if created:
created_retval = True
LOGGER.info(u'Created %sarticle %s %s.', u'orphaned '
if article.is_orphaned else u'', article.id,
u'in feed(s) {0}'.format(_format_feeds(feeds))
if feeds else u'without any feed')
else:
created_retval = False
if article.duplicate_of_id:
LOGGER.info(u'Swaping duplicate %s %s for master %s on '
u'the fly.', article._meta.verbose_name,
article.id, article.duplicate_of_id)
article = article.duplicate_of
if len(feeds) == 1 and feeds[0] not in article.feeds.all():
# This article is already there, but has not yet been
# fetched for this feed. It's mutualized, and as such
# it is considered at partly new. At least, it's not
# as bad as being a true duplicate.
created_retval = None
LOGGER.info(u'Mutualized article %s in feed(s) %s.',
article.id, _format_feeds(feeds))
article.create_reads(feeds=feeds)
else:
# No statsd, because we didn't create any record in database.
LOGGER.info(u'Duplicate article %s in feed(s) %s.',
article.id, _format_feeds(feeds))
# Special case where a mutualized article arrives from RSS
# (with date/author) while it was already here from Twitter
# (no date/author). Post-processing of original data will
# handle the authors, but at lest we update the date now for
# users to have sorted articles until original data is
# post-processed (this can take time, given the server load).
if article.date_published is None:
date_published = kwargs.get('date_published', None)
if date_published is not None:
article.date_published = date_published
article.save()
# Tags & feeds are ManyToMany, they
# need the article to be saved before.
if tags:
try:
with transaction.atomic():
article.tags.add(*tags)
except IntegrityError:
LOGGER.exception(u'Could not add tags %s to article %s',
tags, article.id)
if feeds:
try:
with transaction.atomic():
article.feeds.add(*feeds)
except:
LOGGER.exception(u'Could not add feeds to article %s',
article.id)
# Get a chance to catch the duplicate if workers were fast.
# At the cost of another DB read, this will save some work
# in repair scripts, and avoid some writes when creating reads.
article = cls.objects.get(id=article.id)
if article.duplicate_of_id:
if settings.DEBUG:
LOGGER.debug(u'Catched on-the-fly duplicate %s, returning '
u'master %s instead.', article.id,
article.duplicate_of_id)
return article.duplicate_of, False
return article, created_retval
def post_create_task(self, apply_now=False):
""" Method meant to be run from a celery task. """
if apply_now:
try:
result = baseitem_absolutize_url_task.apply((self.id, ))
if result is not False:
baseitem_create_reads_task.apply((self.id, ))
baseitem_process_task.apply((self.id, ))
baseitem_postprocess_original_data_task.apply((self.id, ))
except:
LOGGER.exception(u'Applying Article.post_create_task(%s) '
u'failed.', self)
return
post_absolutize_chain = tasks_chain(
# HEADS UP: both subtasks are immutable, we just
# want the group to run *after* the absolutization.
baseitem_create_reads_task.si(self.id),
baseitem_process_task.si(self.id),
baseitem_postprocess_original_data_task.si(self.id),
)
# OLD NOTES: randomize the absolutization a little, to avoid
# http://dev.1flow.net/development/1flow-dev-alternate/group/1243/
# as much as possible. This is not yet a full-featured solution,
# but it's completed by the `fetch_limit` thing.
#
# Absolutization is the condition of everything else. If it
# doesn't succeed:
# - no bother trying to post-process author data for example,
# because we need the absolutized website domain to make
# authors unique and worthful.
# - no bother fetching content: it uses the same mechanisms as
# absolutize_url(), and will probably fail the same way.
#
# Thus, we link the post_absolutize_chain as a callback. It will
# be run only if absolutization succeeds. Thanks, celery :-)
baseitem_absolutize_url_task.apply_async(
args=(self.id, ),
kwargs={'stop_chain_on_false': True},
link=post_absolutize_chain
)
#
# TODO: create short_url
#
# TODO: remove_useless_blocks, eg:
# <p><a href="http://addthis.com/bookmark.php?v=250">
# <img src="http://cache.addthis.com/cachefly/static/btn/
# v2/lg-share-en.gif" alt="Bookmark and Share" /></a></p>
#
# (in 51d6a1594adc895fd21c3475, see Notebook)
#
# TODO: link_replace (by our short_url_link for click statistics)
# TODO: images_fetch
# eg. handle <img alt="2013-05-17_0009.jpg"
# data-lazyload-src="http://www.vcsphoto.com/blog/wp-content/uploads/2013/05/2013-05-17_0009.jpg" # NOQA
# src="http://www.vcsphoto.com/blog/wp-content/themes/prophoto4/images/blank.gif" # NOQA
# height="1198" sidth="900"/>
#
# TODO: authors_fetch
# TODO: publishers_fetch
# TODO: duplicates_find (content wise, not URL wise)
#
return
@classmethod
def repair_missing_authors_migration_201411(cls):
# from oneflow.core.tasks.migration import vacuum_analyze
articles = Article.objects.filter(
authors=None,
date_created__gt=datetime(2014, 10, 31))
count = articles.count()
done = 0
LOGGER.info(u'Starting repairing %s missing authors @%s', count, now())
with benchmark(u'Fix missing authors on rel-DB fetched content…'):
for article in articles:
article.postprocess_original_data(force=True)
# if done % 25000 == 0:
# vacuum_analyze()
done += 1
# ———————————————————————————————————————————————————————————————— Celery Tasks
register_task_method(Article, Article.post_create_task,
globals(), queue=u'create')
# register_task_method(Article, Article.find_image,
# globals(), queue=u'fetch', default_retry_delay=3600)
# ————————————————————————————————————————————————————————————————————— Signals
def article_pre_save(instance, **kwargs):
""" Make a slug if none. """
article = instance
if not article.slug:
article.slug = slugify(article.name)
# if settings.DEBUG:
# if getattr(instance, 'skip_history_when_saving', False):
# LOGGER.info(u'%s %s: SAVE without history.',
# instance._meta.verbose_name,
# instance.id)
# else:
# LOGGER.info(u'%s %s: SAVE WITH HISTORY.',
# instance._meta.verbose_name,
# instance.id)
def article_post_save(instance, **kwargs):
article = instance
if kwargs.get('created', False):
with statsd.pipeline() as spipe:
spipe.gauge('articles.counts.total', 1, delta=True)
spipe.gauge('articles.counts.empty', 1, delta=True)
if article.is_orphaned:
spipe.gauge('articles.counts.orphaned', 1, delta=True)
if article.duplicate_of:
spipe.gauge('articles.counts.duplicates', 1, delta=True)
if article.url_error:
spipe.gauge('articles.counts.url_error', 1, delta=True)
if article.content_error:
spipe.gauge('articles.counts.content_error', 1, delta=True)
# Some articles are created "already orphaned" or duplicates.
# In the archive database this is more immediate than looking
# up the database name.
if not (article.is_orphaned or article.duplicate_of):
# MIGRATION: remove this "if".
if article.date_created >= MIGRATION_DATETIME:
# HEADS UP: this task name will be registered later
# by the register_task_method() call.
article_post_create_task.delay(article.id) # NOQA
def article_pre_delete(instance, **kwargs):
article = instance
with statsd.pipeline() as spipe:
spipe.gauge('articles.counts.total', -1, delta=True)
if article.is_orphaned:
spipe.gauge('articles.counts.orphaned', -1, delta=True)
if article.duplicate_of_id:
spipe.gauge('articles.counts.duplicates', -1, delta=True)
if article.url_error:
spipe.gauge('articles.counts.url_error', -1, delta=True)
if article.content_error:
spipe.gauge('articles.counts.content_error', -1, delta=True)
if article.content_type == CONTENT_TYPES.HTML:
spipe.gauge('articles.counts.html', -1, delta=True)
elif article.content_type in (CONTENT_TYPES.MARKDOWN, ):
spipe.gauge('articles.counts.markdown', -1, delta=True)
elif article.content_type in (None, CONTENT_TYPES.NONE, ):
spipe.gauge('articles.counts.empty', -1, delta=True)
if instance.processing_errors.exists():
try:
instance.processing_errors.clear()
except:
LOGGER.exception(u'%s %s: could not clear processing errors',
instance._meta.verbose_name, instance.id)
pre_delete.connect(article_pre_delete, sender=Article)
pre_save.connect(article_pre_save, sender=Article)
post_save.connect(article_post_save, sender=Article)
| agpl-3.0 | 7,480,019,336,841,286,000 | 34.234303 | 122 | 0.57919 | false | 4.077987 | false | false | false |
roijo/C-PAC_complexitytools | CPAC/GUI/interface/utils/modelconfig_window.py | 3 | 59820 | import wx
import generic_class
from .constants import control, dtype, substitution_map
import os
import yaml
import modelDesign_window
ID_RUN = 11
class ModelConfig(wx.Frame):
# this creates the wx.Frame mentioned above in the class declaration
def __init__(self, parent, gpa_settings=None):
wx.Frame.__init__(
self, parent=parent, title="CPAC - Create New FSL Model", size=(900, 650))
if gpa_settings == None:
self.gpa_settings = {}
self.gpa_settings['subject_list'] = ''
self.gpa_settings['pheno_file'] = ''
self.gpa_settings['subject_id_label'] = ''
self.gpa_settings['design_formula'] = ''
self.gpa_settings['mean_mask'] = ''
self.gpa_settings['custom_roi_mask'] = 'None'
self.gpa_settings['coding_scheme'] = ''
self.gpa_settings['use_zscore'] = True
self.gpa_settings['derivative_list'] = ''
self.gpa_settings['repeated_measures'] = ''
self.gpa_settings['group_sep'] = ''
self.gpa_settings['grouping_var'] = 'None'
self.gpa_settings['z_threshold'] = ''
self.gpa_settings['p_threshold'] = ''
else:
self.gpa_settings = gpa_settings
self.parent = parent
mainSizer = wx.BoxSizer(wx.VERTICAL)
vertSizer = wx.BoxSizer(wx.VERTICAL)
self.panel = wx.Panel(self)
self.window = wx.ScrolledWindow(self.panel, size=(-1,300))
self.page = generic_class.GenericClass(self.window, " FSL Model Setup")
self.page.add(label="Subject List ",
control=control.COMBO_BOX,
name="subject_list",
type=dtype.STR,
comment="Full path to a list of subjects to be included in the model.\n\nThis should be a text file with one subject per line.\n\nTip 1: A list in this format contaning all subjects run through CPAC was generated along with the main CPAC subject list (see subject_list_group_analysis.txt).\n\nTIp 2: An easy way to manually create this file is to copy the subjects column from your Regressor/EV spreadsheet.",
values=self.gpa_settings['subject_list'])
self.page.add(label="Phenotype/EV File ",
control=control.COMBO_BOX,
name="pheno_file",
type=dtype.STR,
comment="Full path to a .csv file containing EV information for each subject.\n\nTip: A file in this format (containing a single column listing all subjects run through CPAC) was generated along with the main CPAC subject list (see template_phenotypic.csv).",
values=self.gpa_settings['pheno_file'])
self.page.add(label="Subjects Column Name ",
control=control.TEXT_BOX,
name="subject_id_label",
type=dtype.STR,
comment="Name of the subjects column in your EV file.",
values=self.gpa_settings['subject_id_label'],
style=wx.EXPAND | wx.ALL,
size=(160, -1))
load_panel_sizer = wx.BoxSizer(wx.HORIZONTAL)
load_pheno_btn = wx.Button(self.window, 2, 'Load Phenotype File', (220,10), wx.DefaultSize, 0)
load_panel_sizer.Add(load_pheno_btn)
self.Bind(wx.EVT_BUTTON, self.populateEVs, id=2)
self.page.add_pheno_load_panel(load_panel_sizer)
# experimental checkbox row stuff
self.page.add(label = "Model Setup ",
control = control.CHECKBOX_GRID,
name = "model_setup",
type = 9,#dtype.LBOOL,
values = '',
comment="A list of EVs from your phenotype file will populate in this window. From here, you can select whether the EVs should be treated as categorical or if they should be demeaned (continuous/non-categorical EVs only). 'MeanFD', 'MeanFD_Jenkinson', 'Measure Mean', and 'Custom_ROI_Mean' will also appear in this window automatically as options to be used as regressors that can be included in your model design. Note that the MeanFD and mean of measure values are automatically calculated and supplied by C-PAC via individual-level analysis.",
size = (450, -1))
self.page.add(label="Design Matrix Formula ",
control=control.TEXT_BOX,
name="design_formula",
type=dtype.STR,
comment="Specify the formula to describe your model design. Essentially, including EVs in this formula inserts them into the model. The most basic format to include each EV you select would be 'EV + EV + EV + ..', etc. You can also select to include MeanFD, MeanFD_Jenkinson, Measure_Mean, and Custom_ROI_Mean here. See the C-PAC User Guide for more detailed information regarding formatting your design formula.",
values= self.gpa_settings['design_formula'],
size=(450, -1))
self.page.add(label="Measure Mean Generation ",
control=control.CHOICE_BOX,
name='mean_mask',
type=dtype.LSTR,
comment = "Choose whether to use a group mask or individual-specific mask when calculating the output means to be used as a regressor.\n\nThis only takes effect if you include the 'Measure_Mean' regressor in your Design Matrix Formula.",
values=["Group Mask","Individual Mask"])
self.page.add(label="Custom ROI Mean Mask ",
control=control.COMBO_BOX,
name="custom_roi_mask",
type=dtype.STR,
comment="Optional: Full path to a NIFTI file containing one or more ROI masks. The means of the masked regions will then be computed for each subject's output and will be included in the model as regressors (one for each ROI in the mask file) if you include 'Custom_ROI_Mean' in the Design Matrix Formula.",
values=self.gpa_settings['custom_roi_mask'])
self.page.add(label="Use z-score Standardized Derivatives ",
control=control.CHOICE_BOX,
name='use_zscore',
type=dtype.BOOL,
comment="Run the group analysis model on the z-score " \
"standardized version of the derivatives you " \
"choose in the list below.",
values=["True","False"])
self.page.add(label = "Select Derivatives ",
control = control.CHECKLIST_BOX,
name = "derivative_list",
type = dtype.LSTR,
values = ['ALFF',
'ALFF (smoothed)',
'f/ALFF',
'f/ALFF (smoothed)',
'ReHo',
'ReHo (smoothed)',
'ROI Average SCA',
'ROI Average SCA (smoothed)',
'Voxelwise SCA',
'Voxelwise SCA (smoothed)',
'Dual Regression',
'Dual Regression (smoothed)',
'Multiple Regression SCA',
'Multiple Regression SCA (smoothed)',
'Network Centrality',
'Network Centrality (smoothed)',
'VMHC (z-score std only)',
'VMHC z-stat (z-score std only)'],
comment = "Select which derivatives you would like to include when running group analysis.\n\nWhen including Dual Regression, make sure to correct your P-value for the number of maps you are comparing.\n\nWhen including Multiple Regression SCA, you must have more degrees of freedom (subjects) than there were time series.",
size = (350,160))
self.page.add(label="Coding Scheme ",
control=control.CHOICE_BOX,
name="coding_scheme",
type=dtype.LSTR,
comment="Choose the coding scheme to use when generating your model. 'Treatment' encoding is generally considered the typical scheme. Consult the User Guide for more information.",
values=["Treatment", "Sum"])
self.page.add(label="Model Group Variances Separately ",
control=control.CHOICE_BOX,
name='group_sep',
type=dtype.NUM,
comment="Specify whether FSL should model the variance for each group separately.\n\nIf this option is enabled, you must specify a grouping variable below.",
values=['Off', 'On'])
self.page.add(label="Grouping Variable ",
control=control.TEXT_BOX,
name="grouping_var",
type=dtype.STR,
comment="The name of the EV that should be used to group subjects when modeling variances.\n\nIf you do not wish to model group variances separately, set this value to None.",
values=self.gpa_settings['grouping_var'],
size=(160, -1))
self.page.add(label="Run Repeated Measures ",
control=control.CHOICE_BOX,
name='repeated_measures',
type=dtype.BOOL,
comment="Run repeated measures to compare different " \
"scans (must use the group analysis subject " \
"list and phenotypic file formatted for " \
"repeated measures.",
values=["False","True"])
self.page.add(label="Z threshold ",
control=control.FLOAT_CTRL,
name='z_threshold',
type=dtype.NUM,
comment="Only voxels with a Z-score higher than this value will be considered significant.",
values=2.3)
self.page.add(label="Cluster Significance Threshold ",
control=control.FLOAT_CTRL,
name='p_threshold',
type=dtype.NUM,
comment="Significance threshold (P-value) to use when doing cluster correction for multiple comparisons.",
values=0.05)
self.page.set_sizer()
if 'group_sep' in self.gpa_settings.keys():
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
if name == 'group_sep':
if self.gpa_settings['group_sep'] == True:
ctrl.set_value('On')
elif self.gpa_settings['group_sep'] == False:
ctrl.set_value('Off')
mainSizer.Add(self.window, 1, wx.EXPAND)
btnPanel = wx.Panel(self.panel, -1)
hbox = wx.BoxSizer(wx.HORIZONTAL)
buffer = wx.StaticText(btnPanel, label="\t\t\t\t\t\t")
hbox.Add(buffer)
cancel = wx.Button(btnPanel, wx.ID_CANCEL, "Cancel", (
220, 10), wx.DefaultSize, 0)
self.Bind(wx.EVT_BUTTON, self.cancel, id=wx.ID_CANCEL)
hbox.Add(cancel, 0, flag=wx.LEFT | wx.BOTTOM, border=5)
load = wx.Button(btnPanel, wx.ID_ADD, "Load Settings", (
200, -1), wx.DefaultSize, 0)
self.Bind(wx.EVT_BUTTON, self.load, id=wx.ID_ADD)
hbox.Add(load, 0.6, flag=wx.LEFT | wx.BOTTOM, border=5)
next = wx.Button(btnPanel, 3, "Next >", (200, -1), wx.DefaultSize, 0)
self.Bind(wx.EVT_BUTTON, self.load_next_stage, id=3)
hbox.Add(next, 0.6, flag=wx.LEFT | wx.BOTTOM, border=5)
# reminder: functions bound to buttons require arguments
# (self, event)
btnPanel.SetSizer(hbox)
#text_sizer = wx.BoxSizer(wx.HORIZONTAL)
#measure_text = wx.StaticText(self.window, label='Note: Regressor options \'MeanFD\' and \'Measure_Mean\' are automatically demeaned prior to being inserted into the model.')
#text_sizer.Add(measure_text)
#mainSizer.Add(text_sizer)
mainSizer.Add(
btnPanel, 0.5, flag=wx.ALIGN_RIGHT | wx.RIGHT, border=20)
self.panel.SetSizer(mainSizer)
self.Show()
# this fires only if we're coming BACK to this page from the second
# page, and these parameters are already pre-loaded. this is to
# automatically repopulate the 'Model Setup' checkbox grid and other
# settings under it
if self.gpa_settings['pheno_file'] != '':
phenoFile = open(os.path.abspath(self.gpa_settings['pheno_file']))
phenoHeaderString = phenoFile.readline().rstrip('\r\n')
phenoHeaderItems = phenoHeaderString.split(',')
phenoHeaderItems.remove(self.gpa_settings['subject_id_label'])
# update the 'Model Setup' box and populate it with the EVs and
# their associated checkboxes for categorical and demean
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
if name == 'model_setup':
ctrl.set_value(phenoHeaderItems)
ctrl.set_selection(self.gpa_settings['ev_selections'])
if name == 'coding_scheme':
ctrl.set_value(self.gpa_settings['coding_scheme'])
if name == 'mean_mask':
ctrl.set_value(self.gpa_settings['mean_mask'])
if name == 'repeated_measures':
ctrl.set_value(self.gpa_settings['repeated_measures'])
if name == 'z_threshold':
ctrl.set_value(self.gpa_settings['z_threshold'][0])
if name == 'p_threshold':
ctrl.set_value(self.gpa_settings['p_threshold'])
if name == 'use_zscore':
ctrl.set_value(self.gpa_settings['use_zscore'])
if name == 'group_sep':
ctrl.set_value(self.gpa_settings['group_sep'])
if name == 'grouping_var':
ctrl.set_value(self.gpa_settings['grouping_var'])
if name == 'derivative_list':
value = self.gpa_settings['derivative_list']
if isinstance(value, str):
value = value.replace("['","").replace("']","").split("', '")
new_derlist = []
# remove the _z if they are there, just so it can
# repopulate the listbox through the substitution map
for val in value:
if "_z" in val:
val = val.replace("_z","")
new_derlist.append(val)
else:
new_derlist.append(val)
ctrl.set_value(new_derlist)
def cancel(self, event):
self.Close()
def display(self, win, msg):
wx.MessageBox(msg, "Error")
win.SetBackgroundColour("pink")
win.SetFocus()
win.Refresh()
raise ValueError
def load_pheno(self,event):
pass
''' button: LOAD SETTINGS '''
def load(self, event):
# when the user clicks 'Load Settings', which loads the
# self.gpa_settings dictionary - it populates the values for both
# windows, so when they hit Next, the next window is also populated
dlg = wx.FileDialog(
self, message="Choose the config fsl yaml file",
defaultDir=os.getcwd(),
defaultFile="",
wildcard="YAML files(*.yaml, *.yml)|*.yaml;*.yml",
style=wx.OPEN | wx.CHANGE_DIR)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
config_map = yaml.load(open(path, 'r'))
s_map = dict((v, k) for k, v in substitution_map.iteritems())
# load the group analysis .yml config file (in dictionary form)
# into the self.gpa_settings dictionary which holds all settings
self.gpa_settings = config_map
if self.gpa_settings is None:
errDlgFileTest = wx.MessageDialog(
self, "Error reading file - group analysis " \
"configuration file appears to be blank.",
"File Read Error",
wx.OK | wx.ICON_ERROR)
errDlgFileTest.ShowModal()
errDlgFileTest.Destroy()
raise Exception
# repopulate the model setup checkbox grid, since this has to be
# done specially
if 'pheno_file' in self.gpa_settings.keys():
phenoFile = open(os.path.abspath(self.gpa_settings['pheno_file']))
phenoHeaderString = phenoFile.readline().rstrip('\r\n')
phenoHeaderItems = phenoHeaderString.split(',')
phenoHeaderItems.remove(self.gpa_settings['subject_id_label'])
# update the 'Model Setup' box and populate it with the EVs and
# their associated checkboxes for categorical and demean
for ctrl in self.page.get_ctrl_list():
if ctrl.get_name() == 'model_setup':
ctrl.set_value(phenoHeaderItems)
ctrl.set_selection(self.gpa_settings['ev_selections'])
# populate the rest of the controls
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
value = config_map.get(name)
dtype = ctrl.get_datatype()
# the model setup checkbox grid is the only one that doesn't
# get repopulated the standard way. instead it is repopulated
# by the code directly above
if name == 'derivative_list':
value = [s_map.get(item)
for item in value if s_map.get(item) != None]
if not value:
value = [str(item) for item in value]
new_derlist = []
for val in value:
if "_z" in val:
val = val.replace("_z","")
new_derlist.append(val)
else:
new_derlist.append(val)
ctrl.set_value(new_derlist)
elif name == 'repeated_measures' or name == 'use_zscore':
ctrl.set_value(str(value))
elif name == 'z_threshold' or name == 'p_threshold':
value = value[0]
ctrl.set_value(value)
elif name == 'group_sep':
value = s_map.get(value)
ctrl.set_value(value)
elif name != 'model_setup' and name != 'derivative_list':
ctrl.set_value(value)
dlg.Destroy()
def read_phenotypic(self, pheno_file, ev_selections):
import csv
ph = pheno_file
# Read in the phenotypic CSV file into a dictionary named pheno_dict
# while preserving the header fields as they correspond to the data
p_reader = csv.DictReader(open(os.path.abspath(ph), 'rU'), skipinitialspace=True)
#pheno_dict_list = []
# dictionary to store the data in a format Patsy can use
# i.e. a dictionary where each header is a key, and the value is a
# list of all of that header's values
pheno_data_dict = {}
for line in p_reader:
for key in line.keys():
if key not in pheno_data_dict.keys():
pheno_data_dict[key] = []
# create a list within one of the dictionary values for that
# EV if it is categorical; formats this list into a form
# Patsy can understand regarding categoricals:
# example: { ADHD: ['adhd1', 'adhd1', 'adhd2', 'adhd1'] }
# instead of just [1, 1, 2, 1], etc.
if 'categorical' in ev_selections.keys():
if key in ev_selections['categorical']:
pheno_data_dict[key].append(key + str(line[key]))
else:
pheno_data_dict[key].append(line[key])
else:
pheno_data_dict[key].append(line[key])
#pheno_dict_list.append(line)
# pheno_dict_list is a list of dictionaries of phenotype header items
# matched to their values, which also includes subject IDs
# i.e. [{'header1': 'value', 'header2': 'value'}, {'header1': 'value', 'header2': 'value'}, ..]
# these dictionaries are UNORDERED, i.e. header items ARE NOT ORDERED
return pheno_data_dict
''' button: LOAD PHENOTYPE FILE '''
def populateEVs(self, event):
# this runs when the user clicks 'Load Phenotype File'
if self.gpa_settings is None:
self.gpa_settings = {}
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
self.gpa_settings[name] = str(ctrl.get_selection())
### CHECK PHENOFILE if can open etc.
# function for file path checking
def testFile(filepath, paramName):
try:
fileTest = open(filepath)
fileTest.close()
except:
errDlgFileTest = wx.MessageDialog(
self, 'Error reading file - either it does not exist or you' \
' do not have read access. \n\n' \
'Parameter: %s' % paramName,
'File Access Error',
wx.OK | wx.ICON_ERROR)
errDlgFileTest.ShowModal()
errDlgFileTest.Destroy()
raise Exception
testFile(self.gpa_settings['subject_list'], 'Subject List')
testFile(self.gpa_settings['pheno_file'], 'Phenotype/EV File')
subFile = open(os.path.abspath(self.gpa_settings['subject_list']))
phenoFile = open(os.path.abspath(self.gpa_settings['pheno_file']),"rU")
phenoHeaderString = phenoFile.readline().rstrip('\r\n')
self.phenoHeaderItems = phenoHeaderString.split(',')
if self.gpa_settings['subject_id_label'] in self.phenoHeaderItems:
self.phenoHeaderItems.remove(self.gpa_settings['subject_id_label'])
else:
errSubID = wx.MessageDialog(
self, 'Please enter the name of the subject ID column' \
' as it is labeled in the phenotype file.',
'Blank/Incorrect Subject Header Input',
wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
# some more checks
sub_IDs = subFile.readlines()
self.subs = []
for sub in sub_IDs:
self.subs.append(sub.rstrip("\n"))
pheno_rows = phenoFile.readlines()
for row in pheno_rows:
# check if the pheno file produces any rows such as ",,,,," due
# to odd file formatting issues. if so, ignore this row. if there
# are values present in the row, continue as normal
if ",," not in row:
# if it finds a sub from the subject list in the current row
# taken from the pheno, move on. if it goes through the entire
# subject list and never finds a match, kick off the "else"
# clause below containing the error message
for sub in self.subs:
# for repeated measures-formatted files
if "," in sub:
# make the comma separator an underscore to match the
# repeated measures-formatted pheno file
if sub.replace(",","_") in row:
break
# for normal
else:
if sub in row:
break
else:
errSubID = wx.MessageDialog(
self, "Your phenotype file contains a subject ID " \
"that is not present in your group analysis " \
"subject list.\n\nPhenotype file row with subject " \
"ID not in subject list:\n%s" \
% row,
"Subject Not In List",
wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
for ctrl in self.page.get_ctrl_list():
# update the 'Model Setup' box and populate it with the EVs and
# their associated checkboxes for categorical and demean
if ctrl.get_name() == 'model_setup':
ctrl.set_value(self.phenoHeaderItems)
# populate the design formula text box with a formula which
# includes all of the EVs, and two of the measures (MeanFD and
# the measure/derivative mean) - the user can edit this if they
# need to, obviously
if ctrl.get_name() == 'design_formula':
formula_string = ''
for EV in self.phenoHeaderItems:
if formula_string == '':
formula_string = EV
else:
formula_string = formula_string + ' + ' + EV
formula_string = formula_string + ' + MeanFD_Jenkinson'
ctrl.set_value(formula_string)
''' button: NEXT '''
def load_next_stage(self, event):
import patsy
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
self.gpa_settings[name] = str(ctrl.get_selection())
### CHECK PHENOFILE if can open etc.
# function for file path checking
def testFile(filepath, paramName):
try:
fileTest = open(filepath)
fileTest.close()
except:
errDlgFileTest = wx.MessageDialog(
self, 'Error reading file - either it does not exist ' \
'or you do not have read access. \n\n' \
'Parameter: %s' % paramName,
'File Access Error',
wx.OK | wx.ICON_ERROR)
errDlgFileTest.ShowModal()
errDlgFileTest.Destroy()
raise Exception
testFile(self.gpa_settings['subject_list'], 'Subject List')
testFile(self.gpa_settings['pheno_file'], 'Phenotype/EV File')
phenoFile = open(os.path.abspath(self.gpa_settings['pheno_file']),"rU")
phenoHeaderString = phenoFile.readline().rstrip('\r\n')
self.phenoHeaderItems = phenoHeaderString.split(',')
if self.gpa_settings['subject_id_label'] in self.phenoHeaderItems:
self.phenoHeaderItems.remove(self.gpa_settings['subject_id_label'])
else:
errSubID = wx.MessageDialog(
self, 'Please enter the name of the subject ID column' \
' as it is labeled in the phenotype file.',
'Blank/Incorrect Subject Header Input',
wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
for ctrl in self.page.get_ctrl_list():
name = ctrl.get_name()
# get the design matrix formula
if name == 'design_formula':
self.gpa_settings['design_formula'] = str(ctrl.get_selection())
# get the EV categorical + demean grid selections
elif name == 'model_setup':
# basically, ctrl is checkbox_grid in this case, and
# get_selection goes to generic_class.py first, which links
# it to the custom GetGridSelection() function in the
# checkbox_grid class in custom_control.py
self.gpa_settings['ev_selections'] = ctrl.get_selection()
elif name == 'group_sep':
self.gpa_settings['group_sep'] = ctrl.get_selection()
elif name == 'grouping_var':
self.gpa_settings['grouping_var'] = ctrl.get_selection()
if name == 'derivative_list':
# grab this for below
derlist_ctrl = ctrl
else:
self.gpa_settings[name] = str(ctrl.get_selection())
self.gpa_settings['derivative_list'] = []
for derivative in list(derlist_ctrl.get_selection()):
if self.gpa_settings['use_zscore'] == "True":
self.gpa_settings['derivative_list'].append(derivative + "_z")
else:
self.gpa_settings['derivative_list'].append(derivative)
self.pheno_data_dict = self.read_phenotypic(self.gpa_settings['pheno_file'], self.gpa_settings['ev_selections'])
try:
phenoFile = open(os.path.abspath(self.gpa_settings['pheno_file']))
except:
print '\n\n[!] CPAC says: The phenotype file path provided ' \
'couldn\'t be opened - either it does not exist or ' \
'there are access restrictions.\n'
print 'Phenotype file provided: '
print self.gpa_settings['pheno_file'], '\n\n'
raise IOError
# validate design formula and build Available Contrasts list
var_list_for_contrasts = []
EVs_to_test = []
EVs_to_include = []
# take the user-provided design formula and break down the included
# terms into a list, and use this to create the list of available
# contrasts
formula = self.gpa_settings['design_formula']
# need to cycle through the EVs inside parentheses just to make
# sure they are valid
# THEN you have to treat the entire parentheses thing as one EV when
# it comes to including it in the list for contrasts
formula_strip = formula.replace('+',' ')
formula_strip = formula_strip.replace('-',' ')
formula_strip = formula_strip.replace('**(', '**')
formula_strip = formula_strip.replace(')**', '**')
formula_strip = formula_strip.replace('(',' ')
formula_strip = formula_strip.replace(')',' ')
EVs_to_test = formula_strip.split()
# ensure the design formula only has valid EVs in it
for EV in EVs_to_test:
# ensure ** interactions have a valid EV on one side and a number
# on the other
if '**' in EV:
both_sides = EV.split('**')
int_check = 0
for side in both_sides:
if side.isdigit():
int_check = 1
else:
if (side not in self.pheno_data_dict.keys()) and \
side != 'MeanFD' and side != 'MeanFD_Jenkinson' \
and side != 'Measure_Mean' and \
side != 'Custom_ROI_Mean':
errmsg = 'CPAC says: The regressor \'%s\' you ' \
'entered within the design formula as ' \
'part of the interaction \'%s\' is not ' \
'a valid EV option.\n\nPlease enter ' \
'only the EVs in your phenotype file ' \
'or the MeanFD, MeanFD_Jenkinson, ' \
'Custom_ROI_Mean, or Measure_Mean ' \
'options.' \
% (side,EV)
errSubID = wx.MessageDialog(self, errmsg,
'Invalid EV', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
if int_check != 1:
errmsg = 'CPAC says: The interaction \'%s\' you ' \
'entered within the design formula requires ' \
'a number on one side.\n\nExample: ' \
'(EV1 + EV2 + EV3)**3\n\nNote: This would be ' \
'equivalent to\n(EV1 + EV2 + EV3) * ' \
'(EV1 + EV2 + EV3) * (EV1 + EV2 + EV3)' % EV
errSubID = wx.MessageDialog(self, errmsg,
'Invalid EV', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
# ensure these interactions are input correctly
elif (':' in EV) or ('/' in EV) or ('*' in EV):
if ':' in EV:
both_EVs_in_interaction = EV.split(':')
if '/' in EV:
both_EVs_in_interaction = EV.split('/')
if '*' in EV:
both_EVs_in_interaction = EV.split('*')
for interaction_EV in both_EVs_in_interaction:
if (interaction_EV not in self.pheno_data_dict.keys()) and \
interaction_EV != 'MeanFD' and \
interaction_EV != 'MeanFD_Jenkinson' and \
interaction_EV != 'Measure_Mean' and \
interaction_EV != 'Custom_ROI_Mean':
errmsg = 'CPAC says: The regressor \'%s\' you ' \
'entered within the design formula as ' \
'part of the interaction \'%s\' is not a ' \
'valid EV option.\n\nPlease enter only ' \
'the EVs in your phenotype file or the ' \
'MeanFD, MeanFD_Jenkinson, Custom_ROI_' \
'Mean, or Measure_Mean options.' \
% (interaction_EV,EV)
errSubID = wx.MessageDialog(self, errmsg,
'Invalid EV', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
else:
if (EV not in self.pheno_data_dict.keys()) and EV != 'MeanFD' \
and EV != 'MeanFD_Jenkinson' and EV != 'Measure_Mean' \
and EV != 'Custom_ROI_Mean':
errmsg = 'CPAC says: The regressor \'%s\' you ' \
'entered within the design formula is not ' \
'a valid EV option.' \
'\n\nPlease enter only the EVs in your phenotype ' \
'file or the MeanFD, MeanFD_Jenkinson, ' \
'Custom_ROI_Mean, or Measure_Mean options.' \
% EV
errSubID = wx.MessageDialog(self, errmsg,
'Invalid EV', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
''' design formula/input parameters checks '''
if "Custom_ROI_Mean" in formula and \
(self.gpa_settings['custom_roi_mask'] == None or \
self.gpa_settings['custom_roi_mask'] == ""):
err_string = "You included 'Custom_ROI_Mean' as a regressor " \
"in your Design Matrix Formula, but you did not " \
"specify a Custom ROI Mean Mask file.\n\nPlease " \
"either specify a mask file, or remove " \
"'Custom_ROI_Mean' from your model."
errSubID = wx.MessageDialog(self, err_string,
'No Custom ROI Mean Mask File', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
if "Custom_ROI_Mean" not in formula and \
(self.gpa_settings['custom_roi_mask'] != None and \
self.gpa_settings['custom_roi_mask'] != "" and \
self.gpa_settings['custom_roi_mask'] != "None" and \
self.gpa_settings['custom_roi_mask'] != "none"):
warn_string = "Note: You specified a Custom ROI Mean Mask file, " \
"but you did not include 'Custom_ROI_Mean' as a " \
"regressor in your Design Matrix Formula.\n\nThe " \
"means of the ROIs specified in the file will not " \
"be included as regressors unless you include " \
"'Custom_ROI_Mean' in your model."
errSubID = wx.MessageDialog(self, warn_string,
'No Custom_ROI_Mean Regressor', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
if str(self.gpa_settings["use_zscore"]) == "True":
if "Measure_Mean" in formula:
warn_string = "Note: You have included Measure_Mean as a " \
"regressor in your model, but you have selected to run " \
"the group-level analysis with the z-score standardized "\
"version of the outputs.\n\nThe mean of any z-score " \
"standardized output will always be zero."
errSubID = wx.MessageDialog(self, warn_string,
'Measure_Mean Included With z-scored Outputs', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
else:
for deriv in self.gpa_settings["derivative_list"]:
if "VMHC" in deriv:
warn_string = "Note: You have selected to run group-" \
"level analysis using raw outputs (non-z-score " \
"standardized), but you have also included VMHC " \
"as one of the outputs to include in your model."
errSubID = wx.MessageDialog(self, warn_string,
'VMHC Cannot Be Included As Raw Output', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
# if there is a custom ROI mean mask file provided, and the user
# includes it as a regressor in their design matrix formula, calculate
# the number of ROIs in the file and generate the column names so that
# they can be passed as possible contrast labels
if "Custom_ROI_Mean" in formula and \
(self.gpa_settings['custom_roi_mask'] != None and \
self.gpa_settings['custom_roi_mask'] != "" and \
self.gpa_settings['custom_roi_mask'] != "None" and \
self.gpa_settings['custom_roi_mask'] != "none"):
import commands
try:
ROIstats_output = commands.getoutput("3dROIstats -mask %s %s" \
% (self.gpa_settings['custom_roi_mask'], \
self.gpa_settings['custom_roi_mask']))
except Exception as e:
print "[!] CPAC says: AFNI 3dROIstats failed for custom ROI" \
"Mean Mask file validation. Please ensure you either " \
"have AFNI installed and that you created the mask " \
"file properly. Consult the User Guide for more " \
"information.\n\n"
print "Error details: %s\n\n" % e
raise
ROIstats_list = ROIstats_output.split("\t")
# calculate the number of ROIs - 3dROIstats output can be split
# into a list, and the actual ROI means begin at a certain point
num_rois = (len(ROIstats_list)-3)/2
custom_roi_labels = []
for num in range(0,num_rois):
custom_roi_labels.append("Custom_ROI_Mean_%d" % int(num+1))
if str(self.gpa_settings["group_sep"]) == "On":
if (self.gpa_settings["grouping_var"] == "None") or \
(self.gpa_settings["grouping_var"] is None) or \
(self.gpa_settings["grouping_var"] == "none"):
warn_string = "Note: You have selected to model group " \
"variances separately, but you have not specified a " \
"grouping variable."
errSubID = wx.MessageDialog(self, warn_string,
'No Grouping Variable Specified', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
if self.gpa_settings["grouping_var"] not in formula:
warn_string = "Note: You have specified '%s' as your " \
"grouping variable for modeling the group variances " \
"separately, but you have not included this variable " \
"in your design formula.\n\nPlease include this " \
"variable in your design, or choose a different " \
"grouping variable." % self.gpa_settings["grouping_var"]
errSubID = wx.MessageDialog(self, warn_string,
'Grouping Variable not in Design', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
def read_phenotypic(pheno_file, ev_selections, subject_id_label):
import csv
import numpy as np
ph = pheno_file
# Read in the phenotypic CSV file into a dictionary named pheno_dict
# while preserving the header fields as they correspond to the data
p_reader = csv.DictReader(open(os.path.abspath(ph), 'rU'), skipinitialspace=True)
# dictionary to store the data in a format Patsy can use
# i.e. a dictionary where each header is a key, and the value is a
# list of all of that header's values
pheno_data_dict = {}
for line in p_reader:
# here, each instance of 'line' is really a dictionary where the
# keys are the pheno headers, and their values are the values of
# each EV for that one subject - each iteration of this loop is
# one subject
for key in line.keys():
if key not in pheno_data_dict.keys():
pheno_data_dict[key] = []
# create a list within one of the dictionary values for that
# EV if it is categorical; formats this list into a form
# Patsy can understand regarding categoricals:
# example: { ADHD: ['adhd1', 'adhd1', 'adhd0', 'adhd1'] }
# instead of just [1, 1, 0, 1], etc.
if 'categorical' in ev_selections.keys():
if key in ev_selections['categorical']:
pheno_data_dict[key].append(key + str(line[key]))
elif key == subject_id_label:
pheno_data_dict[key].append(line[key])
else:
pheno_data_dict[key].append(float(line[key]))
elif key == subject_id_label:
pheno_data_dict[key].append(line[key])
else:
pheno_data_dict[key].append(float(line[key]))
# this needs to run after each list in each key has been fully
# populated above
for key in pheno_data_dict.keys():
# demean the EVs marked for demeaning
if 'demean' in ev_selections.keys():
if key in ev_selections['demean']:
new_demeaned_evs = []
mean_evs = 0.0
# populate a dictionary, a key for each demeanable EV, with
# the value being the sum of all the values (which need to be
# converted to float first)
for val in pheno_data_dict[key]:
mean_evs += float(val)
# calculate the mean of the current EV in this loop
mean_evs = mean_evs / len(pheno_data_dict[key])
# remove the EV's mean from each value of this EV
# (demean it!)
for val in pheno_data_dict[key]:
new_demeaned_evs.append(float(val) - mean_evs)
# replace
pheno_data_dict[key] = new_demeaned_evs
# converts non-categorical EV lists into NumPy arrays
# so that Patsy may read them in properly
if 'categorical' in ev_selections.keys():
if key not in ev_selections['categorical']:
pheno_data_dict[key] = np.array(pheno_data_dict[key])
return pheno_data_dict
patsy_formatted_pheno = read_phenotypic(self.gpa_settings['pheno_file'], self.gpa_settings['ev_selections'], self.gpa_settings['subject_id_label'])
# let's create dummy columns for MeanFD, Measure_Mean, and
# Custom_ROI_Mask (if included in the Design Matrix Formula) just so we
# can get an accurate list of EVs Patsy will generate
def create_regressor_column(regressor):
# regressor should be a string of the name of the regressor
import numpy as np
regressor_list = []
for key in patsy_formatted_pheno.keys():
for val in patsy_formatted_pheno[key]:
regressor_list.append(0.0)
break
regressor_list = np.array(regressor_list)
patsy_formatted_pheno[regressor] = regressor_list
if 'MeanFD' in formula:
create_regressor_column('MeanFD')
if 'MeanFD_Jenkinson' in formula:
create_regressor_column('MeanFD_Jenkinson')
if 'Measure_Mean' in formula:
create_regressor_column('Measure_Mean')
if 'Custom_ROI_Mean' in formula:
add_formula_string = ""
for col_label in custom_roi_labels:
create_regressor_column(col_label)
# create a string of all the new custom ROI regressor column
# names to be inserted into the design formula, so that Patsy
# will accept the phenotypic data dictionary that now has these
# columns
if add_formula_string == "":
add_formula_string = add_formula_string + col_label
else:
add_formula_string = add_formula_string + " + " + col_label
formula = formula.replace("Custom_ROI_Mean",add_formula_string)
if 'categorical' in self.gpa_settings['ev_selections']:
for EV_name in self.gpa_settings['ev_selections']['categorical']:
if self.gpa_settings['coding_scheme'] == 'Treatment':
formula = formula.replace(EV_name, 'C(' + EV_name + ')')
elif self.gpa_settings['coding_scheme'] == 'Sum':
formula = formula.replace(EV_name, 'C(' + EV_name + ', Sum)')
# create the dmatrix in Patsy just to see what the design matrix
# columns are going to be
try:
dmatrix = patsy.dmatrix(formula, patsy_formatted_pheno)
except:
print '\n\n[!] CPAC says: Design matrix creation wasn\'t ' \
'successful - do the terms in your formula correctly ' \
'correspond to the EVs listed in your phenotype file?\n'
print 'Phenotype file provided: '
print self.gpa_settings['pheno_file'], '\n\n'
raise Exception
column_names = dmatrix.design_info.column_names
subFile = open(os.path.abspath(self.gpa_settings['subject_list']))
sub_IDs = subFile.readlines()
self.subs = []
for sub in sub_IDs:
self.subs.append(sub.rstrip("\n"))
# check to make sure there are more subjects than EVs!!
if len(column_names) >= len(self.subs):
err = "There are more (or an equal amount of) EVs currently " \
"included in the model than there are subjects in the " \
"group analysis subject list. There must be more " \
"subjects than EVs in the design.\n\nNumber of subjects: " \
"%d\nNumber of EVs: %d\n\nNote: An 'Intercept' " \
"column gets added to the design as an EV, so there will " \
"be one more EV than you may have specified in your " \
"design." % (len(self.subs),len(column_names))
errSubID = wx.MessageDialog(self, err,
"Too Many EVs or Too Few Subjects",
wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
raw_column_strings = []
# remove the header formatting Patsy creates for categorical variables
# because we are going to use var_list_for_contrasts as a label for
# users to know what contrasts are available to them
for column in column_names:
# if using Sum encoding, a column name may look like this:
# C(adhd, Sum)[S.adhd0]
# this loop leaves it with only "adhd0" in this case, for the
# contrasts list for the next GUI page
column_string = column
string_for_removal = ''
for char in column_string:
string_for_removal = string_for_removal + char
if char == '.':
column_string = column_string.replace(string_for_removal, '')
string_for_removal = ''
column_string = column_string.replace(']', '')
if ":" in column_string:
try:
column_string = column_string.split("[")[1]
except:
pass
raw_column_strings.append(column_string)
if str(self.gpa_settings["group_sep"]) == "On":
grouping_options = []
idx = 0
for column_string in raw_column_strings:
if self.gpa_settings["grouping_var"] in column_string:
grouping_variable_info = []
grouping_variable_info.append(column_string)
grouping_variable_info.append(idx)
grouping_options.append(grouping_variable_info)
# grouping_var_idx is the column numbers in the design matrix
# which holds the grouping variable (and its possible levels)
idx += 1
# all the categorical values/levels of the grouping variable
grouping_var_levels = []
for gv_idx in grouping_options:
for subject in dmatrix:
if self.gpa_settings["grouping_var"] in self.gpa_settings["ev_selections"]["categorical"]:
level_num = str(int(subject[gv_idx[1]]))
else:
level_num = str(subject[gv_idx[1]])
level_label = '__' + self.gpa_settings["grouping_var"] + level_num
if level_label not in grouping_var_levels:
grouping_var_levels.append(level_label)
# make the new header for the reorganized data
for column_string in raw_column_strings:
if column_string != "Intercept":
if self.gpa_settings["grouping_var"] not in column_string:
for level in grouping_var_levels:
var_list_for_contrasts.append(column_string + level)
elif self.gpa_settings["grouping_var"] in column_string:
var_list_for_contrasts.append(column_string)
else:
for column_string in raw_column_strings:
if column_string != 'Intercept':
var_list_for_contrasts.append(column_string)
# check for repeated measures file formatting!
group_sublist_file = open(self.gpa_settings['subject_list'], 'r')
group_sublist_items = group_sublist_file.readlines()
group_sublist = [line.rstrip('\n') for line in group_sublist_items \
if not (line == '\n') and not line.startswith('#')]
for ga_sub in group_sublist:
# ga_sub = subject ID taken off the group analysis subject list
# let's check to make sure the subject list is formatted for
# repeated measures properly if repeated measures is enabled
# and vice versa
if (self.gpa_settings['repeated_measures'] == "True") and \
(',' not in ga_sub):
errmsg = "The group analysis subject list is not in the " \
"appropriate format for repeated measures. Please " \
"use the appropriate format as described in the " \
"CPAC User Guide, or turn off Repeated Measures." \
"\n\nNote: CPAC generates a properly-formatted " \
"group analysis subject list meant for running " \
"repeated measures when you create your original " \
"subject list. Look for 'subject_list_group_" \
"analysis_repeated_measures.txt' in the directory " \
"where you created your subject list."
errSubID = wx.MessageDialog(self, errmsg,
'Subject List Format', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
elif (self.gpa_settings['repeated_measures'] == "False") and \
(',' in ga_sub):
errmsg = "It looks like your group analysis subject list is " \
"formatted for running repeated measures, but " \
"'Run Repeated Measures' is not enabled."
errSubID = wx.MessageDialog(self, errmsg,
'Subject List Format', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
# make sure the sub IDs in the sublist and pheno files match!
group_pheno_file = open(self.gpa_settings['pheno_file'], 'r')
group_pheno_lines = group_pheno_file.readlines()
# gather the subject IDs from the phenotype file
def get_pheno_subjects(delimiter):
for item in group_pheno_lines[0].split(delimiter):
if item == self.gpa_settings['subject_id_label']:
index = group_pheno_lines[0].index(item)
group_pheno_subs = group_pheno_lines[1:len(group_pheno_lines)]
pheno_subs = []
for pheno_sub_line in group_pheno_subs:
pheno_subs.append(pheno_sub_line.split(delimiter)[index])
return pheno_subs
pheno_subs = []
if "," in group_pheno_lines[0]:
pheno_subs = get_pheno_subjects(",")
# now make sure the group sublist and pheno subject IDs match, at least
# for the ones that exist (i.e. may be less sub IDs in the sublist)
for sublist_subID, pheno_subID in zip(group_sublist, pheno_subs):
# if group sublist is formatted for repeated measures
if "," in sublist_subID:
sublist_subID = sublist_subID.replace(",","_")
if sublist_subID != pheno_subID:
if self.gpa_settings['repeated_measures'] == "False":
errmsg = "The subject IDs in your group subject list " \
"and your phenotype file do not match. Please " \
"make sure these have been set up correctly."
else:
errmsg = "The subject IDs in your group subject list " \
"and your phenotype file do not match. Please " \
"make sure these have been set up correctly." \
"\n\nNote: Repeated measures is enabled - does "\
"your phenotype file have properly-formatted " \
"subject IDs matching your repeated measures " \
"group analysis subject list?"
errSubID = wx.MessageDialog(self, errmsg,
'Subject ID Mismatch', wx.OK | wx.ICON_ERROR)
errSubID.ShowModal()
errSubID.Destroy()
raise Exception
# open the next window!
modelDesign_window.ModelDesign(self.parent, self.gpa_settings, var_list_for_contrasts) # !!! may need to pass the actual dmatrix as well
self.Close()
| bsd-3-clause | -214,186,313,670,741,800 | 38.826897 | 568 | 0.508358 | false | 4.465845 | false | false | false |
tanmaykm/JuliaBox | engine/src/juliabox/plugins/vol_defcfg/defcfg.py | 3 | 3038 | import os
from juliabox.jbox_util import ensure_delete, make_sure_path_exists, unique_sessname, JBoxCfg
from juliabox.vol import JBoxVol
class JBoxDefaultConfigVol(JBoxVol):
provides = [JBoxVol.JBP_CONFIG]
FS_LOC = None
@staticmethod
def configure():
cfg_location = os.path.expanduser(JBoxCfg.get('cfg_location'))
make_sure_path_exists(cfg_location)
JBoxDefaultConfigVol.FS_LOC = cfg_location
@staticmethod
def _get_config_mounts_used(cid):
used = []
props = JBoxDefaultConfigVol.dckr().inspect_container(cid)
try:
for _cpath, hpath in JBoxVol.extract_mounts(props):
if hpath.startswith(JBoxDefaultConfigVol.FS_LOC):
used.append(hpath.split('/')[-1])
except:
JBoxDefaultConfigVol.log_error("error finding config mount points used in " + cid)
return []
return used
@staticmethod
def refresh_disk_use_status(container_id_list=None):
pass
@staticmethod
def get_disk_for_user(user_email):
JBoxDefaultConfigVol.log_debug("creating configs disk for %s", user_email)
if JBoxDefaultConfigVol.FS_LOC is None:
JBoxDefaultConfigVol.configure()
disk_path = os.path.join(JBoxDefaultConfigVol.FS_LOC, unique_sessname(user_email))
cfgvol = JBoxDefaultConfigVol(disk_path, user_email=user_email)
cfgvol._unpack_config()
return cfgvol
@staticmethod
def is_mount_path(fs_path):
return fs_path.startswith(JBoxDefaultConfigVol.FS_LOC)
@staticmethod
def get_disk_from_container(cid):
mounts_used = JBoxDefaultConfigVol._get_config_mounts_used(cid)
if len(mounts_used) == 0:
return None
mount_used = mounts_used[0]
disk_path = os.path.join(JBoxDefaultConfigVol.FS_LOC, str(mount_used))
container_name = JBoxVol.get_cname(cid)
sessname = container_name[1:]
return JBoxDefaultConfigVol(disk_path, sessname=sessname)
@staticmethod
def refresh_user_home_image():
pass
def release(self, backup=False):
ensure_delete(self.disk_path, include_itself=True)
@staticmethod
def disk_ids_used_pct():
return 0
def _unpack_config(self):
if os.path.exists(self.disk_path):
JBoxDefaultConfigVol.log_debug("Config folder exists %s. Deleting...", self.disk_path)
ensure_delete(self.disk_path, include_itself=True)
JBoxDefaultConfigVol.log_debug("Config folder deleted %s", self.disk_path)
JBoxDefaultConfigVol.log_debug("Will unpack config to %s", self.disk_path)
os.mkdir(self.disk_path)
JBoxDefaultConfigVol.log_debug("Created config folder %s", self.disk_path)
self.restore_user_home(True)
JBoxDefaultConfigVol.log_debug("Restored config files to %s", self.disk_path)
self.setup_instance_config()
JBoxDefaultConfigVol.log_debug("Setup instance config at %s", self.disk_path) | mit | 4,886,966,778,760,411,000 | 34.337209 | 98 | 0.657999 | false | 3.595266 | true | false | false |
ryfx/modrana | modules/pyrender/renderer_labels.py | 1 | 2300 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
#
#----------------------------------------------------------------------------
# Copyright 2008, authors:
# * Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from render_cairo_base import OsmRenderBase
from tilenames import *
class RenderClass(OsmRenderBase):
# Specify the background for new tiles
def imageBackgroundColour(self, mapLayer=None):
return (0, 0, 0, 0)
def requireDataTile(self):
return (False)
# Draw a tile
def draw(self):
file = open("places.txt", "r")
ctx = self.getCtx("mainlayer")
print(self.proj.S, self.proj.dLat)
#pLon = (lon - self.W) / self.dLon
for line in file:
line = line.strip()
(lat, lon, id, type, name) = line.split("\t")
if (type in ('c', 't')):
(px, py) = latlon2relativeXY(float(lat), float(lon))
(x, y) = self.proj.project(py, px)
ctx.set_source_rgb(0.0, 0.0, 0.0)
ctx.set_font_size(12)
ctx.move_to(x, y)
ctx.show_text(name)
ctx.stroke()
#-----------------------------------------------------------------
# Test suite - call this file from the command-line to generate a
# sample image
if (__name__ == '__main__'):
a = RenderClass()
filename = "sample_" + __file__ + ".png"
a.RenderTile(8, 128, 84, 'default', filename) # norwch
print("------------------------------------")
print("Saved image to " + filename)
| gpl-3.0 | -4,013,746,217,387,257,000 | 35.507937 | 77 | 0.525652 | false | 4.078014 | false | false | false |
marek/lockfree | tools/graph.py | 1 | 1352 | #!/usr/bin/env bash
import plotly.offline as offline
import plotly.plotly as py
import plotly.graph_objs as go
import csv
import string
import argparse
parser = argparse.ArgumentParser(description='Process lockfree log')
parser.add_argument (
'--input',
help='input file')
parser.add_argument(
'--output',
help='output of graph picture'
)
args = parser.parse_args()
# parse header
headerDefined = False
xLabels = None
def filterRow(row):
return row[1:2] + [ s.translate(
{ord(c): None for c in string.ascii_letters}
) for s in row[3:]]
data = []
for row in csv.reader(open(args.input, 'r'), delimiter=' ', skipinitialspace=True):
row = filterRow(row)
if not headerDefined:
xLabels = row[1:]
headerDefined = True
continue
trace = go.Scatter(
x = xLabels,
y = row[1:],
name = row[0],
line = dict(
width = 4
)
)
data.append(trace)
offline.init_notebook_mode()
layout = dict(title = 'Queue Performance',
xaxis = dict(title = "Log statements (volume)"),
yaxis = dict(title = 'Time (seconds)')
)
# Plot and embed in ipython notebook!
fig = dict(data=data, layout=layout)
offline.plot(fig, auto_open=True, filename=args.output)
| apache-2.0 | -607,661,293,408,453,400 | 20.806452 | 83 | 0.597633 | false | 3.614973 | false | false | false |
anantzoid/VQA-Keras-Visual-Question-Answering | models.py | 1 | 1630 | from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, LSTM, Flatten, Embedding, Merge
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
import h5py
def Word2VecModel(embedding_matrix, num_words, embedding_dim, seq_length, dropout_rate):
print "Creating text model..."
model = Sequential()
model.add(Embedding(num_words, embedding_dim,
weights=[embedding_matrix], input_length=seq_length, trainable=False))
model.add(LSTM(units=512, return_sequences=True, input_shape=(seq_length, embedding_dim)))
model.add(Dropout(dropout_rate))
model.add(LSTM(units=512, return_sequences=False))
model.add(Dropout(dropout_rate))
model.add(Dense(1024, activation='tanh'))
return model
def img_model(dropout_rate):
print "Creating image model..."
model = Sequential()
model.add(Dense(1024, input_dim=4096, activation='tanh'))
return model
def vqa_model(embedding_matrix, num_words, embedding_dim, seq_length, dropout_rate, num_classes):
vgg_model = img_model(dropout_rate)
lstm_model = Word2VecModel(embedding_matrix, num_words, embedding_dim, seq_length, dropout_rate)
print "Merging final model..."
fc_model = Sequential()
fc_model.add(Merge([vgg_model, lstm_model], mode='mul'))
fc_model.add(Dropout(dropout_rate))
fc_model.add(Dense(1000, activation='tanh'))
fc_model.add(Dropout(dropout_rate))
fc_model.add(Dense(num_classes, activation='softmax'))
fc_model.compile(optimizer='rmsprop', loss='categorical_crossentropy',
metrics=['accuracy'])
return fc_model
| mit | 2,992,886,921,016,437,000 | 43.054054 | 100 | 0.720245 | false | 3.490364 | false | false | false |
qingqing01/models | fluid/adversarial/advbox/attacks/gradient_method.py | 3 | 10220 | """
This module provide the attack method for Iterator FGSM's implement.
"""
from __future__ import division
import logging
from collections import Iterable
import numpy as np
from .base import Attack
__all__ = [
'GradientMethodAttack', 'FastGradientSignMethodAttack', 'FGSM',
'FastGradientSignMethodTargetedAttack', 'FGSMT',
'BasicIterativeMethodAttack', 'BIM',
'IterativeLeastLikelyClassMethodAttack', 'ILCM', 'MomentumIteratorAttack',
'MIFGSM'
]
class GradientMethodAttack(Attack):
"""
This class implements gradient attack method, and is the base of FGSM, BIM,
ILCM, etc.
"""
def __init__(self, model, support_targeted=True):
"""
:param model(model): The model to be attacked.
:param support_targeted(bool): Does this attack method support targeted.
"""
super(GradientMethodAttack, self).__init__(model)
self.support_targeted = support_targeted
def _apply(self,
adversary,
norm_ord=np.inf,
epsilons=0.01,
steps=1,
epsilon_steps=100):
"""
Apply the gradient attack method.
:param adversary(Adversary):
The Adversary object.
:param norm_ord(int):
Order of the norm, such as np.inf, 1, 2, etc. It can't be 0.
:param epsilons(list|tuple|int):
Attack step size (input variation).
Largest step size if epsilons is not iterable.
:param steps:
The number of attack iteration.
:param epsilon_steps:
The number of Epsilons' iteration for each attack iteration.
:return:
adversary(Adversary): The Adversary object.
"""
if norm_ord == 0:
raise ValueError("L0 norm is not supported!")
if not self.support_targeted:
if adversary.is_targeted_attack:
raise ValueError(
"This attack method doesn't support targeted attack!")
if not isinstance(epsilons, Iterable):
epsilons = np.linspace(0, epsilons, num=epsilon_steps)
pre_label = adversary.original_label
min_, max_ = self.model.bounds()
assert self.model.channel_axis() == adversary.original.ndim
assert (self.model.channel_axis() == 1 or
self.model.channel_axis() == adversary.original.shape[0] or
self.model.channel_axis() == adversary.original.shape[-1])
for epsilon in epsilons[:]:
step = 1
adv_img = adversary.original
if epsilon == 0.0:
continue
for i in range(steps):
if adversary.is_targeted_attack:
gradient = -self.model.gradient(adv_img,
adversary.target_label)
else:
gradient = self.model.gradient(adv_img,
adversary.original_label)
if norm_ord == np.inf:
gradient_norm = np.sign(gradient)
else:
gradient_norm = gradient / self._norm(
gradient, ord=norm_ord)
adv_img = adv_img + epsilon * gradient_norm * (max_ - min_)
adv_img = np.clip(adv_img, min_, max_)
adv_label = np.argmax(self.model.predict(adv_img))
logging.info('step={}, epsilon = {:.5f}, pre_label = {}, '
'adv_label={}'.format(step, epsilon, pre_label,
adv_label))
if adversary.try_accept_the_example(adv_img, adv_label):
return adversary
step += 1
return adversary
@staticmethod
def _norm(a, ord):
if a.ndim == 1:
return np.linalg.norm(a, ord=ord)
if a.ndim == a.shape[0]:
norm_shape = (a.ndim, reduce(np.dot, a.shape[1:]))
norm_axis = 1
else:
norm_shape = (reduce(np.dot, a.shape[:-1]), a.ndim)
norm_axis = 0
return np.linalg.norm(a.reshape(norm_shape), ord=ord, axis=norm_axis)
class FastGradientSignMethodTargetedAttack(GradientMethodAttack):
"""
"Fast Gradient Sign Method" is extended to support targeted attack.
"Fast Gradient Sign Method" was originally implemented by Goodfellow et
al. (2015) with the infinity norm.
Paper link: https://arxiv.org/abs/1412.6572
"""
def _apply(self, adversary, epsilons=0.01):
return GradientMethodAttack._apply(
self,
adversary=adversary,
norm_ord=np.inf,
epsilons=epsilons,
steps=1)
class FastGradientSignMethodAttack(FastGradientSignMethodTargetedAttack):
"""
This attack was originally implemented by Goodfellow et al. (2015) with the
infinity norm, and is known as the "Fast Gradient Sign Method".
Paper link: https://arxiv.org/abs/1412.6572
"""
def __init__(self, model):
super(FastGradientSignMethodAttack, self).__init__(model, False)
class IterativeLeastLikelyClassMethodAttack(GradientMethodAttack):
"""
"Iterative Least-likely Class Method (ILCM)" extends "BIM" to support
targeted attack.
"The Basic Iterative Method (BIM)" is to extend "FSGM". "BIM" iteratively
take multiple small steps while adjusting the direction after each step.
Paper link: https://arxiv.org/abs/1607.02533
"""
def _apply(self, adversary, epsilons=0.01, steps=1000):
return GradientMethodAttack._apply(
self,
adversary=adversary,
norm_ord=np.inf,
epsilons=epsilons,
steps=steps)
class BasicIterativeMethodAttack(IterativeLeastLikelyClassMethodAttack):
"""
FGSM is a one-step method. "The Basic Iterative Method (BIM)" iteratively
take multiple small steps while adjusting the direction after each step.
Paper link: https://arxiv.org/abs/1607.02533
"""
def __init__(self, model):
super(BasicIterativeMethodAttack, self).__init__(model, False)
class MomentumIteratorAttack(GradientMethodAttack):
"""
The Momentum Iterative Fast Gradient Sign Method (Dong et al. 2017).
This method won the first places in NIPS 2017 Non-targeted Adversarial
Attacks and Targeted Adversarial Attacks. The original paper used
hard labels for this attack; no label smoothing. inf norm.
Paper link: https://arxiv.org/pdf/1710.06081.pdf
"""
def __init__(self, model, support_targeted=True):
"""
:param model(model): The model to be attacked.
:param support_targeted(bool): Does this attack method support targeted.
"""
super(MomentumIteratorAttack, self).__init__(model)
self.support_targeted = support_targeted
def _apply(self,
adversary,
norm_ord=np.inf,
epsilons=0.1,
steps=100,
epsilon_steps=100,
decay_factor=1):
"""
Apply the momentum iterative gradient attack method.
:param adversary(Adversary):
The Adversary object.
:param norm_ord(int):
Order of the norm, such as np.inf, 1, 2, etc. It can't be 0.
:param epsilons(list|tuple|float):
Attack step size (input variation).
Largest step size if epsilons is not iterable.
:param epsilon_steps:
The number of Epsilons' iteration for each attack iteration.
:param steps:
The number of attack iteration.
:param decay_factor:
The decay factor for the momentum term.
:return:
adversary(Adversary): The Adversary object.
"""
if norm_ord == 0:
raise ValueError("L0 norm is not supported!")
if not self.support_targeted:
if adversary.is_targeted_attack:
raise ValueError(
"This attack method doesn't support targeted attack!")
assert self.model.channel_axis() == adversary.original.ndim
assert (self.model.channel_axis() == 1 or
self.model.channel_axis() == adversary.original.shape[0] or
self.model.channel_axis() == adversary.original.shape[-1])
if not isinstance(epsilons, Iterable):
epsilons = np.linspace(0, epsilons, num=epsilon_steps)
min_, max_ = self.model.bounds()
pre_label = adversary.original_label
for epsilon in epsilons[:]:
if epsilon == 0.0:
continue
step = 1
adv_img = adversary.original
momentum = 0
for i in range(steps):
if adversary.is_targeted_attack:
gradient = -self.model.gradient(adv_img,
adversary.target_label)
else:
gradient = self.model.gradient(adv_img, pre_label)
# normalize gradient
velocity = gradient / self._norm(gradient, ord=1)
momentum = decay_factor * momentum + velocity
if norm_ord == np.inf:
normalized_grad = np.sign(momentum)
else:
normalized_grad = self._norm(momentum, ord=norm_ord)
perturbation = epsilon * normalized_grad
adv_img = adv_img + perturbation
adv_img = np.clip(adv_img, min_, max_)
adv_label = np.argmax(self.model.predict(adv_img))
logging.info(
'step={}, epsilon = {:.5f}, pre_label = {}, adv_label={}'
.format(step, epsilon, pre_label, adv_label))
if adversary.try_accept_the_example(adv_img, adv_label):
return adversary
step += 1
return adversary
FGSM = FastGradientSignMethodAttack
FGSMT = FastGradientSignMethodTargetedAttack
BIM = BasicIterativeMethodAttack
ILCM = IterativeLeastLikelyClassMethodAttack
MIFGSM = MomentumIteratorAttack
| apache-2.0 | -1,025,681,624,726,780,700 | 35.76259 | 80 | 0.576321 | false | 4.009415 | false | false | false |
assamite/cc-course-UH17 | week2/therex.py | 1 | 3851 | #!/usr/bin/env python
__author__ = "Khalid Alnajjar"
'''
A class for accessing Thesaurus Rex (v2) API
Requirements: requests xmltodict (installable thourgh pip)
'''
import requests, urllib.parse, time, xmltodict, json
class TheRex:
def __init__(self):
self.base_url = 'http://ngrams.ucd.ie/therex2/common-nouns/'
self.throttle = 2 # seconds
self.last_query = None
def map_item(self, r):
return tuple([r['#text'], int(r['@weight'])])
def member(self, concept):
'''To obtain properties and categories of a given concept'''
concept = urllib.parse.quote_plus(concept)
url = '{0}common-nouns/member.action?member={concept}&kw={concept}&needDisamb=true&xml=true'.format(self.base_url, concept=concept)
result = self._query_and_parse(url)
return self._result_to_dict(result, 'MemberData')
def modifier(self, modi, concept):
'''To find cateogires of the input concept that share the input modifier(property)'''
modi = urllib.parse.quote_plus(modi)
concept = urllib.parse.quote_plus(concept)
url = '{0}modifier.action?modi={modi}&ref={ref}&xml=true'.format(self.base_url, modi=modi, ref=concept)
result = self._query_and_parse(url)
return self._result_to_dict(result, 'ModifierData')
def head(self, head, concept):
'''To find properties of the input concept that are shared with the input head(category)'''
modi = urllib.parse.quote_plus(head)
concept = urllib.parse.quote_plus(concept)
url = '{0}head.action?head={head}&ref={ref}&xml=true'.format(self.base_url, head=head, ref=concept)
result = self._query_and_parse(url)
return self._result_to_dict(result, 'HeadData')
def category(self, modi, cat):
'''To find concepts that have a given modi(property) and also fall under a given category'''
url = '{0}category.action?cate={1}&kw={2}&search=true&xml=true'.format(self.base_url, modi + ':' + cat, modi + '+' + cat)
result = self._query_and_parse(url)
return self._result_to_dict(result, 'CategoryData')
def _query_and_parse(self, url):
t = time.time()
response = requests.get(url)
time.sleep(max(self.throttle-(time.time()-t), 0)) # simple throttling
return xmltodict.parse(response.content)
def _result_to_dict(self, query_result, root_name):
_root_content = query_result[root_name]
result = {}
if 'Categories' in _root_content and 'Category' in _root_content['Categories']:
categories = map(lambda r: self.map_item(r), _root_content['Categories']['Category'])
result['categories'] = dict(categories)
if 'Members' in _root_content and 'Member' in _root_content['Members']:
members = map(lambda r: self.map_item(r), _root_content['Members']['Member'])
result['members'] = dict(members)
if 'Modifiers' in _root_content and 'Modifier' in _root_content['Modifiers']:
modifiers = map(lambda r: self.map_item(r), _root_content['Modifiers']['Modifier'])
result['modifiers'] = dict(modifiers)
if 'CategoryHeads' in _root_content and 'CategoryHead' in _root_content['CategoryHeads']:
category_heads = map(lambda r: self.map_item(r), _root_content['CategoryHeads']['CategoryHead'])
result['category_heads'] = dict(category_heads)
return result
if __name__ == '__main__':
tr = TheRex()
target_concept = 'cat'
print(json.dumps(tr.member(target_concept), indent=4))
print()
print(json.dumps(tr.modifier(modi='furry', concept=target_concept), indent=4))
print()
print(json.dumps(tr.head(head='mammal', concept=target_concept), indent=4))
print()
print(json.dumps(tr.category('furry', 'animal'), indent=4))
| mit | -341,045,641,225,821,900 | 46.54321 | 139 | 0.637756 | false | 3.469369 | false | false | false |
buguroo/pyknow | pyknow/deffacts.py | 1 | 1382 | from functools import update_wrapper
import inspect
class DefFacts:
def __new__(cls, nonexpected=None, order=0):
obj = super(DefFacts, cls).__new__(cls)
if nonexpected is not None:
raise SyntaxError("DefFacts must be instanced to allow decoration")
obj.__wrapped = None
obj._wrapped_self = None
obj.order = order
return obj
@property
def _wrapped(self):
return self.__wrapped
@_wrapped.setter
def _wrapped(self, value):
if inspect.isgeneratorfunction(value):
self.__wrapped = value
return update_wrapper(self, self.__wrapped)
else:
raise TypeError("DefFact can only decorate generators.")
def __repr__(self): # pragma: no cover
return "DefFacts(%r)" % (self._wrapped)
def __call__(self, *args, **kwargs):
if self._wrapped is not None:
if self._wrapped_self is None:
gen = self._wrapped(*args, **kwargs)
else:
gen = self._wrapped(self._wrapped_self, *args, **kwargs)
return (x.copy() for x in gen)
elif not args:
raise RuntimeError("Usage error.")
else:
self._wrapped = args[0]
return self
def __get__(self, instance, owner):
self._wrapped_self = instance
return self
| lgpl-3.0 | -7,212,318,727,955,124,000 | 27.791667 | 79 | 0.561505 | false | 4.332288 | false | false | false |
mikesligo/pieproxy | ref/test.py | 1 | 4128 | ''' Copyright 2013 Michael Gallagher
Email: mikesligo at gmail dot com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. '''
#!/usr/bin/python
import re
import socket # Import socket module
from threading import Thread
from struct import *
import time
class Packet:
def __init__(self, packet):
packet = str(packet)
print packet
print "length: "+str(len(packet))
self.full = packet + "\r\n"
try:
host = re.search('\nHost: (\S+)',packet)
self.host = host.group(1)
except:
print "Error finding host"
print packet
raise
def printpacket(self):
print 'All: \n' + self.full
class Server:
def __init__(self, host, port):
self.port = port;
self.host = host;
def start(self):
print "Starting PieProxy..."
self.mainsocket = socket.socket()
self.mainsocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # don't bind address
self.mainsocket.bind((host,port))
self.mainsocket.listen(5)
print "Starting listen..."
self.listen_for_incoming_client()
def listen_for_incoming_client(self): # To be run in a thread
while True:
self.conn, addr = self.mainsocket.accept() # Establish connection with client.
packet = self.conn.recv(8192)
if len(packet) != 0:
print "\nListening for incoming client..."
packet = Packet(packet)
self.forward_packet_to_server(packet)
# raw_input("\nHit enter to continue")
def forward_packet_to_server(self, packet):
print "Forwarding packet to server..."
s = socket.socket()
s.settimeout(1)
try:
if packet is not None:
print 'Connecting to '+packet.host
else:
print "Host is none"
print packet.full
s.connect((packet.host,80))
s.sendall(packet.full)
#receive = Thread(target=self.listen_for_incoming_server,args=(s,))
#receive.start()
self.listen_for_incoming_server(s)
except:
print "\nERROR ATTEMPTING TO CONNECT OR SEND PACKETS"
print "==========================================="
print packet.full
raise
def listen_for_incoming_server(self,socket):
print "Listening for incoming packets from the server"
print "Receiving data..."
response = socket.recv(8192)
data = response
try:
while len(data) > 0:
s.settimeout(s.gettimeout()+0.1)
data = socket.recv(8192)
response = response + data
print "Receiving more data..."
print "Length: " + str(len(data))
finally:
print "Response Length: " + str(len(response))
self.return_response_to_client(response)
socket.close()
print "Killing thread..."
return
def return_response_to_client(self, response):
print "Returning response to client..."
self.conn.sendall(response)
def close(self):
self.mainsocket.close()
if __name__ == '__main__':
print
print "Pie Proxy\n=============================\n"
host = socket.gethostname() # Get local machine name
port = 8000 # Reserve port
server = Server(host,port)
server.start()
| gpl-3.0 | -3,125,814,512,734,880,000 | 33.4 | 98 | 0.57437 | false | 4.322513 | false | false | false |
cle1109/sigviewr | readers/lslreader.py | 1 | 1676 | # This file is part of Sigviewr.
# This project is licensed under the GNU GPL (version 3 or higher).
# Copyright 2014 by Clemens Brunner and Thomas Brunner.
from collections import OrderedDict
import time
from .streamreader import StreamReader
# begin HACK
try:
import external.pylsl.pylsl as pylsl
except ImportError:
pass
# end HACK
class LSLReader(StreamReader):
def __init__(self):
super(LSLReader, self).__init__()
def open(self, streams):
self.inlet = pylsl.stream_inlet(streams)
self.metainfo.general["Type"] = "LSL"
self.metainfo.general["Version"] = str(pylsl.protocol_version())
self.metainfo.general["Recording time"] = time.asctime(time.gmtime())
for stream in [streams]:
stream_info = OrderedDict()
stream_info["Type"] = stream.type()
stream_info["Sampling rate"] = str(stream.nominal_srate())
stream_info["Channels"] = str(stream.channel_count())
self.metainfo.streams.append(stream_info)
def close(self):
pass
def numStreams(self):
return len(self.metainfo.streams)
def sampleRate(self, stream):
return float(self.metainfo.streams[stream]["Sampling rate"])
def numChannels(self, stream):
return int(self.metainfo.streams[stream]["Channels"])
def channelLabel(self, stream, channel):
return "Test"
def resolve_streams(self):
self.streams = pylsl.resolve_streams()
return self.streams
def get_data(self):
sample = pylsl.vectorf()
self.inlet.pull_sample(sample)
return list(sample) | gpl-3.0 | -737,442,427,166,842,400 | 27.913793 | 77 | 0.635442 | false | 3.897674 | false | false | false |
sunlaiqi/fundiy | src/shop - 副本/utils.py | 1 | 2978 | """
This file includes commonly used utilities for this app.
"""
from datetime import datetime
today = datetime.now()
year = today.year
month = today.month
day = today.day
# Following are for images upload helper functions. The first two are used for product upload for the front and back.
# The last two are used for design product upload for the front and back.
def front_product_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/maker_<id>/product_<id>/Y/m/d/front/<filename>
return 'product_imgs/maker_{0}/product_{1}/{2}/{3}/{4}/front/{5}'.format(instance.maker.id, instance.id, year, month, day, filename)
def back_product_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/maker_<id>/product_<id>/Y/m/d/back/<filename>
return 'product_imgs/maker_{0}/product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.maker.id, instance.id, year, month, day, filename)
def front_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/front/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/front/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
def back_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/back/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
def fill_category_tree(model, deep=0, parent_id=0, tree=[]):
'''
NAME::
fill_category_tree
DESCRIPTION::
一般用来针对带有parent产品分类表字段的进行遍历,并生成树形结构
PARAMETERS::
:param model: 被遍历的model,具有parent属性
:param deep: 本例中,为了明确表示父子的层次关系,用短线---的多少来表示缩进
:param parent_id: 表示从哪个父类开始,=0表示从最顶层开始
:param tree: 要生成的树形tuple
RETURN::
这里是不需要返回值的,但是如果某个调用中需要可以画蛇添足一下
USAGE::
调用时,可以这样:
choices = [()]
fill_topic_tree(choices=choices)
这里使用[],而不是(),是因为只有[],才能做为“引用”类型传递数据。
'''
if parent_id == 0:
ts = model.objects.filter(parent = None)
# tree[0] += ((None, '选择产品类型'),)
for t in ts:
tmp = [()]
fill_category_tree(4, t.id, tmp)
tree[0] += ((t.id, '-'*deep + t.name,),)
for tt in tmp[0]:
tree[0] += (tt,)
else:
ts = Category.objects.filter(parent_id = parent_id)
for t in ts:
tree[0] += ((t.id, '-'*deep + t.name,),)
fill_category_tree(deep + 4, t.id, tree)
return tree | mit | -4,548,247,652,537,328,000 | 32.987179 | 149 | 0.63283 | false | 2.618577 | false | false | false |
WeCase/WeCase | src/AboutWindow.py | 1 | 1487 | # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
# WeCase -- This file implemented AboutWindow.
# Copyright (C) 2013, 2014 The WeCase Developers.
# License: GPL v3 or later.
from PyQt4 import QtGui
import version
import wecasepath as path
from AboutWindow_ui import Ui_About_Dialog
class AboutWindow(QtGui.QDialog, Ui_About_Dialog):
def __init__(self, parent=None):
super(AboutWindow, self).__init__(parent)
self.setupUi(self)
def _setPkgProvider(self):
if version.pkgprovider == version.default_provider:
vanilla = self.tr("Vanilla Version")
self.distLabel.setText(vanilla)
else:
disttext = version.pkgprovider
self.distLabel.setText(self.distLabel.text() % disttext)
def _setVersionLabel(self):
self.versionLabel.setText(self.versionLabel.text() % version.pkgversion)
def _setDescriptionLabel(self):
self.descriptionLabel.setText(self.descriptionLabel.text() % version.bug_report_url)
def _setContirbutorBrowser(self):
with open(path.myself_path + "AUTHORS", "r") as f:
contirbutors = f.read()
contirbutors = contirbutors.replace("\n", "<br />")
self.contirbutorBrowser.setHtml("<center>%s</center>" % contirbutors)
def setupUi(self, widget):
super().setupUi(widget)
self._setPkgProvider()
self._setVersionLabel()
self._setDescriptionLabel()
self._setContirbutorBrowser()
| gpl-3.0 | 4,490,473,591,975,713,000 | 31.326087 | 92 | 0.667115 | false | 3.726817 | false | false | false |
CSC-IT-Center-for-Science/pouta-blueprints | pebbles/views/stats.py | 1 | 2567 | from flask_restful import marshal_with, fields
from flask import Blueprint as FlaskBlueprint
import logging
from pebbles.models import Blueprint, Instance
from pebbles.server import restful
from pebbles.views.commons import auth
from pebbles.utils import requires_admin, memoize
from collections import defaultdict
stats = FlaskBlueprint('stats', __name__)
def query_blueprint(blueprint_id):
return Blueprint.query.filter_by(id=blueprint_id).first()
blueprint_fields = {
'name': fields.String,
'users': fields.Integer,
'launched_instances': fields.Integer,
'running_instances': fields.Integer,
}
result_fields = {
'blueprints': fields.List(fields.Nested(blueprint_fields)),
'overall_running_instances': fields.Integer
}
class StatsList(restful.Resource):
@auth.login_required
@requires_admin
@marshal_with(result_fields)
def get(self):
instances = Instance.query.all()
overall_running_instances = Instance.query.filter(Instance.state != Instance.STATE_DELETED).count()
get_blueprint = memoize(query_blueprint)
per_blueprint_results = defaultdict(lambda: {'users': 0, 'launched_instances': 0, 'running_instances': 0})
unique_users = defaultdict(set)
for instance in instances:
user_id = instance.user_id
blueprint = get_blueprint(instance.blueprint_id)
if not blueprint:
logging.warn("instance %s has a reference to non-existing blueprint" % instance.id)
continue
if 'name' not in per_blueprint_results[blueprint.id]:
per_blueprint_results[blueprint.id]['name'] = blueprint.name
if user_id not in unique_users[blueprint.id]:
unique_users[blueprint.id].add(user_id)
per_blueprint_results[blueprint.id]['users'] += 1
if(instance.state != Instance.STATE_DELETED):
per_blueprint_results[blueprint.id]['running_instances'] += 1
per_blueprint_results[blueprint.id]['launched_instances'] += 1
# per_blueprint_results[blueprint.id]['overall_running_instances'] = overall_running_instances
results = []
for blueprint_id in per_blueprint_results:
results.append(per_blueprint_results[blueprint_id])
results.sort(key=lambda results_entry: (results_entry["launched_instances"], results_entry["users"]), reverse=True)
final = {"blueprints": results, "overall_running_instances": overall_running_instances}
return final
| mit | 2,162,888,116,153,482,800 | 31.910256 | 123 | 0.671601 | false | 4.029827 | false | false | false |
martinezmizael/Escribir-con-la-mente | object/probarRedNeuronalNormalizado.py | 1 | 1385 | # -*- encoding: utf-8 -*-
'''
Created on: 2015
Author: Mizael Martinez
'''
from pyfann import libfann
from login import Login
from escribirArchivo import EscribirArchivo
import inspect, sys, os
sys.path.append("../model")
from baseDatos import BaseDatos
auxiliar=[]
np=EscribirArchivo()
np.setUrl("prueba_normalizado.data")
np.setNumeroEntradas(8)
np.setNumeroSalidas(5)
np.escribirEnArchivoParaProbarRedNeuronalNormalizados()
bd=BaseDatos()
#primer elemento: # de neuronas
#segundo elemento: error
#tercer elemento: url del archivo de ese entrenamiento
print bd.obtenerErroresMenoresDeEntrenamientoNormalizado()[0][2]
errores=bd.obtenerErroresMenoresDeEntrenamientoNormalizado()
for k in range(len(errores)):
ann = libfann.neural_net()
ann.create_from_file("../files/"+errores[k][2])
ann.reset_MSE()
test_data = libfann.training_data()
test_data.read_train_from_file("../files/prueba_normalizado.data")
entradas=test_data.get_input()
salidas=test_data.get_output()
for i in range(0,len(entradas)):
ann.test(entradas[i], salidas[i])
auxiliar.append(ann.get_MSE())
print auxiliar
print "%s - %s - %s - %s"%("Neuronas".rjust(8),"Archivo".rjust(15),"Error Entrenamiento".rjust(16),"Error Prueba")
for z in range(len(errores)):
print "%s - %s - %s - %s"%(str(errores[z][0]).rjust(8),str(errores[z][2]).rjust(15),str(errores[z][1]).rjust(16),str(auxiliar[z]))
| mit | -3,499,353,600,829,294,000 | 27.265306 | 131 | 0.729964 | false | 2.421329 | false | false | false |
joshamilton/Hamilton_acI_2017 | code/annotTransporters/01findGenesAnnotExpr.py | 1 | 10530 | ###############################################################################
# findGenesAnnotExpr.py
# Copyright (c) 2017, Joshua J Hamilton and Katherine D McMahon
# Affiliation: Department of Bacteriology
# University of Wisconsin-Madison, Madison, Wisconsin, USA
# URL: http://http://mcmahonlab.wisc.edu/
# All rights reserved.
################################################################################
# Identify transport reactions and their corresponding genes. Map genes to COGs
# and extract their expression profiles.
################################################################################
#%%#############################################################################
### Import packages
################################################################################
from collections import Counter
import cobra
import copy
import os
import pandas as pd
import re
#%%#############################################################################
### Define folder structure
################################################################################
modelDir = '../../models/rawModels'
genomeDir = '../../data/transporters/modelGenes'
cladeDir = '../../data/transporters/cladeCOGs'
cogDir = '../../data/orthoMCL/genomeCOGs'
exprDir = '../../results/expression'
resultsDir = '../../results/transporters'
taxonFile = '../../data/externalData/taxonomy.csv'
annotTable = '../../data/orthoMCL/annotTable.csv'
#%%#############################################################################
### Model pre-processing
################################################################################
# Check that the output directory exists. If not, create it.
if not os.path.exists(genomeDir):
os.makedirs(genomeDir)
if not os.path.exists(cladeDir):
os.makedirs(cladeDir)
if not os.path.exists(resultsDir):
os.makedirs(resultsDir)
# Import the list of models
modelList = []
for model in os.listdir(modelDir):
if not model.startswith('.'):
modelList.append(model)
#%%#############################################################################
### Identify transporter genes
################################################################################
for curModel in modelList:
# Read in model from SBML and create dict to store stuff
model = cobra.io.read_sbml_model(modelDir+'/'+curModel+'/'+curModel+'.xml')
transDict = {}
for curRxn in model.reactions:
# Transport reactions, based on keywords
if re.search('transport', curRxn.name) or re.search('permease', curRxn.name) or re.search('symport', curRxn.name) or re.search('diffusion', curRxn.name) or re.search('excretion', curRxn.name) or re.search('export', curRxn.name) or re.search('secretion', curRxn.name) or re.search('uptake', curRxn.name) or re.search('antiport', curRxn.name):
cdsList = []
for gene in curRxn.genes:
if gene.id != 'Unknown':
cdsList = cdsList + [gene.id]
transDict[curRxn.id] = cdsList
# Transport reactions which don't get picked up based on keywords
elif curRxn.id == 'rxn05226_c0' or curRxn.id == 'rxn05292_c0' or curRxn.id == 'rxn05305_c0' or curRxn.id == 'rxn05312_c0' or curRxn.id == 'rxn05315_c0' or curRxn.id == 'rxn10945_c0' or curRxn.id == 'rxn10116_c0':
cdsList = []
for gene in curRxn.genes:
if gene.id != 'Unknown':
cdsList = cdsList + [gene.id]
transDict[curRxn.id] = cdsList
with open(genomeDir+'/'+curModel+'.txt', 'w') as outFile:
for key in transDict.keys():
outFile.write(key+';')
for cds in transDict[key]:
outFile.write(cds+',')
outFile.write('\n')
#%%#############################################################################
### For each clade, identify the COGs associated with each reaction
### For each (rxn, cog) pairing, identify the expression data
################################################################################
# Read in the taxonomy table and create a list of genomes for each clade
cladeToGenomeDict = {}
cladeList = []
taxonClass = pd.DataFrame.from_csv(taxonFile, sep=',')
taxonClass = taxonClass.dropna()
# Extract the unique clades
cladeList = pd.unique(taxonClass['Clade'].values)
for clade in cladeList:
genomeList = taxonClass[taxonClass['Clade'] == clade].index.tolist()
cladeToGenomeDict[clade] = genomeList
# Read in the annotation table
annotDF = pd.read_csv(annotTable, index_col=0)
# Identity all the unique reactions within the clade and their associated COGs.
for clade in cladeList:
geneDict = {}
cogDict = {}
modelList = cladeToGenomeDict[clade]
for model in modelList:
# Create a dictionary associating CDS with each reaction
with open(genomeDir+'/'+model+'.txt', 'r') as inFile:
for line in inFile:
[gene, cdsArray] = line.strip().split(';')
cdsArray = cdsArray.split(',')
cdsArray = filter(None, cdsArray)
if len(cdsArray) > 0:
if gene in geneDict.keys():
geneDict[gene] = geneDict[gene] + cdsArray
else:
geneDict[gene] = cdsArray
for cds in cdsArray:
cogDict[cds] = None
# Create a dictionary associating with COGs with each CDS
# Temporary dict to store all associations for that genome
tempDict = {}
with open(cogDir+'/'+model+'COGs.txt', 'r') as inFile:
for line in inFile:
[cds, cog] = line.strip().split(',')
tempDict[cds] = cog
# Populate the cogDict using this info
for cds in cogDict.keys():
if cds.replace('_CDS_', '.genome.CDS.') in tempDict.keys():
cogDict[cds] = tempDict[cds.replace('_CDS_', '.genome.CDS.')]
with open(cladeDir+'/'+clade+'.CDS.txt', 'w') as outFile:
for key in geneDict.keys():
outFile.write(key+';')
for cds in geneDict[key]:
outFile.write(cds+',')
outFile.write('\n')
# Now, we need to map the CDS for each reaction to its COG.
rxnCogDict = copy.deepcopy(geneDict)
for rxn in rxnCogDict.keys():
for pos, cds in enumerate(rxnCogDict[rxn]):
rxnCogDict[rxn][pos] = cogDict[cds]
# Some CDS map to the same COG, so update the lists to only include
# unique entries
for rxn in rxnCogDict.keys():
rxnCogDict[rxn] = list(set(rxnCogDict[rxn]))
with open(cladeDir+'/'+clade+'.COG.txt', 'w') as outFile:
for key in sorted(rxnCogDict.keys()):
for cds in sorted(rxnCogDict[key]):
outFile.write(key+','+str(cds)+'\n')
# Now, read in the expression data for that clade
exprDataFrame = pd.read_csv(exprDir+'/'+clade+'.norm', index_col=1)
exprDataFrame = exprDataFrame.drop('Clade', axis=1)
# Create an empty dataframe
rxnCogExprMultiIndex = pd.MultiIndex(levels=[[],[]],
labels=[[],[]],
names=['Reaction', 'COG'])
rxnCogExprDataFrame = pd.DataFrame(index=rxnCogExprMultiIndex, columns=exprDataFrame.columns)
# Iterate over the rxnCogDict and look up expression values in the exprDataFrame
# Use these to populate the rxnCogExprDataFrame
for rxn in sorted(rxnCogDict.keys()):
for cds in sorted(rxnCogDict[rxn]):
# If CDS IS in the genome AND expressed
if cds in exprDataFrame.index:
rxnCogExprDataFrame.loc[(rxn, cds),:] = exprDataFrame.loc[cds]
# If CDS IS in the genome AND NOT expressed
elif cds in cogDict.values():
rxnCogExprDataFrame.loc[(rxn, cds),:] = 0
# If CDS IS NOT in the genome
else:
rxnCogExprDataFrame.loc[(rxn, cds),:] = None
# The genes which are not expressed will not have consensus annotations
# Rerun that piece of code
# Compute majority annotation
# First subset the dataframe, keep genomes for that clade and dropping
tempDF = annotDF[modelList]
for curIndex in rxnCogExprDataFrame.index:
cog = curIndex[1]
annotList = []
for genome in tempDF.columns:
if not pd.isnull(tempDF.loc[cog][genome]):
innerString = tempDF.loc[cog][genome]
# Dataframe element is a string enclosed in brackets with a comma separating elements
innerString = re.sub('[\[\]]' , '', innerString)
innerList = re.split('\', \'|\", \"', innerString)
innerList = [re.sub('\"|\'', '', string) for string in innerList]
annotList = annotList + innerList
# Find the most common
annotCounter = Counter(annotList)
majorityAnnot = annotCounter.most_common(1)[0][0]
# Assign the Annotation
rxnCogExprDataFrame.loc[curIndex,'Annotation'] = majorityAnnot
# Write the results to file
rxnCogExprDataFrame.to_csv(cladeDir+'/'+clade+'.COG.norm')
#%%#############################################################################
### Aggregate it all into a single dataframe
################################################################################
# Create a master dataframe
masterMultiIndex = pd.MultiIndex(levels=[[],[]],
labels=[[],[]],
names=['Reaction', 'COG'])
masterDataFrame = pd.DataFrame(index=rxnCogExprMultiIndex)
for clade in cladeList:
# Read in the expression data for that clade
rxnCogExprDataFrame = pd.read_csv(cladeDir+'/'+clade+'.COG.norm', index_col=[0,1])
# Rename the columns
for column in rxnCogExprDataFrame.columns:
rxnCogExprDataFrame.rename(columns={column:column+' ('+clade+')'}, inplace=True)
# Merge into the masterDataFrame
masterDataFrame = pd.concat([masterDataFrame, rxnCogExprDataFrame], axis=1, join='outer')
# Write to file
masterDataFrame.to_csv(resultsDir+'/transAnnotExpr.csv')
| mit | 6,324,326,946,573,725,000 | 40.294118 | 349 | 0.531909 | false | 4.040675 | false | false | false |
mikedh/trimesh | tests/test_extrude.py | 2 | 2901 | try:
from . import generic as g
except BaseException:
import generic as g
try:
import triangle # NOQA
has_triangle = True
except ImportError:
g.log.warning('No triangle! Not testing extrude primitives!')
has_triangle = False
class ExtrudeTest(g.unittest.TestCase):
def test_extrusion(self):
if not has_triangle:
return
transform = g.trimesh.transformations.random_rotation_matrix()
polygon = g.Point([0, 0]).buffer(.5)
e = g.trimesh.primitives.Extrusion(
polygon=polygon,
transform=transform)
# will create an inflated version of the extrusion
b = e.buffer(.1)
assert b.to_mesh().volume > e.to_mesh().volume
assert b.contains(e.vertices).all()
# try making it smaller
b = e.buffer(-.1)
assert b.to_mesh().volume < e.to_mesh().volume
assert e.contains(b.vertices).all()
# try with negative height
e = g.trimesh.primitives.Extrusion(
polygon=polygon,
height=-1.0,
transform=transform)
assert e.to_mesh().volume > 0.0
# will create an inflated version of the extrusion
b = e.buffer(.1)
assert b.to_mesh().volume > e.to_mesh().volume
assert b.contains(e.vertices).all()
# try making it smaller
b = e.buffer(-.1)
assert b.to_mesh().volume < e.to_mesh().volume
assert e.contains(b.vertices).all()
# try with negative height and transform
transform = [[1., 0., 0., -0.],
[0., 1., 0., 0.],
[-0., -0., -1., -0.],
[0., 0., 0., 1.]]
e = g.trimesh.primitives.Extrusion(
polygon=polygon,
height=-1.0,
transform=transform)
assert e.to_mesh().volume > 0.0
for T in g.transforms:
current = e.copy().apply_transform(T)
# get the special case OBB calculation for extrusions
obb = current.bounding_box_oriented
# check to make sure shortcutted OBB is the right size
assert g.np.isclose(
obb.volume,
current.to_mesh().bounding_box_oriented.volume)
# use OBB transform to project vertices of extrusion to plane
points = g.trimesh.transform_points(
current.vertices, g.np.linalg.inv(obb.primitive.transform))
# half extents of calculated oriented bounding box
half = (g.np.abs(obb.primitive.extents) / 2.0) + 1e-3
# every vertex should be inside OBB
assert (points > -half).all()
assert (points < half).all()
assert current.direction.shape == (3,)
assert current.origin.shape == (3,)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
| mit | 36,126,664,886,770,264 | 32.344828 | 75 | 0.559118 | false | 3.842384 | true | false | false |
alexblaessle/PyRW | pyrw/RWstep.py | 1 | 5942 | #===========================================================================
#License
#===========================================================================
#Copyright (C) 2016 Alexander Blaessle
#This software is distributed under the terms of the GNU General Public License.
#This file is part of PyRW.
#PyRW is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
#===========================================================================
#Importing necessary modules
#===========================================================================
#Numpy
import numpy as np
import RWsuperposition
#===========================================================================
#Module description
#===========================================================================
"""
Step module of pyrw containing classes describing random walk steps for walkers,
including the following classes:
(1) step
(2) MRWstep
(3) CRWstep
(4) CorRWstep
(5) CCRWstep
"""
#===========================================================================
#Module classes
#===========================================================================
class step(object):
#Init
def __init__(self,w,typ):
self.walker=w
self.typ=typ
self.superpositions=[]
def updateGammaDist(self):
self.gammaVec=[0.]
for s in self.superpositions:
self.gammaVec.append(self.gammaVec[-1]+s.gamma)
return self.gammaVec
def checkGammas(self,debug=False):
sumGammas=0
for s in self.superpositions:
sumGammas=sumGammas+s.gamma
if sumGammas==1:
return True
else:
if debug:
print "Warning, gammas do not sum up to 1!"
return False
def performStep(self):
#Pick random number to choose which superposition
rand_mode=np.random.random()
#Check which superposition to perform
for i in range(len(self.gammaVec)):
#print self.gamaVec[i], " <= ", rand_mode , " < = "
if self.gammaVec[i]<=rand_mode and rand_mode<=self.gammaVec[i+1]:
self.superpositions[i].doStep()
break
def addSuperposition(self,r,gamma,kappa):
try:
newId=max(self.getSuperpositionIds)+1
except TypeError:
newId=0
s=RWsuperposition.superposition(self.walker,r,gamma,kappa,newId)
self.superpositions.append(s)
self.updateGammaDist()
return s
def getSuperpositionIds(self):
ids=[]
for s in self.superpositions:
ids.append(s.Id)
return ids
class MRWstep(step):
#Init
def __init__(self,w,r):
super(MRWstep, self).__init__(w,0)
#Add simple Brownian step
self.addSuperposition(r,1,0)
class CRWstep(step):
#Init
def __init__(self,w,r1,r2,gamma):
super(CRWstep, self).__init__(w,1)
#Add two superpositions
self.addSuperposition(r1,gamma,0)
self.addSuperposition(r2,gamma,0)
class CorRWstep(step):
#Init
def __init__(self,w,r,kappa):
super(CorRWstep, self).__init__(w,2)
#Add simple correlated random walk
self.addSuperposition(r,1,kappa)
class CCRWstep(step):
#Init
def __init__(self,w,r1,r2,gamma,kappa):
super(CCRWstep, self).__init__(w,3)
#Add two superpositions, one MRW and one CorRW
self.addSuperposition(r1,gamma,0)
self.addSuperposition(r2,1-gamma,kappa)
def setParms(self,parms):
self.scaleR(parms[0])
self.setGamma(parms[1])
self.setKappa(parms[2])
def scaleR(self,rScale):
r1=self.superpositions[0].getR()
self.setR2(rScale*r1)
return rScale*r1
def setGamma(self,gamma):
self.superpositions[0].setGamma(gamma)
self.superpositions[1].setGamma(1-gamma)
return gamma
def setKappa(self,kappa):
self.superpositions[1].setKappa(kappa)
return kappa
def setR1(self,r):
self.superpositions[0].setR(r)
return r
def setR2(self,r):
self.superpositions[1].setR(r)
return r
class SCCRWstep(CCRWstep):
def __init__(self,w,r1,r2,gamma,kappa,gammaSup=1,gammaStep=0.1,gammaMin=0.2):
super(SCCRWstep, self).__init__(w,r1,r2,gamma,kappa)
self.origGamma=gamma
self.gammaStep=gammaStep
self.gammaMin=gammaMin
self.gammaSup=gammaSup
def performStep(self):
"""Overwrite performStep."""
#Pick random number to choose which superposition
rand_mode=np.random.random()
#Check which superposition to perform
for i in range(len(self.gammaVec)):
#print self.gamaVec[i], " <= ", rand_mode , " < = "
if self.gammaVec[i]<=rand_mode and rand_mode<=self.gammaVec[i+1]:
self.superpositions[i].doStep()
# Special clause to adjust gamma
if i==self.gammaSup:
idxs=[0,1]
idxs.remove(self.gammaSup)
idx=idxs[0]
gammaNew=max(self.superpositions[idx].gamma-self.gammaStep,self.gammaMin)
self.updateGamma(gammaNew)
else:
self.setBackGamma()
break
def setBackGamma(self):
self.updateGamma(self.origGamma)
def updateGamma(self,gamma):
self.setGamma(gamma)
self.updateGammaDist()
def setOrigGamma(self,gamma):
self.origGamma=gamma
self.setGamma(gamma)
self.updateGammaDist()
def setGammaStep(self,gammaStep):
self.gammaStep=gammaStep
def setGammaMin(self,gammaMin):
self.gammaMin=gammaMin
| gpl-3.0 | -1,836,619,695,734,177,800 | 24.182203 | 81 | 0.595086 | false | 3.412981 | false | false | false |
myuuuuun/various | ContinuousAlgorithm/HW5/hw5-1.py | 1 | 1850 | #!/usr/bin/python
#-*- encoding: utf-8 -*-
"""
Copyright (c) 2015 @myuuuuun
Released under the MIT license.
"""
import math
import numpy as np
import functools
import matplotlib.pyplot as plt
import matplotlib.cm as cm
EPSIRON = 1.0e-8
# P0〜P_(length-1)までのルジャンドル多項式の, xにおける値の配列を返す
def legendre(x, length):
values = [1, x]
for i in range(2, length):
v = ((2*i-1)*x*values[i-1] - (i-1) * values[i-2]) / i
values.append(v)
return values
# P0〜P_(length-1)までのチェビシェフ多項式の, xにおける値の配列を返す
def chebyshev(x, length):
values = []
for i in range(length):
v = np.cos(i * np.arccos(x))
values.append(v)
return values
if __name__ == '__main__':
# 共通設定
length = 6
x_list = np.arange(-0.99, 1.00, 0.01)
f_matrix = np.zeros((length, 199), dtype=float)
# legendre
"""
for i, x in enumerate(x_list):
values = legendre(x, length)
for j in range(length):
f_matrix[j][i] = values[j]
fig, ax = plt.subplots()
plt.title("Legendre多項式")
plt.xlabel("x")
plt.ylabel("f")
plt.xlim(-1, 1)
for j in range(length):
plt.plot(x_list, f_matrix[j], color=cm.gist_earth(j/length), label='P{0}'.format(j))
plt.legend()
plt.show()
"""
"""
# chebyshev
for i, x in enumerate(x_list):
values = chebyshev(x, length)
for j in range(length):
f_matrix[j][i] = values[j]
fig, ax = plt.subplots()
plt.title("Chebyshev多項式")
plt.xlabel("x")
plt.ylabel("f")
plt.xlim(-1, 1)
for j in range(length):
plt.plot(x_list, f_matrix[j], color=cm.gist_earth(j/length), label='P{0}'.format(j))
plt.legend()
plt.show()
"""
| mit | 2,593,980,254,537,360,000 | 18.659091 | 92 | 0.561272 | false | 2.570579 | false | false | false |
Sythelux/Picarto.bundle | Contents/Libraries/Shared/util.py | 1 | 1121 | # from PicartoClientAPI import Language, ApiClient
def Lang(string): # fallback
return string
L = Lang
def generateTagline(details):
ret_string = L("viewers") + ": "
ret_string = "%s %d (%s: %d)" % (ret_string, details.viewers, L("total"), details.viewers_total)
return ret_string
def generateSummary(details):
ret_string = ""
for panel in details.description_panels:
ret_string += panel.title + "\r\n" + panel.body + "\r\n" + "\r\n"
return ret_string
def generateCountries(languages):
oc = ObjectContainer(
title2=u'%s' % "multistreams and records",
art=None,
content=ContainerContent.GenericVideos
)
ret_string = ""
# for lang_dict in languages:
# lang = ApiClient().deserialize_model(lang_dict, Language)
# ret_string += lang.name + "\r\n"
return oc
def generateExtras(details):
oc = ObjectContainer(
title2=u'%s' % "multistreams and records",
art=None,
content=ContainerContent.GenericVideos
)
for lang in details.languages:
ret_string += lang.name + "\r\n"
return oc
| bsd-3-clause | -1,101,899,389,666,573,200 | 23.911111 | 100 | 0.628011 | false | 3.459877 | false | false | false |
sisoftrg/citrocan | app/decoder_old.py | 1 | 21244 | # Citrocan
# Copyright (c) 2016 sisoftrg
# The MIT License (MIT)
import sys
PY3 = sys.version_info >= (3, 0)
class Decoder(object):
connected = False
cb = {}
mfs = {}
lamps = {}
economy = False
enabled = False
lighting = False
brightness = 0x0c
ignition = 2
rpm = 0
speed = 0
power = 0
odometer = 0
out_temp = 0
message_id = 0
show_message = False
vin1 = ""
vin2 = ""
vin3 = ""
funcs = 0
silence = False
source = ""
srcs = ['---', 'Tuner', 'CD', 'CD Changer', 'Input AUX 1', 'Input AUX 2', 'USB', 'Bluetooth']
have_changer = False
cd_disk = 0
volume = 0
vol_change = False
track_intro = False
random = False
repeat = False
rds_alt = False
want_rdtxt = False
balance_lr = 0
show_balance_lr = False
balance_rf = 0
show_balance_rf = False
bass = 0
show_bass = False
treble = 0
show_treble = False
loudness = False
show_loudness = False
autovol = 0
show_autovol = 0
ambience = ""
ambs = {0x03: 'None', 0x07: 'Classical', 0x0b: 'Jazz-Blues', 0x0f: 'Pop-Rock', 0x13: 'Vocal', 0x17: 'Techno'}
ambience_show = False
radio_mem = 0
radio_band = ""
bands = ['---', ' FM1', ' FM2', 'DAB', 'FMAST', 'AM', 'AMLW', '---']
radio_freq = ""
ast_scan = False
pty_scan = False
radio_scan = False
rds_scan = False
show_radios = False
want_rds = False
have_rds = False
want_ta = False
have_ta = False
traffic = False
want_reg = False
want_pty = False
show_pty = False
pty_mode = 0
pty_sel = 0
pty_cur = ""
ptys = {0x00: 'Deactivate', 0x01: 'News', 0x02: 'Affairs', 0x03: 'Info', 0x04: 'Sport', 0x05: 'Educate', 0x06: 'Drama', 0x07: 'Culture',
0x08: 'Science', 0x09: 'Varied', 0x0A: 'Pop M', 0x0B: 'Rock M', 0x0C: 'Easy M', 0x0D: 'Light M', 0x0E: 'Classics', 0x0F: 'Other M',
0x10: 'Weather', 0x11: 'Finance', 0x12: 'Children', 0x13: 'Social', 0x14: 'Religion', 0x15: 'Phone In', 0x16: 'Travel',
0x17: 'Leisure', 0x18: 'Jazz', 0x19: 'Country', 0x1A: 'Nation M', 0x1B: 'Oldies', 0x1C: 'Folk M', 0x1D: 'Document'}
rds_name = ""
cd_tracks = 0
cd_len = ""
cd_mp3 = 0
cd_pause = False
track_num = 0
track_time = ""
track_len = ""
track_name = ""
track_author = ""
rdtxt = ""
rkeys = {}
msgs = {0x00: 'Diagnosis ok', 0x01: 'Engine temperature too high', 0x03: 'Coolant circuit level too low', 0x04: 'Check engine oil level', 0x05: 'Engine oil pressure too low',
0x08: 'Braking system faulty', 0x0A: 'Air suspension ok (picture)', 0x0B: 'Door, boot, bonnet and fuel tank open', 0x0D: 'Tyre puncture(s) detected',
0x0F: 'Risk of particle filter blocking', 0x11: 'Suspension faulty: max.speed 90 km/h', 0x12: 'Suspension faulty', 0x13: 'Power steering faulty', 0x14: '10km/h!',
0x61: 'Handbrake on', 0x62: 'Handbrake off', 0x64: 'Handbrake control faulty: auto handbrake activated', 0x67: 'Brake pads worn', 0x68: 'Handbrake faulty',
0x69: 'Mobile deflector faulty', 0x6A: 'ABS braking system faulty', 0x6B: 'ESP / ASR system faulty', 0x6C: 'Suspension faulty', 0x6D: 'Power steering faulty',
0x6E: 'Gearbox faulty', 0x6F: 'Cruise control system faulty', 0x73: 'Ambient brightness sensor faulty', 0x74: 'Sidelamp bulb(s) faulty',
0x75: 'Automatic headlamp adjustment faulty', 0x76: 'Directional headlamps faulty', 0x78: 'Airbag faulty', 0x79: 'Active bonnet faulty', 0x7A: 'Gearbox faulty',
0x7B: 'Apply foot on brake and lever in position "N"', 0x7D: 'Presence of water in diesel fuel filter', 0x7E: 'Engine management system faulty',
0x7F: 'Depollution system faulty', 0x81: 'Particle filter additive level too low', 0x83: 'Electronic anti-theft faulty', 0x86: 'Right hand side door faulty',
0x87: 'Left hand side door faulty', 0x89: 'Space measuring system faulty', 0x8A: 'Battery charge or electrical supply faulty', 0x8D: 'Tyre pressure(s) too low',
0x92: 'Warning!', 0x95: 'Info!', 0x96: 'Info!', 0x97: 'Anti-wander system lane-crossing warning device faulty', 0x9D: 'Foglamp bulb(s) faulty',
0x9E: 'Direction indicator(s) faulty', 0xA0: 'Sidelamp bulb(s) faulty', 0xA1: 'Parking lamps active', 0xCD: 'Cruise control not possible: speed too low',
0xCE: 'Control activation not possible: enter the speed', 0xD1: 'Active bonnet deployed', 0xD2: 'Front seat belts not fastened',
0xD3: 'Rear right hand passenger seat belts fastened', 0xD7: 'Place automatic gearbox in position "P"', 0xD8: 'Risk of ice', 0xD9: 'Handbrake!',
0xDE: 'Door, boot, bonnet and fuel tank open', 0xDF: 'Screen wash fluid level too low', 0xE0: 'Fuel level too low', 0xE1: 'Fuel circuit deactivated',
0xE3: 'Remote control battery flat', 0xE4: 'Check and re-initialise tyre pressure', 0xE5: 'Tyre pressure(s) not monitored',
0xE7: 'High speed, check tyre pressures correct', 0xE8: 'Tyre pressure(s) too low', 0xEA: 'Hands-free starting system faulty',
0xEB: 'Starting phase has failed (consult handbook)', 0xEC: 'Prolonged starting in progress', 0xED: 'Starting impossible: unlock the steering',
0xEF: 'Remote control detected', 0xF0: 'Diagnosis in progress...', 0xF1: 'Diagnosis completed', 0xF7: 'Rear LH passenger seatbelt unfastened',
0xF8: 'Rear center passenger seatbelt unfastened', 0xF9: 'Rear RH passenger seatbelt unfastened'}
def __init__(self, ss):
self.ss = ss
@staticmethod
def get_str(b):
if PY3:
ba = bytes(b).strip(b'\0')
else:
ba = bytes(b''.join([chr(x) for x in b if x]))
try:
s = ba.decode('utf8')
except UnicodeDecodeError:
try:
s = ba.decode('cp1251', errors='replace')
except UnicodeDecodeError:
s = "<bad name>"
except LookupError: # kivy's p4a blacklists nonstandrad encodings by default, see blacklist.txt
s = "<wrong program build>"
return s.strip()
def parse_mf(self, ci, cl, cd):
typ = (cd[0] & 0xf0) >> 4
arg = cd[0] & 0x0f
if typ == 0: # single
# print("got mf:", hex(ci), ''.join('{:02x}'.format(x) for x in cd[1:min(1 + arg, cl)]))
return (arg, cd[1:min(1 + arg, cl)])
elif typ == 1: # first
fl = arg * 256 + cd[1]
el = fl - (cl - 2)
self.mfs[ci] = [fl, el, cd[2:cl]]
elif typ == 2: # consecutive. TODO: check frame order!
if ci not in self.mfs:
return None
el = self.mfs[ci][1]
if el > cl - 1:
self.mfs[ci][1] -= cl - 1
self.mfs[ci][2] += cd[1:cl]
else:
fl = self.mfs[ci][0]
d = self.mfs[ci][2] + cd[1:min(cl, el + 2)]
del self.mfs[ci]
# print("got mf:", hex(ci), ''.join('{:02x}'.format(x) for x in d))
return (fl, d)
elif typ == 3: # flow, packets not for us
pass
return None
def decode(self, ci, cl, cd):
if ci in self.cb and cd == self.cb[ci]:
return False
self.cb[ci] = cd
if ci == 0x036: # bsi: ignition
self.economy = bool(cd[2] & 0x80)
self.lighting = bool(cd[3] & 0x20)
self.brightness = cd[3] & 0x0f
self.ignition = cd[4] & 0x07
elif ci == 0x0a4: # current cd track, multiframe
dd = self.parse_mf(ci, cl, cd)
if not dd:
return False
cd = dd[1]
# cd track info
#got mf: 0xa4 20009801546865204372616e626572726965730000000000416e696d616c20496e7374696e63740000000000
#got mf: 0xa4 2000000000
# radiotext
#got mf: 0xa4 10000000544154415220524144494f53202020202020202020202020414c4c49202d2052454b4c414d41202838353532292039322d30302d383220202020202020202020
#got mf: 0xa4 10000000544154415220524144494f53492038372e3520464d204348414c4c49202d2052454b4c414d41202838353532292039322d30302d383220202020202020202020
#got mf: 0xa4 1000000000
page = (cd[0] >> 4) & 0x0f
if page == 1:
self.rdtxt = self.get_str(cd[4:])
elif page == 2:
ha = bool(cd[2] & 0x10)
self.track_author = ha and self.get_str(cd[4:24]) or ""
self.track_name = self.get_str(ha and cd[24:44] or cd[4:24])
elif ci == 0x0b6: # bsi: speed info
self.rpm = cd[0] * 256 + (cd[1] >> 3)
self.speed = cd[2] * 256 + cd[3]
elif ci == 0x0e6: # bsi: voltage
self.power = cd[5] / 20 + 7.2
elif ci == 0x0f6: # bsi: info
self.odometer = cd[2] * 65536 + cd[3] * 256 + cd[4]
self.out_temp = cd[6] / 2 - 39.5
self.lamps['reverse'] = bool(cd[7] & 0x80)
self.lamps['right'] = bool(cd[7] & 0x02)
self.lamps['left'] = bool(cd[7] & 0x01)
#elif ci == 0x120: # bsi: warning log
# pass
elif ci == 0x125: # track list, multiframe
dd = self.parse_mf(ci, cl, cd)
if not dd:
return False
cd = dd[1]
# cd list
#got mf: 0x125 900100108111524f4f5400000000000000000000000000000000
#got mf: 0x125 986f5d41f120696c6c5f6e696e6f5f2d5f686f775f63616e5f696b6f726e2d6b6973732d6d73742e6d70330000006d797a756b612e72755f332e5f42756c6c65745f6d797a756b612e72755f372e5f5374617469632d
#got mf: 0x125 00
# radio list, band
#got mf: 0x125 100100103130332e3230000000
#got mf: 0x125 20500000353331000000000000353331000000000000353331000000000000363330000000000000353331000000000000353331000000000000
#got mf: 0x125 201000004543484f204d534b90464d2039302e3920903130312e354d485ab0504c555320202020903130362e393000002035392d33342d3233b0
#got mf: 0x125 20200000464d2039302e39209031363a31363a333790343634375f31335fb03130332e363000000039302e36300000000044414e434520202090
#got mf: 0x125 40200000464d2039302e39209031363a31363a333790343634375f31335fb03130332e363000000039302e36300000000044414e434520202090
#got mf: 0x125 00
page = (cd[0] >> 4) & 0x0f
elif ci == 0x128: # bsi: lamps
self.lamps['belt_fl'] = bool(cd[0] & 0x40)
self.lamps['doors'] = bool(cd[1] & 0x10)
self.lamps['sidelight'] = bool(cd[4] & 0x80)
self.lamps['beam_l'] = bool(cd[4] & 0x40)
self.lamps['beam_h'] = bool(cd[4] & 0x20)
self.lamps['fog_f'] = bool(cd[4] & 0x10)
self.lamps['fog_r'] = bool(cd[4] & 0x08)
self.lamps['lefti'] = bool(cd[4] & 0x04)
self.lamps['righti'] = bool(cd[4] & 0x02)
#elif ci == 0x131: # cmd to cd changer
# pass
elif ci == 0x165: # radio status
self.enabled = bool(cd[0] & 0x80)
self.silence = bool(cd[0] & 0x20)
self.source = self.srcs[(cd[2] >> 4) & 7]
self.have_changer = bool(cd[1] & 0x10)
#self.cd_disk = ((cd[1] >> 5) & 3) ^ 1 # for b7?
#elif ci == 0x167: # display: settings?
# pass
elif ci == 0x1a1: # bsi: info messages
self.show_message = bool(cd[2] & 0x80)
if cd[0] == 0x80:
self.message_id = cd[1]
elif ci == 0x1a5: # volume
self.volume = cd[0] & 0x1f
self.vol_change = bool(cd[0] & 0x80)
#elif ci == 0x1d0: # climate: control info
# pass
elif ci == 0x1e0: # radio settings
self.track_intro = bool(cd[0] & 0x20)
self.random = bool(cd[0] & 0x04)
self.repeat = bool(cd[1] & 0x80)
self.rds_alt = bool(cd[2] & 0x20)
self.want_rdtxt = bool(cd[4] & 0x20)
elif ci == 0x1e5: # audio settings
self.balance_lr = ((cd[0] + 1) & 0x0f) - (cd[0] ^ 0x40 & 0x40) >> 2
self.show_balance_lr = bool(cd[0] & 0x80)
self.balance_rf = ((cd[1] + 1) & 0x0f) - (cd[1] ^ 0x40 & 0x40) >> 2
self.show_balance_rf = bool(cd[1] & 0x80)
self.bass = ((cd[2] + 1) & 0x0f) - (cd[2] ^ 0x40 & 0x40) >> 2
self.show_bass = bool(cd[2] & 0x80)
self.treble = ((cd[4] + 1) & 0x0f) - (cd[4] ^ 0x40 & 0x40) >> 2
self.show_treble = bool(cd[4] & 0x80)
self.loudness = bool(cd[5] & 0x40)
self.show_loudness = bool(cd[5] & 0x80)
self.autovol = cd[5] & 7
self.show_autovol = bool(cd[5] & 0x10)
self.ambience = self.ambs.get(cd[6] & 0x1f, "Unk:" + hex(cd[6] & 0x1f))
self.ambience_show = bool(cd[6] & 0x40)
elif ci == 0x21f: # remote keys under wheel
self.rkeys['fwd'] = bool(cd[0] & 0x80)
self.rkeys['rew'] = bool(cd[0] & 0x40)
self.rkeys['volup'] = bool(cd[0] & 0x08)
self.rkeys['voldn'] = bool(cd[0] & 0x04)
self.rkeys['src'] = bool(cd[0] & 0x02)
self.rkeys['scroll'] = cd[1]
#elif ci == 0x221: # trip computer
# pass
elif ci == 0x225: # radio freq
if cl == 6: # b7, from autowp docs
self.radio_mem = cd[0] & 7
self.radio_band = self.bands[(cd[1] >> 5) & 7]
freq = (cd[1] & 0x0f) * 256 + cd[2]
elif cl == 5: # b3/b5
self.pty_scan = bool(cd[0] & 0x01)
self.radio_scan = bool(cd[0] & 0x02)
self.rds_scan = bool(cd[0] & 0x04)
self.ast_scan = bool(cd[0] & 0x08)
self.show_radios = bool(cd[0] & 0x80)
self.radio_mem = (cd[1] >> 4) & 7
self.radio_band = self.bands[(cd[2] >> 4) & 7]
freq = (cd[3] & 0x0f) * 256 + cd[4]
if self.radio_band in ('AMMW', 'AMLW'):
self.radio_freq = "%d KHz" % freq
else:
self.radio_freq = "%.2f MHz" % (freq * 0.05 + 50)
elif ci == 0x265: # rds
self.want_rds = bool(cd[0] & 0x80)
self.have_rds = bool(cd[0] & 0x20)
self.want_ta = bool(cd[0] & 0x10)
self.have_ta = bool(cd[0] & 0x04)
self.traffic = bool(cd[0] & 0x02)
self.want_reg = bool(cd[0] & 0x01)
self.want_pty = bool(cd[1] & 0x80)
self.show_pty = bool(cd[1] & 0x40)
self.pty_mode = (cd[1] >> 4) & 3
self.pty_sel = cd[2] & 0x1f
pc = cd[3] & 0x1f
self.pty_cur = self.pty_mode in (1, 2) and pc and self.ptys.get(pc, "Unk:" + hex(pc)) or ""
#elif ci == 0x276: # bsi: date and time
# pass
elif ci == 0x2a5: # rds title
self.rds_name = self.get_str(cd) if cd[0] != 0 else None
elif ci == 0x2b6: # bsi: last 8 vin digits
self.vin3 = bytes(cd[:8]).decode()
elif ci == 0x2e1: # bsi: status of functions
self.funcs = (cd[0] << 16) + (cd[1] << 8) + cd[2]
#elif ci == 0x2e5: # hz
# pass
elif ci == 0x325: # cd tray info
self.cd_disk = cd[1] & 0x83
elif ci == 0x336: # bsi: first 3 vin letters
self.vin1 = bytes(cd[:3]).decode()
#elif ci == 0x361: # bsi: car settings
# pass
elif ci == 0x365: # cd disk info
self.cd_tracks = cd[0]
self.cd_len = "%02d:%02d" % (cd[1], cd[2]) if cd[1] != 0xff else "--:--"
self.cd_mp3 = bool(cd[3] & 0x01)
elif ci == 0x3a5: # cd track info
self.track_num = cd[0]
self.track_len = "%02d:%02d" % (cd[1], cd[2]) if cd[1] != 0xff else "--:--"
self.track_time = "%02d:%02d" % (cd[3], cd[4]) if cd[3] != 0xff else "--:--"
elif ci == 0x3b6: # bsi: middle 6 vin digits
self.vin2 = bytes(cd[:6]).decode()
elif ci == 0x3e5: # keypad
self.rkeys['menu'] = bool(cd[0] & 0x40)
self.rkeys['tel'] = bool(cd[0] & 0x10)
self.rkeys['clim'] = bool(cd[0] & 0x01)
self.rkeys['trip'] = bool(cd[1] & 0x40)
self.rkeys['mode'] = bool(cd[1] & 0x10)
self.rkeys['audio'] = bool(cd[1] & 0x01)
self.rkeys['ok'] = bool(cd[2] & 0x40)
self.rkeys['esc'] = bool(cd[2] & 0x10)
self.rkeys['dark'] = bool(cd[2] & 0x04)
self.rkeys['up'] = bool(cd[5] & 0x40)
self.rkeys['down'] = bool(cd[5] & 0x10)
self.rkeys['right'] = bool(cd[5] & 0x04)
self.rkeys['left'] = bool(cd[5] & 0x01)
#elif ci == 0x520: # hz
# pass
#elif ci == 0x5e0: # hw/sw radio info
# pass
else:
return False
return True
def visualize(self):
tuner = self.source == 'Tuner' and self.enabled
cd = self.source in ('CD', 'CD Changer') and self.enabled
aux = 'AUX' in self.source and self.enabled
if not self.enabled:
self.ss('icon', 'icon')
self.ss('name', 'Disabled')
self.ss('title', '')
elif aux:
self.ss('icon', 'linein')
self.ss('name', self.source)
self.ss('title', '')
elif tuner:
self.ss('icon', 'radio')
self.ss('name', (self.rds_scan or self.ast_scan or self.pty_scan) and "Wait..." or (self.traffic and "Traffic" or self.rds_name or self.radio_freq))
self.ss('title', self.pty_scan and self.pty_sel and ("PTY: " + self.ptys.get(self.pty_sel, "")) or (self.rds_scan and "RDS search.." or
(self.ast_scan and (self.radio_scan and "Autostore stations.." or "List in progress..")) or self.rdtxt))
elif cd:
self.ss('icon', self.cd_mp3 and 'cdmp3' or 'cdaudio')
self.ss('name', self.source == 'CD' and (self.cd_disk in (1, 3) and ('Track %d/%d' % (self.track_num, self.cd_tracks)) or "Wait...") or "CD Changer")
self.ss('title', self.track_name + (self.track_author and (" / %s" % self.track_author) or ""))
else:
self.ss('icon', 'icon')
self.ss('name', self.source)
self.ss('title', '')
self.ss('band', tuner and self.radio_band or "")
self.ss('info', tuner and self.rds_name and self.radio_freq or
(cd and ("%s %s%s" % (self.cd_pause and "×" or "»", self.track_time, self.track_len != "--:--" and " / " + self.track_len or "")) or ""))
self.ss('memch', tuner and not self.radio_scan and self.radio_mem and str(self.radio_mem) or "")
self.ss('dx', tuner and self.radio_scan and "DX" or "")
self.ss('ta', self.enabled and self.want_ta and "TA" or "")
self.ss('ta_ok', tuner and self.have_ta)
self.ss('pty', self.enabled and self.want_pty and "PTY" or "")
self.ss('pty_ok', tuner and self.pty_cur == self.ptys.get(self.pty_sel, ""))
self.ss('ptyname', tuner and self.enabled and self.rdtxt == "" and self.pty_cur or "")
self.ss('reg', tuner and self.want_reg and "REG" or "")
self.ss('rds', tuner and self.want_rds and "RDS" or "")
self.ss('rds_ok', tuner and self.have_rds)
self.ss('rdtxt_rnd', tuner and self.want_rdtxt and "RDTXT" or (cd and (self.random and "RDM" or (self.track_intro and "INT" or (self.repeat and "RPT")))) or "")
self.ss('loud', self.enabled and self.loudness and "LOUD" or "")
self.ss('vol', self.enabled and ("Vol: [b]%d[/b]" % self.volume) or "")
self.ss('volbar', self.enabled and self.volume or 0)
self.ss('temp', self.out_temp and ("[b]%.0f[/b]°C" % self.out_temp) or "[b]——[/b]°F")
self.ss('alert', not self.connected and "No connection" or (self.show_message and self.msgs.get(self.message_id, "") or ""))
self.ss('debug', "rpm=%d speed=%d power=%dV odometer=%d\neconomy=%d lighting=%d bright=%d ignition=%d funcs=%06x\n\nlamps=%s\n\nkeys=%s" % (
self.rpm, self.speed, self.power, self.odometer, self.economy, self.lighting, self.brightness, self.ignition, self.funcs, str(self.lamps), str(self.rkeys)))
def visualize_test(self):
self.ss('icon', "icon")
self.ss('name', "Name")
self.ss('title', "Title")
self.ss('band', "Band")
self.ss('info', "Info")
self.ss('memch', "0")
self.ss('dx', "DX")
self.ss('ta', "TA")
self.ss('ta_ok', True)
self.ss('pty', "PTY")
self.ss('pty_ok', True)
self.ss('ptyname', "PtyName")
self.ss('reg', "REG")
self.ss('rds', "RDS")
self.ss('rds_ok', True)
self.ss('rdtxt_rnd', "RDTXT")
self.ss('loud', "LOUD")
self.ss('vol', "Vol: [b]15[/b]")
self.ss('volbar', 15)
self.ss('temp', "[b]33[/b]°C")
self.ss('alert', "")
self.ss('debug', "some debug info")
| mit | 8,628,095,303,352,691,000 | 43.331942 | 199 | 0.538309 | false | 2.861089 | false | false | false |
jad-b/mlsl | mlsl/glcreate.py | 1 | 1639 | #!/usr/bin/env python2
"""
graphlab
========
Assists with registering, loading, and configuring GraphLab by Dato.
Note the python 2 shebang at the top of this file. At the time of this writing,
Graphlab does not support Python 3. - jdb, 2016May09
"""
import argparse
import os
import sys
import time
class VersionError(Exception):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def load_graphlab():
if sys.version_info >= (3, 0):
raise VersionError("Graphlab is only available in Python 2")
start = time.clock() # noqa
import graphlab
gl_product_key = os.getenv('GLCREATE_PRODUCT_KEY', False)
if not gl_product_key:
print("Please set GLCREATE_PRODUCT_KEY")
return
graphlab.product_key.set_product_key(gl_product_key)
# Display graphlab canvas in notebook
graphlab.canvas.set_target('ipynb')
# Number of workers
graphlab.set_runtime_config('GRAPHLAB_DEFAULT_NUM_PYLAMBDA_WORKERS', 16)
since = time.clock() - start
print("Graphlab loaded in {:.3f} seconds".format(since))
return graphlab
def convert_to_csv(filename):
gl = load_graphlab()
sframe = gl.SFrame(filename)
noext_filename, _ = os.path.splitext(os.path.abspath(filename))
new_filename = noext_filename + '.csv'
df = sframe.to_dataframe()
df.to_csv(new_filename)
assert os.path.exists(new_filename)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file',
help='GraphLab file to convert to Pandas .csv')
args = parser.parse_args()
convert_to_csv(args.file)
| gpl-3.0 | 8,416,035,388,997,661,000 | 27.754386 | 79 | 0.662599 | false | 3.465116 | false | false | false |
LinguList/burmish | pyburmish/util.py | 2 | 1315 | from lingpy import Model
from lingpy.sequence.sound_classes import token2class
from pyburmish import burmish_path, load_burmish
from unicodedata import normalize
sca = Model('sca')
color = Model('color')
color.converter['⁵⁵'] = 'Crimson'
color.converter['³⁵'] = 'LightBlue'
color.converter['⁴'] = 'LightYellow'
color.converter['²'] = 'LightGreen'
color.converter['³'] = 'ForestGreen'
color.converter['³¹'] = 'Brown'
color.converter['¹'] = 'White'
color.converter['²¹'] = 'DarkOrange'
color.converter['³³'] = 'CornflowerBlue'
color.converter['⁵³'] = '#c86496'
color.converter['⁵¹'] = 'cyan'
_conv = {}
_conv['A'] = 'LightBlue'
_conv['E'] = 'Orange'
_conv['I'] = 'LightGreen'
_conv['O'] = 'white'
_conv['U'] = 'Crimson'
_conv['Y'] = 'LightYellow'
for sound in color.converter:
cls = token2class(sound, 'sca')
if cls in 'AEIOUY':
color.converter[sound] = _conv[cls]
def contains(syllable, sound):
_s = normalize('NFD', ''.join(syllable))
if sound in _s:
return True
return False
def is_aspirated(syllable):
return contains(syllable, 'ʰ')
def is_creaky(syllable):
return contains(syllable, '\u0330')
def is_aspirated_or_unvoiced(syllable):
if is_aspirated(syllable):
return True
return contains(syllable, '\u0325')
| gpl-2.0 | 3,261,640,945,445,530,000 | 23.807692 | 53 | 0.664341 | false | 2.648871 | false | false | false |
a10networks/a10sdk-python | a10sdk/core/enable/enable_management_service_https_acl_v6.py | 2 | 3426 | from a10sdk.common.A10BaseClass import A10BaseClass
class VeCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ve_end: {"type": "number", "description": "VE port", "format": "number"}
:param ve_start: {"type": "number", "description": "VE port (VE Interface number)", "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "ve-cfg"
self.DeviceProxy = ""
self.ve_end = ""
self.ve_start = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class EthCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ethernet_start: {"type": "number", "description": "Ethernet port (Ethernet Interface number)", "format": "interface"}
:param ethernet_end: {"type": "number", "description": "Ethernet port", "format": "interface"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "eth-cfg"
self.DeviceProxy = ""
self.ethernet_start = ""
self.ethernet_end = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class AclV6(A10BaseClass):
"""Class Description::
IPv6 ACL for HTTPS service.
Class acl-v6 supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param acl_name: {"description": "ACL name", "format": "string", "minLength": 1, "optional": false, "maxLength": 16, "type": "string"}
:param ve_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ve-end": {"type": "number", "description": "VE port", "format": "number"}, "ve-start": {"type": "number", "description": "VE port (VE Interface number)", "format": "number"}, "optional": true}}]}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param eth_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ethernet-start": {"type": "number", "description": "Ethernet port (Ethernet Interface number)", "format": "interface"}, "ethernet-end": {"type": "number", "description": "Ethernet port", "format": "interface"}, "optional": true}}]}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/enable-management/service/https/acl-v6/{acl_name}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "acl_name"]
self.b_key = "acl-v6"
self.a10_url="/axapi/v3/enable-management/service/https/acl-v6/{acl_name}"
self.DeviceProxy = ""
self.acl_name = ""
self.ve_cfg = []
self.uuid = ""
self.eth_cfg = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| apache-2.0 | -9,146,458,870,826,519,000 | 37.066667 | 361 | 0.605079 | false | 3.652452 | false | false | false |
msscully/datamart | tests/test_variables.py | 1 | 6424 | from tests import TestCase
from werkzeug.urls import url_quote
from datamart.models import Variable
from datamart.models import Dimension
from datamart.models import User
from datamart.models import Role
from flask import url_for
class TestVariables(TestCase):
def test_show_variables_anon(self):
"""Does accessing /variables/ when not logged in redirect to /login?"""
response = self.client.get('/variables/', follow_redirects=False)
new_location='/login?next=%s' % url_quote('/variables/', safe='')
self.assertRedirects(response, location=new_location)
response = self.client.get('/variables/', follow_redirects=True)
assert 'Please log in to access this page.' in response.data
self.assertTemplateUsed(name='login.html')
def test_show_variables_non_admin(self):
"""Make sure logged in users can see the variables page."""
self.login('[email protected]','123456')
response = self._test_get_request('/variables/', 'variables.html')
assert 'Please log in to access this page.' not in response.data
self.logout()
def test_show_variables_admin(self):
"""Make sure logged in admins can see the variables page."""
self.login('[email protected]','123456')
response = self._test_get_request('/variables/', 'variables.html')
assert 'Please log in to access this page.' not in response.data
self.logout()
def test_variable_add(self):
"""Add a variable using /variables/add as admin."""
self.login('[email protected]', '123456')
self._test_get_request('/variables/add/', 'variable_edit.html')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1
self.logout()
def add_dimension(self):
""" Add a dimension to testdb. Must be logged in w/ permissions. """
dim_name = 'Height / Length in feet'
dim = Dimension.query.filter_by(name=dim_name)
new_dim = None
if dim.count() == 0:
new_dim = Dimension()
new_dim.name = dim_name
new_dim.description = "Height / Length in feet"
new_dim.data_type = "Float"
self.db.session.add(new_dim)
self.db.session.commit()
else:
new_dim = dim.first()
return new_dim
def add_variable(self):
""" Add a variable to testdb. Must be logged in w/ permissions. """
new_dim = self.add_dimension()
variable_data = {
'name': 'length',
'description': "Subject height",
'dimension': new_dim.id
}
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
if len(new_var) != 1:
response = self.client.post('/variables/add/', data=variable_data)
assert 'Please fix errors and resubmit.' not in response.data
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
return new_var, variable_data
def add_role_to_variable(self, var_id, role_id):
var = Variable.query.get(var_id)
roles = [str(r.id) for r in var.roles]
roles.append(str(role_id))
variable_data = {
'name': var.name,
'description': var.description,
'dimension': var.dimension.id,
'roles': roles
}
response = self.client.post('/variables/%s/edit/' % var_id,
data=variable_data, follow_redirects=True)
assert 'Please fix errors and resubmit.' not in response.data
return response
def add_role_to_user(self, user_id, role):
user = User.query.get(user_id)
user.roles.append(role)
self.db.session.add(user)
self.db.session.commit()
def test_variable_edit(self):
"""Edit a variable at /variables/<ID>/edit/ as admin."""
self.login('[email protected]', '123456')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1;
variable_data['name'] = 'Standing Length'
response = self.client.post('/variables/%s/edit/' % new_var[0].id,
data=variable_data,
headers={'Referer': url_for('datamart.variables_view')},
follow_redirects=True)
assert 'Variable updated' in response.data
assert 'Please fix errors and resubmit.' not in response.data
new_var = Variable.query.filter(Variable.name==variable_data['name']).all()
assert len(new_var) == 1;
self.logout()
def test_variable_by_role(self):
"""Are variables only displayed if a user has the correct role?"""
self.login('[email protected]', '123456')
new_var, variable_data = self.add_variable()
assert len(new_var) == 1
new_role = Role(name='AdminRole', description='AdminRole')
self.db.session.add(new_role)
self.db.session.commit()
role_id = new_role.id
response = self.add_role_to_variable(new_var[0].id, role_id)
assert 'Variable updated' in response.data
assert 'Please fix errors and resubmit' not in response.data
new_var = Variable.query.join(Role, Variable.roles).filter(Role.id == role_id)
assert new_var.count() == 1;
var_name = new_var.first().name
response = self.client.get('/variables/')
assert var_name not in response.data
assert new_role.name not in response.data
user = User.query.filter_by(username='admin')
user_id = user.first().id
self.add_role_to_user(user_id, new_role)
response = self.client.get('/variables/')
assert new_role.name in response.data
assert var_name in response.data
self.logout()
self.login('[email protected]', '123456')
response = self.client.get('/variables/')
assert new_role.name not in response.data
assert var_name not in response.data
self.logout()
self.login('[email protected]', '123456')
user = User.query.filter_by(username='demo')
user_id = user.first().id
self.add_role_to_user(user_id, new_role)
response = self.client.get('/variables/')
assert new_role.name in response.data
assert var_name in response.data
self.logout()
| mit | -1,339,770,571,371,663,400 | 42.114094 | 92 | 0.602584 | false | 3.924252 | true | false | false |
shentianxiao/language-style-transfer | code/options.py | 1 | 2956 | import sys
import argparse
import pprint
def load_arguments():
argparser = argparse.ArgumentParser(sys.argv[0])
argparser.add_argument('--train',
type=str,
default='')
argparser.add_argument('--dev',
type=str,
default='')
argparser.add_argument('--test',
type=str,
default='')
argparser.add_argument('--online_testing',
type=bool,
default=False)
argparser.add_argument('--output',
type=str,
default='')
argparser.add_argument('--vocab',
type=str,
default='')
argparser.add_argument('--embedding',
type=str,
default='')
argparser.add_argument('--model',
type=str,
default='')
argparser.add_argument('--load_model',
type=bool,
default=False)
argparser.add_argument('--batch_size',
type=int,
default=64)
argparser.add_argument('--max_epochs',
type=int,
default=20)
argparser.add_argument('--steps_per_checkpoint',
type=int,
default=1000)
argparser.add_argument('--max_seq_length',
type=int,
default=20)
argparser.add_argument('--max_train_size',
type=int,
default=-1)
argparser.add_argument('--beam',
type=int,
default=1)
argparser.add_argument('--dropout_keep_prob',
type=float,
default=0.5)
argparser.add_argument('--n_layers',
type=int,
default=1)
argparser.add_argument('--dim_y',
type=int,
default=200)
argparser.add_argument('--dim_z',
type=int,
default=500)
argparser.add_argument('--dim_emb',
type=int,
default=100)
argparser.add_argument('--learning_rate',
type=float,
default=0.0005)
#argparser.add_argument('--learning_rate_decay',
# type=float,
# default=0.5)
argparser.add_argument('--rho', # loss_rec + rho * loss_adv
type=float,
default=1)
argparser.add_argument('--gamma_init', # softmax(logit / gamma)
type=float,
default=0.1)
argparser.add_argument('--gamma_decay',
type=float,
default=1)
argparser.add_argument('--gamma_min',
type=float,
default=0.1)
argparser.add_argument('--filter_sizes',
type=str,
default='1,2,3,4,5')
argparser.add_argument('--n_filters',
type=int,
default=128)
args = argparser.parse_args()
print '------------------------------------------------'
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(vars(args))
print '------------------------------------------------'
return args
| apache-2.0 | 5,124,892,340,955,779,000 | 27.980392 | 79 | 0.497294 | false | 4.157525 | false | false | false |
82Flex/DCRM | WEIPDCRM/views/admin/release.py | 1 | 1841 | # coding=utf-8
"""
DCRM - Darwin Cydia Repository Manager
Copyright (C) 2017 WU Zheng <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import unicode_literals
from django.contrib.admin.views.decorators import staff_member_required
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.safestring import mark_safe
from WEIPDCRM.models.release import Release
from WEIPDCRM.models.setting import Setting
@staff_member_required
def set_default_view(request, release_id):
"""
:param release_id: The release
:param request: Django Request
:return: Redirect Response
"""
release_instance = Release.objects.get(id=release_id)
messages.info(request, mark_safe(_(
"Active release \"<a href=\"{release_url}\">{release}</a>\" has been set.").format(
release_url=release_instance.get_admin_url(),
release=str(release_instance)
)
))
setting_instance = Setting.objects.get()
setting_instance.active_release = release_instance
setting_instance.save()
return redirect(setting_instance.get_admin_url())
| agpl-3.0 | -456,847,748,279,306,400 | 32.472727 | 91 | 0.738729 | false | 3.967672 | false | false | false |
google-research/sound-separation | models/tools/process_wav_stitching.py | 1 | 12647 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Process a .wav file in a stitching mode with a separation model.
The stitching mode can be used to process long-form audio where for example
we would like to use a 2-speaker separation model in a long meeting recording
containing more than 2 speakers but where we assume there are not more than 2
speakers active in a block_size window. So, that our 2-speaker separation model
can run for the whole meeting in a block-by-block fashion producing two tracks
containing non-overlapping speech regardless of the total number of speakers
in the meeting.
python3 process_wav_stitching \
--model_dir /tmp/mixable_sss_8mic_model/ \
--input /tmp/libricss_ov40.wav \
--output /tmp/libricss_ov40_sss_8mic_bf2_10s_processed.wav \
--block_size_in_seconds 10 --permutation_invariant True --window_type vorbis \
--input_channels 8 \
--output_tensor "model_iter_1/beamformed_waveforms:0" \
--write_outputs_separately True
"""
# pylint: enable=line-too-long
import argparse
import os
from typing import Tuple, Optional
import inference
import numpy as np
import stitching
import tensorflow.compat.v1 as tf
strtobool = inference.strtobool
def _extract_blocks_from_input(input_wav_file: str,
num_samples_in_block: int,
input_channels: int = 0,
scale_input: bool = False,
window_type: str = 'rectangular',
) -> Tuple[np.ndarray, np.ndarray]:
"""Reads input wav file and extracts blocks from it.
Args:
input_wav_file: Input signal .wav file path.
num_samples_in_block: Block size in samples.
input_channels: If positive, truncate/extend the input signal to
this number of channels, otherwise keep all channels at the input.
scale_input: If True, scale input to have an absolute maximum of 0.99.
window_type: Window type to use.
Returns:
input_blocks_np: Input signal in blocks, np.ndarray, with shape
[num_blocks, num_mics, num_samples_in_block].
input_len_np: Input signal length in samples, integer.
sample_rate_np: Sample rate, integer.
"""
hop_size_in_samples = num_samples_in_block // 2
# Define the graph which extracts input blocks.
graph_input = tf.Graph()
with graph_input.as_default():
input_wav, sample_rate = inference.read_wav_file(
input_wav_file, input_channels, scale_input)
input_wav = tf.transpose(input_wav) # shape: [mics, samples]
input_len = tf.shape(input_wav)[-1]
# We pre-pad the input signal since we apply a window function and the
# first block's first half only has a single window function in the
# overlap-add reconstruction, so we pad it such that we can ignore the
# first half after reconstruction by overlap-add.
input_wav = tf.pad(input_wav, [[0, 0], [hop_size_in_samples, 0]])
input_blocks = tf.signal.frame(input_wav,
num_samples_in_block,
hop_size_in_samples,
pad_end=True)
input_blocks *= stitching.get_window(window_type, num_samples_in_block)
# Transpose to make blocks as batch items.
input_blocks = tf.transpose(input_blocks, (1, 0, 2))
# input_blocks has shape (batch/blocks, mics, samples_in_block)
# First graph is used to extract the input blocks from the input wav file.
with tf.Session(graph=graph_input) as sess:
input_blocks_np, input_len_np, sample_rate_np = sess.run(
[input_blocks, input_len, sample_rate])
return input_blocks_np, input_len_np, sample_rate_np
def _run_model_for_blocks(input_blocks_np: np.ndarray,
model_dir: str,
checkpoint: Optional[str],
input_tensor_name: str,
output_tensor_name: str) -> np.ndarray:
"""Runs separation model for each block.
The input is a multi-channel signal, but the output is a single channel
output per source signal.
Args:
input_blocks_np: Input mixture signal samples, np.ndarray with shape
[num_blocks, num_mics, num_samples_in_block].
model_dir: Model directory with at least one checkpoint and inference.meta
file.
checkpoint: If not None, checkpoint path to use, otherwise use the
latest checkpoint in the model_dir.
input_tensor_name: The name of the input tensor in the model.
output_tensor_name: The name of the output tensor in the model.
Returns:
output_blocks_np: Output signal samples, np.ndarray with shape
[num_blocks, num_sources, num_samples_in_block].
"""
model_graph_filename = os.path.join(model_dir, 'inference.meta')
tf.logging.info('Importing meta graph: %s', model_graph_filename)
if not checkpoint:
checkpoint = tf.train.latest_checkpoint(model_dir)
# Use separation model.
separation_model = inference.SeparationModel(
checkpoint, model_graph_filename, input_tensor_name,
output_tensor_name)
output_blocks = []
for i in range(input_blocks_np.shape[0]):
print('Processing block %d of %d...' % (i+1, input_blocks_np.shape[0]))
output_blocks.append(separation_model.separate(input_blocks_np[i]))
output_blocks_np = np.stack(output_blocks, axis=0)
return output_blocks_np
def _resolve_permutation_and_write_output(
output_wav_file: str, sample_rate: float,
output_blocks_np: np.ndarray, input_len_np: np.ndarray,
window_type: str, permutation_invariant: bool,
output_channels: int, write_outputs_separately: bool):
"""Resolves permutation across blocks and writes output .wav files.
Args:
output_wav_file: Output .wav file path.
sample_rate: Sampling rate for the output signals.
output_blocks_np: Output signal in blocks, np.ndarray with shape
[num_blocks, num_sources, num_samples_in_block].
input_len_np: Input signal length in samples, so we can truncate the
output(s) to this length when writing.
window_type: Window type to use.
permutation_invariant: If True, the model is trained with a
permutation invariant objective, so the output order of sources
are arbitrary.
output_channels: If positive, the number of sources to output, otherwise
output all sources.
write_outputs_separately: If True, write output for each source in a
separate file derived from the output_wav_file path, otherwise write
them in a single multi-channel .wav file.
Returns:
Nothing, but writes the output signals into output path(s).
"""
# Define a graph which resolves permutation if required and writes
# output signals.
num_samples_in_block = output_blocks_np.shape[-1]
num_sources = output_blocks_np.shape[1]
hop_samples = num_samples_in_block // 2
graph_output = tf.Graph()
with graph_output.as_default():
window = stitching.get_window(window_type, num_samples_in_block)
output_blocks_placeholder = tf.placeholder(
tf.float32, shape=(None, num_sources, num_samples_in_block))
input_len_placeholder = tf.placeholder(tf.int32, shape=())
output_blocks = output_blocks_placeholder
if permutation_invariant:
output_blocks = stitching.sequentially_resolve_permutation(
output_blocks, window)
output_blocks = tf.transpose(output_blocks, (1, 0, 2))
# output_blocks now has shape (sources, blocks, samples)
# We apply the window twice since its overlap-added squared sum is 1.0.
output_blocks *= window
output_wavs = tf.signal.overlap_and_add(output_blocks, hop_samples)
output_wavs = tf.transpose(output_wavs)
# We ignore the padded first hop_samples samples.
output_wavs = output_wavs[
hop_samples: input_len_placeholder + hop_samples, :]
write_output_ops = inference.write_wav_file(
output_wav_file, output_wavs, sample_rate=sample_rate,
num_channels=num_sources,
output_channels=output_channels,
write_outputs_separately=write_outputs_separately,
channel_name='source')
# The graph is used to resolve permutation across blocks if required,
# and writes the output source signals.
with tf.Session(graph=graph_output) as sess:
sess.run(write_output_ops,
feed_dict={output_blocks_placeholder: output_blocks_np,
input_len_placeholder: input_len_np})
def main():
parser = argparse.ArgumentParser(
description='Process a long mixture .wav file to separate into sources '
'by using block processing and combining block outputs through '
'stitching.')
parser.add_argument(
'-i', '--input', help='Input .wav file.', required=True, type=str)
parser.add_argument(
'-o', '--output', help='Output .wav file.', required=True, type=str)
parser.add_argument(
'-m', '--model_dir', help='Model root directory, required. '
'Must contain inference.meta and at least one checkpoint.', type=str)
parser.add_argument(
'-ic', '--input_channels', help='Truncate/pad input to this many '
'channels if positive.',
default=0, type=int)
parser.add_argument(
'-oc', '--output_channels', help='Limit the number of output sources to '
'this number, if positive.', default=0, type=int)
parser.add_argument(
'-it', '--input_tensor', default='input_audio/receiver_audio:0',
help='Name of tensor to which to feed input_wav.', type=str)
parser.add_argument(
'-ot', '--output_tensor', default='denoised_waveforms:0',
help='Name of tensor to output as output_wav.', type=str)
parser.add_argument(
'-wos', '--write_outputs_separately', default=True,
help='Write output source signals into separate wav files.',
type=strtobool)
parser.add_argument(
'-wt', '--window_type', default='rectangular', type=str,
help='Window type: rectangular, vorbis or kaiser-bessel-derived.')
parser.add_argument(
'-bs', '--block_size_in_seconds', default=10.0, type=float,
help='Block size used for stitching processing.')
parser.add_argument(
'-sr', '--sample_rate', default=16000, help='Sample rate.', type=int)
parser.add_argument(
'-pi', '--permutation_invariant', default=False, type=strtobool,
help='If True, perform permutation invariant stitching.')
parser.add_argument(
'-si', '--scale_input', default=False, help='If True, scale the input '
'signal such that its absolute maximum value is 0.99.', type=strtobool)
parser.add_argument(
'-c', '--checkpoint', default=None, help='Override for checkpoint path.')
args = parser.parse_args()
output_dir = os.path.dirname(args.output)
os.makedirs(output_dir, exist_ok=True)
# We run three tf sessions with three different graphs.
# TODO(user): In the future, we may find a way to run the whole
# process as a single tensorflow graph.
# To make it work, either (1) we would need to be able to run the inference
# graph in batch mode with a dynamic batch size, or (2) we should be able to
# import a graph and convert it to a tf function and
# sequentially obtain each block output from a block input in tensorflow
# using a while loop or similar graph looping construct. I tried but neither
# of these approaches worked for me, so we run three sessions.
# Make sure there are even number of samples in each block.
block_size_in_samples = 2 * int(
round(args.block_size_in_seconds * float(args.sample_rate) / 2.0))
input_blocks_np, input_len_np, sample_rate = _extract_blocks_from_input(
args.input, block_size_in_samples, args.input_channels,
args.scale_input, args.window_type)
assert sample_rate == args.sample_rate
output_blocks_np = _run_model_for_blocks(
input_blocks_np, args.model_dir, args.checkpoint, args.input_tensor,
args.output_tensor)
_resolve_permutation_and_write_output(
args.output, sample_rate, output_blocks_np, input_len_np,
args.window_type, args.permutation_invariant,
args.output_channels, args.write_outputs_separately)
if __name__ == '__main__':
main()
| apache-2.0 | 2,434,115,412,898,961,400 | 43.375439 | 79 | 0.686645 | false | 3.713153 | false | false | false |
dansanderson/picotool | tests/pico8/gff/gff_test.py | 1 | 2156 | #!/usr/bin/env python3
import unittest
from unittest.mock import Mock
from unittest.mock import patch
from pico8.gff import gff
class TestGff(unittest.TestCase):
def testGetFlags(self):
g = gff.Gff.empty()
g._data = bytearray([x for x in range(256)])
for x in range(256):
self.assertEqual(x, g.get_flags(x, gff.ALL))
self.assertEquals(gff.RED, g.get_flags(1, gff.RED))
self.assertEquals(0, g.get_flags(1, gff.ORANGE))
self.assertEquals(gff.RED, g.get_flags(3, gff.RED))
self.assertEquals(gff.ORANGE, g.get_flags(3, gff.ORANGE))
self.assertEquals(gff.RED | gff.ORANGE,
g.get_flags(3, gff.RED | gff.ORANGE))
self.assertEquals(gff.RED | gff.ORANGE,
g.get_flags(3, gff.ALL))
def testSetFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.RED | gff.PEACH))
g.set_flags(0, gff.ORANGE)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH | gff.ORANGE,
g.get_flags(0, gff.ALL))
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.RED | gff.PEACH))
def testClearFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
g.clear_flags(0, gff.BLUE)
self.assertEqual(gff.RED | gff.PEACH,
g.get_flags(0, gff.ALL))
def testResetFlags(self):
g = gff.Gff.empty()
g.set_flags(0, gff.RED | gff.BLUE | gff.PEACH)
self.assertEqual(gff.RED | gff.BLUE | gff.PEACH,
g.get_flags(0, gff.ALL))
g.reset_flags(0, gff.BLUE)
self.assertEqual(gff.BLUE,
g.get_flags(0, gff.ALL))
if __name__ == '__main__':
unittest.main()
| mit | -2,706,116,116,492,286,500 | 35.542373 | 69 | 0.543599 | false | 2.90175 | true | false | false |
windeye/spark | python/pyspark/worker.py | 1 | 3782 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Worker that receives input from Piped RDD.
"""
import os
import sys
import time
import socket
import traceback
from base64 import standard_b64decode
# CloudPickler needs to be imported so that depicklers are registered using the
# copy_reg module.
from pyspark.accumulators import _accumulatorRegistry
from pyspark.broadcast import Broadcast, _broadcastRegistry
from pyspark.cloudpickle import CloudPickler
from pyspark.files import SparkFiles
from pyspark.serializers import write_with_length, read_with_length, write_int, \
read_long, write_long, read_int, dump_pickle, load_pickle, read_from_pickle_file
def load_obj(infile):
return load_pickle(standard_b64decode(infile.readline().strip()))
def report_times(outfile, boot, init, finish):
write_int(-3, outfile)
write_long(1000 * boot, outfile)
write_long(1000 * init, outfile)
write_long(1000 * finish, outfile)
def main(infile, outfile):
boot_time = time.time()
split_index = read_int(infile)
if split_index == -1: # for unit tests
return
# fetch name of workdir
spark_files_dir = load_pickle(read_with_length(infile))
SparkFiles._root_directory = spark_files_dir
SparkFiles._is_running_on_worker = True
# fetch names and values of broadcast variables
num_broadcast_variables = read_int(infile)
for _ in range(num_broadcast_variables):
bid = read_long(infile)
value = read_with_length(infile)
_broadcastRegistry[bid] = Broadcast(bid, load_pickle(value))
# fetch names of includes (*.zip and *.egg files) and construct PYTHONPATH
sys.path.append(spark_files_dir) # *.py files that were added will be copied here
num_python_includes = read_int(infile)
for _ in range(num_python_includes):
sys.path.append(os.path.join(spark_files_dir, load_pickle(read_with_length(infile))))
# now load function
func = load_obj(infile)
bypassSerializer = load_obj(infile)
if bypassSerializer:
dumps = lambda x: x
else:
dumps = dump_pickle
init_time = time.time()
iterator = read_from_pickle_file(infile)
try:
for obj in func(split_index, iterator):
write_with_length(dumps(obj), outfile)
except Exception as e:
write_int(-2, outfile)
write_with_length(traceback.format_exc(), outfile)
sys.exit(-1)
finish_time = time.time()
report_times(outfile, boot_time, init_time, finish_time)
# Mark the beginning of the accumulators section of the output
write_int(-1, outfile)
for aid, accum in _accumulatorRegistry.items():
write_with_length(dump_pickle((aid, accum._value)), outfile)
write_int(-1, outfile)
if __name__ == '__main__':
# Read a local port to connect to from stdin
java_port = int(sys.stdin.readline())
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", java_port))
sock_file = sock.makefile("a+", 65536)
main(sock_file, sock_file)
| apache-2.0 | -4,314,784,638,318,855,000 | 35.718447 | 93 | 0.705711 | false | 3.766932 | false | false | false |
bros-bioinfo/bros-bioinfo.github.io | COURS/M1/SEMESTRE2/ALGO/td5_Arbre.py | 1 | 3452 | # Exo 1
def pere(A, f):
return A[1][f]
def fils(A, p):
return A[2][p]
def racine(A):
return A[3]
def etiquette(A, f):
return A[4][f]
# Exo 2
def creer_arbre():
A = []
pere = {}
fils = {}
racine = None
return [A, pere, fils, racine]
def ajouter_racine(T, r):
# ajoute dans A un sommet
# s et le definis comme une
# racine
# assert (T[0] is None)
sommet = T[0]
sommet.append(r)
pere = T[1]
fils = T[2]
T[3] = r
pere[r] = None
fils[r] = []
def ajouter_fils(T, f, p):
# ajoute dans T le sommets
# et definit p comme son pere
# p existe dans T
# s n'exsiste pas dans T
sommet = T[0]
pere = T[1]
fils = T[2]
sommet.append(f)
pere[f] = p
fils[f] = []
fils[p].append(f)
# independant quelque soit l'implementation
# Exo 2
def taille_arbre(A):
return taille_sous_arbre(A, racine(A))
def taille_sous_arbre(A, s):
if s == None:
return 0
if (len(fils(A, s)) == 0):
return 1
taille = 1 # la racine s du sous-arbre
for f in fils(A, s):
taille += taille_sous_arbre(A, f)
return taille
# Exo 3
def parcourir_arbre(A, p):
return parcourir_sous_arbre(A, racine(A))
def parcourir_sous_arbre(A, s):
if s == None:
return p.append(racine(A))
if len(fils(A, s)) == 0:
return p.append(s)
for f in fils(A, s):
parcourir_sous_arbre(A, f)
p.append(s)
return p
# Exo 4
def parcours_niveau(A, h, p):
k = 0
return parcours_niveau_sous_arbre(A, h, k, racine(A), p)
def parcours_niveau_sous_arbre(A, h, k, s, p):
if s == None:
return p
if k == h:
return p.append(s)
for f in fils(A, s):
parcours_niveau_sous_arbre(A, h, k + 1, f, p)
return p
# Exo 5
def sommet_a_distance(A, s, h):
k = 0
p = []
parcours_niveau_sous_arbre(A, h, k, s, p)
return p
# Exo 6
def parcours_feuille(A, p):
return parcours_feuille_rec(A, racine(A), p)
def parcours_feuille_rec(A, s, p):
if s == None:
return p
if len(fils(A, s)) == 0:
return p.append(s)
for f in fils(A, s):
parcours_feuille_rec(A, f, p)
return p
# Exo 7
def parcours_sommets_internes(A, p):
return parcours_sommets_internes_rec(A, racine(A), p)
def parcours_sommets_internes_rec(A, s, p):
if s == None:
return p
if len(fils(A, s)) > 0 and s != racine(A):
p.append(s)
for f in fils(A, s):
parcours_sommets_internes_rec(A, f, p)
return p
# Exo 8
def ecrire(A):
nom = "arbre.dot"
fic = open(nom, "w")
fic.write('digraph A{')
fic.write('\n')
fic.write('\tgraph [ordering="out"];')
fic.write('\n')
for x in A[2]:
fic.write('\t')
fic.write(str(x))
fic.write(' -> {')
for y in fils(A, x):
fic.write(str(y))
fic.write('; ')
fic.write('}')
fic.write('\n')
fic.write("}")
fic.write('\n')
fic.close()
A = creer_arbre()
p = []
n = []
o = []
l = []
ajouter_racine(A, 1)
ajouter_fils(A, 4, 1)
ajouter_fils(A, 2, 1)
ajouter_fils(A, 3, 1)
ajouter_fils(A, 7, 2)
ajouter_fils(A, 5, 2)
ajouter_fils(A, 6, 5)
taille = taille_arbre(A)
print(taille)
parcourir_arbre(A, p)
print(p)
parcours_niveau(A, 1, n)
print(n)
m = sommet_a_distance(A, 2, 2)
print(m)
parcours_feuille(A, o)
print(o)
parcours_sommets_internes(A, l)
print(l)
ecrire(A)
| mit | -8,699,259,828,190,913,000 | 17.168421 | 60 | 0.544032 | false | 2.268068 | false | false | false |
MatthijsKamstra/haxepython | 04haxelow/code/bin/example.py | 1 | 111994 | # Generated by Haxe 3.4.5
# coding: utf-8
import math as python_lib_Math
import math as Math
from os import path as python_lib_os_Path
import inspect as python_lib_Inspect
import builtins as python_lib_Builtins
import functools as python_lib_Functools
import random as python_lib_Random
import re as python_lib_Re
from io import StringIO as python_lib_io_StringIO
class _hx_AnonObject:
def __init__(self, fields):
self.__dict__ = fields
_hx_classes = {}
class Enum:
_hx_class_name = "Enum"
__slots__ = ("tag", "index", "params")
_hx_fields = ["tag", "index", "params"]
_hx_methods = ["__str__"]
def __init__(self,tag,index,params):
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:38
self.tag = tag
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:39
self.index = index
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:40
self.params = params
def __str__(self):
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:45
if (self.params is None):
return self.tag
else:
# /usr/local/lib/haxe/std/python/internal/EnumImpl.hx:48
_this = self.params
return (((HxOverrides.stringOrNull(self.tag) + "(") + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in _this]))) + ")")
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.tag = None
_hx_o.index = None
_hx_o.params = None
Enum._hx_class = Enum
_hx_classes["Enum"] = Enum
class Class:
_hx_class_name = "Class"
Class._hx_class = Class
_hx_classes["Class"] = Class
class EReg:
_hx_class_name = "EReg"
__slots__ = ("pattern", "matchObj", "_hx_global")
_hx_fields = ["pattern", "matchObj", "global"]
def __init__(self,r,opt):
# /usr/local/lib/haxe/std/python/_std/EReg.hx:30
self.matchObj = None
# /usr/local/lib/haxe/std/python/_std/EReg.hx:34
self._hx_global = False
# /usr/local/lib/haxe/std/python/_std/EReg.hx:35
options = 0
# /usr/local/lib/haxe/std/python/_std/EReg.hx:36
# /usr/local/lib/haxe/std/python/_std/EReg.hx:36
_g1 = 0
_g = len(opt)
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:37
c = (-1 if ((i >= len(opt))) else ord(opt[i]))
# /usr/local/lib/haxe/std/python/_std/EReg.hx:38
if (c == 109):
options = (options | python_lib_Re.M)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:39
if (c == 105):
options = (options | python_lib_Re.I)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:40
if (c == 115):
options = (options | python_lib_Re.S)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:41
if (c == 117):
options = (options | python_lib_Re.U)
# /usr/local/lib/haxe/std/python/_std/EReg.hx:42
if (c == 103):
self._hx_global = True
# /usr/local/lib/haxe/std/python/_std/EReg.hx:44
self.pattern = python_lib_Re.compile(r,options)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.pattern = None
_hx_o.matchObj = None
_hx_o._hx_global = None
EReg._hx_class = EReg
_hx_classes["EReg"] = EReg
class EnumValue:
_hx_class_name = "EnumValue"
EnumValue._hx_class = EnumValue
_hx_classes["EnumValue"] = EnumValue
class HaxeLowDisk:
_hx_class_name = "HaxeLowDisk"
__slots__ = ()
_hx_methods = ["readFileSync", "writeFile"]
HaxeLowDisk._hx_class = HaxeLowDisk
_hx_classes["HaxeLowDisk"] = HaxeLowDisk
class SysDisk:
_hx_class_name = "SysDisk"
__slots__ = ()
_hx_methods = ["readFileSync", "writeFile"]
_hx_interfaces = [HaxeLowDisk]
def __init__(self):
pass
def readFileSync(self,file):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:71
if sys_FileSystem.exists(file):
return sys_io_File.getContent(file)
else:
return None
def writeFile(self,file,data):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:75
sys_io_File.saveContent(file,data)
@staticmethod
def _hx_empty_init(_hx_o): pass
SysDisk._hx_class = SysDisk
_hx_classes["SysDisk"] = SysDisk
class HaxeLow:
_hx_class_name = "HaxeLow"
__slots__ = ("file", "db", "checksum", "disk")
_hx_fields = ["file", "db", "checksum", "disk"]
_hx_methods = ["backup", "restore", "save", "col"]
def __init__(self,file = None,disk = None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:101
self.checksum = None
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:105
self.file = file
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:106
self.disk = disk
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:107
self.db = _hx_AnonObject({})
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:109
if ((disk is None) and ((file is not None))):
self.disk = SysDisk()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:120
if (self.file is not None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:121
if (self.disk is None):
raise _HxException("HaxeLow: no disk storage set.")
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:123
self.checksum = self.disk.readFileSync(self.file)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:124
if (self.checksum is not None):
self.restore(self.checksum)
def backup(self,file = None):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:129
backup = tjson_TJSON.encode(self.db,"fancy")
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:130
if (file is not None):
self.disk.writeFile(file,backup)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:131
return backup
def restore(self,s):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:135
try:
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:136
self.db = tjson_TJSON.parse(s)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:139
self.checksum = None
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
raise _HxException(((("HaxeLow: JSON parsing failed: file \"" + HxOverrides.stringOrNull(self.file)) + "\" is corrupt. ") + Std.string(e)))
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:144
return self
def save(self):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:148
if (self.file is None):
return self
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:150
data = self.backup()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:151
if (data == self.checksum):
return self
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:153
self.checksum = data
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:154
self.disk.writeFile(self.file,data)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:156
return self
def col(self,cls):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:160
name = Type.getClassName(cls)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:161
if (not hasattr(self.db,(("_hx_" + name) if ((name in python_Boot.keywords)) else (("_hx_" + name) if (((((len(name) > 2) and ((ord(name[0]) == 95))) and ((ord(name[1]) == 95))) and ((ord(name[(len(name) - 1)]) != 95)))) else name)))):
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:162
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:162
o = self.db
value = list()
setattr(o,(("_hx_" + name) if ((name in python_Boot.keywords)) else (("_hx_" + name) if (((((len(name) > 2) and ((ord(name[0]) == 95))) and ((ord(name[1]) == 95))) and ((ord(name[(len(name) - 1)]) != 95)))) else name)),value)
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:163
self.save()
# /usr/local/lib/haxe/lib/haxelow/0,7,1/HaxeLow.hx:166
return Reflect.field(self.db,name)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.file = None
_hx_o.db = None
_hx_o.checksum = None
_hx_o.disk = None
HaxeLow._hx_class = HaxeLow
_hx_classes["HaxeLow"] = HaxeLow
class List:
_hx_class_name = "List"
__slots__ = ("h", "length")
_hx_fields = ["h", "length"]
_hx_methods = ["iterator"]
def __init__(self):
# /usr/local/lib/haxe/std/List.hx:32
self.h = None
# /usr/local/lib/haxe/std/List.hx:44
self.length = 0
def iterator(self):
# /usr/local/lib/haxe/std/List.hx:161
return _List_ListIterator(self.h)
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
_hx_o.length = None
List._hx_class = List
_hx_classes["List"] = List
class _List_ListNode:
_hx_class_name = "_List.ListNode"
__slots__ = ("item", "next")
_hx_fields = ["item", "next"]
def __init__(self,item,next):
# /usr/local/lib/haxe/std/List.hx:256
self.item = item
# /usr/local/lib/haxe/std/List.hx:257
self.next = next
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.item = None
_hx_o.next = None
_List_ListNode._hx_class = _List_ListNode
_hx_classes["_List.ListNode"] = _List_ListNode
class _List_ListIterator:
_hx_class_name = "_List.ListIterator"
__slots__ = ("head",)
_hx_fields = ["head"]
_hx_methods = ["hasNext", "next"]
def __init__(self,head):
# /usr/local/lib/haxe/std/List.hx:269
self.head = head
def hasNext(self):
# /usr/local/lib/haxe/std/List.hx:273
return (self.head is not None)
def next(self):
# /usr/local/lib/haxe/std/List.hx:277
val = self.head.item
# /usr/local/lib/haxe/std/List.hx:278
self.head = self.head.next
# /usr/local/lib/haxe/std/List.hx:279
return val
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.head = None
_List_ListIterator._hx_class = _List_ListIterator
_hx_classes["_List.ListIterator"] = _List_ListIterator
class Main:
_hx_class_name = "Main"
__slots__ = ()
_hx_statics = ["main"]
def __init__(self):
# src/Main.hx:10
print("Python Haxelow Example")
# src/Main.hx:13
db = HaxeLow("db.json")
# src/Main.hx:16
persons = db.col(Person)
# src/Main.hx:20
# src/Main.hx:20
x = Person("Test",50)
persons.append(x)
# src/Main.hx:25
db.save()
@staticmethod
def main():
# src/Main.hx:31
main = Main()
Main._hx_class = Main
_hx_classes["Main"] = Main
class Person:
_hx_class_name = "Person"
__slots__ = ("name", "age")
_hx_fields = ["name", "age"]
def __init__(self,name,age):
# src/Main.hx:37
self.name = name
# src/Main.hx:38
self.age = age
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.name = None
_hx_o.age = None
Person._hx_class = Person
_hx_classes["Person"] = Person
class Reflect:
_hx_class_name = "Reflect"
__slots__ = ()
_hx_statics = ["field", "isObject"]
@staticmethod
def field(o,field):
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:44
return python_Boot.field(o,field)
@staticmethod
def isObject(v):
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:106
_g = Type.typeof(v)
_g1 = _g.index
# /usr/local/lib/haxe/std/python/_std/Reflect.hx:107
if ((_g1 == 6) or ((_g1 == 4))):
return True
else:
return False
Reflect._hx_class = Reflect
_hx_classes["Reflect"] = Reflect
class Std:
_hx_class_name = "Std"
__slots__ = ()
_hx_statics = ["is", "string", "parseInt", "shortenPossibleNumber", "parseFloat"]
@staticmethod
def _hx_is(v,t):
# /usr/local/lib/haxe/std/python/_std/Std.hx:51
if ((v is None) and ((t is None))):
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:54
if (t is None):
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:58
if (t == Dynamic):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:61
isBool = isinstance(v,bool)
# /usr/local/lib/haxe/std/python/_std/Std.hx:63
if ((t == Bool) and isBool):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:66
if ((((not isBool) and (not (t == Bool))) and (t == Int)) and isinstance(v,int)):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:69
vIsFloat = isinstance(v,float)
# /usr/local/lib/haxe/std/python/_std/Std.hx:71
tmp = None
tmp1 = None
tmp2 = None
tmp3 = None
if (((not isBool) and vIsFloat) and (t == Int)):
f = v
if ((f != Math.POSITIVE_INFINITY) and ((f != Math.NEGATIVE_INFINITY))):
tmp3 = (not python_lib_Math.isnan(f))
else:
tmp3 = False
else:
tmp3 = False
if tmp3:
tmp4 = None
try:
tmp4 = int(v)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
tmp4 = None
tmp2 = (v == tmp4)
else:
tmp2 = False
if tmp2:
tmp1 = (v <= 2147483647)
else:
tmp1 = False
if tmp1:
tmp = (v >= -2147483648)
else:
tmp = False
if tmp:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:76
if (((not isBool) and (t == Float)) and isinstance(v,(float, int))):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:80
if (t == str):
return isinstance(v,str)
# /usr/local/lib/haxe/std/python/_std/Std.hx:83
isEnumType = (t == Enum)
# /usr/local/lib/haxe/std/python/_std/Std.hx:84
if ((isEnumType and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_constructs")):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:86
if isEnumType:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:88
isClassType = (t == Class)
# /usr/local/lib/haxe/std/python/_std/Std.hx:89
if ((((isClassType and (not isinstance(v,Enum))) and python_lib_Inspect.isclass(v)) and hasattr(v,"_hx_class_name")) and (not hasattr(v,"_hx_constructs"))):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:91
if isClassType:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:93
tmp5 = None
try:
tmp5 = isinstance(v,t)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
tmp5 = False
if tmp5:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:97
if python_lib_Inspect.isclass(t):
# /usr/local/lib/haxe/std/python/_std/Std.hx:99
loop = None
# /usr/local/lib/haxe/std/python/_std/Std.hx:100
def _hx_local_1(intf):
# /usr/local/lib/haxe/std/python/_std/Std.hx:101
f1 = (intf._hx_interfaces if (hasattr(intf,"_hx_interfaces")) else [])
# /usr/local/lib/haxe/std/python/_std/Std.hx:102
if (f1 is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:103
# /usr/local/lib/haxe/std/python/_std/Std.hx:103
_g = 0
while (_g < len(f1)):
i = (f1[_g] if _g >= 0 and _g < len(f1) else None)
_g = (_g + 1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:104
if HxOverrides.eq(i,t):
return True
else:
# /usr/local/lib/haxe/std/python/_std/Std.hx:107
l = loop(i)
# /usr/local/lib/haxe/std/python/_std/Std.hx:108
if l:
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:113
return False
else:
return False
# /usr/local/lib/haxe/std/python/_std/Std.hx:99
loop = _hx_local_1
loop1 = loop
# /usr/local/lib/haxe/std/python/_std/Std.hx:118
currentClass = v.__class__
# /usr/local/lib/haxe/std/python/_std/Std.hx:119
while (currentClass is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:120
if loop1(currentClass):
return True
# /usr/local/lib/haxe/std/python/_std/Std.hx:123
currentClass = python_Boot.getSuperClass(currentClass)
# /usr/local/lib/haxe/std/python/_std/Std.hx:125
return False
else:
return False
@staticmethod
def string(s):
# /usr/local/lib/haxe/std/python/_std/Std.hx:134
return python_Boot.toString1(s,"")
@staticmethod
def parseInt(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:147
if (x is None):
return None
# /usr/local/lib/haxe/std/python/_std/Std.hx:148
try:
return int(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
try:
# /usr/local/lib/haxe/std/python/_std/Std.hx:152
prefix = HxString.substr(x,0,2).lower()
# /usr/local/lib/haxe/std/python/_std/Std.hx:154
if (prefix == "0x"):
return int(x,16)
# /usr/local/lib/haxe/std/python/_std/Std.hx:157
raise _HxException("fail")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
# /usr/local/lib/haxe/std/python/_std/Std.hx:160
x1 = Std.parseFloat(x)
r = None
try:
r = int(x1)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
r = None
# /usr/local/lib/haxe/std/python/_std/Std.hx:162
if (r is None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:163
r1 = Std.shortenPossibleNumber(x)
# /usr/local/lib/haxe/std/python/_std/Std.hx:164
if (r1 != x):
return Std.parseInt(r1)
else:
return None
# /usr/local/lib/haxe/std/python/_std/Std.hx:170
return r
@staticmethod
def shortenPossibleNumber(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:177
r = ""
# /usr/local/lib/haxe/std/python/_std/Std.hx:178
# /usr/local/lib/haxe/std/python/_std/Std.hx:178
_g1 = 0
_g = len(x)
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:179
c = ("" if (((i < 0) or ((i >= len(x))))) else x[i])
# /usr/local/lib/haxe/std/python/_std/Std.hx:180
# /usr/local/lib/haxe/std/python/_std/Std.hx:180
_g2 = HxString.charCodeAt(c,0)
if (_g2 is None):
break
else:
_g21 = _g2
# /usr/local/lib/haxe/std/python/_std/Std.hx:191
if (((((((((((_g21 == 57) or ((_g21 == 56))) or ((_g21 == 55))) or ((_g21 == 54))) or ((_g21 == 53))) or ((_g21 == 52))) or ((_g21 == 51))) or ((_g21 == 50))) or ((_g21 == 49))) or ((_g21 == 48))) or ((_g21 == 46))):
r = (("null" if r is None else r) + ("null" if c is None else c))
else:
break
# /usr/local/lib/haxe/std/python/_std/Std.hx:195
return r
@staticmethod
def parseFloat(x):
# /usr/local/lib/haxe/std/python/_std/Std.hx:200
try:
return float(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
# /usr/local/lib/haxe/std/python/_std/Std.hx:204
if (x is not None):
# /usr/local/lib/haxe/std/python/_std/Std.hx:205
r1 = Std.shortenPossibleNumber(x)
# /usr/local/lib/haxe/std/python/_std/Std.hx:206
if (r1 != x):
return Std.parseFloat(r1)
# /usr/local/lib/haxe/std/python/_std/Std.hx:210
return Math.NaN
Std._hx_class = Std
_hx_classes["Std"] = Std
class Float:
_hx_class_name = "Float"
Float._hx_class = Float
_hx_classes["Float"] = Float
class Int:
_hx_class_name = "Int"
Int._hx_class = Int
_hx_classes["Int"] = Int
class Bool:
_hx_class_name = "Bool"
Bool._hx_class = Bool
_hx_classes["Bool"] = Bool
class Dynamic:
_hx_class_name = "Dynamic"
Dynamic._hx_class = Dynamic
_hx_classes["Dynamic"] = Dynamic
class StringTools:
_hx_class_name = "StringTools"
__slots__ = ()
_hx_statics = ["startsWith", "replace"]
@staticmethod
def startsWith(s,start):
# /usr/local/lib/haxe/std/StringTools.hx:200
if (len(s) >= len(start)):
return (HxString.substr(s,0,len(start)) == start)
else:
return False
@staticmethod
def replace(s,sub,by):
# /usr/local/lib/haxe/std/StringTools.hx:386
_this = (list(s) if ((sub == "")) else s.split(sub))
return by.join([python_Boot.toString1(x1,'') for x1 in _this])
StringTools._hx_class = StringTools
_hx_classes["StringTools"] = StringTools
class sys_FileSystem:
_hx_class_name = "sys.FileSystem"
__slots__ = ()
_hx_statics = ["exists"]
@staticmethod
def exists(path):
# /usr/local/lib/haxe/std/python/_std/sys/FileSystem.hx:31
return python_lib_os_Path.exists(path)
sys_FileSystem._hx_class = sys_FileSystem
_hx_classes["sys.FileSystem"] = sys_FileSystem
class haxe_IMap:
_hx_class_name = "haxe.IMap"
__slots__ = ()
_hx_methods = ["get", "keys"]
haxe_IMap._hx_class = haxe_IMap
_hx_classes["haxe.IMap"] = haxe_IMap
class haxe_ds_StringMap:
_hx_class_name = "haxe.ds.StringMap"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["get", "keys"]
_hx_interfaces = [haxe_IMap]
def __init__(self):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:32
self.h = dict()
def get(self,key):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:40
return self.h.get(key,None)
def keys(self):
# /usr/local/lib/haxe/std/python/_std/haxe/ds/StringMap.hx:54
return python_HaxeIterator(iter(self.h.keys()))
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.h = None
haxe_ds_StringMap._hx_class = haxe_ds_StringMap
_hx_classes["haxe.ds.StringMap"] = haxe_ds_StringMap
class python_HaxeIterator:
_hx_class_name = "python.HaxeIterator"
__slots__ = ("it", "x", "has", "checked")
_hx_fields = ["it", "x", "has", "checked"]
_hx_methods = ["next", "hasNext"]
def __init__(self,it):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:31
self.checked = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:30
self.has = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:29
self.x = None
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:34
self.it = it
def next(self):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:38
if (not self.checked):
self.hasNext()
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:39
self.checked = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:40
return self.x
def hasNext(self):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:44
if (not self.checked):
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:45
try:
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:46
self.x = self.it.__next__()
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:47
self.has = True
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
if isinstance(_hx_e1, StopIteration):
s = _hx_e1
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:49
self.has = False
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:50
self.x = None
else:
raise _hx_e
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:52
self.checked = True
# /usr/local/lib/haxe/std/python/HaxeIterator.hx:54
return self.has
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.it = None
_hx_o.x = None
_hx_o.has = None
_hx_o.checked = None
python_HaxeIterator._hx_class = python_HaxeIterator
_hx_classes["python.HaxeIterator"] = python_HaxeIterator
class ValueType(Enum):
__slots__ = ()
_hx_class_name = "ValueType"
_hx_constructs = ["TNull", "TInt", "TFloat", "TBool", "TObject", "TFunction", "TClass", "TEnum", "TUnknown"]
@staticmethod
def TClass(c):
return ValueType("TClass", 6, [c])
@staticmethod
def TEnum(e):
return ValueType("TEnum", 7, [e])
ValueType.TNull = ValueType("TNull", 0, list())
ValueType.TInt = ValueType("TInt", 1, list())
ValueType.TFloat = ValueType("TFloat", 2, list())
ValueType.TBool = ValueType("TBool", 3, list())
ValueType.TObject = ValueType("TObject", 4, list())
ValueType.TFunction = ValueType("TFunction", 5, list())
ValueType.TUnknown = ValueType("TUnknown", 8, list())
ValueType._hx_class = ValueType
_hx_classes["ValueType"] = ValueType
class Type:
_hx_class_name = "Type"
__slots__ = ()
_hx_statics = ["getClass", "getSuperClass", "getClassName", "resolveClass", "createEmptyInstance", "typeof"]
@staticmethod
def getClass(o):
# /usr/local/lib/haxe/std/python/_std/Type.hx:46
if (o is None):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:49
if ((o is not None) and (((o == str) or python_lib_Inspect.isclass(o)))):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:51
if isinstance(o,_hx_AnonObject):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:53
if hasattr(o,"_hx_class"):
return o._hx_class
# /usr/local/lib/haxe/std/python/_std/Type.hx:56
if hasattr(o,"__class__"):
return o.__class__
else:
return None
@staticmethod
def getSuperClass(c):
# /usr/local/lib/haxe/std/python/_std/Type.hx:70
return python_Boot.getSuperClass(c)
@staticmethod
def getClassName(c):
# /usr/local/lib/haxe/std/python/_std/Type.hx:75
if hasattr(c,"_hx_class_name"):
return c._hx_class_name
else:
# /usr/local/lib/haxe/std/python/_std/Type.hx:79
if (c == list):
return "Array"
# /usr/local/lib/haxe/std/python/_std/Type.hx:80
if (c == Math):
return "Math"
# /usr/local/lib/haxe/std/python/_std/Type.hx:81
if (c == str):
return "String"
# /usr/local/lib/haxe/std/python/_std/Type.hx:83
try:
return c.__name__
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
return None
@staticmethod
def resolveClass(name):
# /usr/local/lib/haxe/std/python/_std/Type.hx:97
if (name == "Array"):
return list
# /usr/local/lib/haxe/std/python/_std/Type.hx:98
if (name == "Math"):
return Math
# /usr/local/lib/haxe/std/python/_std/Type.hx:99
if (name == "String"):
return str
# /usr/local/lib/haxe/std/python/_std/Type.hx:101
cl = _hx_classes.get(name,None)
# /usr/local/lib/haxe/std/python/_std/Type.hx:103
if ((cl is None) or (not (((cl is not None) and (((cl == str) or python_lib_Inspect.isclass(cl))))))):
return None
# /usr/local/lib/haxe/std/python/_std/Type.hx:105
return cl
@staticmethod
def createEmptyInstance(cl):
# /usr/local/lib/haxe/std/python/_std/Type.hx:121
i = cl.__new__(cl)
# /usr/local/lib/haxe/std/python/_std/Type.hx:123
callInit = None
def _hx_local_0(cl1):
# /usr/local/lib/haxe/std/python/_std/Type.hx:124
sc = Type.getSuperClass(cl1)
# /usr/local/lib/haxe/std/python/_std/Type.hx:125
if (sc is not None):
callInit(sc)
# /usr/local/lib/haxe/std/python/_std/Type.hx:128
if hasattr(cl1,"_hx_empty_init"):
cl1._hx_empty_init(i)
callInit = _hx_local_0
callInit1 = callInit
# /usr/local/lib/haxe/std/python/_std/Type.hx:132
callInit1(cl)
# /usr/local/lib/haxe/std/python/_std/Type.hx:134
return i
@staticmethod
def typeof(v):
# /usr/local/lib/haxe/std/python/_std/Type.hx:178
if (v is None):
return ValueType.TNull
elif isinstance(v,bool):
return ValueType.TBool
elif isinstance(v,int):
return ValueType.TInt
elif isinstance(v,float):
return ValueType.TFloat
elif isinstance(v,str):
return ValueType.TClass(str)
elif isinstance(v,list):
return ValueType.TClass(list)
elif (isinstance(v,_hx_AnonObject) or python_lib_Inspect.isclass(v)):
return ValueType.TObject
elif isinstance(v,Enum):
return ValueType.TEnum(v.__class__)
elif (isinstance(v,type) or hasattr(v,"_hx_class")):
return ValueType.TClass(v.__class__)
elif callable(v):
return ValueType.TFunction
else:
return ValueType.TUnknown
Type._hx_class = Type
_hx_classes["Type"] = Type
class haxe_Utf8:
_hx_class_name = "haxe.Utf8"
__slots__ = ("_hx___b",)
_hx_fields = ["__b"]
def __init__(self,size = None):
# /usr/local/lib/haxe/std/haxe/Utf8.hx:36
self._hx___b = ""
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o._hx___b = None
haxe_Utf8._hx_class = haxe_Utf8
_hx_classes["haxe.Utf8"] = haxe_Utf8
class haxe_io_Eof:
_hx_class_name = "haxe.io.Eof"
__slots__ = ()
_hx_methods = ["toString"]
def __init__(self):
pass
def toString(self):
# /usr/local/lib/haxe/std/haxe/io/Eof.hx:31
return "Eof"
@staticmethod
def _hx_empty_init(_hx_o): pass
haxe_io_Eof._hx_class = haxe_io_Eof
_hx_classes["haxe.io.Eof"] = haxe_io_Eof
class python_Boot:
_hx_class_name = "python.Boot"
__slots__ = ()
_hx_statics = ["keywords", "toString1", "fields", "simpleField", "field", "getInstanceFields", "getSuperClass", "getClassFields", "prefixLength", "unhandleKeywords"]
@staticmethod
def toString1(o,s):
# /usr/local/lib/haxe/std/python/Boot.hx:94
if (o is None):
return "null"
# /usr/local/lib/haxe/std/python/Boot.hx:96
if isinstance(o,str):
return o
# /usr/local/lib/haxe/std/python/Boot.hx:98
if (s is None):
s = ""
# /usr/local/lib/haxe/std/python/Boot.hx:99
if (len(s) >= 5):
return "<...>"
# /usr/local/lib/haxe/std/python/Boot.hx:101
if isinstance(o,bool):
if o:
return "true"
else:
return "false"
# /usr/local/lib/haxe/std/python/Boot.hx:104
if isinstance(o,int):
return str(o)
# /usr/local/lib/haxe/std/python/Boot.hx:108
if isinstance(o,float):
try:
if (o == int(o)):
return str(Math.floor((o + 0.5)))
else:
return str(o)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
return str(o)
# /usr/local/lib/haxe/std/python/Boot.hx:120
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:122
o1 = o
# /usr/local/lib/haxe/std/python/Boot.hx:124
l = len(o1)
# /usr/local/lib/haxe/std/python/Boot.hx:126
st = "["
# /usr/local/lib/haxe/std/python/Boot.hx:127
s = (("null" if s is None else s) + "\t")
# /usr/local/lib/haxe/std/python/Boot.hx:128
# /usr/local/lib/haxe/std/python/Boot.hx:128
_g1 = 0
_g = l
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:129
prefix = ""
# /usr/local/lib/haxe/std/python/Boot.hx:130
if (i > 0):
prefix = ","
# /usr/local/lib/haxe/std/python/Boot.hx:133
st = (("null" if st is None else st) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1((o1[i] if i >= 0 and i < len(o1) else None),s))))))
# /usr/local/lib/haxe/std/python/Boot.hx:135
st = (("null" if st is None else st) + "]")
# /usr/local/lib/haxe/std/python/Boot.hx:136
return st
# /usr/local/lib/haxe/std/python/Boot.hx:139
try:
if hasattr(o,"toString"):
return o.toString()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:145
if (python_lib_Inspect.isfunction(o) or python_lib_Inspect.ismethod(o)):
return "<function>"
# /usr/local/lib/haxe/std/python/Boot.hx:147
if hasattr(o,"__class__"):
# /usr/local/lib/haxe/std/python/Boot.hx:150
if isinstance(o,_hx_AnonObject):
# /usr/local/lib/haxe/std/python/Boot.hx:152
toStr = None
# /usr/local/lib/haxe/std/python/Boot.hx:153
try:
# /usr/local/lib/haxe/std/python/Boot.hx:155
fields = python_Boot.fields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:156
_g2 = []
_g11 = 0
while (_g11 < len(fields)):
f = (fields[_g11] if _g11 >= 0 and _g11 < len(fields) else None)
_g11 = (_g11 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g2.append(x)
fieldsStr = _g2
# /usr/local/lib/haxe/std/python/Boot.hx:157
toStr = (("{ " + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " }")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
return "{ ... }"
# /usr/local/lib/haxe/std/python/Boot.hx:163
if (toStr is None):
return "{ ... }"
else:
return toStr
# /usr/local/lib/haxe/std/python/Boot.hx:173
if isinstance(o,Enum):
# /usr/local/lib/haxe/std/python/Boot.hx:175
o2 = o
# /usr/local/lib/haxe/std/python/Boot.hx:177
l1 = len(o2.params)
# /usr/local/lib/haxe/std/python/Boot.hx:178
hasParams = (l1 > 0)
# /usr/local/lib/haxe/std/python/Boot.hx:179
if hasParams:
# /usr/local/lib/haxe/std/python/Boot.hx:180
paramsStr = ""
# /usr/local/lib/haxe/std/python/Boot.hx:181
# /usr/local/lib/haxe/std/python/Boot.hx:181
_g12 = 0
_g3 = l1
while (_g12 < _g3):
i1 = _g12
_g12 = (_g12 + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:182
prefix1 = ""
# /usr/local/lib/haxe/std/python/Boot.hx:183
if (i1 > 0):
prefix1 = ","
# /usr/local/lib/haxe/std/python/Boot.hx:186
paramsStr = (("null" if paramsStr is None else paramsStr) + HxOverrides.stringOrNull(((("null" if prefix1 is None else prefix1) + HxOverrides.stringOrNull(python_Boot.toString1((o2.params[i1] if i1 >= 0 and i1 < len(o2.params) else None),s))))))
# /usr/local/lib/haxe/std/python/Boot.hx:188
return (((HxOverrides.stringOrNull(o2.tag) + "(") + ("null" if paramsStr is None else paramsStr)) + ")")
else:
return o2.tag
# /usr/local/lib/haxe/std/python/Boot.hx:194
if hasattr(o,"_hx_class_name"):
if (o.__class__.__name__ != "type"):
# /usr/local/lib/haxe/std/python/Boot.hx:196
fields1 = python_Boot.getInstanceFields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:197
_g4 = []
_g13 = 0
while (_g13 < len(fields1)):
f1 = (fields1[_g13] if _g13 >= 0 and _g13 < len(fields1) else None)
_g13 = (_g13 + 1)
x1 = ((("" + ("null" if f1 is None else f1)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f1),(("null" if s is None else s) + "\t"))))
_g4.append(x1)
fieldsStr1 = _g4
# /usr/local/lib/haxe/std/python/Boot.hx:199
toStr1 = (((HxOverrides.stringOrNull(o._hx_class_name) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr1]))) + " )")
# /usr/local/lib/haxe/std/python/Boot.hx:200
return toStr1
else:
# /usr/local/lib/haxe/std/python/Boot.hx:202
fields2 = python_Boot.getClassFields(o)
# /usr/local/lib/haxe/std/python/Boot.hx:203
_g5 = []
_g14 = 0
while (_g14 < len(fields2)):
f2 = (fields2[_g14] if _g14 >= 0 and _g14 < len(fields2) else None)
_g14 = (_g14 + 1)
x2 = ((("" + ("null" if f2 is None else f2)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f2),(("null" if s is None else s) + "\t"))))
_g5.append(x2)
fieldsStr2 = _g5
# /usr/local/lib/haxe/std/python/Boot.hx:204
toStr2 = (((("#" + HxOverrides.stringOrNull(o._hx_class_name)) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr2]))) + " )")
# /usr/local/lib/haxe/std/python/Boot.hx:205
return toStr2
# /usr/local/lib/haxe/std/python/Boot.hx:209
if (o == str):
return "#String"
# /usr/local/lib/haxe/std/python/Boot.hx:213
if (o == list):
return "#Array"
# /usr/local/lib/haxe/std/python/Boot.hx:217
if callable(o):
return "function"
# /usr/local/lib/haxe/std/python/Boot.hx:220
try:
if hasattr(o,"__repr__"):
return o.__repr__()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:226
if hasattr(o,"__str__"):
return o.__str__([])
# /usr/local/lib/haxe/std/python/Boot.hx:230
if hasattr(o,"__name__"):
return o.__name__
# /usr/local/lib/haxe/std/python/Boot.hx:233
return "???"
else:
return str(o)
@staticmethod
def fields(o):
# /usr/local/lib/haxe/std/python/Boot.hx:245
a = []
# /usr/local/lib/haxe/std/python/Boot.hx:246
if (o is not None):
# /usr/local/lib/haxe/std/python/Boot.hx:247
if hasattr(o,"_hx_fields"):
# /usr/local/lib/haxe/std/python/Boot.hx:248
fields = o._hx_fields
# /usr/local/lib/haxe/std/python/Boot.hx:249
return list(fields)
# /usr/local/lib/haxe/std/python/Boot.hx:251
if isinstance(o,_hx_AnonObject):
# /usr/local/lib/haxe/std/python/Boot.hx:253
d = o.__dict__
# /usr/local/lib/haxe/std/python/Boot.hx:254
keys = d.keys()
# /usr/local/lib/haxe/std/python/Boot.hx:255
handler = python_Boot.unhandleKeywords
# /usr/local/lib/haxe/std/python/Boot.hx:257
for k in keys:
# /usr/local/lib/haxe/std/python/Boot.hx:258
a.append(handler(k))
elif hasattr(o,"__dict__"):
# /usr/local/lib/haxe/std/python/Boot.hx:262
d1 = o.__dict__
# /usr/local/lib/haxe/std/python/Boot.hx:263
keys1 = d1.keys()
# /usr/local/lib/haxe/std/python/Boot.hx:264
for k in keys1:
# /usr/local/lib/haxe/std/python/Boot.hx:265
a.append(k)
# /usr/local/lib/haxe/std/python/Boot.hx:269
return a
@staticmethod
def simpleField(o,field):
# /usr/local/lib/haxe/std/python/Boot.hx:281
if (field is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:283
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
# /usr/local/lib/haxe/std/python/Boot.hx:284
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def field(o,field):
# /usr/local/lib/haxe/std/python/Boot.hx:288
if (field is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:290
field1 = field
_hx_local_0 = len(field1)
# /usr/local/lib/haxe/std/python/Boot.hx:295
if (_hx_local_0 == 10):
if (field1 == "charCodeAt"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:295
s1 = o
def _hx_local_1(a11):
return HxString.charCodeAt(s1,a11)
return _hx_local_1
elif (_hx_local_0 == 11):
if (field1 == "lastIndexOf"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:297
s3 = o
def _hx_local_2(a15):
return HxString.lastIndexOf(s3,a15)
return _hx_local_2
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:315
a4 = o
def _hx_local_3(x4):
return python_internal_ArrayImpl.lastIndexOf(a4,x4)
return _hx_local_3
elif (field1 == "toLowerCase"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:292
s7 = o
def _hx_local_4():
return HxString.toLowerCase(s7)
return _hx_local_4
elif (field1 == "toUpperCase"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:293
s9 = o
def _hx_local_5():
return HxString.toUpperCase(s9)
return _hx_local_5
elif (_hx_local_0 == 9):
if (field1 == "substring"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:300
s6 = o
def _hx_local_6(a19):
return HxString.substring(s6,a19)
return _hx_local_6
elif (_hx_local_0 == 4):
if (field1 == "copy"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:306
def _hx_local_7():
# /usr/local/lib/haxe/std/python/Boot.hx:306
return list(o)
return _hx_local_7
elif (field1 == "join"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:309
def _hx_local_8(sep):
# /usr/local/lib/haxe/std/python/Boot.hx:309
return sep.join([python_Boot.toString1(x1,'') for x1 in o])
return _hx_local_8
elif (field1 == "push"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:312
x7 = o
def _hx_local_9(e):
return python_internal_ArrayImpl.push(x7,e)
return _hx_local_9
elif (field1 == "sort"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:320
x11 = o
def _hx_local_10(f2):
python_internal_ArrayImpl.sort(x11,f2)
return _hx_local_10
elif (_hx_local_0 == 5):
if (field1 == "shift"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:318
x9 = o
def _hx_local_11():
return python_internal_ArrayImpl.shift(x9)
return _hx_local_11
elif (field1 == "slice"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:319
x10 = o
def _hx_local_12(a16):
return python_internal_ArrayImpl.slice(x10,a16)
return _hx_local_12
elif (field1 == "split"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:298
s4 = o
def _hx_local_13(d):
return HxString.split(s4,d)
return _hx_local_13
elif (_hx_local_0 == 7):
if (field1 == "indexOf"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:296
s2 = o
def _hx_local_14(a13):
return HxString.indexOf(s2,a13)
return _hx_local_14
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:314
a = o
def _hx_local_15(x1):
return python_internal_ArrayImpl.indexOf(a,x1)
return _hx_local_15
elif (field1 == "reverse"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:317
a5 = o
def _hx_local_16():
python_internal_ArrayImpl.reverse(a5)
return _hx_local_16
elif (field1 == "unshift"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:313
x14 = o
def _hx_local_17(e2):
python_internal_ArrayImpl.unshift(x14,e2)
return _hx_local_17
elif (_hx_local_0 == 3):
if (field1 == "map"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:303
x5 = o
def _hx_local_18(f1):
return python_internal_ArrayImpl.map(x5,f1)
return _hx_local_18
elif (field1 == "pop"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:311
x6 = o
def _hx_local_19():
return python_internal_ArrayImpl.pop(x6)
return _hx_local_19
elif (_hx_local_0 == 8):
if (field1 == "iterator"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:307
x3 = o
def _hx_local_20():
return python_internal_ArrayImpl.iterator(x3)
return _hx_local_20
elif (field1 == "toString"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:301
s8 = o
def _hx_local_21():
return HxString.toString(s8)
return _hx_local_21
elif isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:310
x13 = o
def _hx_local_22():
return python_internal_ArrayImpl.toString(x13)
return _hx_local_22
elif (_hx_local_0 == 6):
if (field1 == "charAt"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:294
s = o
def _hx_local_23(a1):
return HxString.charAt(s,a1)
return _hx_local_23
elif (field1 == "concat"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:305
a12 = o
def _hx_local_24(a2):
return python_internal_ArrayImpl.concat(a12,a2)
return _hx_local_24
elif (field1 == "filter"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:304
x = o
def _hx_local_25(f):
return python_internal_ArrayImpl.filter(x,f)
return _hx_local_25
elif (field1 == "insert"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:308
a3 = o
def _hx_local_26(a14,x2):
python_internal_ArrayImpl.insert(a3,a14,x2)
return _hx_local_26
elif (field1 == "length"):
if isinstance(o,str):
return len(o)
elif isinstance(o,list):
return len(o)
elif (field1 == "remove"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:316
x8 = o
def _hx_local_27(e1):
return python_internal_ArrayImpl.remove(x8,e1)
return _hx_local_27
elif (field1 == "splice"):
if isinstance(o,list):
# /usr/local/lib/haxe/std/python/Boot.hx:321
x12 = o
def _hx_local_28(a17,a21):
return python_internal_ArrayImpl.splice(x12,a17,a21)
return _hx_local_28
elif (field1 == "substr"):
if isinstance(o,str):
# /usr/local/lib/haxe/std/python/Boot.hx:299
s5 = o
def _hx_local_29(a18):
return HxString.substr(s5,a18)
return _hx_local_29
else:
pass
# /usr/local/lib/haxe/std/python/Boot.hx:325
field2 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
# /usr/local/lib/haxe/std/python/Boot.hx:326
if hasattr(o,field2):
return getattr(o,field2)
else:
return None
@staticmethod
def getInstanceFields(c):
# /usr/local/lib/haxe/std/python/Boot.hx:331
f = (c._hx_fields if (hasattr(c,"_hx_fields")) else [])
# /usr/local/lib/haxe/std/python/Boot.hx:332
if hasattr(c,"_hx_methods"):
f = (f + c._hx_methods)
# /usr/local/lib/haxe/std/python/Boot.hx:335
sc = python_Boot.getSuperClass(c)
# /usr/local/lib/haxe/std/python/Boot.hx:337
if (sc is None):
return f
else:
# /usr/local/lib/haxe/std/python/Boot.hx:341
scArr = python_Boot.getInstanceFields(sc)
# /usr/local/lib/haxe/std/python/Boot.hx:342
scMap = set(scArr)
# /usr/local/lib/haxe/std/python/Boot.hx:345
# /usr/local/lib/haxe/std/python/Boot.hx:345
_g = 0
while (_g < len(f)):
f1 = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
# /usr/local/lib/haxe/std/python/Boot.hx:346
if (not (f1 in scMap)):
scArr.append(f1)
# /usr/local/lib/haxe/std/python/Boot.hx:351
return scArr
@staticmethod
def getSuperClass(c):
# /usr/local/lib/haxe/std/python/Boot.hx:356
if (c is None):
return None
# /usr/local/lib/haxe/std/python/Boot.hx:359
try:
# /usr/local/lib/haxe/std/python/Boot.hx:360
if hasattr(c,"_hx_super"):
return c._hx_super
# /usr/local/lib/haxe/std/python/Boot.hx:363
return None
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
# /usr/local/lib/haxe/std/python/Boot.hx:367
return None
@staticmethod
def getClassFields(c):
# /usr/local/lib/haxe/std/python/Boot.hx:372
if hasattr(c,"_hx_statics"):
# /usr/local/lib/haxe/std/python/Boot.hx:373
x = c._hx_statics
# /usr/local/lib/haxe/std/python/Boot.hx:374
return list(x)
else:
return []
@staticmethod
def unhandleKeywords(name):
# /usr/local/lib/haxe/std/python/Boot.hx:398
if (HxString.substr(name,0,python_Boot.prefixLength) == "_hx_"):
# /usr/local/lib/haxe/std/python/Boot.hx:399
real = HxString.substr(name,python_Boot.prefixLength,None)
# /usr/local/lib/haxe/std/python/Boot.hx:400
if (real in python_Boot.keywords):
return real
# /usr/local/lib/haxe/std/python/Boot.hx:402
return name
python_Boot._hx_class = python_Boot
_hx_classes["python.Boot"] = python_Boot
class python_internal_ArrayImpl:
_hx_class_name = "python.internal.ArrayImpl"
__slots__ = ()
_hx_statics = ["concat", "iterator", "indexOf", "lastIndexOf", "toString", "pop", "push", "unshift", "remove", "shift", "slice", "sort", "splice", "map", "filter", "insert", "reverse", "_get", "_set"]
@staticmethod
def concat(a1,a2):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:35
return (a1 + a2)
@staticmethod
def iterator(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:45
return python_HaxeIterator(x.__iter__())
@staticmethod
def indexOf(a,x,fromIndex = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:50
_hx_len = len(a)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:51
l = (0 if ((fromIndex is None)) else ((_hx_len + fromIndex) if ((fromIndex < 0)) else fromIndex))
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:55
if (l < 0):
l = 0
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:56
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:56
_g1 = l
_g = _hx_len
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:57
if (a[i] == x):
return i
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:59
return -1
@staticmethod
def lastIndexOf(a,x,fromIndex = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:64
_hx_len = len(a)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:65
l = (_hx_len if ((fromIndex is None)) else (((_hx_len + fromIndex) + 1) if ((fromIndex < 0)) else (fromIndex + 1)))
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:69
if (l > _hx_len):
l = _hx_len
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:70
while True:
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:70
l = (l - 1)
tmp = l
if (not ((tmp > -1))):
break
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:71
if (a[l] == x):
return l
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:73
return -1
@staticmethod
def toString(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:84
return (("[" + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in x]))) + "]")
@staticmethod
def pop(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:89
if (len(x) == 0):
return None
else:
return x.pop()
@staticmethod
def push(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:94
x.append(e)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:95
return len(x)
@staticmethod
def unshift(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:100
x.insert(0, e)
@staticmethod
def remove(x,e):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:105
try:
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:106
x.remove(e)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:107
return True
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
return False
@staticmethod
def shift(x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:115
if (len(x) == 0):
return None
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:116
return x.pop(0)
@staticmethod
def slice(x,pos,end = None):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:121
return x[pos:end]
@staticmethod
def sort(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:125
x.sort(key= python_lib_Functools.cmp_to_key(f))
@staticmethod
def splice(x,pos,_hx_len):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:130
if (pos < 0):
pos = (len(x) + pos)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:131
if (pos < 0):
pos = 0
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:132
res = x[pos:(pos + _hx_len)]
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:133
del x[pos:(pos + _hx_len)]
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:134
return res
@staticmethod
def map(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:139
return list(map(f,x))
@staticmethod
def filter(x,f):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:144
return list(filter(f,x))
@staticmethod
def insert(a,pos,x):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:149
a.insert(pos, x)
@staticmethod
def reverse(a):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:153
a.reverse()
@staticmethod
def _get(x,idx):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:158
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
@staticmethod
def _set(x,idx,v):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:163
l = len(x)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:164
while (l < idx):
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:165
x.append(None)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:166
l = (l + 1)
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:168
if (l == idx):
x.append(v)
else:
x[idx] = v
# /usr/local/lib/haxe/std/python/internal/ArrayImpl.hx:173
return v
python_internal_ArrayImpl._hx_class = python_internal_ArrayImpl
_hx_classes["python.internal.ArrayImpl"] = python_internal_ArrayImpl
class _HxException(Exception):
_hx_class_name = "_HxException"
__slots__ = ("val",)
_hx_fields = ["val"]
_hx_methods = []
_hx_statics = []
_hx_interfaces = []
_hx_super = Exception
def __init__(self,val):
# /usr/local/lib/haxe/std/python/internal/HxException.hx:28
self.val = None
# /usr/local/lib/haxe/std/python/internal/HxException.hx:31
message = str(val)
# /usr/local/lib/haxe/std/python/internal/HxException.hx:32
super().__init__(message)
# /usr/local/lib/haxe/std/python/internal/HxException.hx:33
self.val = val
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.val = None
_HxException._hx_class = _HxException
_hx_classes["_HxException"] = _HxException
class HxOverrides:
_hx_class_name = "HxOverrides"
__slots__ = ()
_hx_statics = ["iterator", "eq", "stringOrNull"]
@staticmethod
def iterator(x):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:39
if isinstance(x,list):
return python_HaxeIterator(x.__iter__())
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:42
return x.iterator()
@staticmethod
def eq(a,b):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:46
if (isinstance(a,list) or isinstance(b,list)):
return a is b
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:49
return (a == b)
@staticmethod
def stringOrNull(s):
# /usr/local/lib/haxe/std/python/internal/HxOverrides.hx:53
if (s is None):
return "null"
else:
return s
HxOverrides._hx_class = HxOverrides
_hx_classes["HxOverrides"] = HxOverrides
class HxString:
_hx_class_name = "HxString"
__slots__ = ()
_hx_statics = ["split", "charCodeAt", "charAt", "lastIndexOf", "toUpperCase", "toLowerCase", "indexOf", "toString", "substring", "substr"]
@staticmethod
def split(s,d):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:31
if (d == ""):
return list(s)
else:
return s.split(d)
@staticmethod
def charCodeAt(s,index):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:37
if ((((s is None) or ((len(s) == 0))) or ((index < 0))) or ((index >= len(s)))):
return None
else:
return ord(s[index])
@staticmethod
def charAt(s,index):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:43
if ((index < 0) or ((index >= len(s)))):
return ""
else:
return s[index]
@staticmethod
def lastIndexOf(s,_hx_str,startIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:48
if (startIndex is None):
return s.rfind(_hx_str, 0, len(s))
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:52
i = s.rfind(_hx_str, 0, (startIndex + 1))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:53
startLeft = (max(0,((startIndex + 1) - len(_hx_str))) if ((i == -1)) else (i + 1))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:54
check = s.find(_hx_str, startLeft, len(s))
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:55
if ((check > i) and ((check <= startIndex))):
return check
else:
return i
@staticmethod
def toUpperCase(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:66
return s.upper()
@staticmethod
def toLowerCase(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:70
return s.lower()
@staticmethod
def indexOf(s,_hx_str,startIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:74
if (startIndex is None):
return s.find(_hx_str)
else:
return s.find(_hx_str, startIndex)
@staticmethod
def toString(s):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:81
return s
@staticmethod
def substring(s,startIndex,endIndex = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:97
if (startIndex < 0):
startIndex = 0
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:98
if (endIndex is None):
return s[startIndex:]
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:101
if (endIndex < 0):
endIndex = 0
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:102
if (endIndex < startIndex):
return s[endIndex:startIndex]
else:
return s[startIndex:endIndex]
@staticmethod
def substr(s,startIndex,_hx_len = None):
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:113
if (_hx_len is None):
return s[startIndex:]
else:
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:116
if (_hx_len == 0):
return ""
# /usr/local/lib/haxe/std/python/internal/StringImpl.hx:117
return s[startIndex:(startIndex + _hx_len)]
HxString._hx_class = HxString
_hx_classes["HxString"] = HxString
class sys_io_File:
_hx_class_name = "sys.io.File"
__slots__ = ()
_hx_statics = ["getContent", "saveContent"]
@staticmethod
def getContent(path):
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:32
f = python_lib_Builtins.open(path,"r",-1,"utf-8",None,"")
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:33
content = f.read(-1)
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:34
f.close()
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:35
return content
@staticmethod
def saveContent(path,content):
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:39
f = python_lib_Builtins.open(path,"w",-1,"utf-8",None,"")
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:40
f.write(content)
# /usr/local/lib/haxe/std/python/_std/sys/io/File.hx:41
f.close()
sys_io_File._hx_class = sys_io_File
_hx_classes["sys.io.File"] = sys_io_File
class tjson_TJSON:
_hx_class_name = "tjson.TJSON"
__slots__ = ()
_hx_statics = ["OBJECT_REFERENCE_PREFIX", "parse", "encode"]
@staticmethod
def parse(json,fileName = "JSON Data",stringProcessor = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:12
if (fileName is None):
fileName = "JSON Data"
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:13
t = tjson_TJSONParser(json,fileName,stringProcessor)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:14
return t.doParse()
@staticmethod
def encode(obj,style = None,useCache = True):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:22
if (useCache is None):
useCache = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:23
t = tjson_TJSONEncoder(useCache)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:24
return t.doEncode(obj,style)
tjson_TJSON._hx_class = tjson_TJSON
_hx_classes["tjson.TJSON"] = tjson_TJSON
class tjson_TJSONParser:
_hx_class_name = "tjson.TJSONParser"
__slots__ = ("pos", "json", "lastSymbolQuoted", "fileName", "currentLine", "cache", "floatRegex", "intRegex", "strProcessor")
_hx_fields = ["pos", "json", "lastSymbolQuoted", "fileName", "currentLine", "cache", "floatRegex", "intRegex", "strProcessor"]
_hx_methods = ["doParse", "doObject", "doArray", "convertSymbolToProperType", "looksLikeFloat", "looksLikeInt", "getNextSymbol", "defaultStringProcessor"]
def __init__(self,vjson,vfileName = "JSON Data",stringProcessor = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:42
if (vfileName is None):
vfileName = "JSON Data"
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:44
self.json = vjson
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:45
self.fileName = vfileName
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:46
self.currentLine = 1
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:47
self.lastSymbolQuoted = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:48
self.pos = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:49
self.floatRegex = EReg("^-?[0-9]*\\.[0-9]+$","")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:50
self.intRegex = EReg("^-?[0-9]+$","")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:51
self.strProcessor = (self.defaultStringProcessor if ((stringProcessor is None)) else stringProcessor)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:52
self.cache = list()
def doParse(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:56
try:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:58
_g = self.getNextSymbol()
_g1 = _g
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:60
if (_g1 == "["):
return self.doArray()
elif (_g1 == "{"):
return self.doObject()
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:61
s = _g
return self.convertSymbolToProperType(s)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
if isinstance(_hx_e1, str):
e = _hx_e1
raise _HxException(((((HxOverrides.stringOrNull(self.fileName) + " on line ") + Std.string(self.currentLine)) + ": ") + ("null" if e is None else e)))
else:
raise _hx_e
def doObject(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:69
o = _hx_AnonObject({})
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:70
val = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:71
key = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:72
isClassOb = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:73
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:73
_this = self.cache
_this.append(o)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:74
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:75
key = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:76
if ((key == ",") and (not self.lastSymbolQuoted)):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:77
if ((key == "}") and (not self.lastSymbolQuoted)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:79
if (isClassOb and ((Reflect.field(o,"TJ_unserialize") is not None))):
Reflect.field(o,"TJ_unserialize")()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:82
return o
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:85
seperator = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:86
if (seperator != ":"):
raise _HxException((("Expected ':' but got '" + ("null" if seperator is None else seperator)) + "' instead."))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:90
v = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:92
if (key == "_hxcls"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:93
cls = Type.resolveClass(v)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:94
if (cls is None):
raise _HxException(("Invalid class name - " + ("null" if v is None else v)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:95
o = Type.createEmptyInstance(cls)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:96
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:96
_this1 = self.cache
if (len(_this1) != 0):
_this1.pop()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:97
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:97
_this2 = self.cache
_this2.append(o)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:98
isClassOb = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:99
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:103
if ((v == "{") and (not self.lastSymbolQuoted)):
val = self.doObject()
elif ((v == "[") and (not self.lastSymbolQuoted)):
val = self.doArray()
else:
val = self.convertSymbolToProperType(v)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:110
setattr(o,(("_hx_" + key) if ((key in python_Boot.keywords)) else (("_hx_" + key) if (((((len(key) > 2) and ((ord(key[0]) == 95))) and ((ord(key[1]) == 95))) and ((ord(key[(len(key) - 1)]) != 95)))) else key)),val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:112
raise _HxException("Unexpected end of file. Expected '}'")
def doArray(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:117
a = list()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:118
val = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:119
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:120
val = self.getNextSymbol()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:121
if ((val == ",") and (not self.lastSymbolQuoted)):
continue
elif ((val == "]") and (not self.lastSymbolQuoted)):
return a
elif ((val == "{") and (not self.lastSymbolQuoted)):
val = self.doObject()
elif ((val == "[") and (not self.lastSymbolQuoted)):
val = self.doArray()
else:
val = self.convertSymbolToProperType(val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:134
a.append(val)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:136
raise _HxException("Unexpected end of file. Expected ']'")
def convertSymbolToProperType(self,symbol):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:140
if self.lastSymbolQuoted:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:143
if StringTools.startsWith(symbol,tjson_TJSON.OBJECT_REFERENCE_PREFIX):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:144
idx = Std.parseInt(HxString.substr(symbol,len(tjson_TJSON.OBJECT_REFERENCE_PREFIX),None))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:145
return (self.cache[idx] if idx >= 0 and idx < len(self.cache) else None)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:147
return symbol
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:149
if self.looksLikeFloat(symbol):
return Std.parseFloat(symbol)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:152
if self.looksLikeInt(symbol):
return Std.parseInt(symbol)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:155
if (symbol.lower() == "true"):
return True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:158
if (symbol.lower() == "false"):
return False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:161
if (symbol.lower() == "null"):
return None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:165
return symbol
def looksLikeFloat(self,s):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:170
_this = self.floatRegex
_this.matchObj = python_lib_Re.search(_this.pattern,s)
if (_this.matchObj is None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:171
_this1 = self.intRegex
_this1.matchObj = python_lib_Re.search(_this1.pattern,s)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:170
if (_this1.matchObj is not None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:172
intStr = self.intRegex.matchObj.group(0)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:173
if (HxString.charCodeAt(intStr,0) == 45):
return (intStr > "-2147483648")
else:
return (intStr > "2147483647")
else:
return False
else:
return True
def looksLikeInt(self,s):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:182
_this = self.intRegex
_this.matchObj = python_lib_Re.search(_this.pattern,s)
return (_this.matchObj is not None)
def getNextSymbol(self):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:186
self.lastSymbolQuoted = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:187
c = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:188
inQuote = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:189
quoteType = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:190
symbol = ""
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:191
inEscape = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:192
inSymbol = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:193
inLineComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:194
inBlockComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:196
while (self.pos < len(self.json)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:197
_this = self.json
index = self.pos
self.pos = (self.pos + 1)
if ((index < 0) or ((index >= len(_this)))):
c = ""
else:
c = _this[index]
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:198
if ((c == "\n") and (not inSymbol)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:199
_hx_local_0 = self
_hx_local_1 = _hx_local_0.currentLine
_hx_local_0.currentLine = (_hx_local_1 + 1)
_hx_local_1
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:200
if inLineComment:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:201
if ((c == "\n") or ((c == "\r"))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:202
inLineComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:203
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:203
_hx_local_2 = self
_hx_local_3 = _hx_local_2.pos
_hx_local_2.pos = (_hx_local_3 + 1)
_hx_local_3
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:205
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:208
if inBlockComment:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:209
tmp = None
if (c == "*"):
_this1 = self.json
index1 = self.pos
tmp = ((("" if (((index1 < 0) or ((index1 >= len(_this1))))) else _this1[index1])) == "/")
else:
tmp = False
if tmp:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:210
inBlockComment = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:211
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:211
_hx_local_4 = self
_hx_local_5 = _hx_local_4.pos
_hx_local_4.pos = (_hx_local_5 + 1)
_hx_local_5
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:213
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:216
if inQuote:
if inEscape:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:218
inEscape = False
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:219
if ((c == "'") or ((c == "\""))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:220
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:221
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:223
if (c == "t"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:224
symbol = (("null" if symbol is None else symbol) + "\t")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:225
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:227
if (c == "n"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:228
symbol = (("null" if symbol is None else symbol) + "\n")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:229
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:231
if (c == "\\"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:232
symbol = (("null" if symbol is None else symbol) + "\\")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:233
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:235
if (c == "r"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:236
symbol = (("null" if symbol is None else symbol) + "\r")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:237
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:239
if (c == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:240
symbol = (("null" if symbol is None else symbol) + "/")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:241
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:244
if (c == "u"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:245
hexValue = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:247
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:247
_g = 0
while (_g < 4):
i = _g
_g = (_g + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:248
if (self.pos >= len(self.json)):
raise _HxException("Unfinished UTF8 character")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:250
index2 = self.pos
self.pos = (self.pos + 1)
nc = HxString.charCodeAt(self.json,index2)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:251
hexValue = (hexValue << 4)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:252
if ((nc >= 48) and ((nc <= 57))):
hexValue = (hexValue + ((nc - 48)))
elif ((nc >= 65) and ((nc <= 70))):
hexValue = (hexValue + (((10 + nc) - 65)))
elif ((nc >= 97) and ((nc <= 102))):
hexValue = (hexValue + (((10 + nc) - 95)))
else:
raise _HxException("Not a hex digit")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:261
utf = haxe_Utf8()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:262
utf._hx___b = (HxOverrides.stringOrNull(utf._hx___b) + HxOverrides.stringOrNull("".join(map(chr,[hexValue]))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:263
symbol = (("null" if symbol is None else symbol) + HxOverrides.stringOrNull(utf._hx___b))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:265
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:269
raise _HxException((("Invalid escape sequence '\\" + ("null" if c is None else c)) + "'"))
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:271
if (c == "\\"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:272
inEscape = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:273
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:275
if (c == quoteType):
return symbol
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:278
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:279
continue
elif (c == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:286
_this2 = self.json
index3 = self.pos
c2 = ("" if (((index3 < 0) or ((index3 >= len(_this2))))) else _this2[index3])
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:289
if (c2 == "/"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:290
inLineComment = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:291
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:291
_hx_local_19 = self
_hx_local_20 = _hx_local_19.pos
_hx_local_19.pos = (_hx_local_20 + 1)
_hx_local_20
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:292
continue
elif (c2 == "*"):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:297
inBlockComment = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:298
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:298
_hx_local_21 = self
_hx_local_22 = _hx_local_21.pos
_hx_local_21.pos = (_hx_local_22 + 1)
_hx_local_22
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:299
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:305
if inSymbol:
if ((((((((c == " ") or ((c == "\n"))) or ((c == "\r"))) or ((c == "\t"))) or ((c == ","))) or ((c == ":"))) or ((c == "}"))) or ((c == "]"))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:307
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:307
_hx_local_23 = self
_hx_local_24 = _hx_local_23.pos
_hx_local_23.pos = (_hx_local_24 - 1)
_hx_local_24
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:308
return symbol
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:310
symbol = (("null" if symbol is None else symbol) + ("null" if c is None else c))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:311
continue
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:316
if ((((c == " ") or ((c == "\t"))) or ((c == "\n"))) or ((c == "\r"))):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:320
if ((((((c == "{") or ((c == "}"))) or ((c == "["))) or ((c == "]"))) or ((c == ","))) or ((c == ":"))):
return c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:326
if ((c == "'") or ((c == "\""))):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:327
inQuote = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:328
quoteType = c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:329
self.lastSymbolQuoted = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:330
continue
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:332
inSymbol = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:333
symbol = c
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:334
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:340
if inQuote:
raise _HxException((("Unexpected end of data. Expected ( " + ("null" if quoteType is None else quoteType)) + " )"))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:343
return symbol
def defaultStringProcessor(self,_hx_str):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:348
return _hx_str
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.pos = None
_hx_o.json = None
_hx_o.lastSymbolQuoted = None
_hx_o.fileName = None
_hx_o.currentLine = None
_hx_o.cache = None
_hx_o.floatRegex = None
_hx_o.intRegex = None
_hx_o.strProcessor = None
tjson_TJSONParser._hx_class = tjson_TJSONParser
_hx_classes["tjson.TJSONParser"] = tjson_TJSONParser
class tjson_TJSONEncoder:
_hx_class_name = "tjson.TJSONEncoder"
__slots__ = ("cache", "uCache")
_hx_fields = ["cache", "uCache"]
_hx_methods = ["doEncode", "encodeObject", "encodeMap", "encodeIterable", "cacheEncode", "encodeValue"]
def __init__(self,useCache = True):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:358
if (useCache is None):
useCache = True
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:355
self.cache = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:359
self.uCache = useCache
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:360
if self.uCache:
self.cache = list()
def doEncode(self,obj,style = None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:364
if (not Reflect.isObject(obj)):
raise _HxException("Provided object is not an object.")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:367
st = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:368
if Std._hx_is(style,tjson_EncodeStyle):
st = style
elif (style == "fancy"):
st = tjson_FancyStyle()
else:
st = tjson_SimpleStyle()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:375
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:376
if (Std._hx_is(obj,list) or Std._hx_is(obj,List)):
buffer_b.write(Std.string(self.encodeIterable(obj,st,0)))
elif Std._hx_is(obj,haxe_ds_StringMap):
buffer_b.write(Std.string(self.encodeMap(obj,st,0)))
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:382
self.cacheEncode(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:383
buffer_b.write(Std.string(self.encodeObject(obj,st,0)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:385
return buffer_b.getvalue()
def encodeObject(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:389
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:390
buffer_b.write(Std.string(style.beginObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:391
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:392
fields = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:393
dontEncodeFields = None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:394
cls = Type.getClass(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:395
if (cls is not None):
fields = python_Boot.getInstanceFields(cls)
else:
fields = python_Boot.fields(obj)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:402
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:402
_g = Type.typeof(obj)
if (_g.index == 6):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:403
c = _g.params[0]
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:404
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:406
buffer_b.write(Std.string(("\"_hxcls\"" + HxOverrides.stringOrNull(style.keyValueSeperator(depth)))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:407
buffer_b.write(Std.string(self.encodeValue(Type.getClassName(c),style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:409
if (Reflect.field(obj,"TJ_noEncode") is not None):
dontEncodeFields = Reflect.field(obj,"TJ_noEncode")()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:415
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:415
_g1 = 0
while (_g1 < len(fields)):
field = (fields[_g1] if _g1 >= 0 and _g1 < len(fields) else None)
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:416
if ((dontEncodeFields is not None) and ((python_internal_ArrayImpl.indexOf(dontEncodeFields,field,None) >= 0))):
continue
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:417
value = Reflect.field(obj,field)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:418
vStr = self.encodeValue(value,style,depth)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:419
if (vStr is not None):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:420
tmp1 = fieldCount
fieldCount = (fieldCount + 1)
if (tmp1 > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:422
buffer_b.write(Std.string((((("\"" + ("null" if field is None else field)) + "\"") + HxOverrides.stringOrNull(style.keyValueSeperator(depth))) + Std.string(vStr))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:429
buffer_b.write(Std.string(style.endObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:430
return buffer_b.getvalue()
def encodeMap(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:435
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:436
buffer_b.write(Std.string(style.beginObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:437
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:438
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:438
field = obj.keys()
while field.hasNext():
field1 = field.next()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:439
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:441
value = obj.get(field1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:442
buffer_b.write(Std.string(((("\"" + ("null" if field1 is None else field1)) + "\"") + HxOverrides.stringOrNull(style.keyValueSeperator(depth)))))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:443
buffer_b.write(Std.string(self.encodeValue(value,style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:445
buffer_b.write(Std.string(style.endObject(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:446
return buffer_b.getvalue()
def encodeIterable(self,obj,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:451
buffer_b = python_lib_io_StringIO()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:452
buffer_b.write(Std.string(style.beginArray(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:453
fieldCount = 0
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:454
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:454
value = HxOverrides.iterator(obj)
while value.hasNext():
value1 = value.next()
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:455
tmp = fieldCount
fieldCount = (fieldCount + 1)
if (tmp > 0):
buffer_b.write(Std.string(style.entrySeperator(depth)))
else:
buffer_b.write(Std.string(style.firstEntry(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:457
buffer_b.write(Std.string(self.encodeValue(value1,style,depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:460
buffer_b.write(Std.string(style.endArray(depth)))
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:461
return buffer_b.getvalue()
def cacheEncode(self,value):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:465
if (not self.uCache):
return None
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:467
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:467
_g1 = 0
_g = len(self.cache)
while (_g1 < _g):
c = _g1
_g1 = (_g1 + 1)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:468
if HxOverrides.eq((self.cache[c] if c >= 0 and c < len(self.cache) else None),value):
return ((("\"" + HxOverrides.stringOrNull(tjson_TJSON.OBJECT_REFERENCE_PREFIX)) + Std.string(c)) + "\"")
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:472
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:472
_this = self.cache
_this.append(value)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:473
return None
def encodeValue(self,value,style,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:477
if (Std._hx_is(value,Int) or Std._hx_is(value,Float)):
return value
elif (Std._hx_is(value,list) or Std._hx_is(value,List)):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:481
v = value
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:482
return self.encodeIterable(v,style,(depth + 1))
elif Std._hx_is(value,List):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:485
v1 = value
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:486
return self.encodeIterable(v1,style,(depth + 1))
elif Std._hx_is(value,haxe_ds_StringMap):
return self.encodeMap(value,style,(depth + 1))
elif Std._hx_is(value,str):
return (("\"" + HxOverrides.stringOrNull(StringTools.replace(StringTools.replace(StringTools.replace(StringTools.replace(Std.string(value),"\\","\\\\"),"\n","\\n"),"\r","\\r"),"\"","\\\""))) + "\"")
elif Std._hx_is(value,Bool):
return value
elif Reflect.isObject(value):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:500
ret = self.cacheEncode(value)
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:501
if (ret is not None):
return ret
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:502
return self.encodeObject(value,style,(depth + 1))
elif (value is None):
return "null"
else:
return None
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.cache = None
_hx_o.uCache = None
tjson_TJSONEncoder._hx_class = tjson_TJSONEncoder
_hx_classes["tjson.TJSONEncoder"] = tjson_TJSONEncoder
class tjson_EncodeStyle:
_hx_class_name = "tjson.EncodeStyle"
__slots__ = ()
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator"]
tjson_EncodeStyle._hx_class = tjson_EncodeStyle
_hx_classes["tjson.EncodeStyle"] = tjson_EncodeStyle
class tjson_SimpleStyle:
_hx_class_name = "tjson.SimpleStyle"
__slots__ = ()
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator"]
_hx_interfaces = [tjson_EncodeStyle]
def __init__(self):
pass
def beginObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:532
return "{"
def endObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:535
return "}"
def beginArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:538
return "["
def endArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:541
return "]"
def firstEntry(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:544
return ""
def entrySeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:547
return ","
def keyValueSeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:550
return ":"
@staticmethod
def _hx_empty_init(_hx_o): pass
tjson_SimpleStyle._hx_class = tjson_SimpleStyle
_hx_classes["tjson.SimpleStyle"] = tjson_SimpleStyle
class tjson_FancyStyle:
_hx_class_name = "tjson.FancyStyle"
__slots__ = ("tab", "charTimesNCache")
_hx_fields = ["tab", "charTimesNCache"]
_hx_methods = ["beginObject", "endObject", "beginArray", "endArray", "firstEntry", "entrySeperator", "keyValueSeperator", "charTimesN"]
_hx_interfaces = [tjson_EncodeStyle]
def __init__(self,tab = " "):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:558
if (tab is None):
tab = " "
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:559
self.tab = tab
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:560
self.charTimesNCache = [""]
def beginObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:563
return "{\n"
def endObject(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:566
return (("\n" + HxOverrides.stringOrNull(self.charTimesN(depth))) + "}")
def beginArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:569
return "[\n"
def endArray(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:572
return (("\n" + HxOverrides.stringOrNull(self.charTimesN(depth))) + "]")
def firstEntry(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:575
return (HxOverrides.stringOrNull(self.charTimesN((depth + 1))) + " ")
def entrySeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:578
return (("\n" + HxOverrides.stringOrNull(self.charTimesN((depth + 1)))) + ",")
def keyValueSeperator(self,depth):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:581
return " : "
def charTimesN(self,n):
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:585
if (n < len(self.charTimesNCache)):
return (self.charTimesNCache[n] if n >= 0 and n < len(self.charTimesNCache) else None)
else:
# /usr/local/lib/haxe/lib/tjson/1,4,0/tjson/TJSON.hx:588
tmp = self.charTimesN((n - 1))
def _hx_local_1():
def _hx_local_0():
python_internal_ArrayImpl._set(self.charTimesNCache, n, (("null" if tmp is None else tmp) + HxOverrides.stringOrNull(self.tab)))
return (self.charTimesNCache[n] if n >= 0 and n < len(self.charTimesNCache) else None)
return _hx_local_0()
return _hx_local_1()
@staticmethod
def _hx_empty_init(_hx_o):
_hx_o.tab = None
_hx_o.charTimesNCache = None
tjson_FancyStyle._hx_class = tjson_FancyStyle
_hx_classes["tjson.FancyStyle"] = tjson_FancyStyle
# /usr/local/lib/haxe/std/python/_std/Math.hx:135
Math.NEGATIVE_INFINITY = float("-inf")
# /usr/local/lib/haxe/std/python/_std/Math.hx:136
Math.POSITIVE_INFINITY = float("inf")
# /usr/local/lib/haxe/std/python/_std/Math.hx:137
Math.NaN = float("nan")
# /usr/local/lib/haxe/std/python/_std/Math.hx:138
Math.PI = python_lib_Math.pi
python_Boot.keywords = set(["and", "del", "from", "not", "with", "as", "elif", "global", "or", "yield", "assert", "else", "if", "pass", "None", "break", "except", "import", "raise", "True", "class", "exec", "in", "return", "False", "continue", "finally", "is", "try", "def", "for", "lambda", "while"])
python_Boot.prefixLength = len("_hx_")
tjson_TJSON.OBJECT_REFERENCE_PREFIX = "@~obRef#"
Main.main() | mit | -4,816,244,440,017,755,000 | 40.189776 | 301 | 0.522814 | false | 3.026129 | false | false | false |
jgeskens/django | tests/transactions_regress/tests.py | 4 | 14925 | from __future__ import absolute_import
from django.db import (connection, connections, transaction, DEFAULT_DB_ALIAS, DatabaseError,
IntegrityError)
from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
from django.test import TransactionTestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils.unittest import skipIf, skipUnless
from transactions.tests import IgnorePendingDeprecationWarningsMixin
from .models import Mod, M2mA, M2mB, SubMod
class ModelInheritanceTests(TransactionTestCase):
def test_save(self):
# First, create a SubMod, then try to save another with conflicting
# cnt field. The problem was that transactions were committed after
# every parent save when not in managed transaction. As the cnt
# conflict is in the second model, we can check if the first save
# was committed or not.
SubMod(fld=1, cnt=1).save()
# We should have committed the transaction for the above - assert this.
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
try:
SubMod(fld=2, cnt=1).save()
except IntegrityError:
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
self.assertEqual(Mod.objects.count(), 1)
class TestTransactionClosing(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure that transactions are properly closed
when they should be, and aren't left pending after operations
have been performed in them. Refs #9964.
"""
def test_raw_committed_on_success(self):
"""
Make sure a transaction consisting of raw SQL execution gets
committed by the commit_on_success decorator.
"""
@commit_on_success
def raw_sql():
"Write a record using raw sql under a commit_on_success decorator"
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (fld) values (18)")
raw_sql()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
# Check that the record is in the DB
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 18)
def test_commit_manually_enforced(self):
"""
Make sure that under commit_manually, even "read-only" transaction require closure
(commit or rollback), and a transaction left pending is treated as an error.
"""
@commit_manually
def non_comitter():
"Execute a managed transaction with read-only operations and fail to commit"
Mod.objects.count()
self.assertRaises(TransactionManagementError, non_comitter)
def test_commit_manually_commit_ok(self):
"""
Test that under commit_manually, a committed transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def committer():
"""
Perform a database query, then commit the transaction
"""
Mod.objects.count()
transaction.commit()
try:
committer()
except TransactionManagementError:
self.fail("Commit did not clear the transaction state")
def test_commit_manually_rollback_ok(self):
"""
Test that under commit_manually, a rolled-back transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def roller_back():
"""
Perform a database query, then rollback the transaction
"""
Mod.objects.count()
transaction.rollback()
try:
roller_back()
except TransactionManagementError:
self.fail("Rollback did not clear the transaction state")
def test_commit_manually_enforced_after_commit(self):
"""
Test that under commit_manually, if a transaction is committed and an operation is
performed later, we still require the new transaction to be closed
"""
@commit_manually
def fake_committer():
"Query, commit, then query again, leaving with a pending transaction"
Mod.objects.count()
transaction.commit()
Mod.objects.count()
self.assertRaises(TransactionManagementError, fake_committer)
@skipUnlessDBFeature('supports_transactions')
def test_reuse_cursor_reference(self):
"""
Make sure transaction closure is enforced even when the queries are performed
through a single cursor reference retrieved in the beginning
(this is to show why it is wrong to set the transaction dirty only when a cursor
is fetched from the connection).
"""
@commit_on_success
def reuse_cursor_ref():
"""
Fetch a cursor, perform an query, rollback to close the transaction,
then write a record (in a new transaction) using the same cursor object
(reference). All this under commit_on_success, so the second insert should
be committed.
"""
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
transaction.rollback()
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
reuse_cursor_ref()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 2)
def test_failing_query_transaction_closed(self):
"""
Make sure that under commit_on_success, a transaction is rolled back even if
the first database-modifying operation fails.
This is prompted by http://code.djangoproject.com/ticket/6669 (and based on sample
code posted there to exemplify the problem): Before Django 1.3,
transactions were only marked "dirty" by the save() function after it successfully
wrote the object to the database.
"""
from django.contrib.auth.models import User
@transaction.commit_on_success
def create_system_user():
"Create a user in a transaction"
user = User.objects.create_user(username='system', password='iamr00t',
email='[email protected]')
# Redundant, just makes sure the user id was read back from DB
Mod.objects.create(fld=user.pk)
# Create a user
create_system_user()
with self.assertRaises(DatabaseError):
# The second call to create_system_user should fail for violating
# a unique constraint (it's trying to re-create the same user)
create_system_user()
# Try to read the database. If the last transaction was indeed closed,
# this should cause no problems
User.objects.all()[0]
@override_settings(DEBUG=True)
def test_failing_query_transaction_closed_debug(self):
"""
Regression for #6669. Same test as above, with DEBUG=True.
"""
self.test_failing_query_transaction_closed()
@skipIf(connection.vendor == 'sqlite' and
(connection.settings_dict['NAME'] == ':memory:' or
not connection.settings_dict['NAME']),
'Test uses multiple connections, but in-memory sqlite does not support this')
class TestNewConnection(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Check that new connections don't have special behaviour.
"""
def setUp(self):
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
connections[DEFAULT_DB_ALIAS].close()
finally:
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_commit(self):
"""
Users are allowed to commit and rollback connections.
"""
connection.set_autocommit(False)
try:
# The starting value is False, not None.
self.assertIs(connection._dirty, False)
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.commit()
self.assertFalse(connection.is_dirty())
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.rollback()
self.assertFalse(connection.is_dirty())
finally:
connection.set_autocommit(True)
def test_enter_exit_management(self):
orig_dirty = connection._dirty
connection.enter_transaction_management()
connection.leave_transaction_management()
self.assertEqual(orig_dirty, connection._dirty)
@skipUnless(connection.vendor == 'postgresql',
"This test only valid for PostgreSQL")
class TestPostgresAutocommitAndIsolation(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure psycopg2's autocommit mode and isolation level
is restored after entering and leaving transaction management.
Refs #16047, #18130.
"""
def setUp(self):
from psycopg2.extensions import (ISOLATION_LEVEL_AUTOCOMMIT,
ISOLATION_LEVEL_SERIALIZABLE,
TRANSACTION_STATUS_IDLE)
self._autocommit = ISOLATION_LEVEL_AUTOCOMMIT
self._serializable = ISOLATION_LEVEL_SERIALIZABLE
self._idle = TRANSACTION_STATUS_IDLE
# We want a clean backend with autocommit = True, so
# first we need to do a bit of work to have that.
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
opts = settings['OPTIONS'].copy()
opts['isolation_level'] = ISOLATION_LEVEL_SERIALIZABLE
settings['OPTIONS'] = opts
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
finally:
connections[DEFAULT_DB_ALIAS].close()
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_initial_autocommit_state(self):
# Autocommit is activated when the connection is created.
connection.cursor().close()
self.assertTrue(connection.autocommit)
def test_transaction_management(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_transaction_stacking(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_enter_autocommit(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
list(Mod.objects.all())
self.assertTrue(transaction.is_dirty())
# Enter autocommit mode again.
transaction.enter_transaction_management(False)
self.assertFalse(transaction.is_dirty())
self.assertEqual(
connection.connection.get_transaction_status(),
self._idle)
list(Mod.objects.all())
self.assertFalse(transaction.is_dirty())
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
class TestManyToManyAddTransaction(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
def test_manyrelated_add_commit(self):
"Test for https://code.djangoproject.com/ticket/16818"
a = M2mA.objects.create()
b = M2mB.objects.create(fld=10)
a.others.add(b)
# We're in a TransactionTestCase and have not changed transaction
# behavior from default of "autocommit", so this rollback should not
# actually do anything. If it does in fact undo our add, that's a bug
# that the bulk insert was not auto-committed.
transaction.rollback()
self.assertEqual(a.others.count(), 1)
class SavepointTest(IgnorePendingDeprecationWarningsMixin, TransactionTestCase):
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_commit(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=10)
transaction.savepoint_commit(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 10)
work()
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipIf(connection.vendor == 'mysql' and
connection.features._mysql_storage_engine == 'MyISAM',
"MyISAM MySQL storage engine doesn't support savepoints")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_rollback(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=20)
transaction.savepoint_rollback(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 1)
work()
| bsd-3-clause | 6,142,881,798,479,863,000 | 39.667575 | 101 | 0.646767 | false | 4.596551 | true | false | false |
ReachingOut/unisubs | apps/socialauth/lib/oauthtwitter2.py | 6 | 4688 | import httplib
import urllib2
import urllib
import time
import oauth.oauth as oauth
from django.conf import settings
CALLBACK_URL = 'http://example.com/newaccounts/login/done/'
REQUEST_TOKEN_URL = 'https://twitter.com/oauth/request_token'
AUTHORIZATION_URL = 'http://twitter.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
#CONSUMER_KEY = settings.TWITTER_CONSUMER_KEY
#CONSUMER_SECRET = settings.TWITTER_CONSUMER_SECRET
class TwitterOAuthClient(oauth.OAuthClient):
def __init__(self, consumer_key, consumer_secret, request_token_url=REQUEST_TOKEN_URL, access_token_url=ACCESS_TOKEN_URL, authorization_url=AUTHORIZATION_URL):
self.consumer_secret = consumer_secret
self.consumer_key = consumer_key
self.consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
self.signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
self.request_token_url = request_token_url
self.access_token_url = access_token_url
self.authorization_url = authorization_url
def fetch_request_token(self, callback_url=None):
params = {}
if callback_url is not None:
params = { 'oauth_callback': callback_url }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, http_url=self.request_token_url, parameters=params)
oauth_request.sign_request(self.signature_method, self.consumer, None)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.request_token_url, data)
response = urllib2.urlopen(full_url)
return oauth.OAuthToken.from_string(response.read())
def authorize_token_url(self, token, callback_url=None):
oauth_request = oauth.OAuthRequest.from_token_and_callback(token=token,\
callback=callback_url, http_url=self.authorization_url)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.authorization_url, data)
return full_url
def fetch_access_token(self, token, oauth_verifier=None):
params = {}
if oauth_verifier is not None:
params = { 'oauth_verifier': oauth_verifier }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=token, http_url=self.access_token_url, parameters=params)
oauth_request.sign_request(self.signature_method, self.consumer, token)
params = oauth_request.parameters
data = urllib.urlencode(params)
full_url='%s?%s'%(self.access_token_url, data)
response = urllib2.urlopen(full_url)
return oauth.OAuthToken.from_string(response.read())
def access_resource(self, oauth_request):
# via post body
# -> some protected resources
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
self.connection.request('POST', RESOURCE_URL, body=oauth_request.to_postdata(), headers=headers)
response = self.connection.getresponse()
return response.read()
def run_example():
# setup
print '** OAuth Python Library Example **'
client = TwitterOAuthClient()
pause()
# get request token
print '* Obtain a request token ...'
pause()
token = client.fetch_request_token()
print 'GOT'
print 'key: %s' % str(token.key)
print 'secret: %s' % str(token.secret)
pause()
print '* Authorize the request token ...'
pause()
# this will actually occur only on some callback
url = client.authorize_token_url(token)
print 'GOT'
print url
pause()
# get access token
print '* Obtain an access token ...'
pause()
access_token = client.fetch_access_token(token)
print 'GOT'
print 'key: %s' % str(access_token.key)
print 'secret: %s' % str(access_token.secret)
pause()
# access some protected resources
print '* Access protected resources ...'
pause()
parameters = {'file': 'vacation.jpg', 'size': 'original', 'oauth_callback': CALLBACK_URL} # resource specific params
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, token=token, http_method='POST', http_url=RESOURCE_URL, parameters=parameters)
oauth_request.sign_request(signature_method_hmac_sha1, consumer, token)
print 'REQUEST (via post body)'
print 'parameters: %s' % str(oauth_request.parameters)
pause()
params = client.access_resource(oauth_request)
print 'GOT'
print 'non-oauth parameters: %s' % params
pause()
def pause():
print ''
time.sleep(1)
if __name__ == '__main__':
run_example()
print 'Done.'
| agpl-3.0 | -4,426,484,930,953,053,700 | 35.913386 | 163 | 0.669582 | false | 3.75641 | false | false | false |
ammarkhann/FinalSeniorCode | lib/python2.7/site-packages/IPython/core/tests/test_profile.py | 13 | 5499 | # coding: utf-8
"""Tests for profile-related functions.
Currently only the startup-dir functionality is tested, but more tests should
be added for:
* ipython profile create
* ipython profile list
* ipython profile create --parallel
* security dir permissions
Authors
-------
* MinRK
"""
from __future__ import absolute_import
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import shutil
import sys
import tempfile
from unittest import TestCase
import nose.tools as nt
from IPython.core.profileapp import list_profiles_in, list_bundled_profiles
from IPython.core.profiledir import ProfileDir
from IPython.testing import decorators as dec
from IPython.testing import tools as tt
from IPython.utils import py3compat
from IPython.utils.process import getoutput
from IPython.utils.tempdir import TemporaryDirectory
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
TMP_TEST_DIR = tempfile.mkdtemp()
HOME_TEST_DIR = os.path.join(TMP_TEST_DIR, "home_test_dir")
IP_TEST_DIR = os.path.join(HOME_TEST_DIR,'.ipython')
#
# Setup/teardown functions/decorators
#
def setup():
"""Setup test environment for the module:
- Adds dummy home dir tree
"""
# Do not mask exceptions here. In particular, catching WindowsError is a
# problem because that exception is only defined on Windows...
os.makedirs(IP_TEST_DIR)
def teardown():
"""Teardown test environment for the module:
- Remove dummy home dir tree
"""
# Note: we remove the parent test dir, which is the root of all test
# subdirs we may have created. Use shutil instead of os.removedirs, so
# that non-empty directories are all recursively removed.
shutil.rmtree(TMP_TEST_DIR)
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def win32_without_pywin32():
if sys.platform == 'win32':
try:
import pywin32
except ImportError:
return True
return False
class ProfileStartupTest(TestCase):
def setUp(self):
# create profile dir
self.pd = ProfileDir.create_profile_dir_by_name(IP_TEST_DIR, 'test')
self.options = ['--ipython-dir', IP_TEST_DIR, '--profile', 'test']
self.fname = os.path.join(TMP_TEST_DIR, 'test.py')
def tearDown(self):
# We must remove this profile right away so its presence doesn't
# confuse other tests.
shutil.rmtree(self.pd.location)
def init(self, startup_file, startup, test):
# write startup python file
with open(os.path.join(self.pd.startup_dir, startup_file), 'w') as f:
f.write(startup)
# write simple test file, to check that the startup file was run
with open(self.fname, 'w') as f:
f.write(py3compat.doctest_refactor_print(test))
def validate(self, output):
tt.ipexec_validate(self.fname, output, '', options=self.options)
@dec.skipif(win32_without_pywin32(), "Test requires pywin32 on Windows")
def test_startup_py(self):
self.init('00-start.py', 'zzz=123\n',
py3compat.doctest_refactor_print('print zzz\n'))
self.validate('123')
@dec.skipif(win32_without_pywin32(), "Test requires pywin32 on Windows")
def test_startup_ipy(self):
self.init('00-start.ipy', '%xmode plain\n', '')
self.validate('Exception reporting mode: Plain')
def test_list_profiles_in():
# No need to remove these directories and files, as they will get nuked in
# the module-level teardown.
td = tempfile.mkdtemp(dir=TMP_TEST_DIR)
td = py3compat.str_to_unicode(td)
for name in ('profile_foo', 'profile_hello', 'not_a_profile'):
os.mkdir(os.path.join(td, name))
if dec.unicode_paths:
os.mkdir(os.path.join(td, u'profile_ünicode'))
with open(os.path.join(td, 'profile_file'), 'w') as f:
f.write("I am not a profile directory")
profiles = list_profiles_in(td)
# unicode normalization can turn u'ünicode' into u'u\0308nicode',
# so only check for *nicode, and that creating a ProfileDir from the
# name remains valid
found_unicode = False
for p in list(profiles):
if p.endswith('nicode'):
pd = ProfileDir.find_profile_dir_by_name(td, p)
profiles.remove(p)
found_unicode = True
break
if dec.unicode_paths:
nt.assert_true(found_unicode)
nt.assert_equal(set(profiles), {'foo', 'hello'})
def test_list_bundled_profiles():
# This variable will need to be updated when a new profile gets bundled
bundled = sorted(list_bundled_profiles())
nt.assert_equal(bundled, [])
def test_profile_create_ipython_dir():
"""ipython profile create respects --ipython-dir"""
with TemporaryDirectory() as td:
getoutput([sys.executable, '-m', 'IPython', 'profile', 'create',
'foo', '--ipython-dir=%s' % td])
profile_dir = os.path.join(td, 'profile_foo')
assert os.path.exists(profile_dir)
ipython_config = os.path.join(profile_dir, 'ipython_config.py')
assert os.path.exists(ipython_config)
| mit | 520,207,437,187,262,140 | 32.321212 | 78 | 0.601419 | false | 3.951833 | true | false | false |
Bradfield/algos | book/graphs/dijkstras_algorithm.py | 2 | 10638 | # -*- coding: utf-8 -*-
"""
When you surf the web, send an email, or log in to a laboratory computer
from another location on campus a lot of work is going on behind the
scenes to get the information on your computer transferred to another
computer. The in-depth study of how information flows from one computer
to another over the Internet is the primary topic for a class in
computer networking. However, we will talk about how the Internet works
just enough to understand another very important graph algorithm.

The diagram above shows you a high-level overview of how communication
on the Internet works. When you use your browser to request a web page
from a server, the request must travel over your local area network and
out onto the Internet through a router. The request travels over the
Internet and eventually arrives at a router for the local area network
where the server is located. The web page you requested then travels
back through the same routers to get to your browser. Inside the cloud
labeled “Internet” in the diagram are additional routers. The job of all
of these routers is to work together to get your information from place
to place. You can see there are many routers for yourself if your
computer supports the `traceroute` command. The text below shows the
output of running `traceroute google.com` on the author’s computer,
which illustrates that there are 12 routers between him and the Google
server responding to the request.
```
traceroute to google.com (216.58.192.46), 64 hops max, 52 byte packets
1 192.168.0.1 (192.168.0.1) 3.420 ms 1.133 ms 0.865 ms
2 gw-mosca207.static.monkeybrains.net (199.188.195.1) 14.678 ms 9.725 ms 6.752 ms
3 mosca.mosca-activspace.core.monkeybrains.net (172.17.18.58) 8.919 ms 8.277 ms 7.804 ms
4 lemon.lemon-mosca-10gb.core.monkeybrains.net (208.69.43.185) 6.724 ms 7.369 ms 6.701 ms
5 38.88.216.117 (38.88.216.117) 8.420 ms 11.860 ms 6.813 ms
6 be2682.ccr22.sfo01.atlas.cogentco.com (154.54.6.169) 7.392 ms 7.250 ms 8.241 ms
7 be2164.ccr21.sjc01.atlas.cogentco.com (154.54.28.34) 8.710 ms 8.301 ms 8.501 ms
8 be2000.ccr21.sjc03.atlas.cogentco.com (154.54.6.106) 9.072 ms
be2047.ccr21.sjc03.atlas.cogentco.com (154.54.5.114) 11.034 ms
be2000.ccr21.sjc03.atlas.cogentco.com (154.54.6.106) 10.243 ms
9 38.88.224.6 (38.88.224.6) 8.420 ms 10.637 ms 8.855 ms
10 209.85.249.5 (209.85.249.5) 9.142 ms 17.734 ms 12.211 ms
11 74.125.37.43 (74.125.37.43) 8.792 ms 9.290 ms 8.893 ms
12 nuq04s30-in-f14.1e100.net (216.58.192.46) 8.759 ms 8.705 ms 8.502 ms
```
Each router on the Internet is connected to one or more other routers.
So if you run the `traceroute` command at different times of the day,
you are likely to see that your information flows through different
routers at different times. This is because there is a cost associated
with each connection between a pair of routers that depends on the
volume of traffic, the time of day, and many other factors. By this time
it will not surprise you to learn that we can represent the network of
routers as a graph with weighted edges.

Above we show a small example of a weighted graph that represents the
interconnection of routers in the Internet. The problem that we want to
solve is to find the path with the smallest total weight along which to
route any given message. This problem should sound familiar because it
is similar to the problem we solved using a breadth first search, except
that here we are concerned with the total weight of the path rather than
the number of hops in the path. It should be noted that if all the
weights are equal, the problem is the same.
Dijkstra’s Algorithm
---
The algorithm we are going to use to determine the shortest path is
called “Dijkstra’s algorithm.” Dijkstra’s algorithm is an iterative
algorithm that provides us with the shortest path from one particular
starting node to all other nodes in the graph. Again this is similar to
the results of a breadth first search.
To keep track of the total cost from the start node to each destination
we will make use of a `distances` dictionary which we will initialize to
`0` for the start vertex, and `infinity` for the other vertices. Our
algorithm will update these values until they represent the smallest
weight path from the start to the vertex in question, at which point we
will return the `distances` dictionary.
The algorithm iterates once for every vertex in the graph; however, the
order that we iterate over the vertices is controlled by a priority
queue. The value that is used to determine the order of the objects in
the priority queue is the distance from our starting vertex. By using a
priority queue, we ensure that as we explore one vertex after another,
we are always exploring the one with the smallest distance.
The code for Dijkstra’s algorithm is shown below.
"""
import heapq
def calculate_distances(graph, starting_vertex):
distances = {vertex: float('infinity') for vertex in graph}
distances[starting_vertex] = 0
pq = [(0, starting_vertex)]
while len(pq) > 0:
current_distance, current_vertex = heapq.heappop(pq)
# Nodes can get added to the priority queue multiple times. We only
# process a vertex the first time we remove it from the priority queue.
if current_distance > distances[current_vertex]:
continue
for neighbor, weight in graph[current_vertex].items():
distance = current_distance + weight
# Only consider this new path if it's better than any path we've
# already found.
if distance < distances[neighbor]:
distances[neighbor] = distance
heapq.heappush(pq, (distance, neighbor))
return distances
example_graph = {
'U': {'V': 2, 'W': 5, 'X': 1},
'V': {'U': 2, 'X': 2, 'W': 3},
'W': {'V': 3, 'U': 5, 'X': 3, 'Y': 1, 'Z': 5},
'X': {'U': 1, 'V': 2, 'W': 3, 'Y': 1},
'Y': {'X': 1, 'W': 1, 'Z': 1},
'Z': {'W': 5, 'Y': 1},
}
print(calculate_distances(example_graph, 'X'))
# => {'U': 1, 'W': 2, 'V': 2, 'Y': 1, 'X': 0, 'Z': 2}
"""
Dijkstra’s algorithm uses a priority queue, which we introduced in the
trees chapter and which we achieve here using Python’s `heapq` module.
The entries in our priority queue are tuples of `(distance, vertex)`
which allows us to maintain a queue of vertices sorted by distance.
When the distance to a vertex that is already in the queue is reduced,
we wish to update the distance and thereby give it a different priority.
We accomplish this by just adding another entry to the priority queue for
the same vertex. (We also include a check after removing an entry from
the priority queue, in order to make sure that we only process each
vertex once.)
Let’s walk through an application of Dijkstra’s algorithm one vertex at
a time using the following sequence of diagrams as our guide. We begin
with the vertex $$u$$. The three vertices adjacent to $$u$$ are $$v,w,$$ and
$$x$$. Since the initial distances to $$v,w,$$ and $$x$$ are all initialized
to `infinity`, the new costs to get to them through the start node are
all their direct costs. So we update the costs to each of these three
nodes. The state of the algorithm is:

In the next iteration of the `while` loop we examine the vertices that
are adjacent to $$u$$. The vertex $$x$$ is next because it has the lowest
overall cost and therefore will be the first entry removed from the
priority queue. At $$x$$ we look at its neighbors $$u,v,w$$ and $$y$$. For
each neighboring vertex we check to see if the distance to that vertex
through $$x$$ is smaller than the previously known distance. Obviously
this is the case for $$y$$ since its distance was `infinity`. It is not
the case for $$u$$ or $$v$$ since their distances are 0 and 2 respectively.
However, we now learn that the distance to $$w$$ is smaller if we go
through $$x$$ than from $$u$$ directly to $$w$$. Since that is the case we
update $$w$$ with a new distance and add another entry to the priority
queue. The state of the algorithm is now:

The next step is to look at the vertices neighboring $$v$$ (below). This
step results in no changes to the graph, so we move on to node $$y$$.

At node $$y$$ (below) we discover that it is cheaper to get to both
$$w$$ and $$z$$, so we adjust the distances accordingly.

Finally we check nodes $$w$$ and $$z$$. However, no additional changes
are found and so the priority queue is empty and Dijkstra’s algorithm
exits.


It is important to note that Dijkstra’s algorithm works only when the
weights are all positive. You should convince yourself that if you
introduced a negative weight on one of the edges to the graph that the
algorithm would never exit.
We will note that to route messages through the Internet, other
algorithms are used for finding the shortest path. One of the problems
with using Dijkstra’s algorithm on the Internet is that you must have a
complete representation of the graph in order for the algorithm to run.
The implication of this is that every router has a complete map of all
the routers in the Internet. In practice this is not the case and other
variations of the algorithm allow each router to discover the graph as
they go. One such algorithm that you may want to read about is called
the “distance vector” routing algorithm.
Analysis of Dijkstra’s Algorithm
---
We will now consider the running time of Dijkstra’s algorithm.
Building the `distances` dictionary takes $$O(V)$$ time since we add
every vertex in the graph to the dictionary.
The `while` loop is executed once for every entry that gets added to
the priority queue. An entry can only be added when we explore an edge,
so there are at most $$O(E)$$ iterations of the `while` loop.
The `for` loop is executed at most once for every vertex, since the
`current_distance > distances[current_vertex]` check ensures that we
only process a vertex once. The `for` loop iterates over outgoing
edges, so among all iterations of the `while` loop, the body of the
`for` loop executes at most $$O(E)$$ times.
Finally, if we consider that each priority queue operation (adding or
removing an entry) is $$O(\log E)$$, we conclude that the total running
time is $$O(V + E \log E)$$.
"""
| cc0-1.0 | -8,986,169,047,754,631,000 | 45.893805 | 94 | 0.736554 | false | 3.503471 | false | false | false |
xiaokaizh/SmartDoorControl | ShockTest.py | 1 | 1145 | #!/user/bin/env python
# coding=utf-8
__author__ = 'xzhao'
import RPi.GPIO as GPIO
import time
import threading
GPIO.setmode(GPIO.BOARD)
GPIO_shock = 12 # 振动传感器GPIO
shock = 0
shocki = 0
shockSum = 0
previousShock = 0 #Shock前一个状态
GPIO.setup(GPIO_shock, GPIO.IN)
def signalCollect():
while True:
signalshock()
time.sleep(0.05)
# 振动信号搜集
def signalshock():
global shocki
global shockSum
global shock
global GPIO_shock
if shocki != 5:
GPIO.setmode(GPIO.BOARD)
GPIO.setup(GPIO_shock, GPIO.IN)
shockSum = shockSum+GPIO.input(GPIO_shock)
shocki += 1
else:
shocki = 0
if shockSum >=2:
shock = 1
else:
shock = 0
global previousShock
if shock != previousShock:
previousShock = shock
print("Dang qian shock %s" % shock)
shockSum = 0
if __name__ == '__main__':
GPIO.cleanup()
t1 = threading.Thread(target=signalCollect, name='SignalCollect')
t1.start() | apache-2.0 | 393,560,562,758,648,450 | 20.843137 | 69 | 0.551662 | false | 3.161932 | false | false | false |
HiSPARC/publicdb | scripts/fake-datastore-xmlrpc-server.py | 1 | 1729 | #!/usr/bin/python
""" Simple XML-RPC Server to run on the datastore server.
This daemon should be run on HiSPARC's datastore server. It will
handle the cluster layouts and station passwords. When an update is
necessary, it will reload the HTTP daemon.
The basis for this code was ripped from the python SimpleXMLRPCServer
library documentation and extended.
"""
from SimpleXMLRPCServer import SimpleXMLRPCServer
from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler
import urllib2
import hashlib
HASH = '/tmp/hash_datastore'
DATASTORE_CFG = '/tmp/station_list.csv'
CFG_URL = 'http://localhost:8003/config/datastore'
def reload_datastore():
"""Load datastore config and reload datastore, if necessary"""
datastore_cfg = urllib2.urlopen(CFG_URL).read()
new_hash = hashlib.sha1(datastore_cfg).hexdigest()
try:
with open(HASH, 'r') as file:
old_hash = file.readline()
except IOError:
old_hash = None
if new_hash == old_hash:
print("New hash is old hash")
return True
else:
with open(DATASTORE_CFG, 'w') as file:
file.write(datastore_cfg)
print("New hash received")
with open(HASH, 'w') as file:
file.write(new_hash)
return True
if __name__ == '__main__':
# Restrict to a particular path.
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ('/RPC2',)
# Create server
server = SimpleXMLRPCServer(("localhost", 8002),
requestHandler=RequestHandler)
server.register_introspection_functions()
server.register_function(reload_datastore)
# Run the server's main loop
server.serve_forever()
| gpl-3.0 | -3,502,502,690,191,686,000 | 26.887097 | 73 | 0.665703 | false | 4.146283 | false | false | false |
liumeixia/xiaworkspace | testweb/testzh/login_regist.py | 1 | 7225 | __author__ = 'liumx'
# -*- coding: utf-8 -*-
from testzh.models import ArTicle1
from testzh.models import ArTicle2
from testzh.models import Usercenter
from django.shortcuts import render_to_response
from django.views.decorators.http import require_http_methods
from django.http import HttpResponseRedirect, HttpResponse
import re
from django.db.models import Q
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import When, F, Q
from django import forms
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib import messages
import re
class registForm(forms.Form):
username = forms.CharField(label='用户名:', max_length=20)
passworld1 = forms.CharField(label='密码:', widget=forms.PasswordInput())
passworld2 = forms.CharField(label='密码:', widget=forms.PasswordInput())
email = forms.EmailField(label='电子邮件:',widget=forms.EmailInput)
class loginForm(forms.Form):
passworld = forms.CharField(label='密码:', widget=forms.PasswordInput())
username = forms.CharField(label='用户名:', max_length=20)
@require_http_methods(["GET", "POST"])
def regist(request):
# error=[]
if request.method == "POST":
uf = registForm(request.POST)
#print uf
#u=request.POST.get("userName")
#print u
# print u"注册"
if uf.is_valid(): # 获取表单信息
username = uf.cleaned_data['username']
passworld1 = uf.cleaned_data['passworld1']
# print passworld1
passworld2 = uf.cleaned_data['passworld2']
# print passworld2
email = uf.cleaned_data['email']
# print username, passworld1,passworld2, email
# 将表单写入数据库
# Usercenter.objects.create(username=username,passworld=passworld,email=email)
user = Usercenter()
# print username
#判断用户名
if not Usercenter.objects.all().filter(uname=username):
if len(username)>=2 and len(username)<=16:
print "ok"
a = re.search(u"[^\u4e00-\u9fa5-A-Za-z]+", username)
if a:
messages.error(request,u"用户名必须是英文,中文")
# error.append(u"用户名必须是英文,中文")
# print error
#a.group()
#print 'seccess:',a.group()
#for key in a:
#print key
else:
user.uname = username
else:
messages.error(request,u"用户名格式不正确,必须2-16位")
# error.append(u"用户名格式不正确,必须2-16位")
# print error
else:
messages.error(request,u"用户名已经被注册")
# error.append(u"用户名已经被注册")
# print error
#判断邮箱
if Usercenter.objects.all().filter(email=email):
messages.error(request,u"邮箱已经注册")
else:
if len(email)>=6 and len(email)<=30:
a = re.search("^[a-zA-Z0-9]([a-zA-Z0-9]*[-_]?[a-zA-Z0-9]+)*@[a-zA-Z0-9]*.(cn|com|net|org|CN|COM|ENT|ORG)$", email)
if a:
user.email = email
#print u"用户名必须是英文,中文"
#a.group()
# print 'seccess:',a.group()
#for key in a:
#print key
else:
messages.error(request,u"'邮箱格式不正确'")
else:
messages.error(request,u"邮箱格式不正确,必须6-30位")
#判断密码
if passworld1 ==passworld2:
if len(passworld1)>=6 and len(passworld1)<=16:
a = re.search("[^a-zA-Z0-9_]+", passworld1)
if a:
#print u"用户名必须是英文,中文"
#a.group()
messages.error(request, u'密码格式不正确,必须字符数字,字母,_')
#for key in a:
#print key
else:
user.psword = passworld1
else:
messages.error(request, u"密码格式不正确,必须6-16位")
else:
messages.error(request,u"两次密码不一致")
user.save()
# email.save()
# passworld1.save()
# request.session['username'] = username
# return index(request)
return render_to_response('regist.html', {"uf": uf}, context_instance=RequestContext(request))
#print "regist sccuess"
else:
uf = registForm()
# us=uf.username
# ps=uf.passworld
# em=uf.email
# messages.error(request,u"所有必填")
return render_to_response('regist.html', {"uf": uf}, context_instance=RequestContext(request))
# return render_to_response('regist.html',{"uf":uf,'us':us,"ps":ps,"em":em})
def out(request):
del request.session['username'] # 删除session
uf = loginForm()
return render_to_response('login.html', {"uf": uf}, context_instance=RequestContext(request))
# render_to_response('login.html')
def login(request):
if request.method == "POST":
uf = loginForm(request.POST)
#print u"登录"
if uf.is_valid(): # 获取表单信息
username = uf.cleaned_data['username']
passworld = uf.cleaned_data['passworld']
# print passworld
user = Usercenter.objects.filter(uname__exact=username, psword__exact=passworld)
if user:
# username=user.uname
# print u"比较成功",username
# 把获取表单的用户名传递给session对象
request.session['username'] = username
# 比较成功,跳转index
response = HttpResponseRedirect('/index/')
# 将username写入浏览器cookie,失效时间为3600
response.set_cookie('username', username, 60)
return response
# return render_to_response('index.html',context_instance=RequestContext(request))
else:
# 比较失败,还在login
messages.error(request,u"用户名或者密码不正确")
# return HttpResponseRedirect('login.html', {"messages":messages})
else:
uf = loginForm()
return render_to_response('login.html', {'uf': uf}, context_instance=RequestContext(request))
| gpl-2.0 | -7,246,749,130,745,708,000 | 37.886905 | 134 | 0.529026 | false | 3.512055 | false | false | false |
asendecka/djangae | djangae/tests/test_transactional.py | 7 | 6364 | from djangae.test import TestCase
from djangae.db import transaction
from djangae.contrib import sleuth
class TransactionTests(TestCase):
def test_repeated_usage_in_a_loop(self):
from .test_connector import TestUser
pk = TestUser.objects.create(username="foo").pk
for i in xrange(4):
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
continue
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
def test_atomic_decorator(self):
from .test_connector import TestUser
@transaction.atomic
def txn():
TestUser.objects.create(username="foo", field2="bar")
self.assertTrue(transaction.in_atomic_block())
raise ValueError()
with self.assertRaises(ValueError):
txn()
self.assertEqual(0, TestUser.objects.count())
def test_interaction_with_datastore_txn(self):
from google.appengine.ext import db
from google.appengine.datastore.datastore_rpc import TransactionOptions
from .test_connector import TestUser
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def some_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def some_non_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def double_nested_transactional():
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def do_stuff():
TestUser.objects.create(username="Double")
raise ValueError()
try:
return do_stuff
except:
return
with transaction.atomic():
double_nested_transactional()
@db.transactional()
def something_containing_atomic():
with transaction.atomic():
TestUser.objects.create(username="Inner")
something_containing_atomic()
with transaction.atomic():
with transaction.atomic():
some_non_indie_txn("Bob1")
some_indie_txn("Bob2")
some_indie_txn("Bob3")
with transaction.atomic(independent=True):
some_non_indie_txn("Fred1")
some_indie_txn("Fred2")
some_indie_txn("Fred3")
def test_atomic_context_manager(self):
from .test_connector import TestUser
with self.assertRaises(ValueError):
with transaction.atomic():
TestUser.objects.create(username="foo", field2="bar")
raise ValueError()
self.assertEqual(0, TestUser.objects.count())
def test_non_atomic_context_manager(self):
from .test_connector import TestUser
existing = TestUser.objects.create(username="existing", field2="exists")
with transaction.atomic():
self.assertTrue(transaction.in_atomic_block())
user = TestUser.objects.create(username="foo", field2="bar")
with transaction.non_atomic():
# We're outside the transaction, so the user should not exist
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
self.assertFalse(transaction.in_atomic_block())
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
TestUser.objects.get(pk=existing.pk) #Should hit the cache, not the datastore
self.assertFalse(datastore_get.called)
with transaction.atomic(independent=True):
user2 = TestUser.objects.create(username="foo2", field2="bar2")
self.assertTrue(transaction.in_atomic_block())
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
TestUser.objects.get(pk=existing.pk) #Should hit the cache, not the datastore
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
self.assertTrue(transaction.in_atomic_block())
def test_xg_argument(self):
from .test_connector import TestUser, TestFruit
@transaction.atomic(xg=True)
def txn(_username):
TestUser.objects.create(username=_username, field2="bar")
TestFruit.objects.create(name="Apple", color="pink")
raise ValueError()
with self.assertRaises(ValueError):
txn("foo")
self.assertEqual(0, TestUser.objects.count())
self.assertEqual(0, TestFruit.objects.count())
def test_independent_argument(self):
"""
We would get a XG error if the inner transaction was not independent
"""
from .test_connector import TestUser, TestFruit
@transaction.atomic
def txn1(_username, _fruit):
@transaction.atomic(independent=True)
def txn2(_fruit):
TestFruit.objects.create(name=_fruit, color="pink")
raise ValueError()
TestUser.objects.create(username=_username)
txn2(_fruit)
with self.assertRaises(ValueError):
txn1("test", "banana")
def test_nested_decorator(self):
# Nested decorator pattern we discovered can cause a connection_stack
# underflow.
@transaction.atomic
def inner_txn():
pass
@transaction.atomic
def outer_txn():
inner_txn()
# Calling inner_txn first puts it in a state which means it doesn't
# then behave properly in a nested transaction.
inner_txn()
outer_txn()
| bsd-3-clause | -1,173,051,211,443,170,300 | 33.586957 | 105 | 0.603708 | false | 4.488011 | true | false | false |
asazodi/phdmate | crawlers/semex_cfp.py | 2 | 4854 | import requests
from urllib2 import unquote
from datetime import date
from scrapy.http import HtmlResponse
import re
import cgi
import json
import pprint
ranks = { 0:'unknown', 3:'C', 4:'B', 5:'A'}
title_re = re.compile(r'<td.*?_blank">(.*?)</a')
rest_re = re.compile(r'...list">(?!.*?<a)(?: |\xa0|\xa8){0,3}(.*?)</font>')
full_title_re = re.compile(r'<div align="left"(| title=".*?")>(?=.*?<a href)')
urls_re = re.compile(r'url=(.*?)"')
conferences = {}
gMapsURL = 'https://maps.googleapis.com/maps/api/geocode/json'
# google_maps_api_key = "AIzaSyAxQx5RvskYSPxNQzVvhUYe4YRfJFCCEkE"
google_maps_api_key = "AIzaSyAl4FRVY9SvAZKkvxnH3PEm0POBoI6ddJY"
invalid_locations = ['n/a', 'publication', '', ' ', 'online', 'special issue', 'none']
def run(repo, db):
cfps = db["conferences"]
conferences = repo["conferences"]
today = str(date.today())
url = 'http://grid.hust.edu.cn/call/deadline.jsp?time=all&after='+today+'&rows=1000'
h = HtmlResponse(
url=url,
body=requests.get(url).text,
encoding = 'utf-8').selector.xpath('//table[@id =\'main\']').extract()[0]
titles = title_re.findall(h)
rest = rest_re.findall(h)
full_titles = full_title_re.findall(h)
urls = [unquote(url) for url in urls_re.findall(h)]
for title_num in range(len(titles)):
full_title = full_titles[title_num]
if full_title:
title = full_title[full_title.find('"')+1:full_title.rfind('"')]
else:
title = " ".join(titles[title_num].replace('\'',' 20').split())
cfpIdentifier = " ".join(titles[title_num].replace('\'',' 20').lower().split())
identifier = " ".join(titles[title_num][:titles[title_num].find('\'')].lower().split())
location = rest[4*title_num]
publisher = rest[4*title_num+1]
deadline = rest[4*title_num+2]
rank = ranks[len(rest[4*title_num+3])]
url = urls[title_num]
if cfpIdentifier in cfps.keys():
if len(cfps[cfpIdentifier]["full_title"])<len(title):
cfps[cfpIdentifier]["full_title"] = title.replace('title=\"','').replace('\"','')
#print "FOUND %s " % cfpIdentifier
else:
confDict = {}
cfps[cfpIdentifier] = {}
cfps[cfpIdentifier]["submission"] = deadline
cfps[cfpIdentifier]["url"] = url
cfps[cfpIdentifier]["date"] = "Unknown"
cfps[cfpIdentifier]["title"] = " ".join(titles[title_num].replace('\'',' 20').split())
cfps[cfpIdentifier]["full_title"] = full_title.replace('title=\"','').replace('\"','')
cfps[cfpIdentifier]["location"] = location
cfps[cfpIdentifier]["lat"] = 0
cfps[cfpIdentifier]["lng"] = 0
cfps[cfpIdentifier]["categories"] = ['computer science']
if location.lower() not in invalid_locations:
#print location.lower()
userdata = {"address": location.strip(), "key": google_maps_api_key}
response = requests.get(gMapsURL, params=userdata)
if 'OK' == response.json()["status"]:
conf_loc_info = response.json()["results"][0]["geometry"]["location"]
cfps[cfpIdentifier]["lat"] = conf_loc_info["lat"]
cfps[cfpIdentifier]["lng"] = conf_loc_info["lng"]
else:
print "Invalid Response:"
print response.json()
#print "CREATED: %s" % cfpIdentifier
if identifier in conferences.keys():
#print "FOUND %s " % identifier
if len(conferences[identifier]["full_title"])<len(title):
conferences[identifier]["full_title"] = title.replace('title=\"','').replace('\"','')
if conferences[identifier]["tier"] == "None":
conferences[identifier]["tier"] = rank
if conferences[identifier]["type"] == "None":
conferences[identifier]["type"] = publisher
else:
confDict = {}
confDict["ranking"] = 'Unknown'
confDict["full_title"] = title.replace('title=\"','').replace('\"','')
confDict["type"] = publisher
confDict["tier"] = rank
conferences[identifier] = confDict
#print "I: %s" % identifier
#print "T: %s | L: %s | P: %s | D: %s | R: %s " %(title,location,publisher,deadline,rank)
f = open('../www/conference-repo.json','w')
f.write(json.dumps(repo))
f.close()
f2 = open('../www/db.json','w')
f2.write(json.dumps(db))
f2.close()
if __name__ == '__main__':
f = open('../www/conference-repo.json','r')
repo = json.loads(f.read())
f.close()
#print repo
f2 = open('../www/db.json','r')
db = json.loads(f2.read())
f2.close()
run(repo, db)
| gpl-2.0 | 434,400,404,328,708,500 | 37.832 | 103 | 0.554388 | false | 3.435244 | false | false | false |
leiyangleon/FSH | ISCE_processing_scripts/insarApp_substitute/runCoherence.py | 1 | 3709 | #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2012, by the California Institute of Technology. ALL RIGHTS RESERVED.
# United States Government Sponsorship acknowledged. Any commercial use must be
# negotiated with the Office of Technology Transfer at the California Institute of
# Technology. This software is subject to U.S. export control laws and regulations
# and has been classified as EAR99. By accepting this software, the user agrees to
# comply with all applicable U.S. export laws and regulations. User has the
# responsibility to obtain export licenses, or other export authority as may be
# required before exporting such information to foreign countries or providing
# access to foreign persons.
#
# Author: Brett George
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import logging
import operator
import isceobj
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
from mroipac.correlation.correlation import Correlation
from isceobj.Util.decorators import use_api
logger = logging.getLogger('isce.insar.runCoherence')
## mapping from algorithm method to Correlation instance method name
CORRELATION_METHOD = {
'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'),
'cchz_wave' : operator.methodcaller('calculateCorrelation')
}
@use_api
def runCoherence(self, method="phase_gradient"):
logger.info("Calculating Coherence")
# Initialize the amplitude
# resampAmpImage = self.insar.resampAmpImage
# ampImage = isceobj.createAmpImage()
# IU.copyAttributes(resampAmpImage, ampImage)
# ampImage.setAccessMode('read')
# ampImage.createImage()
# ampImage = self.insar.getResampOnlyAmp().copy(access_mode='read')
ampImage = isceobj.createImage()
ampImage.load( self.insar.getResampOnlyAmp().filename + '.xml')
ampImage.setAccessMode('READ')
ampImage.createImage()
# Initialize the flattened inteferogram
topoflatIntFilename = self.insar.topophaseFlatFilename
intImage = isceobj.createImage()
intImage.load ( self.insar.topophaseFlatFilename + '.xml')
intImage.setAccessMode('READ')
intImage.createImage()
# widthInt = self.insar.resampIntImage.getWidth()
# intImage.setFilename(topoflatIntFilename)
# intImage.setWidth(widthInt)
# intImage.setAccessMode('read')
# intImage.createImage()
# Create the coherence image
cohFilename = topoflatIntFilename.replace('.flat', '.cor')
cohImage = isceobj.createOffsetImage()
cohImage.setFilename(cohFilename)
cohImage.setWidth(intImage.width)
cohImage.setAccessMode('write')
cohImage.createImage()
cor = Correlation()
cor.configure()
cor.wireInputPort(name='interferogram', object=intImage)
cor.wireInputPort(name='amplitude', object=ampImage)
cor.wireOutputPort(name='correlation', object=cohImage)
cohImage.finalizeImage()
intImage.finalizeImage()
ampImage.finalizeImage()
cor.calculateCorrelation()
# NEW COMMANDS added by YL --start
import subprocess
subprocess.getoutput('MULTILOOK_FILTER_ISCE.py -a ./resampOnlyImage.amp -c ./topophase.cor')
subprocess.getoutput('CROP_ISCE_insarApp.py -a ./resampOnlyImage.amp -c ./topophase.cor')
subprocess.getoutput('imageMath.py -e="a_0;a_1" --a ./resampOnlyImage.amp -o ./resampOnlyImage1.amp -s BIL -t FLOAT')
self.geocode_list += ['./resampOnlyImage1.amp']
# NEW COMMANDS added by YL --end
# try:
# CORRELATION_METHOD[method](cor)
# except KeyError:
# print("Unrecognized correlation method")
# sys.exit(1)
# pass
return None
| gpl-3.0 | -9,096,079,866,505,561,000 | 36.846939 | 121 | 0.69938 | false | 3.600971 | false | false | false |
Nizebulous/games_solver | packages/modules/chopsticks.py | 1 | 3581 | from base import BaseGame
from packages.utils import Value
class Chopsticks(BaseGame):
"""
The finger game of Chopsticks
"""
DEAD_HAND = 6
def __init__(self):
"""
Initialize the play board
"""
self.board = [1, 1, 1, 1]
self.players_turn = 0
self.hands = ['left', 'right']
self.hands_map = {
'left': 0,
'right': 1
}
def hash(self):
"""
Turn a board into a position
"""
pos_hash = 0
for hand in self.board:
pos_hash += hand - 1
pos_hash *= 6
pos_hash /= 6
pos_hash <<= 1
pos_hash += self.players_turn
return pos_hash
@classmethod
def unhash(cls, pos_hash):
"""
Turn a position (value) into a board
"""
board = cls()
board.players_turn = pos_hash % 2
pos_hash >>= 1
for index in range(3, -1, -1):
board.board[index] = (pos_hash % 6) + 1
pos_hash = int(pos_hash / 6)
return board
def get_moves(self):
"""
Get supported moves
"""
moves = []
source_first_hand_index = self.players_turn * 2
dest_first_hand_index = (source_first_hand_index + 2) % 4
for index in range(source_first_hand_index, source_first_hand_index + 2):
if self.board[index] != self.DEAD_HAND:
for second_index in range(dest_first_hand_index, dest_first_hand_index + 2):
if self.board[second_index] == self.DEAD_HAND:
continue
moves.append((self.hands[index % 2], self.hands[second_index % 2]))
return moves
def do_move(self, move):
"""
Apply the move to the current board
"""
source_first_hand_index = self.players_turn * 2
dest_first_hand_index = (source_first_hand_index + 2) % 4
source = source_first_hand_index + self.hands_map[move[0]]
dest = dest_first_hand_index + self.hands_map[move[1]]
self.board[dest] = (self.board[dest] + self.board[source]) % 5 or self.DEAD_HAND
self.players_turn = (self.players_turn + 1) % 2
def undo_move(self, move):
"""
Unapply the move that resulted in the current board
"""
dest_first_hand_index = self.players_turn * 2
source_first_hand_index = (dest_first_hand_index + 2) % 4
source = source_first_hand_index + self.hands_map[move[0]]
dest = dest_first_hand_index + self.hands_map[move[1]]
old_value = 0 if self.board[dest] == self.DEAD_HAND else self.board[dest]
self.board[dest] = (old_value - self.board[source]) % 5
self.players_turn = (self.players_turn + 1) % 2
def get_value(self):
"""
Return if this is an ending position
"""
first_hand_index = self.players_turn * 2
if self.board[first_hand_index] == self.DEAD_HAND and \
self.board[first_hand_index + 1] == self.DEAD_HAND:
return Value.LOSS
return Value.UNKNOWN
def print_position(self):
"""
Print the specified position
"""
board = [hand if hand != self.DEAD_HAND else 'X' for hand in self.board]
print ' Player 1: Player 2:'
print 'left: {} right: {} left: {} right: {}'.format(*board)
print
print 'Player {}\'s turn!'.format(str(self.players_turn + 1))
print '======================='
print
| mit | 1,893,841,753,697,081,600 | 31.853211 | 92 | 0.526389 | false | 3.672821 | false | false | false |
fox-it/django-auth-policy | django_auth_policy/tests/__init__.py | 1 | 22907 | import datetime
import logging
import collections
from cStringIO import StringIO
from django.test import TestCase, RequestFactory
from django.contrib.auth import get_user_model, SESSION_KEY
from django.core.urlresolvers import reverse
from django.utils import timezone
from django_auth_policy.forms import (StrictAuthenticationForm,
StrictPasswordChangeForm)
from django_auth_policy.models import LoginAttempt, PasswordChange
from django_auth_policy.backends import StrictModelBackend
from django_auth_policy import settings as dap_settings
class LoginTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_success(self):
""" Test view with form and successful login """
resp = self.client.get(reverse('login'))
self.assertEqual(resp.status_code, 200)
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'})
self.assertEqual(resp.status_code, 302)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
attempts = LoginAttempt.objects.filter(username=self.user.username,
successful=True)
self.assertEqual(attempts.count(), 1)
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change password\n'))
def test_username_lockout(self):
""" Test too many failed login attempts for one username """
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX):
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = '10.0.0.%d' % (x + 1)
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': form.username_field.verbose_name}])
attempts = LoginAttempt.objects.filter(username=self.user.username,
successful=False, lockout=True)
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_USERNAME_MAX)
# Another failed authentication triggers lockout
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_USERNAME_MAX + 1)
# Even valid authentication will no longer work now
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=10.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.1, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=10.0.0.2\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.2, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=10.0.0.3\n'
u'WARNING Authentication failure, username=rf, address=10.0.0.3, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'username locked\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'username locked\n'))
def test_address_lockout(self):
""" Test too many failed login attempts for one address """
addr = '1.2.3.4'
for x in xrange(0, dap_settings.FAILED_AUTH_ADDRESS_MAX):
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = addr
form = StrictAuthenticationForm(request=req, data={
'username': 'rf%d' % x, 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': form.username_field.verbose_name}])
attempts = LoginAttempt.objects.filter(source_address=addr,
successful=False, lockout=True)
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_ADDRESS_MAX)
# Another failed authentication triggers lockout
req = self.factory.get(reverse('login'))
req.META['REMOTE_ADDR'] = addr
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertEqual(form.non_field_errors(), [
form.error_messages['address_locked_out']])
self.assertEqual(attempts.count(),
dap_settings.FAILED_AUTH_ADDRESS_MAX + 1)
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf0, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf0, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf1, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf1, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf2, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf2, address=1.2.3.4, '
u'invalid authentication.\n'
u'INFO Authentication attempt, username=rf, address=1.2.3.4\n'
u'WARNING Authentication failure, username=rf, address=1.2.3.4, '
u'address locked\n'))
def test_inactive_user(self):
self.user.is_active = False
self.user.save()
# Valid authentication data, but user is inactive
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [
form.error_messages['inactive']])
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'user inactive.\n'))
def test_lock_period(self):
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# User locked out
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
# Alter timestamps as if they happened longer ago
period = datetime.timedelta(
seconds=dap_settings.FAILED_AUTH_LOCKOUT_PERIOD)
expire_at = timezone.now() - period
LoginAttempt.objects.all().update(timestamp=expire_at)
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
# Successful login resets lock count
locking_attempts = LoginAttempt.objects.filter(lockout=True)
self.assertEqual(locking_attempts.count(), 0)
def test_unlock(self):
""" Resetting lockout data unlocks user """
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# User locked out
self.assertEqual(form.non_field_errors(), [
form.error_messages['username_locked_out']])
# Unlock user or address
LoginAttempt.objects.all().update(lockout=False)
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
def test_backend_locked_username(self):
# Authentication works
backend = StrictModelBackend()
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
# Lock user
for x in xrange(0, dap_settings.FAILED_AUTH_USERNAME_MAX + 1):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'wrong password'})
self.assertFalse(form.is_valid())
# Authentication must no longer work for this user
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, None)
class UserExpiryTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_expiry(self):
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertTrue(form.is_valid())
# Simulate user didn't log in for a long time
period = datetime.timedelta(days=dap_settings.INACTIVE_USERS_EXPIRY)
expire_at = timezone.now() - period
self.user.last_login = expire_at
self.user.save()
LoginAttempt.objects.all().update(timestamp=expire_at)
# Login attempt disabled user
req = self.factory.get(reverse('login'))
form = StrictAuthenticationForm(request=req, data={
'username': 'rf', 'password': 'password'})
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [
form.error_messages['inactive']])
# Check log messages
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'WARNING User rf disabled because last login was at %s\n'
u'WARNING Authentication failure, username=rf, address=127.0.0.1, '
u'user inactive.\n' % expire_at))
def test_backend_expired_user(self):
# Authentication works
backend = StrictModelBackend()
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
self.assertTrue(user.is_active)
# Simulate user didn't log in for a long time
period = datetime.timedelta(days=dap_settings.INACTIVE_USERS_EXPIRY)
expire_at = timezone.now() - period
self.user.last_login = expire_at
self.user.save()
LoginAttempt.objects.all().update(timestamp=expire_at)
# Authentication must still work for this user, but user is inactive
user = backend.authenticate(username='rf', password='password')
self.assertEqual(user, self.user)
self.assertFalse(user.is_active)
class PasswordChangeTests(TestCase):
urls = 'django_auth_policy.tests.urls'
def setUp(self):
self.user = get_user_model().objects.create_user(
username='rf',
email='[email protected]',
password='password')
self.factory = RequestFactory()
self.logger = logging.getLogger()
self.old_stream = self.logger.handlers[0].stream
self.logger.handlers[0].stream = StringIO()
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
def test_expiry(self):
# Create one recent password change
pw = PasswordChange.objects.create(user=self.user, successful=True,
is_temporary=False)
# Redirect to login
resp = self.client.get(reverse('login_required_view'), follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.request['PATH_INFO'], reverse('login'))
# Login
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertFalse(self.client.session['password_change_enforce'])
self.assertFalse(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertNotContains(resp, 'new_password1')
# Test if login worked ok
resp = self.client.get(reverse('login_required_view'), follow=False)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.request['PATH_INFO'], '/')
# Logout
resp = self.client.get(reverse('logout'), follow=True)
self.assertFalse(SESSION_KEY in self.client.session)
# Move PasswordChange into the past
period = datetime.timedelta(days=dap_settings.MAX_PASSWORD_AGE)
expire_at = timezone.now() - period
pw.timestamp = expire_at
pw.save()
# Login will still work
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertTrue(self.client.session['password_change_enforce'])
self.assertTrue(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# And try requesting a different page still displays a change
# password view
resp = self.client.get(reverse('another_view'), follow=False)
self.assertTrue('password_change_enforce' in self.client.session)
self.assertTrue(self.client.session['password_change_enforce'])
self.assertTrue(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# Post a new password
resp = self.client.post(reverse('login_required_view'), data={
'old_password': 'password',
'new_password1': 'abcABC123!@#',
'new_password2': 'abcABC123!@#'}, follow=True)
self.assertFalse(self.client.session['password_change_enforce'])
self.assertFalse(self.client.session['password_is_expired'])
self.assertFalse(self.client.session['password_is_temporary'])
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
self.assertEqual(resp.redirect_chain, [('http://testserver/', 302)])
# Recheck, change password view should be gone
resp = self.client.get(reverse('login_required_view'), follow=False)
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change expired password\n'
u'INFO Password change successful for user rf\n'))
def test_temporary_password(self):
# Create one recent password change
PasswordChange.objects.create(user=self.user, successful=True,
is_temporary=True)
# Login
resp = self.client.post(reverse('login'), data={
'username': 'rf', 'password': 'password'})
self.assertEqual(resp.status_code, 302)
self.assertTrue(SESSION_KEY in self.client.session)
self.assertEqual(self.client.session[SESSION_KEY], self.user.id)
# Requesting a page shows password change view
resp = self.client.get(reverse('login_required_view'), follow=True)
self.assertEqual(resp.request['PATH_INFO'], '/')
self.assertContains(resp, 'old_password')
self.assertContains(resp, 'new_password1')
self.assertContains(resp, 'new_password2')
# Change the password:
resp = self.client.post(reverse('login_required_view'), data={
'old_password': 'password',
'new_password1': 'A-New-Passw0rd-4-me',
'new_password2': 'A-New-Passw0rd-4-me'}, follow=True)
self.assertEqual(resp.redirect_chain, [('http://testserver/', 302)])
self.assertEqual(resp.request['PATH_INFO'], '/')
self.assertNotContains(resp, 'old_password')
self.assertNotContains(resp, 'new_password1')
self.assertNotContains(resp, 'new_password2')
self.assertEqual(PasswordChange.objects.all().count(), 2)
self.assertEqual(PasswordChange.objects.filter(
is_temporary=True).count(), 1)
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
u'INFO Authentication attempt, username=rf, address=127.0.0.1\n'
u'INFO Authentication success, username=rf, address=127.0.0.1\n'
u'INFO User rf must change temporary password\n'
u'INFO Password change successful for user rf\n'))
def password_change_login_required(self):
resp = self.client.post(reverse('password_change'), follow=True)
self.assertEqual(resp.redirect_chain, [
('http://testserver/login/?next=/password_change/', 302)])
def test_password_length(self):
new_passwd = 'Aa1.$Bb2.^Cc.Dd5%.Ee6&.Dd7*'
short_passwd = new_passwd[:dap_settings.PASSWORD_MIN_LENGTH]
# Too short password doesnt work
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': short_passwd[:-1],
'new_password2': short_passwd[:-1]})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'],
[form.error_messages['password_min_length']])
# Longer password does work
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': short_passwd,
'new_password2': short_passwd})
self.assertTrue(form.is_valid())
# Check correct PasswordChange items were created
self.assertEqual(PasswordChange.objects.all().count(), 2)
self.assertEqual(PasswordChange.objects.filter(
successful=True).count(), 1)
self.assertEqual(PasswordChange.objects.filter(
successful=False).count(), 1)
# Logging tests
self.assertEqual(self.logger.handlers[0].stream.getvalue(), (
'INFO Password change failed for user rf\n'
'INFO Password change successful for user rf\n'))
def test_password_complexity(self):
# Remove one category at a time to check all posibilities
rules = collections.deque(dap_settings.PASSWORD_COMPLEXITY)
for x in xrange(0, len(rules)):
passwd = u''.join([r['chars'][:4] for r in list(rules)[:-1]])
form = StrictPasswordChangeForm(self.user, data={
'old_password': 'password',
'new_password1': passwd,
'new_password2': passwd})
failing_rule = rules[-1]
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'], [
form.error_messages['password_complexity'] % failing_rule])
rules.rotate(1)
def test_password_differ_old(self):
""" Make sure new password differs from old password """
passwd = 'Aa1.$Bb2.^Cc.Dd5%.Ee6&.Dd7*'
self.user.set_password(passwd)
self.user.save()
form = StrictPasswordChangeForm(self.user, data={
'old_password': passwd,
'new_password1': passwd,
'new_password2': passwd})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['new_password1'],
[form.error_messages['password_unchanged']])
| bsd-3-clause | 8,727,663,295,439,031,000 | 42.220755 | 79 | 0.622779 | false | 4.073804 | true | false | false |
ychen820/microblog | y/google-cloud-sdk/.install/.backup/platform/gcutil/lib/google_compute_engine/gcutil_lib/version_checker.py | 4 | 9195 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module can check for new versions of gcutil.
A JSON file located at VERSION_INFO_URL contains the version number of
the latest version of gcutil.
"""
import json
import os
import platform
import time
import gflags as flags
from gcutil_lib import gcutil_logging
from gcutil_lib import utils
from gcutil_lib import version
LOGGER = gcutil_logging.LOGGER
VERSION_INFO_URL = 'http://dl.google.com/compute/latest-version.json'
VERSION_CACHE_FILE = os.path.join(os.path.expanduser('~'), '.gcutil.version')
SETUP_DOC_URL = 'https://developers.google.com/compute/docs/gcutil'
TIMEOUT_IN_SEC = 1
# The minimum amount of time that can pass between visits to
# VERSION_INFO_URL to grab the latest version string.
CACHE_TTL_SEC = 24 * 60 * 60
FLAGS = flags.FLAGS
flags.DEFINE_boolean('check_for_new_version',
True,
'Enables gcutil\'s version checker.')
class VersionChecker(object):
"""A class that encapsulates the logic for performing version checks."""
def __init__(
self,
cache_path=VERSION_CACHE_FILE,
cache_ttl_sec=CACHE_TTL_SEC,
current_version=version.__version__):
"""Constructs a new VersionChecker.
Args:
cache_path: The path to a file that caches the results of
fetching VERSION_INFO_URL.
cache_ttl_sec: The maximum amount of time the cache is considered
valid.
current_version: The version of currently executing gcutil.
"""
self._cache_path = os.path.expanduser(cache_path)
self._cache_ttl_sec = cache_ttl_sec
self._current_version = current_version
@staticmethod
def _IsCacheMalformed(cache):
"""Returns True if the given cache is not in its expected form."""
if ('last_check' not in cache or
'current_version' not in cache or
'last_checked_version' not in cache):
return True
if not isinstance(cache['last_check'], float):
return True
try:
VersionChecker._ParseVersionString(cache['current_version'])
VersionChecker._ParseVersionString(cache['last_checked_version'])
except BaseException:
return True
return False
def _IsCacheStale(self, cache, current_time=None):
"""Returns True if the cache is stale."""
if VersionChecker._IsCacheMalformed(cache):
LOGGER.debug('Encountered malformed or empty cache: %s', cache)
return True
# If the gcutil version has changed since the last cache write, then
# the cache is stale.
if cache['current_version'] != self._current_version:
return True
current_time = time.time() if current_time is None else current_time
# If the cache is old, then it's stale.
if cache['last_check'] + self._cache_ttl_sec <= current_time:
return True
# If for some reason the current time is less than the last time
# the cache was written to (e.g., the user changed his or her
# system time), then the safest thing to do is to assume the cache
# is stale.
if cache['last_check'] > current_time:
return True
return False
@staticmethod
def _ParseVersionString(version_string):
"""Converts a version string into a tuple of its components.
For example, '1.2.0' -> (1, 2, 0).
Args:
version_string: The input.
Raises:
ValueError: If any of the version components are not integers.
Returns:
A tuple of the version components.
"""
try:
return tuple([int(i) for i in version_string.split('.')])
except ValueError as e:
raise ValueError('Could not parse version string %s: %s' %
(version_string, e))
@staticmethod
def _CompareVersions(left, right):
"""Returns True if the left version is less than the right version."""
return (VersionChecker._ParseVersionString(left) <
VersionChecker._ParseVersionString(right))
def _UpdateCache(self, cache, http=None, current_time=None):
"""Fetches the version info and updates the given cache dict.
Args:
cache: A dict representing the contents of the cache.
http: An httplib2.Http object. This is used for testing.
current_time: The current time since the Epoch, in seconds.
This is also used for testing.
Raises:
ValueError: If the response code is not 200.
"""
http = http or utils.GetHttp()
response, content = http.request(
VERSION_INFO_URL, headers={'Cache-Control': 'no-cache'})
LOGGER.debug('Version check response: %s', response)
LOGGER.debug('Version check payload: %s', content)
if response.status != 200:
raise ValueError('Received response code %s while fetching %s.',
response.status, VERSION_INFO_URL)
latest_version_data = json.loads(content)
cache['current_version'] = self._current_version
cache['last_checked_version'] = latest_version_data['version']
cache['last_tar_url'] = latest_version_data.get('tar')
cache['last_zip_url'] = latest_version_data.get('zip')
cache['last_check'] = current_time or time.time()
def _ReadCache(self):
"""Reads the contents of the version cache file.
Returns:
A dict that corresponds to the JSON stored in the cache file.
Returns an empty dict if the cache file does not exist or if
there is a problem reading/parsing the cache.
"""
if not os.path.exists(self._cache_path):
return {}
try:
with open(self._cache_path) as f:
return json.load(f)
except BaseException as e:
LOGGER.debug('Reading %s failed: %s', self._cache_path, e)
return {}
def _WriteToCache(self, cache):
"""JSON-serializes the given dict and writes it to the cache."""
with open(self._cache_path, 'w') as f:
json.dump(cache, f)
def _GetSystem(self):
"""Gets the system that gcutil is currently running on.
Can be overwritten for testing.
Returns:
The name of the system that gcutil is running on.
"""
return platform.system()
def _GetDownloadLink(self, cache):
"""Gets the link to the latest version of gcutil from the cache.
The link should be to either a .tar or .zip archive, based on the system
gcutil is running on.
Args:
cache: A dict representing the contents of the cache.
Returns:
Link to the latest version of gcutil, based on the system. Might be None
if latest cache does not contain this information.
"""
if self._GetSystem() == 'Windows':
return cache.get('last_zip_url')
else:
return cache.get('last_tar_url')
def _NewVersionExists(self):
"""Checks whether new version of gcutil exists.
Returns:
A tuple with three elements. First indicates whether a new gcutil
version exists, second contains the last known version, and third
contains the latest download link.
"""
cache = self._ReadCache()
if self._IsCacheStale(cache):
LOGGER.debug('%s is stale. Consulting %s for latest version info...',
self._cache_path, VERSION_INFO_URL)
self._UpdateCache(cache)
self._WriteToCache(cache)
else:
LOGGER.debug('Consulting %s for latest version info...', self._cache_path)
latest_version = cache['last_checked_version']
ret = self._CompareVersions(self._current_version, latest_version)
latest_link = self._GetDownloadLink(cache)
return ret, latest_version, latest_link
def CheckForNewVersion(self):
"""Performs the actual check for a new version.
This method may either consult the cache or the web, depending on
the cache's age.
The side-effect of this message is a WARN log that tells the user
of an old version.
Returns:
True if version checking was requested and a new version is
available.
"""
if not FLAGS.check_for_new_version:
LOGGER.debug('Skipping version check...')
return
LOGGER.debug('Performing version check...')
try:
newer_exists, latest_version, _ = self._NewVersionExists()
if newer_exists:
LOGGER.warning(
'There is a new version of gcutil available. Go to: %s',
SETUP_DOC_URL)
LOGGER.warning(
'Your version of gcutil is %s, the latest version is %s.',
self._current_version, latest_version)
else:
LOGGER.debug('gcutil is up-to-date.')
# So much can go wrong with this code that it's unreasonable to
# add error handling everywhere hence the "catch-all" exception
# handling.
except BaseException as e:
LOGGER.debug('Version checking failed: %s', e)
| bsd-3-clause | 6,195,015,633,478,772,000 | 31.72242 | 80 | 0.670473 | false | 4.025832 | true | false | false |
igrad/Bag-of-Holding | Bag.py | 1 | 2569 | from SysFuncs import *
from LoadSaves import *
from AppInit import *
from BagItem import *
from Currency import *
class Bag:
'''An instance of a user's bag. This contains all of the meta data about the
bag as well as lists the contents of the bag as ID references. Information
about the items themselves are not stored here, just the IDs of the items
assigned to this bag.'''
def __init__(self, **kwargs):
if 'ID' in kwargs: self.ID = str(kwargs['ID'])
else: self.ID = self.GetNewBagID()
if 'name' in kwargs: self.name = str(kwargs['name'])
if 'items' in kwargs: self.items = list(kwargs['items'])
if 'currency' in kwargs: self.currency = CurrencySet(cTypes = kwargs['currency'])
if 'view' in kwargs: self.view = str(kwargs['view'])
self.tot_items = len(self.items)
self.tot_weight = 0
self.tot_val = 0
BAGS[self.ID] = self
if 'ID' not in kwargs:
self.SaveBagInfo()
def UpdateBag(self, **kwargs):
if 'name' in kwargs: self.name = str(kwargs['name'])
if 'items' in kwargs: self.items = list(kwargs['items'])
if 'currency' in kwargs: self.currency = CurrencySet(kwargs['currency'])
if 'view' in kwargs: self.view = str(kwargs['view'])
self.SaveBagInfo()
self.SetTotals()
def AddItem(self, itemID):
self.items.append(itemID)
self.items.sort()
self.SaveBagInfo()
def RemoveItemFromBag(self, itemID):
del self.items[itemID]
ITEMS[itemID].DeleteItemFromSave()
del ITEMS[itemID]
self.SaveBagInfo()
def GetNewBagID(self):
'''Gets an unused bagID number.'''
keys = BAGS.keys()
for i in range(MAX_BAGS):
if not i in keys: return str(i)
LogMsg("No more bags available!")
return None
def SaveBagInfo(self):
'''Stores the bag by copying a shallow copy of the actual bag.'''
bagsStore.put(str(self.ID), name = self.name, currency = self.currency.cTypes,
view = self.view, items = self.items)
def SetTotals(self):
'''Set the tot_items, tot_weight, and tot_val properties.'''
self.tot_items = len(self.items)
self.tot_weight = sum([int(ExtractNumber(ITEMS[str(x)].weight)) for x in self.items])
self.tot_val = sum([ExtractNumber(ITEMS[str(x)].val) for x in self.items])
def DeleteBagFromSave(self):
'''Removes the bag from bagsStore save file.'''
bagsStore.delete(str(self.ID))
| gpl-3.0 | 618,479,454,505,748,700 | 29.223529 | 93 | 0.611133 | false | 3.563107 | false | false | false |
ionomy/ion | test/functional/addressindex.py | 1 | 14802 | #!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2018-2020 The Ion Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test addressindex generation and fetching
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import binascii
class AddressIndexTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self):
self.add_nodes(self.num_nodes)
# Nodes 0/1 are "wallet" nodes
self.start_node(0, ["-relaypriority=0"])
self.start_node(1, ["-addressindex"])
# Nodes 2/3 are used for testing
self.start_node(2, ["-addressindex", "-relaypriority=0"])
self.start_node(3, ["-addressindex"])
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
self.log.info("Test that settings can't be changed without -reindex...")
self.stop_node(1)
self.assert_start_raises_init_error(1, ["-addressindex=0"], 'You need to rebuild the database using -reindex to change -addressindex')
self.start_node(1, ["-addressindex=0", "-reindex"])
connect_nodes(self.nodes[0], 1)
self.sync_all()
self.stop_node(1)
self.assert_start_raises_init_error(1, ["-addressindex"], 'You need to rebuild the database using -reindex to change -addressindex')
self.start_node(1, ["-addressindex", "-reindex"])
connect_nodes(self.nodes[0], 1)
self.sync_all()
self.log.info("Mining blocks...")
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that balances are correct
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 0)
# Check p2pkh and p2sh address indexes
self.log.info("Testing p2pkh and p2sh address index...")
txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10)
self.nodes[0].generate(1)
txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10)
self.nodes[0].generate(1)
txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15)
self.nodes[0].generate(1)
txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15)
self.nodes[0].generate(1)
txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20)
self.nodes[0].generate(1)
txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20)
self.nodes[0].generate(1)
self.sync_all()
txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4")
assert_equal(len(txids), 3)
assert_equal(txids[0], txid0)
assert_equal(txids[1], txid1)
assert_equal(txids[2], txid2)
txidsb = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(len(txidsb), 3)
assert_equal(txidsb[0], txidb0)
assert_equal(txidsb[1], txidb1)
assert_equal(txidsb[2], txidb2)
# Check that limiting by height works
self.log.info("Testing querying txids by range of block heights..")
height_txids = self.nodes[1].getaddresstxids({
"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB"],
"start": 105,
"end": 110
})
assert_equal(len(height_txids), 2)
assert_equal(height_txids[0], txidb0)
assert_equal(height_txids[1], txidb1)
# Check that multiple addresses works
multitxids = self.nodes[1].getaddresstxids({"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", "yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4"]})
assert_equal(len(multitxids), 6)
assert_equal(multitxids[0], txid0)
assert_equal(multitxids[1], txidb0)
assert_equal(multitxids[2], txid1)
assert_equal(multitxids[3], txidb1)
assert_equal(multitxids[4], txid2)
assert_equal(multitxids[5], txidb2)
# Check that balances are correct
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 45 * 100000000)
# Check that outputs with the same address will only return one txid
self.log.info("Testing for txid uniqueness...")
addressHash = binascii.unhexlify("FE30B718DCF0BF8A2A686BF1820C073F8B2C3B37")
scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL])
unspent = self.nodes[0].listunspent()
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
tx.vout = [CTxOut(10, scriptPubKey), CTxOut(11, scriptPubKey)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(len(txidsmany), 4)
assert_equal(txidsmany[3], sent_txid)
# Check that balances are correct
self.log.info("Testing balances...")
balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
assert_equal(balance0["balance"], 45 * 100000000 + 21)
# Check that balances are correct after spending
self.log.info("Testing balances after spending...")
privkey2 = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc"
address2 = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3"
addressHash2 = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D")
scriptPubKey2 = CScript([OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG])
self.nodes[0].importprivkey(privkey2)
unspent = self.nodes[0].listunspent()
tx = CTransaction()
tx_fee_sat = 1000
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat
tx.vout = [CTxOut(amount, scriptPubKey2)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
balance1 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance1["balance"], amount)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))]
send_amount = 1 * 100000000 + 12840
change_amount = amount - send_amount - 10000
tx.vout = [CTxOut(change_amount, scriptPubKey2), CTxOut(send_amount, scriptPubKey)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
balance2 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance2["balance"], change_amount)
# Check that deltas are returned correctly
deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 0, "end": 200})
balance3 = 0
for delta in deltas:
balance3 += delta["satoshis"]
assert_equal(balance3, change_amount)
assert_equal(deltas[0]["address"], address2)
assert_equal(deltas[0]["blockindex"], 1)
# Check that entire range will be queried
deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]})
assert_equal(len(deltasAll), len(deltas))
# Check that deltas can be returned from range of block heights
deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 113, "end": 113})
assert_equal(len(deltas), 1)
# Check that unspent outputs can be queried
self.log.info("Testing utxos...")
utxos = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos), 1)
assert_equal(utxos[0]["satoshis"], change_amount)
# Check that indexes will be updated with a reorg
self.log.info("Testing reorg...")
best_hash = self.nodes[0].getbestblockhash()
self.nodes[0].invalidateblock(best_hash)
self.nodes[1].invalidateblock(best_hash)
self.nodes[2].invalidateblock(best_hash)
self.nodes[3].invalidateblock(best_hash)
# Allow some time for the reorg to start
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
self.sync_all()
balance4 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance4, balance1)
utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos2), 1)
assert_equal(utxos2[0]["satoshis"], amount)
# Check sorting of utxos
self.nodes[2].generate(150)
txidsort1 = self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
txidsort2 = self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
self.sync_all()
utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]})
assert_equal(len(utxos3), 3)
assert_equal(utxos3[0]["height"], 114)
assert_equal(utxos3[1]["height"], 264)
assert_equal(utxos3[2]["height"], 265)
# Check mempool indexing
self.log.info("Testing mempool indexing...")
privKey3 = "cRyrMvvqi1dmpiCmjmmATqjAwo6Wu7QTjKu1ABMYW5aFG4VXW99K"
address3 = "yWB15aAdpeKuSaQHFVJpBDPbNSLZJSnDLA"
addressHash3 = binascii.unhexlify("6C186B3A308A77C779A9BB71C3B5A7EC28232A13")
scriptPubKey3 = CScript([OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG])
# address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ"
scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL])
unspent = self.nodes[2].listunspent()
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat
tx.vout = [CTxOut(amount, scriptPubKey3)]
tx.rehash()
signed_tx = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))]
amount = int(unspent[1]["amount"] * 100000000) - tx_fee_sat
tx2.vout = [
CTxOut(int(amount / 4), scriptPubKey3),
CTxOut(int(amount / 4), scriptPubKey3),
CTxOut(int(amount / 4), scriptPubKey4),
CTxOut(int(amount / 4), scriptPubKey4)
]
tx2.rehash()
signed_tx2 = self.nodes[2].signrawtransaction(binascii.hexlify(tx2.serialize()).decode("utf-8"))
memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
mempool = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool), 3)
assert_equal(mempool[0]["txid"], memtxid1)
assert_equal(mempool[0]["address"], address3)
assert_equal(mempool[0]["index"], 0)
assert_equal(mempool[1]["txid"], memtxid2)
assert_equal(mempool[1]["index"], 0)
assert_equal(mempool[2]["txid"], memtxid2)
assert_equal(mempool[2]["index"], 1)
self.nodes[2].generate(1);
self.sync_all();
mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool2), 0)
tx = CTransaction()
tx.vin = [
CTxIn(COutPoint(int(memtxid2, 16), 0)),
CTxIn(COutPoint(int(memtxid2, 16), 1))
]
tx.vout = [CTxOut(int(amount / 2 - 10000), scriptPubKey2)]
tx.rehash()
self.nodes[2].importprivkey(privKey3)
signed_tx3 = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
memtxid3 = self.nodes[2].sendrawtransaction(signed_tx3["hex"], True)
self.bump_mocktime(2)
set_node_times(self.nodes, self.mocktime)
mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool3), 2)
assert_equal(mempool3[0]["prevtxid"], memtxid2)
assert_equal(mempool3[0]["prevout"], 0)
assert_equal(mempool3[1]["prevtxid"], memtxid2)
assert_equal(mempool3[1]["prevout"], 1)
# sending and receiving to the same address
privkey1 = "cMvZn1pVWntTEcsK36ZteGQXRAcZ8CoTbMXF1QasxBLdnTwyVQCc"
address1 = "yM9Eed1bxjy7tYxD3yZDHxjcVT48WdRoB1"
address1hash = binascii.unhexlify("0909C84A817651502E020AAD0FBCAE5F656E7D8A")
address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG])
self.nodes[0].sendtoaddress(address1, 10)
self.nodes[0].generate(1)
self.sync_all()
utxos = self.nodes[1].getaddressutxos({"addresses": [address1]})
assert_equal(len(utxos), 1)
tx = CTransaction()
tx.vin = [
CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["outputIndex"]))
]
amount = int(utxos[0]["satoshis"] - 10000)
tx.vout = [CTxOut(amount, address1script)]
tx.rehash()
self.nodes[0].importprivkey(privkey1)
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
mem_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.sync_all()
mempool_deltas = self.nodes[2].getaddressmempool({"addresses": [address1]})
assert_equal(len(mempool_deltas), 2)
self.log.info("Passed")
if __name__ == '__main__':
AddressIndexTest().main()
| mit | 8,207,495,467,560,706,000 | 41.657061 | 143 | 0.639778 | false | 3.179807 | true | false | false |
google-research/tapas | tapas/utils/beam_utils.py | 1 | 2276 | # coding=utf-8
# Copyright 2019 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utilities around apache beams."""
from typing import Iterable, List, Tuple
from tapas.protos import interaction_pb2
from tapas.utils import pretrain_utils
to_numpy_seed = pretrain_utils.to_numpy_seed
split_by_table_id_and_write = pretrain_utils.split_by_table_id_and_write
def rekey(
interaction):
new_interaction = interaction_pb2.Interaction()
new_interaction.CopyFrom(interaction)
iid = interaction.table.table_id
iid = hex(to_numpy_seed(iid))
new_interaction.id = iid
new_interaction.table.table_id = iid
return new_interaction
def _get_sharded_ranges(
begin,
end,
max_length,
):
"""Recursively cuts ranges in half to satisfy 'max_length'."""
if max_length <= 0:
raise ValueError("max_length <= 0.")
length = end - begin
if length <= max_length:
return [(begin, end)]
pivot = begin + length // 2
return (_get_sharded_ranges(begin, pivot, max_length) +
_get_sharded_ranges(pivot, end, max_length))
def get_row_sharded_interactions(
interaction,
max_num_cells,
):
"""Equally shards the interaction row-wise to satisfy 'max_num_cells'."""
num_columns = len(interaction.table.columns)
max_num_rows = max_num_cells // num_columns
if max_num_rows == 0:
return
for begin, end in _get_sharded_ranges(
begin=0,
end=len(interaction.table.rows),
max_length=max_num_rows,
):
new_interaction = interaction_pb2.Interaction()
new_interaction.CopyFrom(interaction)
del new_interaction.table.rows[:]
for row in interaction.table.rows[begin:end]:
new_interaction.table.rows.add().CopyFrom(row)
yield new_interaction
| apache-2.0 | 4,589,415,483,412,307,500 | 30.178082 | 75 | 0.711775 | false | 3.528682 | false | false | false |
ryanraaum/oldowan.polymorphism | oldowan/polymorphism/polymorphism.py | 1 | 1349 |
class Polymorphism(object):
def __init__(self, position, insert, value, reference=''):
self.position = position
self.insert = insert
self.value = value
self.reference = reference
def __cmp__(self, other):
if self.position == other.position:
if self.insert == other.insert:
return cmp(self.value, other.value)
return cmp(self.insert, other.insert)
return cmp(self.position, other.position)
def __str__(self):
if self.insert == 0:
if self.value == '-':
return '%s%s' % (self.position, 'd')
else:
return '%s%s' % (self.position, self.value)
return '%s.%s%s' % (self.position, self.insert, self.value)
def __repr__(self):
return str(self)
def is_substitution(self):
return self.insert == 0 and self.value != '-'
def is_transition(self):
changes = [self.value, self.reference]
changes.sort()
change = ('%s%s' % tuple(changes)).upper()
return self.is_substitution() and change in ['AG', 'CT']
def is_transversion(self):
return self.is_substitution() and not self.is_transition()
def is_insertion(self):
return self.insert > 0
def is_deletion(self):
return self.value in ['-']
| mit | 2,859,331,567,036,315,000 | 28.977778 | 67 | 0.55745 | false | 3.944444 | false | false | false |
wimac/home | Dropbox/skel/bin/bg.py | 1 | 2099 | #!/usr/bin/python
#
__author__ = 'author'
import getpass
try:
from xml.etree import ElementTree # for Python 2.5 users
except:
from elementtree import ElementTree
from gdata import service
import gdata
import atom
import getopt
import sys
def main():
# parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:], "", ["f=","u=", "p="])
except getopt.error, msg:
print ('bg.py --f [file] --u [username] --p [password] | inline')
sys.exit(2)
file = ''
user=''
password = ''
# Process options
for o, a in opts:
if o == "--f":
file= a
elif o == "--u":
user=a
elif o == "--p":
password = a
if password =="inline":
password = getpass.getpass()
if file == '' or password == '' or user=='':
print ('python blog.py --f [file] --u [username] --p [password] | inline ')
sys.exit(2)
fileHandle = open (file)
#sample = BloggerExample(user, password)
#sample.CreatePost (fileHandle.readline() ,fileHandle.read() , "bloger", False)
servic = service.GDataService(user, password)
servic.source = 'Blogger_Python_Sample-1.0'
servic.service = 'blogger'
servic.server = 'www.blogger.com'
servic.ProgrammaticLogin()
feed = servic.Get('/feeds/default/blogs')
self_link = feed.entry[0].GetSelfLink()
if self_link:
blog_id = self_link.href.split('/')[-1]
entry = gdata.GDataEntry()
entry.author.append(atom.Author(atom.Name(text='author')))
entry.title = atom.Title(title_type='xhtml', text=fileHandle.readline() )
entry.content = atom.Content(content_type='html', text=fileHandle.read())
AtomCategory category = new AtomCategory();
category.Term = "labelToDisplay";
category.Scheme = "http://www.blogger.com/atom/ns#";
entry.Categories.Add(category);
#if is_draft:
# control = atom.Control()
# control.draft = atom.Draft(text='yes')
# entry.control = control
# Ask the service to insert the new entry.
servic.Post(entry, '/feeds/' + blog_id + '/posts/default')
print('publishing completed')
fileHandle.close()
if __name__ == '__main__':
main()
| gpl-2.0 | 4,222,878,376,277,236,700 | 23.126437 | 81 | 0.640305 | false | 3.259317 | false | false | false |
The-Compiler/pytest | testing/test_capture.py | 12 | 50277 | import contextlib
import io
import os
import subprocess
import sys
import textwrap
from io import UnsupportedOperation
from typing import BinaryIO
from typing import cast
from typing import Generator
from typing import TextIO
import pytest
from _pytest import capture
from _pytest.capture import _get_multicapture
from _pytest.capture import CaptureManager
from _pytest.capture import CaptureResult
from _pytest.capture import MultiCapture
from _pytest.config import ExitCode
from _pytest.pytester import Testdir
# note: py.io capture tests where copied from
# pylib 1.4.20.dev2 (rev 13d9af95547e)
def StdCaptureFD(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.FDCapture(0) if in_ else None,
out=capture.FDCapture(1) if out else None,
err=capture.FDCapture(2) if err else None,
)
def StdCapture(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.SysCapture(0) if in_ else None,
out=capture.SysCapture(1) if out else None,
err=capture.SysCapture(2) if err else None,
)
def TeeStdCapture(
out: bool = True, err: bool = True, in_: bool = True
) -> MultiCapture[str]:
return capture.MultiCapture(
in_=capture.SysCapture(0, tee=True) if in_ else None,
out=capture.SysCapture(1, tee=True) if out else None,
err=capture.SysCapture(2, tee=True) if err else None,
)
class TestCaptureManager:
@pytest.mark.parametrize("method", ["no", "sys", "fd"])
def test_capturing_basic_api(self, method):
capouter = StdCaptureFD()
old = sys.stdout, sys.stderr, sys.stdin
try:
capman = CaptureManager(method)
capman.start_global_capturing()
capman.suspend_global_capture()
outerr = capman.read_global_capture()
assert outerr == ("", "")
capman.suspend_global_capture()
outerr = capman.read_global_capture()
assert outerr == ("", "")
print("hello")
capman.suspend_global_capture()
out, err = capman.read_global_capture()
if method == "no":
assert old == (sys.stdout, sys.stderr, sys.stdin)
else:
assert not out
capman.resume_global_capture()
print("hello")
capman.suspend_global_capture()
out, err = capman.read_global_capture()
if method != "no":
assert out == "hello\n"
capman.stop_global_capturing()
finally:
capouter.stop_capturing()
def test_init_capturing(self):
capouter = StdCaptureFD()
try:
capman = CaptureManager("fd")
capman.start_global_capturing()
pytest.raises(AssertionError, capman.start_global_capturing)
capman.stop_global_capturing()
finally:
capouter.stop_capturing()
@pytest.mark.parametrize("method", ["fd", "sys"])
def test_capturing_unicode(testdir, method):
obj = "'b\u00f6y'"
testdir.makepyfile(
"""\
# taken from issue 227 from nosetests
def test_unicode():
import sys
print(sys.stdout)
print(%s)
"""
% obj
)
result = testdir.runpytest("--capture=%s" % method)
result.stdout.fnmatch_lines(["*1 passed*"])
@pytest.mark.parametrize("method", ["fd", "sys"])
def test_capturing_bytes_in_utf8_encoding(testdir, method):
testdir.makepyfile(
"""\
def test_unicode():
print('b\\u00f6y')
"""
)
result = testdir.runpytest("--capture=%s" % method)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_collect_capturing(testdir):
p = testdir.makepyfile(
"""
import sys
print("collect %s failure" % 13)
sys.stderr.write("collect %s_stderr failure" % 13)
import xyz42123
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*Captured stdout*",
"collect 13 failure",
"*Captured stderr*",
"collect 13_stderr failure",
]
)
class TestPerTestCapturing:
def test_capture_and_fixtures(self, testdir):
p = testdir.makepyfile(
"""
def setup_module(mod):
print("setup module")
def setup_function(function):
print("setup " + function.__name__)
def test_func1():
print("in func1")
assert 0
def test_func2():
print("in func2")
assert 0
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"setup module*",
"setup test_func1*",
"in func1*",
"setup test_func2*",
"in func2*",
]
)
@pytest.mark.xfail(reason="unimplemented feature")
def test_capture_scope_cache(self, testdir):
p = testdir.makepyfile(
"""
import sys
def setup_module(func):
print("module-setup")
def setup_function(func):
print("function-setup")
def test_func():
print("in function")
assert 0
def teardown_function(func):
print("in teardown")
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*test_func():*",
"*Captured stdout during setup*",
"module-setup*",
"function-setup*",
"*Captured stdout*",
"in teardown*",
]
)
def test_no_carry_over(self, testdir):
p = testdir.makepyfile(
"""
def test_func1():
print("in func1")
def test_func2():
print("in func2")
assert 0
"""
)
result = testdir.runpytest(p)
s = result.stdout.str()
assert "in func1" not in s
assert "in func2" in s
def test_teardown_capturing(self, testdir):
p = testdir.makepyfile(
"""
def setup_function(function):
print("setup func1")
def teardown_function(function):
print("teardown func1")
assert 0
def test_func1():
print("in func1")
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*teardown_function*",
"*Captured stdout*",
"setup func1*",
"in func1*",
"teardown func1*",
# "*1 fixture failure*"
]
)
def test_teardown_capturing_final(self, testdir):
p = testdir.makepyfile(
"""
def teardown_module(mod):
print("teardown module")
assert 0
def test_func():
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*def teardown_module(mod):*",
"*Captured stdout*",
"*teardown module*",
"*1 error*",
]
)
def test_capturing_outerr(self, testdir):
p1 = testdir.makepyfile(
"""\
import sys
def test_capturing():
print(42)
sys.stderr.write(str(23))
def test_capturing_error():
print(1)
sys.stderr.write(str(2))
raise ValueError
"""
)
result = testdir.runpytest(p1)
result.stdout.fnmatch_lines(
[
"*test_capturing_outerr.py .F*",
"====* FAILURES *====",
"____*____",
"*test_capturing_outerr.py:8: ValueError",
"*--- Captured stdout *call*",
"1",
"*--- Captured stderr *call*",
"2",
]
)
class TestLoggingInteraction:
def test_logging_stream_ownership(self, testdir):
p = testdir.makepyfile(
"""\
def test_logging():
import logging
import pytest
stream = capture.CaptureIO()
logging.basicConfig(stream=stream)
stream.close() # to free memory/release resources
"""
)
result = testdir.runpytest_subprocess(p)
assert result.stderr.str().find("atexit") == -1
def test_logging_and_immediate_setupteardown(self, testdir):
p = testdir.makepyfile(
"""\
import logging
def setup_function(function):
logging.warning("hello1")
def test_logging():
logging.warning("hello2")
assert 0
def teardown_function(function):
logging.warning("hello3")
assert 0
"""
)
for optargs in (("--capture=sys",), ("--capture=fd",)):
print(optargs)
result = testdir.runpytest_subprocess(p, *optargs)
s = result.stdout.str()
result.stdout.fnmatch_lines(
["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors show first!
)
# verify proper termination
assert "closed" not in s
def test_logging_and_crossscope_fixtures(self, testdir):
p = testdir.makepyfile(
"""\
import logging
def setup_module(function):
logging.warning("hello1")
def test_logging():
logging.warning("hello2")
assert 0
def teardown_module(function):
logging.warning("hello3")
assert 0
"""
)
for optargs in (("--capture=sys",), ("--capture=fd",)):
print(optargs)
result = testdir.runpytest_subprocess(p, *optargs)
s = result.stdout.str()
result.stdout.fnmatch_lines(
["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors come first
)
# verify proper termination
assert "closed" not in s
def test_conftestlogging_is_shown(self, testdir):
testdir.makeconftest(
"""\
import logging
logging.basicConfig()
logging.warning("hello435")
"""
)
# make sure that logging is still captured in tests
result = testdir.runpytest_subprocess("-s", "-p", "no:capturelog")
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stderr.fnmatch_lines(["WARNING*hello435*"])
assert "operation on closed file" not in result.stderr.str()
def test_conftestlogging_and_test_logging(self, testdir):
testdir.makeconftest(
"""\
import logging
logging.basicConfig()
"""
)
# make sure that logging is still captured in tests
p = testdir.makepyfile(
"""\
def test_hello():
import logging
logging.warning("hello433")
assert 0
"""
)
result = testdir.runpytest_subprocess(p, "-p", "no:capturelog")
assert result.ret != 0
result.stdout.fnmatch_lines(["WARNING*hello433*"])
assert "something" not in result.stderr.str()
assert "operation on closed file" not in result.stderr.str()
def test_logging_after_cap_stopped(self, testdir):
testdir.makeconftest(
"""\
import pytest
import logging
log = logging.getLogger(__name__)
@pytest.fixture
def log_on_teardown():
yield
log.warning('Logging on teardown')
"""
)
# make sure that logging is still captured in tests
p = testdir.makepyfile(
"""\
def test_hello(log_on_teardown):
import logging
logging.warning("hello433")
assert 1
raise KeyboardInterrupt()
"""
)
result = testdir.runpytest_subprocess(p, "--log-cli-level", "info")
assert result.ret != 0
result.stdout.fnmatch_lines(
["*WARNING*hello433*", "*WARNING*Logging on teardown*"]
)
assert (
"AttributeError: 'NoneType' object has no attribute 'resume_capturing'"
not in result.stderr.str()
)
class TestCaptureFixture:
@pytest.mark.parametrize("opt", [[], ["-s"]])
def test_std_functional(self, testdir, opt):
reprec = testdir.inline_runsource(
"""\
def test_hello(capsys):
print(42)
out, err = capsys.readouterr()
assert out.startswith("42")
""",
*opt,
)
reprec.assertoutcome(passed=1)
def test_capsyscapfd(self, testdir):
p = testdir.makepyfile(
"""\
def test_one(capsys, capfd):
pass
def test_two(capfd, capsys):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
[
"*ERROR*setup*test_one*",
"E*capfd*capsys*same*time*",
"*ERROR*setup*test_two*",
"E*capsys*capfd*same*time*",
"*2 errors*",
]
)
def test_capturing_getfixturevalue(self, testdir):
"""Test that asking for "capfd" and "capsys" using request.getfixturevalue
in the same test is an error.
"""
testdir.makepyfile(
"""\
def test_one(capsys, request):
request.getfixturevalue("capfd")
def test_two(capfd, request):
request.getfixturevalue("capsys")
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines(
[
"*test_one*",
"E * cannot use capfd and capsys at the same time",
"*test_two*",
"E * cannot use capsys and capfd at the same time",
"*2 failed in*",
]
)
def test_capsyscapfdbinary(self, testdir):
p = testdir.makepyfile(
"""\
def test_one(capsys, capfdbinary):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(
["*ERROR*setup*test_one*", "E*capfdbinary*capsys*same*time*", "*1 error*"]
)
@pytest.mark.parametrize("method", ["sys", "fd"])
def test_capture_is_represented_on_failure_issue128(self, testdir, method):
p = testdir.makepyfile(
"""\
def test_hello(cap{}):
print("xxx42xxx")
assert 0
""".format(
method
)
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(["xxx42xxx"])
def test_stdfd_functional(self, testdir):
reprec = testdir.inline_runsource(
"""\
def test_hello(capfd):
import os
os.write(1, b"42")
out, err = capfd.readouterr()
assert out.startswith("42")
capfd.close()
"""
)
reprec.assertoutcome(passed=1)
@pytest.mark.parametrize("nl", ("\n", "\r\n", "\r"))
def test_cafd_preserves_newlines(self, capfd, nl):
print("test", end=nl)
out, err = capfd.readouterr()
assert out.endswith(nl)
def test_capfdbinary(self, testdir):
reprec = testdir.inline_runsource(
"""\
def test_hello(capfdbinary):
import os
# some likely un-decodable bytes
os.write(1, b'\\xfe\\x98\\x20')
out, err = capfdbinary.readouterr()
assert out == b'\\xfe\\x98\\x20'
assert err == b''
"""
)
reprec.assertoutcome(passed=1)
def test_capsysbinary(self, testdir):
p1 = testdir.makepyfile(
r"""
def test_hello(capsysbinary):
import sys
sys.stdout.buffer.write(b'hello')
# Some likely un-decodable bytes.
sys.stdout.buffer.write(b'\xfe\x98\x20')
sys.stdout.buffer.flush()
# Ensure writing in text mode still works and is captured.
# https://github.com/pytest-dev/pytest/issues/6871
print("world", flush=True)
out, err = capsysbinary.readouterr()
assert out == b'hello\xfe\x98\x20world\n'
assert err == b''
print("stdout after")
print("stderr after", file=sys.stderr)
"""
)
result = testdir.runpytest(str(p1), "-rA")
result.stdout.fnmatch_lines(
[
"*- Captured stdout call -*",
"stdout after",
"*- Captured stderr call -*",
"stderr after",
"*= 1 passed in *",
]
)
def test_partial_setup_failure(self, testdir):
p = testdir.makepyfile(
"""\
def test_hello(capsys, missingarg):
pass
"""
)
result = testdir.runpytest(p)
result.stdout.fnmatch_lines(["*test_partial_setup_failure*", "*1 error*"])
def test_keyboardinterrupt_disables_capturing(self, testdir):
p = testdir.makepyfile(
"""\
def test_hello(capfd):
import os
os.write(1, b'42')
raise KeyboardInterrupt()
"""
)
result = testdir.runpytest_subprocess(p)
result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
assert result.ret == 2
def test_capture_and_logging(self, testdir):
"""#14"""
p = testdir.makepyfile(
"""\
import logging
def test_log(capsys):
logging.error('x')
"""
)
result = testdir.runpytest_subprocess(p)
assert "closed" not in result.stderr.str()
@pytest.mark.parametrize("fixture", ["capsys", "capfd"])
@pytest.mark.parametrize("no_capture", [True, False])
def test_disabled_capture_fixture(self, testdir, fixture, no_capture):
testdir.makepyfile(
"""\
def test_disabled({fixture}):
print('captured before')
with {fixture}.disabled():
print('while capture is disabled')
print('captured after')
assert {fixture}.readouterr() == ('captured before\\ncaptured after\\n', '')
def test_normal():
print('test_normal executed')
""".format(
fixture=fixture
)
)
args = ("-s",) if no_capture else ()
result = testdir.runpytest_subprocess(*args)
result.stdout.fnmatch_lines(["*while capture is disabled*", "*= 2 passed in *"])
result.stdout.no_fnmatch_line("*captured before*")
result.stdout.no_fnmatch_line("*captured after*")
if no_capture:
assert "test_normal executed" in result.stdout.str()
else:
result.stdout.no_fnmatch_line("*test_normal executed*")
def test_disabled_capture_fixture_twice(self, testdir: Testdir) -> None:
"""Test that an inner disabled() exit doesn't undo an outer disabled().
Issue #7148.
"""
testdir.makepyfile(
"""
def test_disabled(capfd):
print('captured before')
with capfd.disabled():
print('while capture is disabled 1')
with capfd.disabled():
print('while capture is disabled 2')
print('while capture is disabled 1 after')
print('captured after')
assert capfd.readouterr() == ('captured before\\ncaptured after\\n', '')
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
[
"*while capture is disabled 1",
"*while capture is disabled 2",
"*while capture is disabled 1 after",
],
consecutive=True,
)
@pytest.mark.parametrize("fixture", ["capsys", "capfd"])
def test_fixture_use_by_other_fixtures(self, testdir, fixture):
"""Ensure that capsys and capfd can be used by other fixtures during
setup and teardown."""
testdir.makepyfile(
"""\
import sys
import pytest
@pytest.fixture
def captured_print({fixture}):
print('stdout contents begin')
print('stderr contents begin', file=sys.stderr)
out, err = {fixture}.readouterr()
yield out, err
print('stdout contents end')
print('stderr contents end', file=sys.stderr)
out, err = {fixture}.readouterr()
assert out == 'stdout contents end\\n'
assert err == 'stderr contents end\\n'
def test_captured_print(captured_print):
out, err = captured_print
assert out == 'stdout contents begin\\n'
assert err == 'stderr contents begin\\n'
""".format(
fixture=fixture
)
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(["*1 passed*"])
result.stdout.no_fnmatch_line("*stdout contents begin*")
result.stdout.no_fnmatch_line("*stderr contents begin*")
@pytest.mark.parametrize("cap", ["capsys", "capfd"])
def test_fixture_use_by_other_fixtures_teardown(self, testdir, cap):
"""Ensure we can access setup and teardown buffers from teardown when using capsys/capfd (##3033)"""
testdir.makepyfile(
"""\
import sys
import pytest
import os
@pytest.fixture()
def fix({cap}):
print("setup out")
sys.stderr.write("setup err\\n")
yield
out, err = {cap}.readouterr()
assert out == 'setup out\\ncall out\\n'
assert err == 'setup err\\ncall err\\n'
def test_a(fix):
print("call out")
sys.stderr.write("call err\\n")
""".format(
cap=cap
)
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_setup_failure_does_not_kill_capturing(testdir):
sub1 = testdir.mkpydir("sub1")
sub1.join("conftest.py").write(
textwrap.dedent(
"""\
def pytest_runtest_setup(item):
raise ValueError(42)
"""
)
)
sub1.join("test_mod.py").write("def test_func1(): pass")
result = testdir.runpytest(testdir.tmpdir, "--traceconfig")
result.stdout.fnmatch_lines(["*ValueError(42)*", "*1 error*"])
def test_capture_conftest_runtest_setup(testdir):
testdir.makeconftest(
"""
def pytest_runtest_setup():
print("hello19")
"""
)
testdir.makepyfile("def test_func(): pass")
result = testdir.runpytest()
assert result.ret == 0
result.stdout.no_fnmatch_line("*hello19*")
def test_capture_badoutput_issue412(testdir):
testdir.makepyfile(
"""
import os
def test_func():
omg = bytearray([1,129,1])
os.write(1, omg)
assert 0
"""
)
result = testdir.runpytest("--capture=fd")
result.stdout.fnmatch_lines(
"""
*def test_func*
*assert 0*
*Captured*
*1 failed*
"""
)
def test_capture_early_option_parsing(testdir):
testdir.makeconftest(
"""
def pytest_runtest_setup():
print("hello19")
"""
)
testdir.makepyfile("def test_func(): pass")
result = testdir.runpytest("-vs")
assert result.ret == 0
assert "hello19" in result.stdout.str()
def test_capture_binary_output(testdir):
testdir.makepyfile(
r"""
import pytest
def test_a():
import sys
import subprocess
subprocess.call([sys.executable, __file__])
def test_foo():
import os;os.write(1, b'\xc3')
if __name__ == '__main__':
test_foo()
"""
)
result = testdir.runpytest("--assert=plain")
result.assert_outcomes(passed=2)
def test_error_during_readouterr(testdir):
"""Make sure we suspend capturing if errors occur during readouterr"""
testdir.makepyfile(
pytest_xyz="""
from _pytest.capture import FDCapture
def bad_snap(self):
raise Exception('boom')
assert FDCapture.snap
FDCapture.snap = bad_snap
"""
)
result = testdir.runpytest_subprocess("-p", "pytest_xyz", "--version")
result.stderr.fnmatch_lines(
["*in bad_snap", " raise Exception('boom')", "Exception: boom"]
)
class TestCaptureIO:
def test_text(self):
f = capture.CaptureIO()
f.write("hello")
s = f.getvalue()
assert s == "hello"
f.close()
def test_unicode_and_str_mixture(self):
f = capture.CaptureIO()
f.write("\u00f6")
pytest.raises(TypeError, f.write, b"hello")
def test_write_bytes_to_buffer(self):
"""In python3, stdout / stderr are text io wrappers (exposing a buffer
property of the underlying bytestream). See issue #1407
"""
f = capture.CaptureIO()
f.buffer.write(b"foo\r\n")
assert f.getvalue() == "foo\r\n"
class TestTeeCaptureIO(TestCaptureIO):
def test_text(self):
sio = io.StringIO()
f = capture.TeeCaptureIO(sio)
f.write("hello")
s1 = f.getvalue()
assert s1 == "hello"
s2 = sio.getvalue()
assert s2 == s1
f.close()
sio.close()
def test_unicode_and_str_mixture(self):
sio = io.StringIO()
f = capture.TeeCaptureIO(sio)
f.write("\u00f6")
pytest.raises(TypeError, f.write, b"hello")
def test_dontreadfrominput():
from _pytest.capture import DontReadFromInput
f = DontReadFromInput()
assert f.buffer is f
assert not f.isatty()
pytest.raises(OSError, f.read)
pytest.raises(OSError, f.readlines)
iter_f = iter(f)
pytest.raises(OSError, next, iter_f)
pytest.raises(UnsupportedOperation, f.fileno)
f.close() # just for completeness
def test_captureresult() -> None:
cr = CaptureResult("out", "err")
assert len(cr) == 2
assert cr.out == "out"
assert cr.err == "err"
out, err = cr
assert out == "out"
assert err == "err"
assert cr[0] == "out"
assert cr[1] == "err"
assert cr == cr
assert cr == CaptureResult("out", "err")
assert cr != CaptureResult("wrong", "err")
assert cr == ("out", "err")
assert cr != ("out", "wrong")
assert hash(cr) == hash(CaptureResult("out", "err"))
assert hash(cr) == hash(("out", "err"))
assert hash(cr) != hash(("out", "wrong"))
assert cr < ("z",)
assert cr < ("z", "b")
assert cr < ("z", "b", "c")
assert cr.count("err") == 1
assert cr.count("wrong") == 0
assert cr.index("err") == 1
with pytest.raises(ValueError):
assert cr.index("wrong") == 0
assert next(iter(cr)) == "out"
assert cr._replace(err="replaced") == ("out", "replaced")
@pytest.fixture
def tmpfile(testdir) -> Generator[BinaryIO, None, None]:
f = testdir.makepyfile("").open("wb+")
yield f
if not f.closed:
f.close()
@contextlib.contextmanager
def lsof_check():
pid = os.getpid()
try:
out = subprocess.check_output(("lsof", "-p", str(pid))).decode()
except (OSError, subprocess.CalledProcessError, UnicodeDecodeError) as exc:
# about UnicodeDecodeError, see note on pytester
pytest.skip("could not run 'lsof' ({!r})".format(exc))
yield
out2 = subprocess.check_output(("lsof", "-p", str(pid))).decode()
len1 = len([x for x in out.split("\n") if "REG" in x])
len2 = len([x for x in out2.split("\n") if "REG" in x])
assert len2 < len1 + 3, out2
class TestFDCapture:
def test_simple(self, tmpfile):
fd = tmpfile.fileno()
cap = capture.FDCapture(fd)
data = b"hello"
os.write(fd, data)
pytest.raises(AssertionError, cap.snap)
cap.done()
cap = capture.FDCapture(fd)
cap.start()
os.write(fd, data)
s = cap.snap()
cap.done()
assert s == "hello"
def test_simple_many(self, tmpfile):
for i in range(10):
self.test_simple(tmpfile)
def test_simple_many_check_open_files(self, testdir):
with lsof_check():
with testdir.makepyfile("").open("wb+") as tmpfile:
self.test_simple_many(tmpfile)
def test_simple_fail_second_start(self, tmpfile):
fd = tmpfile.fileno()
cap = capture.FDCapture(fd)
cap.done()
pytest.raises(AssertionError, cap.start)
def test_stderr(self):
cap = capture.FDCapture(2)
cap.start()
print("hello", file=sys.stderr)
s = cap.snap()
cap.done()
assert s == "hello\n"
def test_stdin(self):
cap = capture.FDCapture(0)
cap.start()
x = os.read(0, 100).strip()
cap.done()
assert x == b""
def test_writeorg(self, tmpfile):
data1, data2 = b"foo", b"bar"
cap = capture.FDCapture(tmpfile.fileno())
cap.start()
tmpfile.write(data1)
tmpfile.flush()
cap.writeorg(data2.decode("ascii"))
scap = cap.snap()
cap.done()
assert scap == data1.decode("ascii")
with open(tmpfile.name, "rb") as stmp_file:
stmp = stmp_file.read()
assert stmp == data2
def test_simple_resume_suspend(self):
with saved_fd(1):
cap = capture.FDCapture(1)
cap.start()
data = b"hello"
os.write(1, data)
sys.stdout.write("whatever")
s = cap.snap()
assert s == "hellowhatever"
cap.suspend()
os.write(1, b"world")
sys.stdout.write("qlwkej")
assert not cap.snap()
cap.resume()
os.write(1, b"but now")
sys.stdout.write(" yes\n")
s = cap.snap()
assert s == "but now yes\n"
cap.suspend()
cap.done()
pytest.raises(AssertionError, cap.suspend)
assert repr(cap) == (
"<FDCapture 1 oldfd={} _state='done' tmpfile={!r}>".format(
cap.targetfd_save, cap.tmpfile
)
)
# Should not crash with missing "_old".
assert repr(cap.syscapture) == (
"<SysCapture stdout _old=<UNSET> _state='done' tmpfile={!r}>".format(
cap.syscapture.tmpfile
)
)
def test_capfd_sys_stdout_mode(self, capfd):
assert "b" not in sys.stdout.mode
@contextlib.contextmanager
def saved_fd(fd):
new_fd = os.dup(fd)
try:
yield
finally:
os.dup2(new_fd, fd)
os.close(new_fd)
class TestStdCapture:
captureclass = staticmethod(StdCapture)
@contextlib.contextmanager
def getcapture(self, **kw):
cap = self.__class__.captureclass(**kw)
cap.start_capturing()
try:
yield cap
finally:
cap.stop_capturing()
def test_capturing_done_simple(self):
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert out == "hello"
assert err == "world"
def test_capturing_reset_simple(self):
with self.getcapture() as cap:
print("hello world")
sys.stderr.write("hello error\n")
out, err = cap.readouterr()
assert out == "hello world\n"
assert err == "hello error\n"
def test_capturing_readouterr(self):
with self.getcapture() as cap:
print("hello world")
sys.stderr.write("hello error\n")
out, err = cap.readouterr()
assert out == "hello world\n"
assert err == "hello error\n"
sys.stderr.write("error2")
out, err = cap.readouterr()
assert err == "error2"
def test_capture_results_accessible_by_attribute(self):
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
capture_result = cap.readouterr()
assert capture_result.out == "hello"
assert capture_result.err == "world"
def test_capturing_readouterr_unicode(self):
with self.getcapture() as cap:
print("hxąć")
out, err = cap.readouterr()
assert out == "hxąć\n"
def test_reset_twice_error(self):
with self.getcapture() as cap:
print("hello")
out, err = cap.readouterr()
pytest.raises(ValueError, cap.stop_capturing)
assert out == "hello\n"
assert not err
def test_capturing_modify_sysouterr_in_between(self):
oldout = sys.stdout
olderr = sys.stderr
with self.getcapture() as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
sys.stdout = capture.CaptureIO()
sys.stderr = capture.CaptureIO()
print("not seen")
sys.stderr.write("not seen\n")
out, err = cap.readouterr()
assert out == "hello"
assert err == "world"
assert sys.stdout == oldout
assert sys.stderr == olderr
def test_capturing_error_recursive(self):
with self.getcapture() as cap1:
print("cap1")
with self.getcapture() as cap2:
print("cap2")
out2, err2 = cap2.readouterr()
out1, err1 = cap1.readouterr()
assert out1 == "cap1\n"
assert out2 == "cap2\n"
def test_just_out_capture(self):
with self.getcapture(out=True, err=False) as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert out == "hello"
assert not err
def test_just_err_capture(self):
with self.getcapture(out=False, err=True) as cap:
sys.stdout.write("hello")
sys.stderr.write("world")
out, err = cap.readouterr()
assert err == "world"
assert not out
def test_stdin_restored(self):
old = sys.stdin
with self.getcapture(in_=True):
newstdin = sys.stdin
assert newstdin != sys.stdin
assert sys.stdin is old
def test_stdin_nulled_by_default(self):
print("XXX this test may well hang instead of crashing")
print("XXX which indicates an error in the underlying capturing")
print("XXX mechanisms")
with self.getcapture():
pytest.raises(OSError, sys.stdin.read)
class TestTeeStdCapture(TestStdCapture):
captureclass = staticmethod(TeeStdCapture)
def test_capturing_error_recursive(self):
r"""For TeeStdCapture since we passthrough stderr/stdout, cap1
should get all output, while cap2 should only get "cap2\n"."""
with self.getcapture() as cap1:
print("cap1")
with self.getcapture() as cap2:
print("cap2")
out2, err2 = cap2.readouterr()
out1, err1 = cap1.readouterr()
assert out1 == "cap1\ncap2\n"
assert out2 == "cap2\n"
class TestStdCaptureFD(TestStdCapture):
captureclass = staticmethod(StdCaptureFD)
def test_simple_only_fd(self, testdir):
testdir.makepyfile(
"""\
import os
def test_x():
os.write(1, b"hello\\n")
assert 0
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
"""
*test_x*
*assert 0*
*Captured stdout*
"""
)
def test_intermingling(self):
with self.getcapture() as cap:
os.write(1, b"1")
sys.stdout.write(str(2))
sys.stdout.flush()
os.write(1, b"3")
os.write(2, b"a")
sys.stderr.write("b")
sys.stderr.flush()
os.write(2, b"c")
out, err = cap.readouterr()
assert out == "123"
assert err == "abc"
def test_many(self, capfd):
with lsof_check():
for i in range(10):
cap = StdCaptureFD()
cap.start_capturing()
cap.stop_capturing()
class TestStdCaptureFDinvalidFD:
def test_stdcapture_fd_invalid_fd(self, testdir):
testdir.makepyfile(
"""
import os
from fnmatch import fnmatch
from _pytest import capture
def StdCaptureFD(out=True, err=True, in_=True):
return capture.MultiCapture(
in_=capture.FDCapture(0) if in_ else None,
out=capture.FDCapture(1) if out else None,
err=capture.FDCapture(2) if err else None,
)
def test_stdout():
os.close(1)
cap = StdCaptureFD(out=True, err=False, in_=False)
assert fnmatch(repr(cap.out), "<FDCapture 1 oldfd=* _state='initialized' tmpfile=*>")
cap.start_capturing()
os.write(1, b"stdout")
assert cap.readouterr() == ("stdout", "")
cap.stop_capturing()
def test_stderr():
os.close(2)
cap = StdCaptureFD(out=False, err=True, in_=False)
assert fnmatch(repr(cap.err), "<FDCapture 2 oldfd=* _state='initialized' tmpfile=*>")
cap.start_capturing()
os.write(2, b"stderr")
assert cap.readouterr() == ("", "stderr")
cap.stop_capturing()
def test_stdin():
os.close(0)
cap = StdCaptureFD(out=False, err=False, in_=True)
assert fnmatch(repr(cap.in_), "<FDCapture 0 oldfd=* _state='initialized' tmpfile=*>")
cap.stop_capturing()
"""
)
result = testdir.runpytest_subprocess("--capture=fd")
assert result.ret == 0
assert result.parseoutcomes()["passed"] == 3
def test_fdcapture_invalid_fd_with_fd_reuse(self, testdir):
with saved_fd(1):
os.close(1)
cap = capture.FDCaptureBinary(1)
cap.start()
os.write(1, b"started")
cap.suspend()
os.write(1, b" suspended")
cap.resume()
os.write(1, b" resumed")
assert cap.snap() == b"started resumed"
cap.done()
with pytest.raises(OSError):
os.write(1, b"done")
def test_fdcapture_invalid_fd_without_fd_reuse(self, testdir):
with saved_fd(1), saved_fd(2):
os.close(1)
os.close(2)
cap = capture.FDCaptureBinary(2)
cap.start()
os.write(2, b"started")
cap.suspend()
os.write(2, b" suspended")
cap.resume()
os.write(2, b" resumed")
assert cap.snap() == b"started resumed"
cap.done()
with pytest.raises(OSError):
os.write(2, b"done")
def test_capture_not_started_but_reset():
capsys = StdCapture()
capsys.stop_capturing()
def test_using_capsys_fixture_works_with_sys_stdout_encoding(capsys):
test_text = "test text"
print(test_text.encode(sys.stdout.encoding, "replace"))
(out, err) = capsys.readouterr()
assert out
assert err == ""
def test_capsys_results_accessible_by_attribute(capsys):
sys.stdout.write("spam")
sys.stderr.write("eggs")
capture_result = capsys.readouterr()
assert capture_result.out == "spam"
assert capture_result.err == "eggs"
def test_fdcapture_tmpfile_remains_the_same() -> None:
cap = StdCaptureFD(out=False, err=True)
try:
cap.start_capturing()
capfile = cap.err.tmpfile
cap.readouterr()
finally:
cap.stop_capturing()
capfile2 = cap.err.tmpfile
assert capfile2 == capfile
def test_close_and_capture_again(testdir):
testdir.makepyfile(
"""
import os
def test_close():
os.close(1)
def test_capture_again():
os.write(1, b"hello\\n")
assert 0
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines(
"""
*test_capture_again*
*assert 0*
*stdout*
*hello*
"""
)
@pytest.mark.parametrize(
"method", ["SysCapture(2)", "SysCapture(2, tee=True)", "FDCapture(2)"]
)
def test_capturing_and_logging_fundamentals(testdir, method: str) -> None:
# here we check a fundamental feature
p = testdir.makepyfile(
"""
import sys, os
import py, logging
from _pytest import capture
cap = capture.MultiCapture(
in_=None,
out=None,
err=capture.%s,
)
cap.start_capturing()
logging.warning("hello1")
outerr = cap.readouterr()
print("suspend, captured %%s" %%(outerr,))
logging.warning("hello2")
cap.pop_outerr_to_orig()
logging.warning("hello3")
outerr = cap.readouterr()
print("suspend2, captured %%s" %% (outerr,))
"""
% (method,)
)
result = testdir.runpython(p)
result.stdout.fnmatch_lines(
"""
suspend, captured*hello1*
suspend2, captured*WARNING:root:hello3*
"""
)
result.stderr.fnmatch_lines(
"""
WARNING:root:hello2
"""
)
assert "atexit" not in result.stderr.str()
def test_error_attribute_issue555(testdir):
testdir.makepyfile(
"""
import sys
def test_capattr():
assert sys.stdout.errors == "replace"
assert sys.stderr.errors == "replace"
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.mark.skipif(
not sys.platform.startswith("win") and sys.version_info[:2] >= (3, 6),
reason="only py3.6+ on windows",
)
def test_py36_windowsconsoleio_workaround_non_standard_streams() -> None:
"""
Ensure _py36_windowsconsoleio_workaround function works with objects that
do not implement the full ``io``-based stream protocol, for example execnet channels (#2666).
"""
from _pytest.capture import _py36_windowsconsoleio_workaround
class DummyStream:
def write(self, s):
pass
stream = cast(TextIO, DummyStream())
_py36_windowsconsoleio_workaround(stream)
def test_dontreadfrominput_has_encoding(testdir):
testdir.makepyfile(
"""
import sys
def test_capattr():
# should not raise AttributeError
assert sys.stdout.encoding
assert sys.stderr.encoding
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_crash_on_closing_tmpfile_py27(testdir):
p = testdir.makepyfile(
"""
import threading
import sys
printing = threading.Event()
def spam():
f = sys.stderr
print('SPAMBEFORE', end='', file=f)
printing.set()
while True:
try:
f.flush()
except (OSError, ValueError):
break
def test_spam_in_thread():
t = threading.Thread(target=spam)
t.daemon = True
t.start()
printing.wait()
"""
)
# Do not consider plugins like hypothesis, which might output to stderr.
testdir.monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
result = testdir.runpytest_subprocess(str(p))
assert result.ret == 0
assert result.stderr.str() == ""
result.stdout.no_fnmatch_line("*OSError*")
def test_global_capture_with_live_logging(testdir):
# Issue 3819
# capture should work with live cli logging
# Teardown report seems to have the capture for the whole process (setup, capture, teardown)
testdir.makeconftest(
"""
def pytest_runtest_logreport(report):
if "test_global" in report.nodeid:
if report.when == "teardown":
with open("caplog", "w") as f:
f.write(report.caplog)
with open("capstdout", "w") as f:
f.write(report.capstdout)
"""
)
testdir.makepyfile(
"""
import logging
import sys
import pytest
logger = logging.getLogger(__name__)
@pytest.fixture
def fix1():
print("fix setup")
logging.info("fix setup")
yield
logging.info("fix teardown")
print("fix teardown")
def test_global(fix1):
print("begin test")
logging.info("something in test")
print("end test")
"""
)
result = testdir.runpytest_subprocess("--log-cli-level=INFO")
assert result.ret == 0
with open("caplog") as f:
caplog = f.read()
assert "fix setup" in caplog
assert "something in test" in caplog
assert "fix teardown" in caplog
with open("capstdout") as f:
capstdout = f.read()
assert "fix setup" in capstdout
assert "begin test" in capstdout
assert "end test" in capstdout
assert "fix teardown" in capstdout
@pytest.mark.parametrize("capture_fixture", ["capsys", "capfd"])
def test_capture_with_live_logging(testdir, capture_fixture):
# Issue 3819
# capture should work with live cli logging
testdir.makepyfile(
"""
import logging
import sys
logger = logging.getLogger(__name__)
def test_capture({0}):
print("hello")
sys.stderr.write("world\\n")
captured = {0}.readouterr()
assert captured.out == "hello\\n"
assert captured.err == "world\\n"
logging.info("something")
print("next")
logging.info("something")
captured = {0}.readouterr()
assert captured.out == "next\\n"
""".format(
capture_fixture
)
)
result = testdir.runpytest_subprocess("--log-cli-level=INFO")
assert result.ret == 0
def test_typeerror_encodedfile_write(testdir):
"""It should behave the same with and without output capturing (#4861)."""
p = testdir.makepyfile(
"""
def test_fails():
import sys
sys.stdout.write(b"foo")
"""
)
result_without_capture = testdir.runpytest("-s", str(p))
result_with_capture = testdir.runpytest(str(p))
assert result_with_capture.ret == result_without_capture.ret
out = result_with_capture.stdout.str()
assert ("TypeError: write() argument must be str, not bytes" in out) or (
"TypeError: unicode argument expected, got 'bytes'" in out
)
def test_stderr_write_returns_len(capsys):
"""Write on Encoded files, namely captured stderr, should return number of characters written."""
assert sys.stderr.write("Foo") == 3
def test_encodedfile_writelines(tmpfile: BinaryIO) -> None:
ef = capture.EncodedFile(tmpfile, encoding="utf-8")
with pytest.raises(TypeError):
ef.writelines([b"line1", b"line2"])
assert ef.writelines(["line3", "line4"]) is None # type: ignore[func-returns-value]
ef.flush()
tmpfile.seek(0)
assert tmpfile.read() == b"line3line4"
tmpfile.close()
with pytest.raises(ValueError):
ef.read()
def test__get_multicapture() -> None:
assert isinstance(_get_multicapture("no"), MultiCapture)
pytest.raises(ValueError, _get_multicapture, "unknown").match(
r"^unknown capturing method: 'unknown'"
)
def test_logging_while_collecting(testdir):
"""Issue #6240: Calls to logging.xxx() during collection causes all logging calls to be duplicated to stderr"""
p = testdir.makepyfile(
"""\
import logging
logging.warning("during collection")
def test_logging():
logging.warning("during call")
assert False
"""
)
result = testdir.runpytest_subprocess(p)
assert result.ret == ExitCode.TESTS_FAILED
result.stdout.fnmatch_lines(
[
"*test_*.py F*",
"====* FAILURES *====",
"____*____",
"*--- Captured log call*",
"WARNING * during call",
"*1 failed*",
]
)
result.stdout.no_fnmatch_line("*Captured stderr call*")
result.stdout.no_fnmatch_line("*during collection*")
| mit | 4,863,957,510,946,503,000 | 29.413188 | 115 | 0.529708 | false | 4.10895 | true | false | false |
carlosperate/ubitflashtool | tests/test_cli.py | 1 | 11444 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for cli.py."""
import os
from unittest import mock
from click.testing import CliRunner
import pytest
from ubittool import cli, cmds
@pytest.fixture
def check_no_board_connected():
"""Check that there is no mbed board that PyOCD can connect to."""
try:
cmds._read_continuous_memory(address=0x00, count=16)
except Exception:
# Good: Exception raised if no board is found
pass
else:
raise Exception("Found an Mbed device connected, please unplug.")
@mock.patch("ubittool.cli.os.path.exists", autospec=True)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker(mock_exit, mock_echo, mock_exists):
"""Test the file checker perform the required checks and prints info."""
mock_exists.return_value = False
cli._file_checker("subject", "file/path.py")
mock_exists.assert_called_once_with("file/path.py")
assert mock_echo.call_count == 1
assert "subject will be written to: file/path.py" in mock_echo.call_args[0]
assert mock_exit.call_count == 0
@mock.patch("ubittool.cli.os.path.exists", autospec=True)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker_existing_path(mock_exit, mock_echo, mock_exists):
"""Test file checker exits with error if the file exists."""
mock_exists.return_value = True
cli._file_checker("subject", "file/path.py")
mock_exists.assert_called_once_with("file/path.py")
assert mock_echo.call_count == 1
assert (
"Abort: The file/path.py file already exists."
in mock_echo.call_args[0][0]
)
mock_exit.assert_called_once_with(1)
@mock.patch("ubittool.cli.click.echo", autospec=True)
@mock.patch("ubittool.cli.sys.exit", autospec=True)
def test_file_checker_no_path(mock_exit, mock_echo):
"""Test the file check informs about console output if no file is given."""
cli._file_checker("subject", None)
assert mock_echo.call_count == 1
assert "subject will be output to console." in mock_echo.call_args[0]
assert mock_exit.call_count == 0
@mock.patch("ubittool.cli.read_python_code", autospec=True)
def test_read_code(mock_read_python_code, check_no_board_connected):
"""Test the read-code command without a file option."""
python_code = "Python code here"
mock_read_python_code.return_value = python_code
runner = CliRunner()
result = runner.invoke(cli.read_code)
assert "MicroPython code will be output to console." in result.output
assert "Printing the MicroPython code" in result.output
assert python_code in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_code_no_board(check_no_board_connected):
"""Test the read-code command when no board is connected."""
runner = CliRunner()
result = runner.invoke(cli.read_code)
assert result.exit_code != 0
assert "MicroPython code will be output to console." in result.output
assert "Did not find any connected boards." in result.output
@mock.patch("ubittool.cli.read_python_code", autospec=True)
def test_read_code_path(mock_read_python_code, check_no_board_connected):
"""Test the read-code command with a file option."""
mock_read_python_code.return_value = "Python code here"
runner = CliRunner()
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open:
result = runner.invoke(cli.read_code, ["--file_path", "thisfile.py"])
m_open.assert_called_once_with("thisfile.py", "w")
m_open().write.assert_called_once_with("Python code here")
assert "MicroPython code will be written to: thisfile.py" in result.output
assert "Saving the MicroPython code..." in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_code_path_no_board(check_no_board_connected):
"""Test read-code command with a file option and no board connected."""
file_name = "thisfile.py"
runner = CliRunner()
results = [
runner.invoke(cli.read_code, ["--file_path", file_name]),
runner.invoke(cli.read_code, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert (
"MicroPython code will be written to: {}".format(file_name)
in result.output
), "Message written to file"
assert (
"Did not find any connected boards." in result.output
), "Message error, board not found"
# File not mocked, so checking command hasn't created it
assert not os.path.isfile(file_name), "File does not exist"
@mock.patch("ubittool.cli.read_flash_hex", autospec=True)
def test_read_flash(mock_read_flash_hex, check_no_board_connected):
"""Test the read-flash command without a file option."""
flash_hex_content = "Intel Hex lines here"
mock_read_flash_hex.return_value = flash_hex_content
runner = CliRunner()
result = runner.invoke(cli.read_flash)
assert "micro:bit flash hex will be output to console." in result.output
assert "Printing the flash contents" in result.output
assert flash_hex_content in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_flash_no_board(check_no_board_connected):
"""Test the read-flash command when no board is connected."""
runner = CliRunner()
result = runner.invoke(cli.read_flash)
assert result.exit_code != 0
assert "micro:bit flash hex will be output to console." in result.output
assert "Did not find any connected boards." in result.output
@mock.patch("ubittool.cli.read_flash_hex", autospec=True)
def test_read_flash_path(mock_read_flash_hex, check_no_board_connected):
"""Test the read-code command with a file option."""
flash_hex_content = "Intel Hex lines here"
mock_read_flash_hex.return_value = flash_hex_content
file_name = "thisfile.py"
runner = CliRunner()
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open:
results = [runner.invoke(cli.read_flash, ["--file_path", file_name])]
with mock.patch("ubittool.cli.open", mock.mock_open()) as m_open2:
results.append(runner.invoke(cli.read_flash, ["-f", file_name]))
m_open.assert_called_once_with(file_name, "w")
m_open2.assert_called_once_with(file_name, "w")
m_open().write.assert_called_once_with(flash_hex_content)
m_open2().write.assert_called_once_with(flash_hex_content)
for result in results:
assert (
"micro:bit flash hex will be written to: {}".format(file_name)
in result.output
)
assert "Saving the flash contents..." in result.output
assert "Finished successfully" in result.output
assert result.exit_code == 0
def test_read_flash_path_no_board(check_no_board_connected):
"""Test read-flash command with a file option and no board connected."""
file_name = "thisfile.py"
runner = CliRunner()
results = [
runner.invoke(cli.read_flash, ["--file_path", file_name]),
runner.invoke(cli.read_flash, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert (
"micro:bit flash hex will be written to: {}".format(file_name)
in result.output
), "Message written to file"
assert (
"Did not find any connected boards." in result.output
), "Message error, board not found"
# File not mocked, so checking command hasn't created it
assert not os.path.isfile(file_name), "File does not exist"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
@mock.patch("ubittool.cli.compare_full_flash_hex", autospec=True)
def test_compare_flash(mock_compare, mock_isfile, check_no_board_connected):
"""Test the compare-flash command."""
file_name = "random_file_name.hex"
mock_isfile.return_value = True
mock_compare.return_value = 0
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert mock_compare.call_count == len(results)
for result in results:
assert "Diff output loaded in the default browser." in result.output
assert "Finished successfully." in result.output
assert result.exit_code == 0, "Exit code 0"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
@mock.patch("ubittool.cli.compare_full_flash_hex", autospec=True)
def test_compare_flash_diffs(
mock_compare, mock_isfile, check_no_board_connected
):
"""Test the compare-flash command."""
file_name = "random_file_name.hex"
mock_isfile.return_value = True
mock_compare.return_value = 1
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert mock_compare.call_count == len(results)
for result in results:
assert "Diff output loaded in the default browser." in result.output
assert (
"There are some differences in the micro:bit flash!"
in result.output
)
assert result.exit_code != 0, "Exit code non-zero"
@mock.patch("ubittool.cli.os.path.isfile", autospec=True)
def test_compare_flash_no_board(mock_isfile, check_no_board_connected):
"""Test the compare-flash command when no board is connected."""
file_name = "random_file_name.hex"
file_content = "Intel Hex lines here"
mock_isfile.return_value = True
runner = CliRunner()
with mock.patch(
"ubittool.cmds.open", mock.mock_open(read_data=file_content)
) as m_open:
results = [
runner.invoke(cli.compare_flash, ["-f", file_name]),
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
]
assert m_open.call_count == len(results)
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert "Did not find any connected boards." in result.output
def test_compare_flash_invalid_file():
"""Check error is thrown when compare-flash file does not exist."""
file_name = "random_file_does_not_exist.hex"
runner = CliRunner()
results = [
runner.invoke(cli.compare_flash, ["--file_path", file_name]),
runner.invoke(cli.compare_flash, ["-f", file_name]),
]
for result in results:
assert result.exit_code != 0, "Exit code non-zero"
assert "Abort: File does not exists" in result.output
def test_compare_flash_no_file():
"""Test there is an error when compare-flash doesn't have a file arg."""
runner = CliRunner()
result = runner.invoke(cli.compare_flash)
assert result.exit_code != 0, "Exit code non-zero"
assert "Error: Missing option '-f' / '--file_path'." in result.output
@mock.patch("ubittool.gui.open_gui", autospec=True)
def test_gui(mock_open_gui, check_no_board_connected):
"""Test the gui command."""
runner = CliRunner()
result = runner.invoke(cli.gui)
assert result.exit_code == 0, "Exit code 0"
assert mock_open_gui.call_count == 1, "open_gui() function called"
| mit | 5,813,955,467,411,993,000 | 35.330159 | 79 | 0.668822 | false | 3.520148 | true | false | false |
sugarraysam/spell_checker | tp2_20054572_20037847.py | 1 | 2899 | from multiprocessing import Pool
from Word import Word
import multiprocessing
if __name__ == "__main__":
# Verify dict.txt and input.txt exist
import os, sys
from PrimeTest import PrimeTest
from ChainHashMap import ChainHashMap
# make sure files exist
if os.path.exists("dict.txt") and os.path.exists("input.txt"):
# initialize hash table
num_words = sum(1 for line in open('dict.txt'))
hash_table = ChainHashMap(num_words)
# populate hash table with words in dictionary
for word in open('dict.txt'):
hash_table[word.strip()] = ""
# Custom function for processing
def check_word(word):
try:
hash_table[word.lower()]
res = word
except KeyError:
res = Word(word,hash_table).spellproposals()
finally:
return res
# Process line and print to stdout
output = ""
with Pool(processes = multiprocessing.cpu_count()) as pool:
for line in open("input.txt"):
words = line.strip().split(" ")
# Check if "'" present in words to adjust
for w in words:
# Delete word and add 2 new words to list
if("'" in w and w.index("'") != len(w)-1):
idx = w.index("'") + 1
words.append(w[:idx])
words.append(w[idx:])
words.remove(w)
for w in pool.map(check_word, words):
output += w + " "
print(output)
# dict.txt and/or input.txt don't exist
else:
print("Error: could not find necessary files, \
'dict.txt' and 'input.txt'")
sys.exit(1)
"""DEBUGGING"""
if("debug" in sys.argv):
# Check efficiency of hash table
coll = num_words - hash_table.buckets
print("DEBUG",
"Num entries in ht: " + str(hash_table.len()),
"Num words in dict: " + str(num_words),
"Num of buckets in ht: " + str(hash_table.buckets),
"Max bucket size: " + str(hash_table.maxbucketsize),
"Num of collisions: " + str(coll),
"% of collisions: " + str((coll / num_words) * 100),
"Prime number used for ht size: " + str(hash_table.size),
sep="\n")
# make a couple access to ht
print("\n", "3 first words supposed to be in ht")
try:
words = ["aboveboard", "battened", "bowdlerise", "notinht",
"shouldreturnkeyerror", "shitfacecockmaster"]
for w in words:
print(hash_table[w])
except Exception as e:
print(e.args, "is not in dictionary")
| gpl-3.0 | 664,518,414,141,673,000 | 35.2375 | 73 | 0.500862 | false | 4.320417 | false | false | false |
PolyLAN/django-wiki | wiki/admin.py | 11 | 2901 | from __future__ import unicode_literals
from __future__ import absolute_import
from django import forms
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from mptt.admin import MPTTModelAdmin
from . import models
from . import editors
# Django 1.9 deprecation of contenttypes.generic
try:
from django.contrib.contenttypes.admin import GenericTabularInline
except ImportError:
from django.contrib.contenttypes.generic import GenericTabularInline
class ArticleObjectAdmin(GenericTabularInline):
model = models.ArticleForObject
extra = 1
max_num = 1
class ArticleRevisionForm(forms.ModelForm):
class Meta:
model = models.ArticleRevision
exclude = ()
def __init__(self, *args, **kwargs):
super(ArticleRevisionForm, self).__init__(*args, **kwargs)
# TODO: This pattern is too weird
editor = editors.getEditor()
self.fields['content'].widget = editor.get_admin_widget()
class ArticleRevisionAdmin(admin.ModelAdmin):
form = ArticleRevisionForm
list_display = ('title', 'created', 'modified', 'user', 'ip_address')
class Media:
js = editors.getEditorClass().AdminMedia.js
css = editors.getEditorClass().AdminMedia.css
class ArticleRevisionInline(admin.TabularInline):
model = models.ArticleRevision
form = ArticleRevisionForm
fk_name = 'article'
extra = 1
fields = ('content', 'title', 'deleted', 'locked',)
class Media:
js = editors.getEditorClass().AdminMedia.js
css = editors.getEditorClass().AdminMedia.css
class ArticleForm(forms.ModelForm):
class Meta:
model = models.Article
exclude = ()
def __init__(self, *args, **kwargs):
super(ArticleForm, self).__init__(*args, **kwargs)
if self.instance.pk:
revisions = models.ArticleRevision.objects.filter(
article=self.instance)
self.fields['current_revision'].queryset = revisions
else:
self.fields[
'current_revision'].queryset = models.ArticleRevision.objects.none()
self.fields['current_revision'].widget = forms.HiddenInput()
class ArticleAdmin(admin.ModelAdmin):
inlines = [ArticleRevisionInline]
form = ArticleForm
class URLPathAdmin(MPTTModelAdmin):
inlines = [ArticleObjectAdmin]
list_filter = ('site', 'articles__article__current_revision__deleted',
'articles__article__created',
'articles__article__modified')
list_display = ('__str__', 'article', 'get_created')
def get_created(self, instance):
return instance.article.created
get_created.short_description = _('created')
admin.site.register(models.URLPath, URLPathAdmin)
admin.site.register(models.Article, ArticleAdmin)
admin.site.register(models.ArticleRevision, ArticleRevisionAdmin)
| gpl-3.0 | 3,458,470,543,661,273,000 | 29.536842 | 84 | 0.6808 | false | 4.222707 | false | false | false |
igboyes/virtool | virtool/references/utils.py | 2 | 11303 | import gzip
import json
from cerberus import Validator
from operator import itemgetter
import virtool.otus.utils
ISOLATE_KEYS = [
"id",
"source_type",
"source_name",
"default"
]
OTU_KEYS = [
"name",
"abbreviation",
"schema"
]
RIGHTS = [
"build",
"modify",
"modify_otu",
"remove"
]
SEQUENCE_KEYS = [
"accession",
"definition",
"host",
"sequence"
]
def check_import_data(import_data, strict=True, verify=True):
errors = detect_duplicates(import_data["otus"])
v = Validator(get_import_schema(require_meta=strict), allow_unknown=True)
v.validate(import_data)
if v.errors:
errors.append({
"id": "file",
"issues": v.errors
})
otus = dict()
for otu in import_data["otus"]:
verification = None
if verify:
verification = virtool.otus.utils.verify(otu)
validation = validate_otu(otu, strict)
issues = dict()
if verification:
issues["verification"] = verification
if validation:
issues["validation"] = validation
if issues:
otus[otu["_id"]] = issues
return errors
def check_will_change(old, imported):
for key in ["name", "abbreviation"]:
if old[key] != imported[key]:
return True
# Will change if isolate ids have changed, meaning an isolate has been added or removed.
if {i["id"] for i in old["isolates"]} != {i["id"] for i in imported["isolates"]}:
return True
# Will change if the schema has changed.
if json.dumps(old["schema"], sort_keys=True) != json.dumps(imported["schema"], sort_keys=True):
return True
new_isolates = sorted(imported["isolates"], key=itemgetter("id"))
old_isolates = sorted(old["isolates"], key=itemgetter("id"))
# Check isolate by isolate. Order is ignored.
for new_isolate, old_isolate in zip(new_isolates, old_isolates):
# Will change if a value property of the isolate has changed.
for key in ISOLATE_KEYS:
if new_isolate[key] != old_isolate[key]:
return True
# Check if sequence ids have changed.
if {i["_id"] for i in new_isolate["sequences"]} != {i["remote"]["id"] for i in old_isolate["sequences"]}:
return True
# Check sequence-by-sequence. Order is ignored.
new_sequences = sorted(new_isolate["sequences"], key=itemgetter("_id"))
old_sequences = sorted(old_isolate["sequences"], key=lambda d: d["remote"]["id"])
for new_sequence, old_sequence in zip(new_sequences, old_sequences):
for key in SEQUENCE_KEYS:
if new_sequence[key] != old_sequence[key]:
return True
return False
def clean_export_list(otus):
cleaned = list()
otu_keys = OTU_KEYS + ["_id"]
sequence_keys = SEQUENCE_KEYS + ["_id"]
for otu in otus:
try:
otu["_id"] = otu["remote"]["id"]
except KeyError:
pass
for isolate in otu["isolates"]:
for sequence in isolate["sequences"]:
try:
sequence["_id"] = sequence["remote"]["id"]
except KeyError:
pass
cleaned.append(clean_otu(otu, otu_keys, sequence_keys))
return cleaned
def clean_otu(otu, otu_keys=None, sequence_keys=None):
otu_keys = otu_keys or OTU_KEYS
sequence_keys = sequence_keys or SEQUENCE_KEYS
cleaned = {key: otu.get(key) for key in otu_keys}
cleaned.update({
"isolates": list(),
"schema": otu.get("schema", list())
})
for isolate in otu["isolates"]:
cleaned_isolate = {key: isolate[key] for key in ISOLATE_KEYS}
cleaned_isolate["sequences"] = list()
for sequence in isolate["sequences"]:
cleaned_sequence = {key: sequence[key] for key in sequence_keys}
for key in ["segment", "target"]:
try:
cleaned_sequence[key] = sequence[key]
except KeyError:
pass
cleaned_isolate["sequences"].append(cleaned_sequence)
cleaned["isolates"].append(cleaned_isolate)
return cleaned
def detect_duplicate_abbreviation(joined, duplicates, seen):
abbreviation = joined.get("abbreviation", "")
if abbreviation:
if abbreviation in seen:
duplicates.add(abbreviation)
else:
seen.add(abbreviation)
def detect_duplicate_ids(joined, duplicate_ids, seen_ids):
if joined["_id"] in seen_ids:
duplicate_ids.add(joined["_id"])
else:
seen_ids.add(joined["_id"])
def detect_duplicate_isolate_ids(joined, duplicate_isolate_ids):
duplicates = set()
isolate_ids = [i["id"] for i in joined["isolates"]]
for isolate_id in isolate_ids:
if isolate_ids.count(isolate_id) > 1:
duplicates.add(isolate_id)
if duplicates:
duplicate_isolate_ids[joined["_id"]] = {
"name": joined["name"],
"duplicates": list(duplicates)
}
def detect_duplicate_sequence_ids(joined, duplicate_sequence_ids, seen_sequence_ids):
sequence_ids = virtool.otus.utils.extract_sequence_ids(joined)
# Add sequence ids that are duplicated within an OTU to the duplicate set.
duplicate_sequence_ids.update({i for i in sequence_ids if sequence_ids.count(i) > 1})
sequence_ids = set(sequence_ids)
# Add sequence ids that have already been seen and are in the OTU.
duplicate_sequence_ids.update(seen_sequence_ids & sequence_ids)
# Add all sequences to seen list.
seen_sequence_ids.update(sequence_ids)
def detect_duplicate_name(joined, duplicates, seen):
lowered = joined["name"].lower()
if joined["name"].lower() in seen:
duplicates.add(joined["name"])
else:
seen.add(lowered)
def detect_duplicates(otus, strict=True):
duplicate_abbreviations = set()
duplicate_ids = set()
duplicate_isolate_ids = dict()
duplicate_names = set()
duplicate_sequence_ids = set()
seen_abbreviations = set()
seen_ids = set()
seen_names = set()
seen_sequence_ids = set()
for joined in otus:
detect_duplicate_abbreviation(
joined,
duplicate_abbreviations,
seen_abbreviations
)
detect_duplicate_name(
joined,
duplicate_names,
seen_names
)
if strict:
detect_duplicate_ids(
joined,
duplicate_ids,
seen_ids,
)
detect_duplicate_isolate_ids(
joined,
duplicate_isolate_ids
)
detect_duplicate_sequence_ids(
joined,
duplicate_sequence_ids,
seen_sequence_ids
)
errors = list()
if duplicate_abbreviations:
errors.append({
"id": "duplicate_abbreviations",
"message": "Duplicate OTU abbreviations found",
"duplicates": list(duplicate_abbreviations)
})
if duplicate_ids:
errors.append({
"id": "duplicate_ids",
"message": "Duplicate OTU ids found",
"duplicates": list(duplicate_ids)
})
if duplicate_isolate_ids:
errors.append({
"id": "duplicate_isolate_ids",
"message": "Duplicate isolate ids found in some OTUs",
"duplicates": duplicate_isolate_ids
})
if duplicate_names:
errors.append({
"id": "duplicate_names",
"message": "Duplicate OTU names found",
"duplicates": list(duplicate_names)
})
if duplicate_sequence_ids:
errors.append({
"id": "duplicate_sequence_ids",
"message": "Duplicate sequence ids found",
"duplicates": duplicate_sequence_ids
})
return errors
def get_import_schema(require_meta=True):
return {
"data_type": {
"type": "string",
"required": require_meta
},
"organism": {
"type": "string",
"required": require_meta
},
"otus": {
"type": "list",
"required": True
}
}
def get_isolate_schema(require_id):
return {
"id": {
"type": "string",
"required": require_id
},
"source_type": {
"type": "string",
"required": True
},
"source_name": {
"type": "string",
"required": True
},
"default": {
"type": "boolean",
"required": True
},
"sequences": {
"type": "list",
"required": True
}
}
def get_otu_schema(require_id):
return {
"_id": {
"type": "string",
"required": require_id
},
"abbreviation": {
"type": "string"
},
"name": {
"type": "string",
"required": True
},
"isolates": {
"type": "list",
"required": True
}
}
def get_owner_user(user_id):
return {
"id": user_id,
"build": True,
"modify": True,
"modify_otu": True,
"remove": True
}
def get_sequence_schema(require_id):
return {
"_id": {
"type": "string",
"required": require_id
},
"accession": {
"type": "string",
"required": True
},
"definition": {
"type": "string",
"required": True
},
"sequence": {
"type": "string",
"required": True
}
}
def load_reference_file(path):
"""
Load a list of merged otus documents from a file associated with a Virtool reference file.
:param path: the path to the otus.json.gz file
:type path: str
:return: the otus data to import
:rtype: dict
"""
with open(path, "rb") as handle:
with gzip.open(handle, "rt") as gzip_file:
return json.load(gzip_file)
def validate_otu(otu, strict):
report = {
"otu": None,
"isolates": dict(),
"sequences": dict()
}
otu_validator = Validator(get_otu_schema(strict), allow_unknown=True)
if not otu_validator.validate(otu):
report["otu"] = otu_validator.errors
report["isolates"] = dict()
if "isolates" in otu:
isolate_validator = Validator(get_isolate_schema(strict), allow_unknown=True)
sequence_validator = Validator(get_sequence_schema(strict), allow_unknown=True)
for isolate in otu["isolates"]:
if not isolate_validator.validate(isolate):
report["isolates"][isolate["id"]] = isolate_validator.errors
if "sequences" in isolate:
for sequence in isolate["sequences"]:
if not sequence_validator.validate(sequence):
report["sequences"][sequence["_id"]] = isolate_validator.errors
if any(value for value in report.values()):
return report
| mit | 7,329,266,022,629,094,000 | 24.4 | 113 | 0.547554 | false | 3.972935 | false | false | false |
EricssonResearch/scott-eu | simulation-ros/src/turtlebot2i/turtlebot2i_scene_graph/src/vrep_scene_graph_generator.py | 1 | 10125 | #!/usr/bin/env python
from vrep_object_extractor import VrepObjectExtractor
import time
import vrep
# add for generate scene graph
import re
from graphviz import Digraph
import math
from shapely.geometry import box
# some functions for label message in scene graph nodes
def get_distance(i, j):
dx = j.pose[0] - i.pose[0]
dy = j.pose[1] - i.pose[1]
if not re.match(r'wall*', j.name):
ri = math.sqrt(i.size[0]*i.size[0] + i.size[1]*i.size[1])
rj = math.sqrt(j.size[0]*j.size[0] + j.size[1]*j.size[1])
temp_ij = dx*dx + dy*dy
dist_ij = math.sqrt(temp_ij) #- ri - rj
else:
if posi_ix < (posi_wx + size_wx/2) and posi_ix > (posi_wx - size_wx/2):
dist_ij = dy - size_iy - size_jy
elif posi_iy < (posi_wy + size_wy/2) and posi_iy > (posi_wy - size_wx/2):
dist_ij = dx - size_ix - size_jx
else:
temp = dx * dx + dy * dy
dist_ij = math.sqrt(temp - size_ix / 2 - size_jx / 2)
return dist_ij
def get_distance_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
min_dist = pol_i.distance(pol_j)
return min_dist
def get_support_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
pol_support = pol_i.intersects(pol_j)
print(pol_support)
return pol_support
def get_overlap_bbox(i, j):
pol_i = box(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
pol_j = box(j.bbox_min[0], j.bbox_min[1], j.bbox_max[0], j.bbox_max[1])
pol_overlap = pol_i.overlaps(pol_j)
pol_intersect = pol_i.intersects(pol_j)
pol_support = pol_overlap | pol_intersect
# print(pol_support)
return pol_support
def get_velocity(j):
# vel_j = j.vel
vel_j = math.sqrt(j.vel[0]*j.vel[0] + j.vel[1]*j.vel[1] + j.vel[2]*j.vel[2])
return vel_j
def get_direction(i, j):
dx = j.pose[0] - i.pose[0]
dy = j.pose[1] - i.pose[1]
dire_tan = math.atan2(dy, dx) - i.ori[2]
# print math.atan2(dy, dx)*180/pi, 'robot', i.ori[2]*180/pi
dire_tan = dire_tan*180/pi
if dire_tan > 180:
dire_tan = dire_tan - 360
elif dire_tan < -180:
dire_tan = dire_tan + 360
else:
pass
'''
if (dire_tan > -pi/8) and (dire_tan < pi/8):
dire_label = 'right'
elif (dire_tan >= pi/8) and (dire_tan <= 3*pi/8):
dire_label = 'front-right'
elif (dire_tan > 3*pi/8) and (dire_tan < 5*pi/8):
dire_label = 'front'
elif (dire_tan >= 5*pi/8) and (dire_tan <= 7*pi/8):
dire_label = 'front-left'
elif (dire_tan > 7*pi/8) or (dire_tan < -7*pi/8):
dire_label = 'left'
elif (dire_tan >= -7*pi/8) and (dire_tan <= -5*pi/8):
dire_label = 'back-left'
elif (dire_tan > -5*pi/8) and (dire_tan < -3*pi/8):
dire_label = 'back'
else:
dire_label = 'back-right'
'''
return dire_tan
def get_type(i):
if re.match(r'Bill*', i.name):
obj_type = 1
elif re.match(r'turtlebot*', i.name):
obj_type = 0
else:
obj_type = 2
return obj_type
def get_orientation(i, j):
obj_ori = j.ori[2]*180/pi - i.ori[2]*180/pi
return obj_ori
# Update rate in seconds
#rate = 0.1
pi = math.pi
extractor = VrepObjectExtractor('127.0.0.1', 19997)
# List of object names to retrieve information
# For now it is hardcoded
extractor.set_static_obj_names(['stairs', 'slidingDoor',
'dockstation_body',\
'ConveyorBeltBody', 'ConveyorBeltBody#0', 'ConveyorBeltBody#1',
'ShelfBody', 'ShelfBody#0', 'ShelfBody#1'])
extractor.set_dynamic_obj_names(['Bill_base#2',
'productGreen#0', 'productGreen#1', 'productGreen#2',
'productYellow#0', 'productYellow#1', 'productYellow#2',
'productRed#0', 'productRed#1', 'productRed#2'])
extractor.set_robot_names(['turtlebot2i', 'turtlebot2i#0'])
# extractor.set_static_obj_names(['stairs', 'slidingDoor',
# 'dockstation_body',\
# 'ConveyorBeltBody', 'ConveyorBeltBody#0', 'ConveyorBeltBody#1',
# 'ShelfBody', 'ShelfBody#0', 'ShelfBody#1'])
# extractor.set_dynamic_obj_names(['Bill#2'])
# extractor.set_robot_names(['turtlebot2i'])
print('Connected to remote API server')
print('Getting scene properties (this can take a while)...')
# Get all objects info once (for static properties) and
# prepare the callback for the streaming mode
extractor.operation_mode = vrep.simx_opmode_streaming
extractor.get_all_objects_info()
extractor.update_robots_vision_sensor_info()
extractor.update_all_robots_vision_sensors_fov()
time.sleep(0.3) # streaming takes a while to get ready
extractor.operation_mode = vrep.simx_opmode_buffer
extractor.get_all_objects_info()
extractor.update_robots_vision_sensor_info()
extractor.update_all_robots_vision_sensors_fov()
print('Finished getting scene properties!\n')
print('Started getting scene objects from vision sensor FOV...')
while True:
# tt = 2
time_start = time.time()
# Get dynamic object info (pose and vel) periodically
extractor.update_dynamic_obj_info()
# Update vision sensor info
extractor.update_all_robots_vision_sensors_fov()
robot_list = extractor.robot_obj_list
# Get objects that are in the sensor FOV
for robot_num in range(len(robot_list)):
obj_list = extractor.get_objects_from_vision_sensor(robot_list[robot_num].vision_sensor)
if (obj_list != None):
# Remove the robot itself from the list
obj_list = [i for i in obj_list if i.name!=robot_list[robot_num].name]
# Print detected objects of the vision sensor
print(robot_list[robot_num].name, robot_list[robot_num].vision_sensor.name, obj_list)
#############################################
# generate scene graph
#############################################
dot = Digraph(comment='warehouse', format='png')
dot.node_attr['shape']='record'
robot_velocity = get_velocity(robot_list[robot_num])
i = robot_list[robot_num]
# print(i.bbox_min[0], i.bbox_min[1], i.bbox_max[0], i.bbox_max[1])
# robot_label = '{%s|%s|velocity: %.2f|orientation: %.2f}'%(robot[robot_num].name, robot[robot_num].vision_sensor.name, robot_velocity, robot[robot_num].ori[2]*180/pi)
robot_label = '{%s|type: 0|%s|velocity: %.2f}'%(robot_list[robot_num].name, robot_list[robot_num].vision_sensor.name, robot_velocity)
# robot_label = '{%s|%s}'%(robot[robot_num].name, robot[robot_num].vision_sensor.name)
dot.node('robot', label=robot_label)
dot.node('warehouse', label='warehouse')
dot.node('floor', label='{floor|size: 25*25}')
dot.edge('warehouse','floor')
for obj in obj_list:
obj_direction = get_direction(robot_list[robot_num], obj)
obj_distance = get_distance_bbox(robot_list[robot_num], obj)
obj_velocity = get_velocity(obj)
obj_type = get_type(obj)
obj_orientation = get_orientation(robot_list[robot_num], obj)
# print(obj.name, '%.3f' %obj_velocity)
# node_label = '{%s|direction: %s|distance: %.2f}'%(obj.name, obj_direction, obj_distance)
# if obj.name == 'Bill#3':
# node_label = '{%s|velocity: 0.2|distance: %.2f}'%(obj.name, obj_distance)
# else:
# node_label = '{%s|Static|distance: %.2f}'%(obj.name, obj_distance)
node_label = '{%s|type: %s|distance: %.2f|orientation: %.2f|direction: %.2f|velocity: %.2f|size: x %.2f, y %.2f, z %.2f}'%( obj.name, obj_type, obj_distance, obj_orientation, obj_direction, obj_velocity, obj.size[0], obj.size[1], obj.size[2])
# node_label = '{%s|velocity: %.2f|distance: %.2f}'%( obj.name, obj_velocity, obj_distance)
# node_label = '{%s|distance: %.2f}'%(obj.name, obj_distance)
dot.node(obj.name, label=node_label)
support_flg = 0
if re.match(r'wall*', obj.name):
dot.edge('warehouse', obj.name, label='on')
elif re.match(r'product*', obj.name):
# obj_list = obj_list.remove(obj)
for obj_support in obj_list:
if obj_support.name[0:5] != obj.name[0:5]:
# if get_support_bbox(obj, obj_support):
if get_overlap_bbox(obj, obj_support):
dot.edge(obj_support.name, obj.name, label='on')
support_flg = 1
break
if support_flg == 0:
dot.edge('floor', obj.name, label='on')
else:
dot.edge('floor', obj.name, label='on')
'''
L = [floor]
assign_object = []
while jj not in assign_object:
if len(L) != 0:
parent = L[0]
L.pop(0)
for i in obj_list:
dot.node(i.name, label='%s'%i.name)
dot.edge(parent.name, i.name, label='on')
L.append(i)
for i in range(len()):
for j in range(i, len())
dot.edge(obj_list[i].name, obj_list[j].name, label='')
'''
#output scene graph as .svg file in
sg_name = 'sg_robot/robot%d' %robot_num
dot.render(sg_name, view=True)
time_end = time.time()
time_cost = time_end - time_start
scene_graph_fps = 1.0/time_cost
# print("Scene graph generating fps is %.2f" % scene_graph_fps)
#time.sleep(rate)
clientID=extractor.clientID # first method
# Close the connection to V-REP
vrep.simxFinish(clientID)
| apache-2.0 | 6,536,135,167,575,143,000 | 38.244186 | 254 | 0.560988 | false | 3.065395 | false | false | false |
powersj/tilt-shift | src/blur.py | 1 | 1763 | #!/usr/bin/python
"""Set of functions to blur an entire image that replicates a lens blur."""
import cv2
import numpy as np
import os
import shutil
def make_more_vivid(image):
"""Modify the saturation and value of the image."""
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
hue, saturation, value = cv2.split(hsv)
saturation = np.array(saturation * 1.2, dtype=np.uint16)
saturation = np.array(np.clip(saturation, 0, 255), dtype=np.uint8)
value = np.array(value * 1.1, dtype=np.uint16)
value = np.array(np.clip(value, 0, 255), dtype=np.uint8)
return cv2.cvtColor(cv2.merge((hue, saturation, value)), cv2.COLOR_HSV2BGR)
def read_image(input_dir):
"""Read in an image and provide the image itself, name, and extension."""
for photo in os.listdir(input_dir):
print photo,
name, ext = os.path.splitext(photo)
image = cv2.imread(input_dir + '/' + photo)
yield image, name, ext
def clean_folder(directory):
"""Clean out the given directory."""
if os.path.isdir(directory):
shutil.rmtree(directory)
os.mkdir(directory)
def process(image):
"""Given an image process it using the process to replicate a lens blur."""
print '...bluring image',
image = make_more_vivid(image)
image = cv2.bilateralFilter(image, 9, 150, 150)
image = cv2.blur(image, (15, 15))
return image
def main():
"""Given the images in a directory blur each of them."""
input_dir = 'images/original'
output_dir = 'images/blur'
clean_folder(output_dir)
for image, name, ext in read_image(input_dir):
output = process(image)
cv2.imwrite(output_dir + '/' + name + ext, output)
print '...[DONE]'
if __name__ == "__main__":
main()
| apache-2.0 | 1,325,017,258,541,798,700 | 27.901639 | 79 | 0.642655 | false | 3.228938 | false | false | false |
Hybrid-Cloud/cinder | cinder/volume/drivers/falconstor/rest_proxy.py | 6 | 58141 | # Copyright (c) 2016 FalconStor, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import json
import random
import time
import uuid
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from six.moves import http_client
from cinder import exception
from cinder.i18n import _, _LI, _LW
FSS_BATCH = 'batch'
FSS_PHYSICALRESOURCE = 'physicalresource'
FSS_PHYSICALADAPTER = 'physicaladapter'
FSS_FCCLIENTINITIATORS = 'fcclientinitiators'
FSS_FC_TGT_WWPN = 'fctgtwwpn'
FSS_STORAGE_POOL = 'storagepool'
FSS_LOGICALRESOURCE = 'logicalresource'
FSS_SAN = 'sanresource'
FSS_MIRROR = 'mirror'
FSS_TIMEMARKPOLICY = 'timemarkpolicy'
FSS_TIMEMARK = 'timemark'
FSS_TIMEVIEW = 'timeview'
FSS_SNAPSHOT_RESOURCE = 'snapshotresource'
FSS_SNAPSHOT_GROUP = 'snapshotgroup'
FSS_CLIENT = 'client'
FSS_SANCLIENT = 'sanclient'
FSS_ISCSI_TARGET = 'iscsitarget'
FSS_ISCSI_CLIENT_INITIATORS = 'iscsiclientinitiators'
FSS_SERVER = 'server'
FSS_OPTIONS = 'options'
FSS_PORTAL = 'defaultiscsiportal'
FSS_PROPERTIES = 'properties'
FSS_HOST = 'host'
FSS_RETURN_CODE = 'rcs'
FSS_AUTH = 'auth'
FSS_LOGIN = 'login'
FSS_SINGLE_TYPE = 'single'
POST = 'POST'
GET = 'GET'
PUT = 'PUT'
DELETE = 'DELETE'
GROUP_PREFIX = 'OpenStack-'
PRODUCT_NAME = 'ipstor'
SESSION_COOKIE_NAME = 'session_id'
RETRY_LIST = ['107', '2147680512']
MAXSNAPSHOTS = 1000
OPERATION_TIMEOUT = 60 * 60
RETRY_CNT = 5
RETRY_INTERVAL = 15
LOG = logging.getLogger(__name__)
class RESTProxy(object):
def __init__(self, config):
self.fss_host = config.san_ip
self.fss_username = config.san_login
self.fss_password = config.san_password
self.fss_defined_pool = config.fss_pool
if config.additional_retry_list:
RETRY_LIST.append(config.additional_retry_list)
self.FSS = FSSRestCommon(
host=self.fss_host,
username=self.fss_username,
password=self.fss_password,
fss_debug=config.fss_debug)
self.session_id = None
# naming
def _get_vol_name_from_snap(self, snapshot):
"""Return the name of the snapshot that FSS will use."""
return "cinder-%s" % snapshot["volume_id"]
def _get_fss_volume_name(self, volume):
"""Return the name of the volume FSS will use."""
return "cinder-%s" % volume["id"]
def _get_group_name_from_id(self, id):
return "cinder-consisgroup-%s" % id
def _encode_name(self, name):
uuid_str = name.replace("-", "")
vol_uuid = uuid.UUID('urn:uuid:%s' % uuid_str)
newuuid = (base64.urlsafe_b64encode(vol_uuid.bytes).
decode('utf-8').strip('='))
return "cinder-%s" % newuuid
def do_setup(self):
self.session_id = self.FSS.fss_login()
def _convert_size_to_gb(self, size):
s = round(float(size) / units.Gi, 2)
if s > 0:
return s
else:
return 0
def _convert_size_to_mb(self, size):
return size * units.Ki
def _get_pools_info(self):
qpools = []
poolinfo = {}
try:
output = self.list_pool_info()
if "storagepools" in output['data']:
for item in output['data']['storagepools']:
if item['name'].startswith(GROUP_PREFIX) and (
self.fss_defined_pool == item['id']):
poolid = int(item['id'])
qpools.append(poolid)
break
if not qpools:
msg = _('The storage pool information is empty or not correct')
raise exception.DriverNotInitialized(msg)
# Query pool detail information
for poolid in qpools:
output = self.list_pool_info(poolid)
poolinfo['pool_name'] = output['data']['name']
poolinfo['total_capacity_gb'] = (
self._convert_size_to_gb(output['data']['size']))
poolinfo['used_gb'] = (
self._convert_size_to_gb(output['data']['used']))
poolinfo['QoS_support'] = False
poolinfo['reserved_percentage'] = 0
except Exception:
msg = (_('Unexpected exception during get pools info.'))
LOG.exception(msg)
raise exception.VolumeBackendAPIException(data=msg)
return poolinfo
def list_pool_info(self, pool_id=None):
return self.FSS.list_pool_info(pool_id)
def list_physicaladapter_info(self, adapter_id=None):
return self.FSS.list_physicaladapter_info(adapter_id)
def _checking_adapter_type(self, id):
adapter_type = ''
output = self.list_physicaladapter_info()
if "physicaladapters" in output['data']:
physicaladapters = output['data']['physicaladapters']
if physicaladapters['id'] == id:
adapter_type = physicaladapters['type']
return adapter_type
def create_vdev(self, volume):
sizemb = self._convert_size_to_mb(volume["size"])
volume_name = self._get_fss_volume_name(volume)
params = dict(storagepoolid=self.fss_defined_pool,
category="virtual",
sizemb=sizemb,
name=volume_name)
return volume_name, self.FSS.create_vdev(params)
def create_tv_from_cdp_tag(self, volume_metadata, volume):
tv_vid = ''
cdp_tag = ''
if 'cdptag' in volume_metadata:
tv_vid = str(volume_metadata['timeview']) + '_0'
cdp_tag = str(volume_metadata['cdptag'])
if 'rawtimestamp' in volume_metadata:
tv_vid = '{0}_{1}'.format(str(volume_metadata['timeview']),
str(volume_metadata['rawtimestamp']))
volume_name = self._get_fss_volume_name(volume)
sizemb = self._convert_size_to_mb(volume['size'])
params = dict(name=volume_name,
storage=dict(storagepoolid=self.fss_defined_pool,
sizemb=sizemb),
automaticexpansion=dict(enabled=False),
timeviewcopy=True)
if cdp_tag:
params.update(cdpjournaltag=cdp_tag)
metadata = self.FSS.create_timeview(tv_vid, params)
return volume_name, metadata
def create_thin_vdev(self, volume_metadata, volume):
thin_size = 0
size = volume["size"]
sizemb = self._convert_size_to_mb(size)
params = dict(storagepoolid=self.fss_defined_pool,
category="virtual")
if 'thinprovisioned' in volume_metadata:
if volume_metadata['thinprovisioned'] is False:
msg = (_('If you want to create a thin provisioning volume,'
' this param must be True.'))
raise exception.VolumeBackendAPIException(msg)
if 'thinsize' in volume_metadata:
thin_size = int(volume_metadata['thinsize'])
if size < 10:
msg = _('The resource is a FSS thin device, minimum size is '
'10240 MB.')
raise exception.VolumeBackendAPIException(msg)
else:
try:
if thin_size > size:
msg = _('The allocated size must less than total size.')
raise exception.VolumeBackendAPIException(msg)
except Exception:
msg = _('The resource is a thin device, thin size is invalid.')
raise exception.VolumeBackendAPIException(msg)
thin_size = self._convert_size_to_mb(thin_size)
thin_disk = dict(
enabled=True,
fullsizemb=sizemb)
params.update(thinprovisioning=thin_disk)
params.update(sizemb=thin_size)
volume_name = self._get_fss_volume_name(volume)
params.update(name=volume_name)
return volume_name, self.FSS.create_vdev(params)
def _get_fss_vid_from_name(self, volume_name, fss_type=None):
vid = []
output = self.FSS.list_fss_volume_info()
try:
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
if item['name'] in volume_name:
vid.append(item['id'])
except Exception:
msg = (_('Can not find cinder volume - %(volumeName)s') %
{"volumeName": volume_name})
raise exception.VolumeBackendAPIException(msg)
if fss_type is not None and fss_type == FSS_SINGLE_TYPE:
vid = ''.join(str(x) for x in vid)
return vid
def _get_fss_gid_from_name(self, group_name):
gid = ''
output = self.FSS.list_group_info()
if "snapshotgroups" in output['data']:
for item in output['data']['snapshotgroups']:
if item['name'] == group_name:
gid = item['id']
break
if gid == '':
msg = (_('Can not find consistency group: %s.') % group_name)
raise exception.VolumeBackendAPIException(msg)
return gid
def _get_fss_group_membercount(self, gid):
membercount = 0
output = self.FSS.list_group_info(gid)
if "membercount" in output['data']:
membercount = output['data']['membercount']
return membercount
def _get_vdev_id_from_group_id(self, group_id):
vidlist = []
output = self.FSS.list_group_info(group_id)
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
vidlist.append(item['id'])
return vidlist
def clone_volume(self, new_vol_name, source_volume_name):
params = dict(storagepoolid=self.fss_defined_pool)
volume_metadata = {}
new_vid = ''
vid = self._get_fss_vid_from_name(source_volume_name, FSS_SINGLE_TYPE)
mirror_params = dict(
category='virtual',
selectioncriteria='anydrive',
mirrortarget="virtual"
)
mirror_params.update(params)
ret1 = self.FSS.create_mirror(vid, mirror_params)
if ret1:
if ret1['rc'] != 0:
failed_ret = self.FSS.get_fss_error_code(ret1['rc'])
raise exception.VolumeBackendAPIException(data=failed_ret)
ret2 = self.FSS.sync_mirror(vid)
self.FSS._random_sleep()
if ret2['rc'] == 0:
self.FSS._check_mirror_sync_finished(vid, OPERATION_TIMEOUT)
ret3 = self.FSS.promote_mirror(vid, new_vol_name)
if ret3 and ret3['rc'] == 0:
new_vid = ret3['id']
volume_metadata['FSS-vid'] = new_vid
return volume_metadata
def delete_vdev(self, volume):
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if vid:
return self.FSS.delete_vdev(vid)
else:
msg = _('vid is null. FSS failed to delete volume.')
raise exception.VolumeBackendAPIException(data=msg)
def create_snapshot(self, snapshot):
snap_metadata = {}
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
size = snapshot['volume_size']
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to create snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
(snap, tm_policy, vdev_size) = (self.FSS.
_check_if_snapshot_tm_exist(vid))
if not snap:
self.create_vdev_snapshot(vid, self._convert_size_to_mb(size))
if not tm_policy:
self.FSS.create_timemark_policy(
vid, storagepoolid=self.fss_defined_pool)
if not snap_name:
snap_name = "snap-%s" % time.strftime('%Y%m%d%H%M%S')
self.FSS.create_timemark(vid, snap_name)
snap_metadata['fss_tm_comment'] = snap_name
return snap_metadata
def delete_snapshot(self, snapshot):
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to delete snapshot')
raise exception.VolumeBackendAPIException(data=msg)
if not snap_name:
if ('metadata' in snapshot and 'fss_tm_comment' in
snapshot['metadata']):
snap_name = snapshot['metadata']['fss_tm_comment']
tm_info = self.FSS.get_timemark(vid)
rawtimestamp = self._get_timestamp(tm_info, snap_name)
if rawtimestamp:
timestamp = '%s_%s' % (vid, rawtimestamp)
self.FSS.delete_timemark(timestamp)
final_tm_data = self.FSS.get_timemark(vid)
if "timemark" in final_tm_data['data']:
if not final_tm_data['data']['timemark']:
self.FSS.delete_timemark_policy(vid)
self.FSS.delete_vdev_snapshot(vid)
def _get_timestamp(self, tm_data, encode_snap_name):
timestamp = ''
if "timemark" in tm_data['data']:
for item in tm_data['data']['timemark']:
if "comment" in item and item['comment'] == encode_snap_name:
timestamp = item['rawtimestamp']
break
return timestamp
def create_volume_from_snapshot(self, volume, snapshot):
volume_metadata = {}
volume_name = self._get_vol_name_from_snap(snapshot)
snap_name = snapshot["display_name"]
new_vol_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = _('vid is null. FSS failed to create_volume_from_snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
if not snap_name:
if ('metadata' in snapshot) and ('fss_tm_comment'
in snapshot['metadata']):
snap_name = snapshot['metadata']['fss_tm_comment']
tm_info = self.FSS.get_timemark(vid)
rawtimestamp = self._get_timestamp(tm_info, snap_name)
if not rawtimestamp:
msg = _('rawtimestamp is null. FSS failed to '
'create_volume_from_snapshot.')
raise exception.VolumeBackendAPIException(data=msg)
timestamp = '%s_%s' % (vid, rawtimestamp)
output = self.FSS.copy_timemark(
timestamp, storagepoolid=self.fss_defined_pool, name=new_vol_name)
if output['rc'] == 0:
vid = output['id']
self.FSS._random_sleep()
if self.FSS._check_tm_copy_finished(vid, OPERATION_TIMEOUT):
volume_metadata['FSS-vid'] = vid
return volume_name, volume_metadata
def extend_vdev(self, volume_name, vol_size, new_size):
if new_size > vol_size:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
size = self._convert_size_to_mb(new_size - vol_size)
params = dict(
action='expand',
sizemb=size
)
return self.FSS.extend_vdev(vid, params)
def list_volume_info(self, vid):
return self.FSS.list_fss_volume_info(vid)
def rename_vdev(self, vid, new_vol_name):
params = dict(
action='update',
name=new_vol_name
)
return self.FSS.rename_vdev(vid, params)
def assign_iscsi_vdev(self, client_id, target_id, vid):
params = dict(
action="assign",
virtualdeviceids=[vid],
iscsi=dict(target=target_id)
)
return self.FSS.assign_vdev(client_id, params)
def assign_fc_vdev(self, client_id, vid):
params = dict(
action="assign",
virtualdeviceids=[vid],
fc=dict(
fcmapping='alltoall',
accessmode='readwritenonexclusive')
)
return self.FSS.assign_vdev(client_id, params)
def unassign_vdev(self, client_id, vid):
params = dict(
action="unassign",
virtualdeviceid=vid
)
return self.FSS.unassign_vdev(client_id, params)
def _create_vdev_snapshot(self, volume_name, size):
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
return self.create_vdev_snapshot(vid, self._convert_size_to_mb(size))
def create_vdev_snapshot(self, vid, size):
params = dict(
idlist=[vid],
selectioncriteria='anydrive',
policy='alwayswrite',
sizemb=size,
storagepoolid=self.fss_defined_pool
)
return self.FSS.create_vdev_snapshot(params)
def create_group(self, group):
group_name = self._get_group_name_from_id(group['id'])
params = dict(
name=group_name
)
return self.FSS.create_group(params)
def destroy_group(self, group):
group_name = self._get_group_name_from_id(group['id'])
gid = self._get_fss_gid_from_name(group_name)
return self.FSS.destroy_group(gid)
def _add_volume_to_consistency_group(self, group_id, vol_name):
self.set_group(group_id, addvollist=[vol_name])
def set_group(self, group_id, **kwargs):
group_name = self._get_group_name_from_id(group_id)
gid = self._get_fss_gid_from_name(group_name)
join_params = dict()
leave_params = dict()
if kwargs.get('addvollist'):
joing_vid = self._get_fss_vid_from_name(kwargs['addvollist'])
join_params.update(
action='join',
virtualdevices=joing_vid
)
if kwargs.get('remvollist'):
leave_vid = self._get_fss_vid_from_name(kwargs['remvollist'])
leave_params.update(
action='leave',
virtualdevices=leave_vid
)
return self.FSS.set_group(gid, join_params, leave_params)
def create_cgsnapshot(self, cgsnapshot):
group_name = self._get_group_name_from_id(
cgsnapshot['consistencygroup_id'])
gsnap_name = self._encode_name(cgsnapshot['id'])
gid = self._get_fss_gid_from_name(group_name)
vidlist = self._get_vdev_id_from_group_id(gid)
for vid in vidlist:
(snap, tm_policy, sizemb) = (self.FSS.
_check_if_snapshot_tm_exist(vid))
if not snap:
self.create_vdev_snapshot(vid, sizemb)
if not tm_policy:
self.FSS.create_timemark_policy(
vid, storagepoolid=self.fss_defined_pool)
group_tm_policy = self.FSS._check_if_group_tm_enabled(gid)
if not group_tm_policy:
self.create_group_timemark_policy(gid)
self.create_group_timemark(gid, gsnap_name)
def create_group_timemark_policy(self, gid):
tm_params = dict(
automatic=dict(enabled=False),
maxtimemarkcount=MAXSNAPSHOTS
)
return self.FSS.create_group_timemark_policy(gid, tm_params)
def create_group_timemark(self, gid, gsnap_name):
params = dict(
comment=gsnap_name,
priority='medium',
snapshotnotification=False
)
return self.FSS.create_group_timemark(gid, params)
def delete_cgsnapshot(self, cgsnapshot):
group_name = self._get_group_name_from_id(
cgsnapshot['consistencygroup_id'])
encode_snap_name = self._encode_name(cgsnapshot['id'])
gid = self._get_fss_gid_from_name(group_name)
if not gid:
msg = _('gid is null. FSS failed to delete cgsnapshot.')
raise exception.VolumeBackendAPIException(data=msg)
if self._get_fss_group_membercount(gid) != 0:
tm_info = self.FSS.get_group_timemark(gid)
rawtimestamp = self._get_timestamp(tm_info, encode_snap_name)
timestamp = '%s_%s' % (gid, rawtimestamp)
self.delete_group_timemark(timestamp)
final_tm_data = self.FSS.get_group_timemark(gid)
if "timemark" in final_tm_data['data']:
if not final_tm_data['data']['timemark']:
self.FSS.delete_group_timemark_policy(gid)
def delete_group_timemark(self, timestamp):
params = dict(
deleteallbefore=False
)
return self.FSS.delete_group_timemark(timestamp, params)
def _check_iscsi_option(self):
output = self.FSS.get_server_options()
if "iscsitarget" in output['data']:
if not output['data']['iscsitarget']:
self.FSS.set_server_options('iscsitarget')
def _check_fc_target_option(self):
output = self.FSS.get_server_options()
if "fctarget" in output['data']:
if not output['data']['fctarget']:
self.FSS.set_server_options('fctarget')
def _check_iocluster_state(self):
output = self.FSS.get_server_options()
if 'iocluster' not in output['data']:
msg = _('No iocluster information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return output['data']['iocluster']
def list_fc_target_wwpn(self):
return self.FSS.list_fc_target_wwpn()
def list_fc_client_initiators(self):
return self.FSS.list_fc_client_initiators()
def create_fc_client(self, cinder_host_name, free_initiator_wwpns):
client_id = 0
params = dict(
name=cinder_host_name,
protocoltype=["fc"],
ipaddress=self.fss_host,
ostype='linux',
fcpolicy=dict(
initiators=[free_initiator_wwpns],
vsaenabled=False
)
)
client_info = self.FSS.create_client(params)
if client_info and client_info['rc'] == 0:
client_id = client_info['id']
return client_id
def list_iscsi_target_info(self, target_id=None):
return self.FSS.list_iscsi_target_info(target_id)
def _check_fc_host_devices_empty(self, client_id):
is_empty = False
output = self.FSS.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'fcdevices' not in output['data']:
msg = _('No fcdevices in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if len(output['data']['fcdevices']) == 0:
is_empty = True
self.FSS.delete_client(client_id)
return is_empty
def create_iscsi_client(self, cinder_host_name, initiator):
params = dict(
name=cinder_host_name,
protocoltype=["iscsi"],
ipaddress=self.fss_host,
ostype='linux',
iscsipolicy=dict(
initiators=[initiator],
authentication=dict(enabled=False,
mutualchap=dict(enabled=False))
)
)
return self.FSS.create_client(params)
def create_iscsitarget(self, client_id, initiator, fss_hosts):
params = dict(
clientid=client_id,
name=initiator,
ipaddress=fss_hosts,
accessmode='readwritenonexclusive'
)
return self.FSS.create_iscsitarget(params)
def _get_iscsi_host(self, connector):
target_info = self.list_iscsi_target_info()
if 'data' not in target_info:
msg = _('No data information in return info.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsitargets' not in target_info['data']:
msg = _('No iscsitargets in return info.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if target_info['data']['iscsitargets']:
iscsitargets = target_info['data']['iscsitargets']
for iscsitarget in iscsitargets:
if connector["initiator"] in iscsitarget["name"]:
target_id = iscsitarget["id"]
client_id = iscsitarget["clientid"]
return client_id, target_id
return None, None
def _create_iscsi_host(self, host_name, initiator, fss_hosts):
client_id = ''
target_id = ''
client_info = self.create_iscsi_client(host_name, initiator)
if client_info and client_info['rc'] == 0:
client_id = client_info['id']
target_info = self.create_iscsitarget(client_id, initiator, fss_hosts)
if target_info['rc'] == 0:
target_id = target_info['id']
return client_id, target_id
def _get_fc_client_initiators(self, connector):
fc_initiators_assigned = []
fc_available_initiator = []
fc_initiators_info = self.list_fc_client_initiators()
if 'data' not in fc_initiators_info:
raise ValueError(_('No data information in return info.'))
if fc_initiators_info['data']:
fc_initiators = fc_initiators_info['data']
for fc_initiator in fc_initiators:
if fc_initiator['wwpn'] in connector['wwpns']:
fc_available_initiator.append(str(fc_initiator['wwpn']))
fc_initiators_assigned.append(dict(
wwpn=str(fc_initiator['wwpn']),
assigned=fc_initiator['assigned']))
return fc_available_initiator, fc_initiators_assigned
def fc_initialize_connection(self, volume, connector, fss_hosts):
"""Connect the host and volume; return dict describing connection."""
vid = 0
fc_target_info = {}
free_fc_initiator = None
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = (_('Can not find cinder volume - %s.') % volume_name)
raise exception.VolumeBackendAPIException(msg)
available_initiator, fc_initiators_info = (
self._get_fc_client_initiators(connector))
if fc_initiators_info is None:
msg = _('No FC initiator can be added to host.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for fc_initiator in fc_initiators_info:
value = fc_initiator['assigned']
if len(value) == 0:
free_fc_initiator = fc_initiator['wwpn']
if free_fc_initiator is None:
msg = _('No free FC initiator can be assigned to host.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
initiator = connector["initiator"]
host_name = GROUP_PREFIX + '%s-' % connector["host"]
initiator_name = initiator.split(':')
idx = len(initiator_name) - 1
client_host_name = host_name + initiator_name[
idx] + '_FC-wwpn-' + free_fc_initiator
client_id = self.create_fc_client(client_host_name, free_fc_initiator)
try:
self.assign_fc_vdev(client_id, vid)
time.sleep(3)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984845 and "XML_ERROR_CLIENT_EXIST"
in err.text):
ctxt.reraise = False
LOG.warning(_LW('Assign volume failed with message: %(msg)s.'),
{"msg": err.reason})
finally:
lun = self.FSS._get_fc_client_info(client_id, vid)
fc_target_info['lun'] = lun
fc_target_info['available_initiator'] = available_initiator
if not fc_target_info:
msg = _('Failed to get iSCSI target info for the LUN: %s.')
raise exception.VolumeBackendAPIException(data=msg % volume_name)
return fc_target_info
def fc_terminate_connection(self, volume, connector):
client_id = 0
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
output = self.list_volume_info(vid)
if 'data' not in output:
msg = _('No vdev information in given data')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'clients' not in output['data']:
msg = _('No clients in vdev information.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
client_info = output['data']['clients']
for fcclients in client_info:
client_id = int(fcclients['id'])
if client_id == 0:
msg = _(
'Can not find client id. The connection target name is %s.')
raise exception.VolumeBackendAPIException(
data=msg % connector["initiator"])
try:
self.unassign_vdev(client_id, vid)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984988 and
"XML_ERROR_VIRTUAL_DEV_NOT_ASSIGNED_TO_iSCSI_TARGET"
in err.text):
ctxt.reraise = False
LOG.warning(_LW('Disconnection failed with message: '
"%(msg)s."), {"msg": err.reason})
return client_id
def initialize_connection_iscsi(self, volume, connector, fss_hosts):
"""Connect the host and volume; return dict describing connection."""
vid = 0
iscsi_target_info = {}
self._check_iscsi_option()
client_id, target_id = self._get_iscsi_host(connector)
if target_id is None:
initiator = connector["initiator"]
host_name = GROUP_PREFIX + '%s-' % connector["host"]
initiator_info = initiator.split(':')
idx = len(initiator_info) - 1
client_host_name = host_name + initiator_info[idx]
client_id, target_id = self._create_iscsi_host(client_host_name,
initiator,
fss_hosts)
volume_name = self._get_fss_volume_name(volume)
try:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
if not vid:
msg = (_('Can not find cinder volume - %(volumeName)s.') %
{"volumeName": volume_name})
raise exception.VolumeBackendAPIException(msg)
self.assign_iscsi_vdev(client_id, target_id, vid)
time.sleep(3)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984989 and
"XML_ERROR_VIRTUAL_DEV_ASSIGNED_TO_iSCSI_TARGET" in
err.text):
ctxt.reraise = False
LOG.warning(_LW("Assign volume failed with message: %(msg)s."),
{"msg": err.reason})
finally:
(lun, target_name) = self.FSS._get_iscsi_target_info(client_id,
vid)
iscsi_target_info['lun'] = lun
iscsi_target_info['iqn'] = target_name
if not iscsi_target_info:
msg = _('Failed to get iSCSI target info for the LUN: %s')
raise exception.VolumeBackendAPIException(data=msg % volume_name)
return iscsi_target_info
def terminate_connection_iscsi(self, volume, connector):
volume_name = self._get_fss_volume_name(volume)
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
client_id, target_id = self._get_iscsi_host(connector)
if not client_id:
msg = _('Can not find client id. The connection target name '
'is %s.')
raise exception.VolumeBackendAPIException(
data=msg % connector["initiator"])
try:
self.unassign_vdev(client_id, vid)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
if (err.code == 2415984988 and
"XML_ERROR_VIRTUAL_DEV_NOT_ASSIGNED_TO_iSCSI_TARGET"
in err.text):
ctxt.reraise = False
LOG.warning(_LW("Disconnection failed with message: "
"%(msg)s."), {"msg": err.reason})
finally:
is_empty = self.FSS._check_host_mapping_status(client_id,
target_id)
if is_empty:
self.FSS.delete_iscsi_target(target_id)
self.FSS.delete_client(client_id)
def _get_existing_volume_ref_vid(self, existing_ref):
if 'source-id' in existing_ref:
vid = existing_ref['source-id']
else:
reason = _("FSSISCSIDriver manage_existing requires vid to "
"identify an existing volume.")
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=reason)
vdev_info = self.list_volume_info(vid)
if not vdev_info:
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref,
reason=_("Unable to find volume with FSS vid =%s.") % vid)
if 'data' not in vdev_info:
msg = _('No vdev information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'sizemb' not in vdev_info['data']:
msg = _('No vdev sizemb in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return vdev_info['data']['sizemb']
def _manage_existing_volume(self, vid, volume):
new_vol_name = self._get_fss_volume_name(volume)
try:
self.rename_vdev(vid, new_vol_name)
except FSSHTTPError as err:
with excutils.save_and_reraise_exception() as ctxt:
ctxt.reraise = False
LOG.warning(_LW("Volume manage_existing_volume was unable "
"to rename the volume, error message: %s."),
err.reason)
def unmanage(self, volume):
volume_name = self._get_fss_volume_name(volume)
unmanaged_vol_name = volume_name + "-unmanaged"
try:
vid = self._get_fss_vid_from_name(volume_name, FSS_SINGLE_TYPE)
self.rename_vdev(vid, unmanaged_vol_name)
except FSSHTTPError as err:
LOG.warning(_LW("Volume unmanage was unable to rename the volume,"
" error message: %(msg)s."), {"msg": err.reason})
class FSSRestCommon(object):
def __init__(self, host, username, password, fss_debug):
self.hostip = host
self.username = username
self.password = password
self.session_id = None
self.fss_debug = fss_debug
def _fss_request(self, method, path, data=None):
json_data = None
url = "http://%(ip)s/%(product)s/%(path)s" % {
"ip": self.hostip, "product": PRODUCT_NAME, "path": path}
headers = {"Content-Type": "application/json"}
if self.session_id is not None:
cookie = dict(
Cookie=SESSION_COOKIE_NAME + '=' + self.session_id
)
headers.update(cookie)
if data is not None:
request_body = json.dumps(data).encode("utf-8")
else:
request_body = None
connection = http_client.HTTPConnection(self.hostip, 80, timeout=60)
if self.fss_debug:
LOG.info(_LI("[FSS_RESTAPI]====%(method)s@url=%(url)s ===="
"@request_body=%(body)s===") % {
"method": method,
"url": url,
"body": request_body})
attempt = 1
while True:
connection.request(method, url, request_body, headers)
response = connection.getresponse()
response_body = response.read()
if response_body:
try:
data = json.loads(response_body)
json_data = json.dumps(data)
json_data = json.loads(json_data.decode('utf8'))
except ValueError:
pass
if self.fss_debug:
LOG.info(_LI("[FSS_RESTAPI]==@json_data: %s =="), json_data)
if response.status == 200:
return json_data
elif response.status == 404:
msg = (_('FSS rest api return failed, method=%(method)s, '
'uri=%(url)s, response=%(response)s') % {
"method": method,
"url": url,
"response": response_body})
raise exception.VolumeBackendAPIException(msg)
else:
err_code = json_data['rc']
if (attempt > RETRY_CNT) or (str(err_code) not in RETRY_LIST):
err_target = ("method=%(method)s, url=%(url)s, "
"response=%(response)s" %
{"method": method, "url": url,
"response": response_body})
err_response = self.get_fss_error_code(err_code)
err = dict(
code=err_code,
text=err_response['key'],
reason=err_response['message']
)
raise FSSHTTPError(err_target, err)
attempt += 1
LOG.warning(_LW("Retry with rc: %s."), err_code)
self._random_sleep(RETRY_INTERVAL)
if err_code == 107:
self.fss_login()
def _random_sleep(self, interval=60):
nsleep = random.randint(10, interval * 10)
value = round(float(nsleep) / 10, 2)
time.sleep(value)
#
# REST API session management methods
#
def fss_login(self):
url = '%s/%s' % (FSS_AUTH, FSS_LOGIN)
params = dict(
username=self.username,
password=self.password,
server=self.hostip
)
data = self._fss_request(POST, url, params)
if 'id' in data:
self.session_id = data['id']
return self.session_id
#
# Physical Adapters management methods
#
def list_physicaladapter_info(self, adapter_id=None):
url = '%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER)
if adapter_id is not None:
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE,
FSS_PHYSICALADAPTER, adapter_id)
return self._fss_request(GET, url)
def list_fc_target_wwpn(self):
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER,
FSS_FC_TGT_WWPN)
tgt_wwpn = []
output = self._fss_request(GET, url)
if output['data']:
tgt_wwpns = output['data']
for tgt_alias_wwpn in tgt_wwpns:
tgt_wwpn.append(
str(tgt_alias_wwpn['aliaswwpn'].replace('-', '')))
return tgt_wwpn
def list_fc_client_initiators(self):
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE, FSS_PHYSICALADAPTER,
FSS_FCCLIENTINITIATORS)
return self._fss_request(GET, url)
#
# storage pool management methods
#
def list_pool_info(self, pool_id=None):
url = '%s/%s' % (FSS_PHYSICALRESOURCE, FSS_STORAGE_POOL)
if pool_id is not None:
url = '%s/%s/%s' % (FSS_PHYSICALRESOURCE,
FSS_STORAGE_POOL, pool_id)
return self._fss_request(GET, url)
#
# Volume and snapshot management methods
#
def create_vdev(self, params):
metadata = {}
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN)
output = self._fss_request(POST, url, params)
if output:
if output['rc'] == 0:
metadata['FSS-vid'] = output['id']
return metadata
def _check_mirror_sync_finished(self, vid, timeout):
starttime = time.time()
while True:
self._random_sleep()
if time.time() > starttime + timeout:
msg = (_('FSS get mirror sync timeout on vid: %s ') % vid)
raise exception.VolumeBackendAPIException(data=msg)
elif self._check_mirror_sync_status(vid):
break
def delete_vdev(self, vid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(DELETE, url, dict(force=True))
def extend_vdev(self, vid, params):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(PUT, url, params)
def rename_vdev(self, vid, params):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return vid, self._fss_request(PUT, url, params)
def list_fss_volume_info(self, vid=None):
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN)
if vid is not None:
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SAN, vid)
return self._fss_request(GET, url)
def _get_fss_vid_from_name(self, volume_name, fss_type=None):
vid = []
output = self.list_fss_volume_info()
try:
if "virtualdevices" in output['data']:
for item in output['data']['virtualdevices']:
if item['name'] in volume_name:
vid.append(item['id'])
except Exception:
msg = (_('Can not find cinder volume - %s') % volume_name)
raise exception.VolumeBackendAPIException(msg)
if fss_type is not None and fss_type == FSS_SINGLE_TYPE:
vid = ''.join(str(x) for x in vid)
return vid
def _check_if_snapshot_tm_exist(self, vid):
snapshotenabled = False
timemarkenabled = False
sizemb = 0
output = self.list_fss_volume_info(vid)
if "snapshotenabled" in output['data']:
snapshotenabled = output['data']['snapshotenabled']
if "timemarkenabled" in output['data']:
timemarkenabled = output['data']['timemarkenabled']
if "sizemb" in output['data']:
sizemb = output['data']['sizemb']
return (snapshotenabled, timemarkenabled, sizemb)
def create_vdev_snapshot(self, params):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_RESOURCE)
return self._fss_request(POST, url, params)
def create_timemark_policy(self, vid, **kwargs):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE, FSS_TIMEMARKPOLICY)
params = dict(
idlist=[vid],
automatic=dict(enabled=False),
maxtimemarkcount=MAXSNAPSHOTS
)
if kwargs.get('storagepoolid'):
params.update(kwargs)
return self._fss_request(POST, url, params)
def create_timemark(self, vid, snap_name):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
params = dict(
comment=snap_name,
priority='medium',
snapshotnotification=False
)
return self._fss_request(POST, url, params)
def get_timemark(self, vid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
return self._fss_request(GET, url)
def delete_timemark(self, timestamp):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, timestamp)
params = dict(
deleteallbefore=False
)
return self._fss_request(DELETE, url, params)
def delete_timemark_policy(self, vid):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE, FSS_TIMEMARKPOLICY)
params = dict(
idlist=[vid]
)
return self._fss_request(DELETE, url, params)
def delete_vdev_snapshot(self, vid):
url = '%s/%s/%s' % (FSS_BATCH, FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_RESOURCE)
params = dict(
idlist=[vid]
)
return self._fss_request(DELETE, url, params)
def copy_timemark(self, timestamp, **kwargs):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEMARK, timestamp)
params = dict(
action='copy',
includetimeviewdata=False
)
params.update(kwargs)
return self._fss_request(PUT, url, params)
def get_timemark_copy_status(self, vid):
url = '%s/%s/%s?type=operationstatus' % (
FSS_LOGICALRESOURCE, FSS_TIMEMARK, vid)
return self._fss_request(GET, url)
def _check_tm_copy_status(self, vid):
finished = False
output = self.get_timemark_copy_status(vid)
if output['timemarkoperationstatus']:
timemark_status = output['timemarkoperationstatus']
if timemark_status['operation'] == "copy":
if timemark_status['status'] == 'completed':
finished = True
return finished
def _check_tm_copy_finished(self, vid, timeout):
finished = False
starttime = time.time()
while True:
self._random_sleep()
if time.time() > starttime + timeout:
msg = (_('FSS get timemark copy timeout on vid: %s') % vid)
raise exception.VolumeBackendAPIException(data=msg)
elif self._check_tm_copy_status(vid):
finished = True
return finished
#
# TimeView methods
#
def create_timeview(self, tv_vid, params):
vid = ''
volume_metadata = {}
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_TIMEVIEW, tv_vid)
output = self._fss_request(POST, url, params)
if output and output['rc'] == 0:
if output['copyid'] == -1:
vid = output['id']
else:
vid = output['copyid']
volume_metadata['FSS-vid'] = vid
return volume_metadata
#
# Mirror methods
#
def create_mirror(self, vid, pool_id):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
params = dict(
category='virtual',
selectioncriteria='anydrive',
mirrortarget="virtual"
)
params.update(pool_id)
return self._fss_request(POST, url, params)
def get_mirror_sync_status(self, vid):
url = '%s/%s/%s?type=syncstatus' % (
FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
return self._fss_request(GET, url)
def _check_mirror_sync_status(self, vid):
finished = False
output = self.get_mirror_sync_status(vid)
if output['mirrorsyncstatus']:
mirrorsyncstatus = output['mirrorsyncstatus']
if mirrorsyncstatus['status'] == "insync":
if mirrorsyncstatus['percentage'] == 0:
finished = True
return finished
def _set_mirror(self, vid, **kwargs):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_MIRROR, vid)
return self._fss_request(PUT, url, kwargs)
def sync_mirror(self, vid):
return self._set_mirror(vid, action='sync')
def promote_mirror(self, vid, new_volume_name):
return self._set_mirror(vid, action='promote', name=new_volume_name)
#
# Host management methods
#
def get_server_options(self):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
return self._fss_request(GET, url)
def set_server_options(self, action):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
params = dict(
action=action,
enabled=True
)
return self._fss_request(PUT, url, params)
def get_server_name(self):
url = '%s/%s' % (FSS_SERVER, FSS_OPTIONS)
return self._fss_request(GET, url)
#
# SAN Client management methods
#
def list_client_initiators(self):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT,
FSS_ISCSI_CLIENT_INITIATORS)
return self._fss_request(GET, url)
def get_default_portal(self):
url = '%s/%s/%s' % (FSS_SERVER, FSS_OPTIONS, FSS_PORTAL)
return self._fss_request(GET, url)
def create_client(self, params):
url = '%s/%s' % (FSS_CLIENT, FSS_SANCLIENT)
return self._fss_request(POST, url, params)
def list_sanclient_info(self, client_id=None):
url = '%s/%s' % (FSS_CLIENT, FSS_SANCLIENT)
if client_id is not None:
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT,
client_id)
return self._fss_request(GET, url)
def assign_vdev(self, client_id, params):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(PUT, url, params)
def unassign_vdev(self, client_id, params):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(PUT, url, params)
def _get_iscsi_target_info(self, client_id, vid):
lun = 0
target_name = None
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsidevices' not in output['data']:
msg = _('No iscsidevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for iscsidevices in output['data']['iscsidevices']:
if int(vid) == int(iscsidevices['id']):
lun = iscsidevices['lun']
iscsitarget_info = iscsidevices['iscsitarget']
for key, value in iscsitarget_info.items():
if key == 'name':
target_name = value
return lun, target_name
def _check_host_mapping_status(self, client_id, target_id):
is_empty = False
hosting_cnt = 0
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsidevices' not in output['data']:
msg = _('No iscsidevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if len(output['data']['iscsidevices']) == 0:
is_empty = True
else:
for iscsidevices in output['data']['iscsidevices']:
iscsitarget_info = iscsidevices['iscsitarget']
for key, value in iscsitarget_info.items():
if key == 'id' and target_id == value:
hosting_cnt += 1
if hosting_cnt == 0:
is_empty = True
return is_empty
def list_iscsi_target_info(self, target_id=None):
url = '%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET)
if target_id is not None:
url = '%s/%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET,
target_id)
return self._fss_request(GET, url)
def _get_iscsi_target_id(self, initiator_iqn):
target_id = ''
client_id = ''
output = self.list_iscsi_target_info()
if 'data' not in output:
msg = _('No target in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'iscsitargets' not in output['data']:
msg = _('No iscsitargets for target.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for targets in output['data']['iscsitargets']:
if 'name' in targets:
if initiator_iqn in targets['name']:
target_id = str(targets['id'])
client_id = str(targets['clientid'])
break
return target_id, client_id
def create_iscsitarget(self, params):
url = '%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET)
return self._fss_request(POST, url, params)
def delete_iscsi_target(self, target_id):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_ISCSI_TARGET, target_id)
params = dict(
force=True
)
return self._fss_request(DELETE, url, params)
def delete_client(self, client_id):
url = '%s/%s/%s' % (FSS_CLIENT, FSS_SANCLIENT, client_id)
return self._fss_request(DELETE, url)
def _get_fc_client_info(self, client_id, vid):
lun = 0
output = self.list_sanclient_info(client_id)
if 'data' not in output:
msg = _('No target information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if 'fcdevices' not in output['data']:
msg = _('No fcdevices information in given data.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
for fcdevices in output['data']['fcdevices']:
if int(vid) == int(fcdevices['id']):
lun = fcdevices['lun']
return lun
#
# Group related methods
#
def create_group(self, params):
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP)
return self._fss_request(POST, url, params)
def list_group_info(self, gid=None):
if gid is not None:
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
else:
url = '%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP)
return self._fss_request(GET, url)
def set_group(self, gid, join_params=None, leave_params=None):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
if join_params:
self._fss_request(PUT, url, join_params)
if leave_params:
self._fss_request(PUT, url, leave_params)
def create_group_timemark_policy(self, gid, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARKPOLICY, gid)
return self._fss_request(POST, url, params)
def _check_if_group_tm_enabled(self, gid):
timemarkenabled = False
output = self.list_group_info(gid)
if "timemarkenabled" in output['data']:
timemarkenabled = output['data']['timemarkenabled']
return timemarkenabled
def create_group_timemark(self, gid, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, gid)
return self._fss_request(POST, url, params)
def get_group_timemark(self, gid):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, gid)
return self._fss_request(GET, url)
def delete_group_timemark(self, timestamp, params):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARK, timestamp)
return self._fss_request(DELETE, url, params)
def delete_group_timemark_policy(self, gid):
url = '%s/%s/%s/%s' % (FSS_LOGICALRESOURCE,
FSS_SNAPSHOT_GROUP, FSS_TIMEMARKPOLICY, gid)
return self._fss_request(DELETE, url)
def delete_snapshot_group(self, gid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
return self._fss_request(DELETE, url)
def destroy_group(self, gid):
url = '%s/%s/%s' % (FSS_LOGICALRESOURCE, FSS_SNAPSHOT_GROUP, gid)
return self._fss_request(DELETE, url)
def get_fss_error_code(self, err_id):
try:
url = '%s/%s/%s' % (FSS_SERVER, FSS_RETURN_CODE, err_id)
output = self._fss_request(GET, url)
if output['rc'] == 0:
return output
except Exception:
msg = (_('Can not find this error code:%s.') % err_id)
raise exception.APIException(reason=msg)
class FSSHTTPError(Exception):
def __init__(self, target, response):
super(FSSHTTPError, self).__init__()
self.target = target
self.code = response['code']
self.text = response['text']
self.reason = response['reason']
def __str__(self):
msg = ("FSSHTTPError code {0} returned by REST at {1}: {2}\n{3}")
return msg.format(self.code, self.target,
self.reason, self.text)
| apache-2.0 | -2,182,897,599,370,730,200 | 37.000654 | 79 | 0.557318 | false | 3.802551 | false | false | false |
vincentfung13/TwitterRepManagement | user_handle/utility.py | 1 | 2069 | from user_handle.models import UserEntity
from django.contrib.auth.models import User
from django.http import HttpResponse
from twitter_services.tweet_processing import utility as tweet_util
import json
# Return true if a username has been registered before and false otherwise
def check_exist(username, email):
try:
User.objects.get(username=username)
User.objects.get(email=email)
return True
except User.DoesNotExist:
return False
# Create a new user in the database
def save_user(username, password, email):
try:
user = User.objects.create_user(username, email, password)
except Exception as e:
return None, str(e)
return user
# Add an entity to interest list
# TODO: improve database usage
def add_interested(user, entity):
# Unique pair issue is handled by DBMS
UserEntity.objects.create(user=user, entity=entity)
# Remove entity from interest list:
# TODO: improve database usage
def remove_entity(user, entity):
UserEntity.objects.get(user=user, entity=entity).delete()
def json_response(ret, data="", msg=""):
resp = {"msg": msg, "ret": ret, "data": data}
return HttpResponse(json.dumps(resp, ensure_ascii=False), content_type="application/json")
# Strip out the topics given a topic str
def get_topics(topic_list):
word_freq = {}
entities_lower = [entity.lower() for entity in tweet_util.entities_list]
for topic_tuple in topic_list:
keywords_weight = topic_tuple[1].split('+')
for keyword_weight in keywords_weight:
weight = keyword_weight.split('*')[0].strip()
word = keyword_weight.split('*')[1].strip()
if (word in word_freq) and (word not in entities_lower):
word_freq[word] += float(weight)
else:
word_freq[word] = float(weight)
# Normalize the frequency for display
topic_str = ''
for keyword, frequency in word_freq.iteritems():
topic_str = topic_str + keyword + ',' + str(frequency) + '\n'
return topic_str[:-1]
| mit | -1,225,466,274,652,373,500 | 29.426471 | 94 | 0.667956 | false | 3.918561 | false | false | false |
vcelis/M101P | week3/homework_3_1/hw1.py | 1 | 2357 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Vincent Celis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pymongo
import sys
from pymongo import MongoClient
# Establish database connection
client = MongoClient('mongodb://localhost', safe=True)
def find():
"""Remove the lowest grade of type 'homework' for each student"""
# Getting the database
db = client.school
# Getting the collection
students = db.students
try:
# Find all that matches our query and selector
cursor = students.find({})
except:
print 'Unexpected error:', sys.exc_info()[1]
for student in cursor:
homeworks = sorted([x for x in student['scores'] if x['type'] == 'homework'], key=lambda k: k['score'])
others = [x for x in student['scores'] if x['type'] != 'homework']
student['scores'] = homeworks[1:] + others
students.save(student)
find()
"""To verify that you have completed this task correctly, provide the identity of the student with the highest average in the class with following query that uses the aggregation framework. The answer will appear in the _id field of the resulting document.
db.students.aggregate( { '$unwind' : '$scores' } , { '$group' : { '_id' : '$_id' , 'average' : { $avg : '$scores.score' } } } , { '$sort' : { 'average' : -1 } } , { '$limit' : 1 } )
""" | mit | -3,611,229,863,796,380,000 | 39.655172 | 256 | 0.712346 | false | 4.022184 | false | false | false |
averagehat/vim-fireplace-talks | vim-talks.py | 1 | 3483 | from snake import *
from functools import partial
#belongs as ~/.vimrc.py
#"" hook to normal mode movements
#":autocmd CursorMoved * :call SpeakLine()
#
#"nmap <silent> <C-i> :set opfunc=NetBeansCommand<CR>g@
#
SPEAK_CHAR = False #speak letter-by-letter
SPEAK_BLOCKING = False
SETTINGS = { ":speed" : 0.5} #...
compose2 = lambda f, g: Lambda x: f(g(x))
compose = partial(reduce, compose2)
def is_sentence(s): pass
speak_line = compose(speak, get_current_line)
speak_word = compose(speak, get_word)
speak_char = compose(speak, get_char)
speak_visual = compose(speak, get_visual_selection)
@opfunc("<some-motion>")
def example_opfunc(a_0, a_type):
''' see :help :map-operator in vim.
@param a_type: one of "line", "block", "char"
where block is blcokwise-visual '''
if a_0: pass #visual mode
if mode == 'v': speak_visual() #I think?
''' The '[ mark is positioned at the start of the text
moved over by {motion}, the '] mark on the last character of the text.'''
reg="a"
with preserve_registers(reg): pass
'''yank the register and speak it! rad.'''
def register_opfunc(key, opfunc):
'''
0. register the original python function in snake.
1. create a vim function with a unique name, the body of which is vim_opfunc_template
2. run that opfunc as a command
3. register normal and visual mappings
4.
'''
vim_opfunc_template = '''function! {name}(type, ...)
silent exe ":py " . {py_func} . a:0 . "," a:type . ")"
endfunction'''
key_map(key, ":set opfunc={0}<CR>g@".format(vim_func_name))
#this visual keymapping could be handled by a separate pure python function
key_map(key, ":<C-U>call {0}(visualmode(), 1)".format(vim_func_name), mode=VISUAL_MODE)
def speak(s): pass #return message
def get_last_motion(c1, c3):
last_cursor = (0, 0)
@on_event('CursorMoved') #on_events(['....
def speak_on_cursor_moved(ctx):
''' A word will not cover more than two lines, but a sentence can be arbitrary
number of lines long. One options would be to look backwards (keys('(')) but meh.'''
cursor = get_cursor()
dx, dy = last_cursor[0] - cursor[0], last_cursor[1] - cursor[1]
s = get_between(last_cursor, cursor)
#could potentiall do paragraphs but meh.
if is_senence(s): speak_sentence()
elif "\n" in s: speak_line()
elif is_word(s.strip()): speak_word()
elif len(s) == 1: speak_char()
last_cursor = cursor
''' This is another way to handle movement events. '''
def speak_motion(key):
if not SPEAKING: return
reg = "@"
with preserve_registers(reg)
''' \" accesses the register, \y yanks '''
keys('"{0}{1}y'.format(reg, key))
yanked = get_register(reg)
speak(yanked)
'''perform the original motion'''
keys(key)
def speak_before_key(key):
specific_func = partial(speak_motion, key)
key_map(key, specific_func)
# but have to handle searching
["w" ,"W" ,"e" ,"E" ,")" ,"}" ,"$" ,"<C-f>" ,"<C-d>" ,"0"]
''' This doesn't handle:
searching
marks
repeat-motions
it may not play nicely with opfunc!
it will speak any deleted words . . . i.e. keys('dw')'''
'''
Another approach to this would be to start with a list of known movement commands.
for each movement, map that key to do the following:
if speech is not on, perform the key and return. else:
yank the motion into a temporary register.
speak that register
perform that actual motion.
'''
| epl-1.0 | 7,756,230,314,261,015,000 | 29.823009 | 92 | 0.641401 | false | 3.169245 | false | false | false |
smithsps/LawsonAPI | old/src/old/discover.py | 1 | 1645 | import subprocess
import User
# Starts ssh process an and returns it
#NOT USED ATM
def ssh(host, command):
#https://gist.github.com/bortzmeyer/1284249
ssh = subprocess.Popen(['ssh', '%s' % host, '%s' % command], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return ssh
#Runs who command with ssh return subprocess
def who(host):
ssh = subprocess.Popen(['ssh', '%s' % host, 'who'], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return ssh
#who = ssh.stdout.readlines()
'''users = list()
for line in who:
split = line.split();
#print(split[1])
users.append(User.User(split[0], identify(split[0]), ' '.join(split[2:5]), split[1] + '-' + host))
return ssh
'''
def who_list(hosts):
processes = list()
for host in hosts:
# [ name, process]
processes.append([host, who(host)])
return processes
#Go to Lore and get /etc/passwd
#Put into dictionary, return
def getLookupTable():
ssh = subprocess.Popen(['ssh', 'lore', 'cat', '/etc/passwd'], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
raw = ssh.stdout.readlines()
#Get Usernames:Names add to dictionary
lookup = dict()
for line in raw:
rawSplit = line.split(":")
lookup[rawSplit[0]] = rawSplit[4].strip(',')
#print(lookup)
return lookup
def identify(username):
#TODO use static lookuptable if advailable
if(not identify.lookup):
print("Getting new User Lookup Table..")
identify.lookup = getLookupTable()
#lookup = getLookupTable()
if(username in identify.lookup):
return identify.lookup[username]
else:
return 'N/A'
identify.lookup = dict()
| mit | -4,881,728,816,694,318,000 | 25.967213 | 124 | 0.67234 | false | 3.448637 | false | false | false |
harsh8398/pybackup | pybackup/incremental_backup.py | 1 | 2029 | import os
import shutil
from .rsync_copy import Rsync
from tqdm import tqdm
from .backup import Backup
class IncrementalBackup(Backup):
def __init__(self, src, dst):
super().__init__(src, dst)
self.THRESHOLD_DIFF_RATIO = 0.75
self.WHOLE_COPY_CONST = 0
self.PARTIAL_COPY_CONST = 1
@staticmethod
def scantree(path):
for entry in os.scandir(path):
if entry.is_dir(follow_symlinks=False):
yield from IncrementalBackup.scantree(entry.path)
else:
yield entry
def _get_dst_path_from_src(self, path):
return str.replace(path, self.src, self.dst, 1)
def _get_diff_files(self):
src_entries = IncrementalBackup.scantree(self.src)
for entry in src_entries:
if not os.path.exists(self._get_dst_path_from_src(entry.path)):
yield entry.path, self.WHOLE_COPY_CONST
elif entry.stat().st_mtime >= self._metadata["last_run_epoch"]:
yield entry.path, self.PARTIAL_COPY_CONST
@staticmethod
def patch_file(src_file, dst_file):
patchedstream = open(src_file, "rb")
instream = open(dst_file, "rb")
outstream = open(dst_file, "wb")
Rsync(patchedstream, instream, outstream).rsync_copy()
@staticmethod
def copy_file(src_file, dst_file):
# XXX: should we preserve metadata?
shutil.copy2(src_file, dst_file)
def _copytree(self):
diff_files = self._get_diff_files()
for filepath, action in tqdm(diff_files):
src_filepath = filepath
dst_filepath = self._get_dst_path_from_src(filepath)
if action == self.WHOLE_COPY_CONST:
self.copy_file(src_filepath, dst_filepath)
elif action == self.PARTIAL_COPY_CONST:
IncrementalBackup.patch_file(src_filepath, dst_filepath)
def run(self):
if not os.path.exists(self.dst):
os.makedirs(self.dst)
self._copytree()
| mit | 6,733,685,852,030,608,000 | 32.816667 | 75 | 0.601281 | false | 3.636201 | false | false | false |
adbrant/zuma-fpga | source/InitFpga.py | 1 | 15060 | from structs import *
import globs
import time
import re
import const
import closNetwork
## build the simple network.
# In the simple network, every pin of a ble get
# its own inode, which can route from every input
# of the IIB. This can be a cluster input or a lut feedback.
def buildSimpleNetwork(cluster,key):
# make inodes for internal cluster connection
for lut in range(globs.params.N):
cluster.LUT_input_nodes.append([])
for pin in range(globs.params.K):#input nodes
inode = Node()
inode.type = 7
# append all cluster inputs as an input
for clusterInput in cluster.inputs:
inode.inputs.append(clusterInput.id)
#apend all ffmuxes as an input
for ffmux in cluster.LUT_FFMUX_nodes:
inode.inputs.append(ffmux)
inode.location = key
# append the node dict
globs.addNode(inode)
#append the input node to the cluster.
cluster.LUT_input_nodes[lut].append(inode.id)
#connect the inode with the elut node
elut = globs.nodes[cluster.LUT_nodes[lut]]
elut.inputs.append(inode.id)
##builds for each cluster the inner structure (ble's+ IIB).
#The interconnection block can be implemented
#by a simple network or a clos network.
def build_inner_structure():
count = len(globs.nodes)
for key in globs.clusters:
cluster = globs.clusters[key]
#build lut and ffmux nodes and append them to the
#node graph
for lut in range(globs.params.N):
#actual eLUT
elut = Node()
elut.type = 8
elut.location = key
elut.eLUT = True
# append to the node dict
globs.addNode(elut)
# write its id to the LUT_nodes list
cluster.LUT_nodes.append(elut.id)
ffmux = Node()
ffmux.type = 9
ffmux.ffmux = True
ffmux.inputs.append(elut.id)
ffmux.location = key
#LUT node drives this node.
#Because we used the registered and unregisterd output, the source
#of the mux is always the lut.
#the routing will be handled by the useFF flag. when its on its use
#channel 2 otherwise channel 1(the lut)
#so therefore we can set the final routing always to the lut
ffmux.source = elut.id
#append the ffmux node to the node graph
globs.addNode(ffmux)
#append it to the cluster list
cluster.LUT_FFMUX_nodes.append(ffmux.id)
# Reconnect the corresponding cluster output opin in the node graph:
# Disconnect it from the source node
# Connect it to the ffmux
opin_id = cluster.outputs[lut].id
globs.nodes[opin_id].inputs = [ffmux.id]
globs.nodes[opin_id].source = ffmux.id
# we can use the clos or simple network
if globs.params.UseClos:
print ' ----------- build clos network ----------------'
cluster.clos = closNetwork.buildClosNetwork(cluster, \
key, globs.params.I,globs.params.K)
else:
print ' ----------- build simple network --------------'
buildSimpleNetwork(cluster,key)
## This function builds up the virtual fpga.
# First it reads the graph.echo file and build up the outer structure of the
# virtual fpga, consisting of the clusters, I/O Pins, and switchboxes.
# It also builds up the node graph and inits the connections to the
# outer structure through the driver objects.
# Second it builds the inner structure (IIB + ble's) for each cluster.
# @param filename the path to the graph.echo file
def load_graph(filename):
#parse the lines of the following format:
# id type location index direction driver
#Node: 0 SINK (0, 1) Ptc_num: 0 Direction: OPEN Drivers: OPEN
#open the graph.echo file
fh = open(filename,"r")
#counter for tracking the current id node.
id = 0
#parse the file and build up the node graph
while 1:
line = fh.readline() # read node type, location, ...
if not line:
break
str = line.split()
#print id, int(str[1])
#assert(id is int(str[1]))
n = Node()
#set the id.
n.id = int(str[1])
if (str[2] == 'SINK'):
n.type = 1
elif (str[2] == 'SOURCE'):
n.type = 2
elif (str[2] == 'OPIN'):
n.type = 3
elif (str[2] == 'IPIN'):
n.type = 4
elif (str[2] == 'CHANX'):
n.type = 5
elif (str[2] == 'CHANY'):
n.type = 6
else:
assert(0)
nums = re.findall(r'\d+', line)
nums = [int(i) for i in nums ]
#get the location and the index.
#The index is the pad position, pin position or track number
#depending its a pin on an I/O block, cluster or a channel.
#Depending on this node type the values are on different positions
#in the file.
if n.type < 5 or len(nums) < 5:
n.location = (nums[1],nums[2])
n.index = nums[3]
else:
n.location = (nums[1],nums[2],nums[3],nums[4])
n.index = nums[5]
#set the direction of the node.
if n.type > 4:
dir = line.split(' ')[-3]
if dir == 'INC_DIRECTION':
#north or east
if n.type is 5:
n.dir = const.E
else:
n.dir = const.N
else:
if n.type is 5:
n.dir = const.W
else:
n.dir = const.S
#read the edge ids and append them to
#the edge list of the node
line = fh.readline() # read edges
nums = re.findall(r'\d+', line)
#assign the ids
n.edges = [int(i) for i in nums[1:]]
#skip the rest of the information
line = fh.readline() # skip switch types
line = fh.readline() # skip (occupancy?) and capacity
line = fh.readline() # skip R and C
line = fh.readline() # skip cost index
line = fh.readline() # skip newline dividing records
#clusterx,clustery are the maximal value of location coords.
#find these maximal location coords
globs.clusterx = max(globs.clusterx,n.location[0])
globs.clustery = max(globs.clustery,n.location[1])
#append the node to the global node graph
globs.nodes.append(n)
#check if the node was append in a previous loop.
#current node should be the last node in the list.
if globs.nodes[n.id] is not n:
print 'graph error', len(globs.nodes), n.id
#increase the current node id
id = id + 1
#end up parsing.
#now build the outer structure.
#initialize the cluster grid, switchbox and IOs array.
#initialize the clusters.
#clusters are on all locations except (0,x) , (y,0) which are IOs
for x in range(1, globs.clusterx):
for y in range(1, globs.clustery):
globs.clusters[(x,y)] = Cluster()
#every location get a switch box
for x in range(0, globs.clusterx):
for y in range(0, globs.clustery):
globs.switchbox[(x,y)] = SBox()
#build the I/O blocks
#build blocks from (0,1) - (0,clustery-1),
#and (clusterx,1) - (clusterx,clusterx-1)
for x in [0, globs.clusterx]:
#TODO: TW: Removed unnecessary range extension
for y in range(1, globs.clustery):
globs.IOs[(x,y)] = IO()
#build blocks from (1,0) - (clusterx-1,0),
#and (1,clustery) - (clusterx-1,clustery)
for y in [0, globs.clustery]:
#TODO: TW: Removed unnecessary range douplication
for x in range(1, globs.clusterx):
globs.IOs[(x,y)] = IO()
# set the input ids for every node in the graph
for n in globs.nodes:
for e in n.edges:
globs.nodes[e].inputs.append(n.id)
#counters for a later echo.
global_outputs = 0
global_inputs = 0
#append the source and sink nodes to the orderedInput
#and orderedOutput list
#init the drivers for the I/O blocks and switchboxes.
for n in globs.nodes:
# reuse SINKs and SOURCEs for ordered global IO
if n.type is 1: #SINK
pass
elif n.type is 2: #SOURCE
pass
# for OPINs and IPINs a notable assumption was made
# that they are listed in increasing order in the file,
# while the SOURCEs and SINKs can be spread over
# this file.
# TODO: Is that always true?
# This is important because the orderedInput and orderedOutput lists
# are append the corresponding source and sink nodes
# for that OPINs and IPINs in their order.
# The inputs for OPINs are SOURCE Nodes,
# the edges of IPINs are SINK nodes
#node is an OPIN
elif n.type is 3:
# OPIN of a global IO pad is an FPGA input
# check if this is a input pin on a I/O block,
# by checking if the location is on the edge of the fpga
if n.location[0] in [0, globs.clusterx] \
or n.location[1] in [0, globs.clustery]:
#init a corresponding driver for this node.
globs.IOs[n.location].inputs.append(Driver(n.id, n.index))
# add the SOURCE node id to the orderedInputs list
# The SOURCE node id is only inputs[0],
# because an fpga input pin have only
# one SOURCE node (one input).
globs.orderedInputs.append(n.inputs[0])
global_inputs += 1
#this is a clusters output pin
#append it to the ouput list
else:
globs.clusters[n.location].outputs.append(Driver(n.id,n.index))
#print 'clust output', n.location, n.id
#node is an IPIN
elif n.type is 4:
# IPIN of a global IO pad is an FPGA output
# global output without predecessor can be ignored
if len(n.inputs) == 0: #dont get input from dangling node
print 'dropping node', n.id, 'from', n.location
else:
# check if this is a ouput pin on a I/O block,
# by checking if the location is on the edge of the fpga
if n.location[0] in [0, globs.clusterx] \
or n.location[1] in [0, globs.clustery]:
#init a corresponding driver for this node.
globs.IOs[n.location].outputs.append(Driver(n.id,n.index))
#TODO: why only edge[0]. okay there can be only one.
#when not you have multiple drivers for that output pin
#or this pin have them as an input?
#add the SINK node id to the orderedOutputs list
globs.orderedOutputs.append(n.edges[0])
global_outputs += 1
#this is a clusters output pin
#append it to the ouput list
else:
globs.clusters[n.location].inputs.append(Driver(n.id, n.index))
#node is a CHANNEL
elif n.type is 5 or n.type is 6:
#get the corresponding switchbox for that node
source = n.location[0:2]
sbox = globs.switchbox[source]
#append the driver to this node to the switchbox
if n.type is 5:
sbox.driversx.append(Driver(n.id, n.index, n.dir))
else:
sbox.driversy.append(Driver(n.id, n.index, n.dir))
print "Global IO: out", global_outputs, "and in", global_inputs
# build a list of ids for all IPINs and OPINs nodes of the graph
# go through the locations and copy the ids
# of the IPINS and OPins of that location
allOutputNodes = []
allInputNodes = []
for key in globs.IOs:
io = globs.IOs[key]
# append the IPIN node. yes the IPIN :)
for i in io.outputs:
allOutputNodes.append(i.id)
# append the OPIN node.
for i in io.inputs:
allInputNodes.append(i.id)
# create global I/O permutation muxes for the fpga inputs.
# Therefore transform the source and sink nodes to I/O permutation muxes
# go through all OPINs nodes step by step.
# grab their corresponding SOURCE node and add the other
# available OPINs as an edge to that source
for i,node in enumerate(allInputNodes):
# get the corresponding SOURCE node of that OPIN
# it is the same id as the input of the OPIN
source = globs.nodes[globs.orderedInputs[i]]
source.name = ''
# Disabling this should automatically disable
# the permutation MUXes and their configuration...
if globs.params.orderedIO:
source.type = 10
#change the location of that source
source.location = (0, 0)
# add the other OPINS as an edge for that source
for input in allInputNodes:
# if its not the initial edge (the initial OPIN),
# add this OPIN
if input != source.edges[0]:
#add the opin
source.edges.append(input)
# also set the source node as an input to that OPIN
globs.nodes[input].inputs.append(source.id)
# create global I/O permutation muxes for the fpga outputs.
# go through all IPINs nodes step by step.
# grab their corresponding SINK node and add the other
# available IPINs as an input to that sink
for i,node in enumerate(allOutputNodes):
# get the corresponding SINK node of that IPIN
# it is the same id as the edge of the IPIN
sink = globs.nodes[globs.orderedOutputs[i]]
sink.name = ''
# Disabling this should automatically disable
# the permutation MUXes and their configuration...
if globs.params.orderedIO:
sink.type = 10
#change the location of that sink
sink.location = (globs.clusterx, globs.clustery)
for output in allOutputNodes:
# if its not the initial input (the initial IPIN),
# add this IPIN
if output != sink.inputs[0]:
#add the ipin
sink.inputs.append(output)
# also set the sink node as an edge to that IPIN
globs.nodes[output].edges.append(sink.id)
print "All input nodes: ", allInputNodes
print "All output nodes: ", allOutputNodes
#build the inner structure
build_inner_structure()
| bsd-2-clause | 4,496,067,873,928,476,700 | 35.464891 | 83 | 0.568858 | false | 4.001063 | false | false | false |
jhosmer/PySmile | tests/pysmile_tests.py | 1 | 11679 | #!/usr/bin/env python
import os
import glob
import unittest
import pysmile
import json
__author__ = 'Jonathan Hosmer'
class PySmileTestDecode(unittest.TestCase):
def setUp(self):
curdir = os.path.dirname(os.path.abspath(__file__))
self.smile_dir = os.path.join(curdir, 'data', 'smile')
self.json_dir = os.path.join(curdir, 'data', 'json')
def test_json_org_sample1(self):
s = os.path.join(self.smile_dir, 'json-org-sample1.smile')
j = os.path.join(self.json_dir, 'json-org-sample1.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample2(self):
s = os.path.join(self.smile_dir, 'json-org-sample2.smile')
j = os.path.join(self.json_dir, 'json-org-sample2.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample3(self):
s = os.path.join(self.smile_dir, 'json-org-sample3.smile')
j = os.path.join(self.json_dir, 'json-org-sample3.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample4(self):
s = os.path.join(self.smile_dir, 'json-org-sample4.smile')
j = os.path.join(self.json_dir, 'json-org-sample4.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_json_org_sample5(self):
s = os.path.join(self.smile_dir, 'json-org-sample5.smile')
j = os.path.join(self.json_dir, 'json-org-sample5.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_numbers_int_4k(self):
s = os.path.join(self.smile_dir, 'numbers-int-4k.smile')
j = os.path.join(self.json_dir, 'numbers-int-4k.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_numbers_int_64k(self):
s = os.path.join(self.smile_dir, 'numbers-int-64k.smile')
j = os.path.join(self.json_dir, 'numbers-int-64k.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_test1(self):
s = os.path.join(self.smile_dir, 'test1.smile')
j = os.path.join(self.json_dir, 'test1.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
def test_test2(self):
s = os.path.join(self.smile_dir, 'test2.smile')
j = os.path.join(self.json_dir, 'test2.jsn')
b = json.load(open(j, 'rb'))
try:
a = pysmile.decode(open(s, 'rb').read())
except pysmile.SMILEDecodeError, e:
self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1]))
else:
if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
elif isinstance(a, dict):
self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
else:
self.fail('Unexpected Type: {!r}'.format(type(a)))
class PySmileTestEncode(unittest.TestCase):
def setUp(self):
curdir = os.path.dirname(os.path.abspath(__file__))
self.smile_dir = os.path.join(curdir, 'data', 'smile')
self.json_dir = os.path.join(curdir, 'data', 'json')
def test_json_org_sample1(self):
s = os.path.join(self.smile_dir, 'json-org-sample1.smile')
j = os.path.join(self.json_dir, 'json-org-sample1.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample2(self):
s = os.path.join(self.smile_dir, 'json-org-sample2.smile')
j = os.path.join(self.json_dir, 'json-org-sample2.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample3(self):
s = os.path.join(self.smile_dir, 'json-org-sample3.smile')
j = os.path.join(self.json_dir, 'json-org-sample3.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample4(self):
s = os.path.join(self.smile_dir, 'json-org-sample4.smile')
j = os.path.join(self.json_dir, 'json-org-sample4.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_json_org_sample5(self):
s = os.path.join(self.smile_dir, 'json-org-sample5.smile')
j = os.path.join(self.json_dir, 'json-org-sample5.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_numbers_int_4k(self):
s = os.path.join(self.smile_dir, 'numbers-int-4k.smile')
j = os.path.join(self.json_dir, 'numbers-int-4k.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_numbers_int_64k(self):
s = os.path.join(self.smile_dir, 'numbers-int-64k.smile')
j = os.path.join(self.json_dir, 'numbers-int-64k.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_test1(self):
s = os.path.join(self.smile_dir, 'test1.smile')
j = os.path.join(self.json_dir, 'test1.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
def test_test2(self):
s = os.path.join(self.smile_dir, 'test2.smile')
j = os.path.join(self.json_dir, 'test2.jsn')
a = pysmile.encode(json.load(open(j, 'rb')))
b = open(s, 'rb').read()
self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a))
class PySmileTestMisc(unittest.TestCase):
def test_1(self):
a = [1]
b = pysmile.decode(':)\n\x03\xf8\xc2\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_2(self):
a = [1, 2]
b = pysmile.decode(':)\n\x03\xf8\xc2\xc4\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_3(self):
a = [1, 2, {'c': 3}]
b = pysmile.decode(':)\n\x03\xf8\xc2\xc4\xfa\x80c\xc6\xfb\xf9')
self.assertListEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_4(self):
a = {'a': 1}
b = pysmile.decode(':)\n\x03\xfa\x80a\xc2\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_5(self):
a = {'a': '1', 'b': 2, 'c': [3], 'd': -1, 'e': 4.20}
b = pysmile.decode(
':)\n\x03\xfa\x80a@1\x80c\xf8\xc6\xf9\x80b\xc4\x80e(fL\x19\x04\x04\x80d\xc1\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
def test_6(self):
a = {'a': {'b': {'c': {'d': ['e']}}}}
b = pysmile.decode(
':)\n\x03\xfa\x80a\xfa\x80b\xfa\x80c\xfa\x80d\xf8@e\xf9\xfb\xfb\xfb\xfb')
self.assertDictEqual(a, b, 'Expected:\n{!r}\nGot:\n{!r}'.format(a, b))
| apache-2.0 | 6,097,365,158,100,773,000 | 43.238636 | 93 | 0.529669 | false | 2.789348 | true | false | false |
Fedik/gramps | gramps/gen/plug/_pluginreg.py | 4 | 48822 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
This module provides the base class for plugin registration.
It provides an object containing data about the plugin (version, filename, ...)
and a register for the data of all plugins .
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import os
import sys
import re
import traceback
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...version import VERSION as GRAMPSVERSION, VERSION_TUPLE
from ..const import IMAGE_DIR
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
import logging
LOG = logging.getLogger('._manager')
#-------------------------------------------------------------------------
#
# PluginData
#
#-------------------------------------------------------------------------
#a plugin is stable or unstable
STABLE = 0
UNSTABLE = 1
STATUS = [STABLE, UNSTABLE]
STATUSTEXT = {STABLE: _('Stable'), UNSTABLE: _('Unstable')}
#possible plugin types
REPORT = 0
QUICKREPORT = 1 # deprecated
QUICKVIEW = 1
TOOL = 2
IMPORT = 3
EXPORT = 4
DOCGEN = 5
GENERAL = 6
MAPSERVICE = 7
VIEW = 8
RELCALC = 9
GRAMPLET = 10
SIDEBAR = 11
DATABASE = 12
RULE = 13
PTYPE = [REPORT, QUICKREPORT, TOOL, IMPORT, EXPORT, DOCGEN, GENERAL,
MAPSERVICE, VIEW, RELCALC, GRAMPLET, SIDEBAR, DATABASE, RULE]
PTYPE_STR = {
REPORT: _('Report') ,
QUICKREPORT: _('Quickreport'),
TOOL: _('Tool'),
IMPORT: _('Importer'),
EXPORT: _('Exporter'),
DOCGEN: _('Doc creator'),
GENERAL: _('Plugin lib'),
MAPSERVICE: _('Map service'),
VIEW: _('Gramps View'),
RELCALC: _('Relationships'),
GRAMPLET: _('Gramplet'),
SIDEBAR: _('Sidebar'),
DATABASE: _('Database'),
RULE: _('Rule')
}
#possible report categories
CATEGORY_TEXT = 0
CATEGORY_DRAW = 1
CATEGORY_CODE = 2
CATEGORY_WEB = 3
CATEGORY_BOOK = 4
CATEGORY_GRAPHVIZ = 5
CATEGORY_TREE = 6
REPORT_CAT = [ CATEGORY_TEXT, CATEGORY_DRAW, CATEGORY_CODE,
CATEGORY_WEB, CATEGORY_BOOK, CATEGORY_GRAPHVIZ,
CATEGORY_TREE]
#possible tool categories
TOOL_DEBUG = -1
TOOL_ANAL = 0
TOOL_DBPROC = 1
TOOL_DBFIX = 2
TOOL_REVCTL = 3
TOOL_UTILS = 4
TOOL_CAT = [ TOOL_DEBUG, TOOL_ANAL, TOOL_DBPROC, TOOL_DBFIX, TOOL_REVCTL,
TOOL_UTILS]
#possible quickreport categories
CATEGORY_QR_MISC = -1
CATEGORY_QR_PERSON = 0
CATEGORY_QR_FAMILY = 1
CATEGORY_QR_EVENT = 2
CATEGORY_QR_SOURCE = 3
CATEGORY_QR_PLACE = 4
CATEGORY_QR_REPOSITORY = 5
CATEGORY_QR_NOTE = 6
CATEGORY_QR_DATE = 7
CATEGORY_QR_MEDIA = 8
CATEGORY_QR_CITATION = 9
CATEGORY_QR_SOURCE_OR_CITATION = 10
# Modes for generating reports
REPORT_MODE_GUI = 1 # Standalone report using GUI
REPORT_MODE_BKI = 2 # Book Item interface using GUI
REPORT_MODE_CLI = 4 # Command line interface (CLI)
REPORT_MODES = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
# Modes for running tools
TOOL_MODE_GUI = 1 # Standard tool using GUI
TOOL_MODE_CLI = 2 # Command line interface (CLI)
TOOL_MODES = [TOOL_MODE_GUI, TOOL_MODE_CLI]
# possible view orders
START = 1
END = 2
#-------------------------------------------------------------------------
#
# Functions and classes
#
#-------------------------------------------------------------------------
def myint(s):
"""
Protected version of int()
"""
try:
v = int(s)
except:
v = s
return v
def version(sversion):
"""
Return the tuple version of a string version.
"""
return tuple([myint(x or "0") for x in (sversion + "..").split(".")])
def valid_plugin_version(plugin_version_string):
"""
Checks to see if string is a valid version string for this version
of Gramps.
"""
if not isinstance(plugin_version_string, str): return False
dots = plugin_version_string.count(".")
if dots == 1:
plugin_version = tuple(map(int, plugin_version_string.split(".", 1)))
return plugin_version == VERSION_TUPLE[:2]
elif dots == 2:
plugin_version = tuple(map(int, plugin_version_string.split(".", 2)))
return (plugin_version[:2] == VERSION_TUPLE[:2] and
plugin_version <= VERSION_TUPLE)
return False
class PluginData:
"""
This is the base class for all plugin data objects.
The workflow is:
1. plugin manager reads all register files, and stores plugin data
objects in a plugin register
2. when plugin is needed, the plugin register creates the plugin, and
the manager stores this, after which it can be executed.
Attributes present for all plugins
.. attribute:: id
A unique identifier for the plugin. This is eg used to store the plugin
settings. MUST be in ASCII, with only "_- ().,'" special characters.
.. attribute:: name
A friendly name to call this plugin (normally translated)
.. attribute:: name_accell
A friendly name to call this plugin (normally translated), with an
accellerator present (eg '_Descendant report', with D to be accellerator
key
.. attribute:: description
A friendly description of what the plugin does
.. attribute:: version
The version of the plugin
.. attribute:: status
The status of the plugin, STABLE or UNSTABLE
UNSTABLE is only visible in development code, not in release
.. attribute:: fname
The python file where the plugin implementation can be found
.. attribute:: fpath
The python path where the plugin implementation can be found
.. attribute:: ptype
The plugin type. One of REPORT , QUICKREPORT, TOOL, IMPORT,
EXPORT, DOCGEN, GENERAL, MAPSERVICE, VIEW, GRAMPLET, DATABASE, RULE
.. attribute:: authors
List of authors of the plugin, default=[]
.. attribute:: authors_email
List of emails of the authors of the plugin, default=[]
.. attribute:: supported
Bool value indicating if the plugin is still supported, default=True
.. attribute:: load_on_reg
bool value, if True, the plugin is loaded on Gramps startup. Some
plugins. Only set this value if for testing you want the plugin to be
loaded immediately on startup. default=False
.. attribute: icons
New stock icons to register. A list of tuples (stock_id, icon_label),
eg:
[('gramps_myplugin', _('My Plugin')),
('gramps_myplugin_open', _('Open Plugin')]
The icon directory must contain the directories scalable, 48x48, 22x22
and 16x16 with the icons, eg:
scalable/gramps_myplugin.svg
48x48/gramps_myplugin.png
22x22/gramps_myplugin.png
.. attribute: icondir
The directory to use for the icons. If icondir is not set or None, it
reverts to the plugindirectory itself.
Attributes for RELCALC plugins:
.. attribute:: relcalcclass
The class in the module that is the relationcalc class
.. attribute:: lang_list
List of languages this plugin handles
Attributes for REPORT plugins:
.. attribute:: require_active
Bool, If the reports requries an active person to be set or not
.. attribute:: reportclass
The class in the module that is the report class
.. attribute:: report_modes
The report modes: list of REPORT_MODE_GUI ,REPORT_MODE_BKI,REPORT_MODE_CLI
Attributes for REPORT and TOOL and QUICKREPORT and VIEW plugins
.. attribute:: category
Or the report category the plugin belongs to, default=CATEGORY_TEXT
or the tool category a plugin belongs to, default=TOOL_UTILS
or the quickreport category a plugin belongs to, default=CATEGORY_QR_PERSON
or the view category a plugin belongs to,
default=("Miscellaneous", _("Miscellaneous"))
Attributes for REPORT and TOOL and DOCGEN plugins
.. attribute:: optionclass
The class in the module that is the option class
Attributes for TOOL plugins
.. attribute:: toolclass
The class in the module that is the tool class
.. attribute:: tool_modes
The tool modes: list of TOOL_MODE_GUI, TOOL_MODE_CLI
Attributes for DOCGEN plugins
.. attribute :: docclass
The class in the module that is the BaseDoc defined
.. attribute :: paper
bool, Indicates whether the plugin uses paper or not, default=True
.. attribute :: style
bool, Indicates whether the plugin uses styles or not, default=True
Attribute for DOCGEN, EXPORT plugins
.. attribute :: extension
str, The file extension to use for output produced by the docgen/export,
default=''
Attributes for QUICKREPORT plugins
.. attribute:: runfunc
The function that executes the quick report
Attributes for MAPSERVICE plugins
.. attribute:: mapservice
The class in the module that is a mapservice
Attributes for EXPORT plugins
.. attribute:: export_function
Function that produces the export
.. attribute:: export_options
Class to set options
.. attribute:: export_options_title
Title for the option page
Attributes for IMPORT plugins
.. attribute:: import_function
Function that starts an import
Attributes for GRAMPLET plugins
.. attribute:: gramplet
The function or class that defines the gramplet.
.. attribute:: height
The height the gramplet should have in a column on GrampletView,
default = 200
.. attribute:: detached_height
The height the gramplet should have detached, default 300
.. attribute:: detached_width
The width the gramplet should have detached, default 400
.. attribute:: expand
If the attributed should be expanded on start, default False
.. attribute:: gramplet_title
Title to use for the gramplet, default = _('Gramplet')
.. attribute:: navtypes
Navigation types that the gramplet is appropriate for, default = []
.. attribute:: help_url
The URL where documentation for the URL can be found
Attributes for VIEW plugins
.. attribute:: viewclass
A class of type ViewCreator that holds the needed info of the
view to be created: icon, viewclass that derives from pageview, ...
.. attribute:: stock_icon
The icon in the toolbar or sidebar used to select the view
Attributes for SIDEBAR plugins
.. attribute:: sidebarclass
The class that defines the sidebar.
.. attribute:: menu_label
A label to use on the seltion menu.
Attributes for VIEW and SIDEBAR plugins
.. attribute:: order
order can be START or END. Default is END. For END, on registering,
the plugin is appended to the list of plugins. If START, then the
plugin is prepended. Only set START if you want a plugin to be the
first in the order of plugins
Attributes for DATABASE plugins
.. attribute:: databaseclass
The class in the module that is the database class
.. attribute:: reset_system
Boolean to indicate that the system (sys.modules) should
be reset.
Attributes for RULE plugins
.. attribute:: namespace
The class (Person, Event, Media, etc.) the rule applies to.
.. attribute:: ruleclass
The exact class name of the rule; ex: HasSourceParameter
"""
def __init__(self):
#read/write attribute
self.directory = None
#base attributes
self._id = None
self._name = None
self._name_accell = None
self._version = None
self._gramps_target_version = None
self._description = None
self._status = UNSTABLE
self._fname = None
self._fpath = None
self._ptype = None
self._authors = []
self._authors_email = []
self._supported = True
self._load_on_reg = False
self._icons = []
self._icondir = None
self._depends_on = []
self._include_in_listing = True
#derived var
self.mod_name = None
#RELCALC attr
self._relcalcclass = None
self._lang_list = None
#REPORT attr
self._reportclass = None
self._require_active = True
self._report_modes = [REPORT_MODE_GUI]
#REPORT and TOOL and GENERAL attr
self._category = None
#REPORT and TOOL attr
self._optionclass = None
#TOOL attr
self._toolclass = None
self._tool_modes = [TOOL_MODE_GUI]
#DOCGEN attr
self._paper = True
self._style = True
self._extension = ''
#QUICKREPORT attr
self._runfunc = None
#MAPSERVICE attr
self._mapservice = None
#EXPORT attr
self._export_function = None
self._export_options = None
self._export_options_title = ''
#IMPORT attr
self._import_function = None
#GRAMPLET attr
self._gramplet = None
self._height = 200
self._detached_height = 300
self._detached_width = 400
self._expand = False
self._gramplet_title = _('Gramplet')
self._navtypes = []
self._orientation = None
self._help_url = None
#VIEW attr
self._viewclass = None
self._stock_icon = None
#SIDEBAR attr
self._sidebarclass = None
self._menu_label = ''
#VIEW and SIDEBAR attr
self._order = END
#DATABASE attr
self._databaseclass = None
self._reset_system = False
#GENERAL attr
self._data = []
self._process = None
#RULE attr
self._ruleclass = None
self._namespace = None
def _set_id(self, id):
self._id = id
def _get_id(self):
return self._id
def _set_name(self, name):
self._name = name
def _get_name(self):
return self._name
def _set_name_accell(self, name):
self._name_accell = name
def _get_name_accell(self):
if self._name_accell is None:
return self._name
else:
return self._name_accell
def _set_description(self, description):
self._description = description
def _get_description(self):
return self._description
def _set_version(self, version):
self._version = version
def _get_version(self):
return self._version
def _set_gramps_target_version(self, version):
self._gramps_target_version = version
def _get_gramps_target_version(self):
return self._gramps_target_version
def _set_status(self, status):
if status not in STATUS:
raise ValueError('plugin status cannot be %s' % str(status))
self._status = status
def _get_status(self):
return self._status
def _set_fname(self, fname):
self._fname = fname
def _get_fname(self):
return self._fname
def _set_fpath(self, fpath):
self._fpath = fpath
def _get_fpath(self):
return self._fpath
def _set_ptype(self, ptype):
if ptype not in PTYPE:
raise ValueError('Plugin type cannot be %s' % str(ptype))
elif self._ptype is not None:
raise ValueError('Plugin type may not be changed')
self._ptype = ptype
if self._ptype == REPORT:
self._category = CATEGORY_TEXT
elif self._ptype == TOOL:
self._category = TOOL_UTILS
elif self._ptype == QUICKREPORT:
self._category = CATEGORY_QR_PERSON
elif self._ptype == VIEW:
self._category = ("Miscellaneous", _("Miscellaneous"))
#if self._ptype == DOCGEN:
# self._load_on_reg = True
def _get_ptype(self):
return self._ptype
def _set_authors(self, authors):
if not authors or not isinstance(authors, list):
return
self._authors = authors
def _get_authors(self):
return self._authors
def _set_authors_email(self, authors_email):
if not authors_email or not isinstance(authors_email, list):
return
self._authors_email = authors_email
def _get_authors_email(self):
return self._authors_email
def _set_supported(self, supported):
if not isinstance(supported, bool):
raise ValueError('Plugin must have supported=True or False')
self._supported = supported
def _get_supported(self):
return self._supported
def _set_load_on_reg(self, load_on_reg):
if not isinstance(load_on_reg, bool):
raise ValueError('Plugin must have load_on_reg=True or False')
self._load_on_reg = load_on_reg
def _get_load_on_reg(self):
return self._load_on_reg
def _get_icons(self):
return self._icons
def _set_icons(self, icons):
if not isinstance(icons, list):
raise ValueError('Plugin must have icons as a list')
self._icons = icons
def _get_icondir(self):
return self._icondir
def _set_icondir(self, icondir):
self._icondir = icondir
def _get_depends_on(self):
return self._depends_on
def _set_depends_on(self, depends):
if not isinstance(depends, list):
raise ValueError('Plugin must have depends_on as a list')
self._depends_on = depends
def _get_include_in_listing(self):
return self._include_in_listing
def _set_include_in_listing(self, include):
if not isinstance(include, bool):
raise ValueError('Plugin must have include_in_listing as a bool')
self._include_in_listing = include
id = property(_get_id, _set_id)
name = property(_get_name, _set_name)
name_accell = property(_get_name_accell, _set_name_accell)
description = property(_get_description, _set_description)
version = property(_get_version, _set_version)
gramps_target_version = property(_get_gramps_target_version,
_set_gramps_target_version)
status = property(_get_status, _set_status)
fname = property(_get_fname, _set_fname)
fpath = property(_get_fpath, _set_fpath)
ptype = property(_get_ptype, _set_ptype)
authors = property(_get_authors, _set_authors)
authors_email = property(_get_authors_email, _set_authors_email)
supported = property(_get_supported, _set_supported)
load_on_reg = property(_get_load_on_reg, _set_load_on_reg)
icons = property(_get_icons, _set_icons)
icondir = property(_get_icondir, _set_icondir)
depends_on = property(_get_depends_on, _set_depends_on)
include_in_listing = property(_get_include_in_listing, _set_include_in_listing)
def statustext(self):
return STATUSTEXT[self.status]
#type specific plugin attributes
#RELCALC attributes
def _set_relcalcclass(self, relcalcclass):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._relcalcclass = relcalcclass
def _get_relcalcclass(self):
return self._relcalcclass
def _set_lang_list(self, lang_list):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._lang_list = lang_list
def _get_lang_list(self):
return self._lang_list
relcalcclass = property(_get_relcalcclass, _set_relcalcclass)
lang_list = property(_get_lang_list, _set_lang_list)
#REPORT attributes
def _set_require_active(self, require_active):
if not self._ptype == REPORT:
raise ValueError('require_active may only be set for REPORT plugins')
if not isinstance(require_active, bool):
raise ValueError('Report must have require_active=True or False')
self._require_active = require_active
def _get_require_active(self):
return self._require_active
def _set_reportclass(self, reportclass):
if not self._ptype == REPORT:
raise ValueError('reportclass may only be set for REPORT plugins')
self._reportclass = reportclass
def _get_reportclass(self):
return self._reportclass
def _set_report_modes(self, report_modes):
if not self._ptype == REPORT:
raise ValueError('report_modes may only be set for REPORT plugins')
if not isinstance(report_modes, list):
raise ValueError('report_modes must be a list')
self._report_modes = [x for x in report_modes if x in REPORT_MODES]
if not self._report_modes:
raise ValueError('report_modes not a valid list of modes')
def _get_report_modes(self):
return self._report_modes
#REPORT or TOOL or QUICKREPORT or GENERAL attributes
def _set_category(self, category):
if self._ptype not in [REPORT, TOOL, QUICKREPORT, VIEW, GENERAL]:
raise ValueError('category may only be set for ' \
'REPORT/TOOL/QUICKREPORT/VIEW/GENERAL plugins')
self._category = category
def _get_category(self):
return self._category
#REPORT OR TOOL attributes
def _set_optionclass(self, optionclass):
if not (self._ptype == REPORT or self.ptype == TOOL or self._ptype == DOCGEN):
raise ValueError('optionclass may only be set for REPORT/TOOL/DOCGEN plugins')
self._optionclass = optionclass
def _get_optionclass(self):
return self._optionclass
#TOOL attributes
def _set_toolclass(self, toolclass):
if not self._ptype == TOOL:
raise ValueError('toolclass may only be set for TOOL plugins')
self._toolclass = toolclass
def _get_toolclass(self):
return self._toolclass
def _set_tool_modes(self, tool_modes):
if not self._ptype == TOOL:
raise ValueError('tool_modes may only be set for TOOL plugins')
if not isinstance(tool_modes, list):
raise ValueError('tool_modes must be a list')
self._tool_modes = [x for x in tool_modes if x in TOOL_MODES]
if not self._tool_modes:
raise ValueError('tool_modes not a valid list of modes')
def _get_tool_modes(self):
return self._tool_modes
require_active = property(_get_require_active, _set_require_active)
reportclass = property(_get_reportclass, _set_reportclass)
report_modes = property(_get_report_modes, _set_report_modes)
category = property(_get_category, _set_category)
optionclass = property(_get_optionclass, _set_optionclass)
toolclass = property(_get_toolclass, _set_toolclass)
tool_modes = property(_get_tool_modes, _set_tool_modes)
#DOCGEN attributes
def _set_paper(self, paper):
if not self._ptype == DOCGEN:
raise ValueError('paper may only be set for DOCGEN plugins')
if not isinstance(paper, bool):
raise ValueError('Plugin must have paper=True or False')
self._paper = paper
def _get_paper(self):
return self._paper
def _set_style(self, style):
if not self._ptype == DOCGEN:
raise ValueError('style may only be set for DOCGEN plugins')
if not isinstance(style, bool):
raise ValueError('Plugin must have style=True or False')
self._style = style
def _get_style(self):
return self._style
def _set_extension(self, extension):
if not (self._ptype == DOCGEN or self._ptype == EXPORT
or self._ptype == IMPORT):
raise ValueError('extension may only be set for DOCGEN/EXPORT/'\
'IMPORT plugins')
self._extension = extension
def _get_extension(self):
return self._extension
paper = property(_get_paper, _set_paper)
style = property(_get_style, _set_style)
extension = property(_get_extension, _set_extension)
#QUICKREPORT attributes
def _set_runfunc(self, runfunc):
if not self._ptype == QUICKREPORT:
raise ValueError('runfunc may only be set for QUICKREPORT plugins')
self._runfunc = runfunc
def _get_runfunc(self):
return self._runfunc
runfunc = property(_get_runfunc, _set_runfunc)
#MAPSERVICE attributes
def _set_mapservice(self, mapservice):
if not self._ptype == MAPSERVICE:
raise ValueError('mapservice may only be set for MAPSERVICE plugins')
self._mapservice = mapservice
def _get_mapservice(self):
return self._mapservice
mapservice = property(_get_mapservice, _set_mapservice)
#EXPORT attributes
def _set_export_function(self, export_function):
if not self._ptype == EXPORT:
raise ValueError('export_function may only be set for EXPORT plugins')
self._export_function = export_function
def _get_export_function(self):
return self._export_function
def _set_export_options(self, export_options):
if not self._ptype == EXPORT:
raise ValueError('export_options may only be set for EXPORT plugins')
self._export_options = export_options
def _get_export_options(self):
return self._export_options
def _set_export_options_title(self, export_options_title):
if not self._ptype == EXPORT:
raise ValueError('export_options_title may only be set for EXPORT plugins')
self._export_options_title = export_options_title
def _get_export_options_title(self):
return self._export_options_title
export_function = property(_get_export_function, _set_export_function)
export_options = property(_get_export_options, _set_export_options)
export_options_title = property(_get_export_options_title,
_set_export_options_title)
#IMPORT attributes
def _set_import_function(self, import_function):
if not self._ptype == IMPORT:
raise ValueError('import_function may only be set for IMPORT plugins')
self._import_function = import_function
def _get_import_function(self):
return self._import_function
import_function = property(_get_import_function, _set_import_function)
#GRAMPLET attributes
def _set_gramplet(self, gramplet):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet may only be set for GRAMPLET plugins')
self._gramplet = gramplet
def _get_gramplet(self):
return self._gramplet
def _set_height(self, height):
if not self._ptype == GRAMPLET:
raise ValueError('height may only be set for GRAMPLET plugins')
if not isinstance(height, int):
raise ValueError('Plugin must have height an integer')
self._height = height
def _get_height(self):
return self._height
def _set_detached_height(self, detached_height):
if not self._ptype == GRAMPLET:
raise ValueError('detached_height may only be set for GRAMPLET plugins')
if not isinstance(detached_height, int):
raise ValueError('Plugin must have detached_height an integer')
self._detached_height = detached_height
def _get_detached_height(self):
return self._detached_height
def _set_detached_width(self, detached_width):
if not self._ptype == GRAMPLET:
raise ValueError('detached_width may only be set for GRAMPLET plugins')
if not isinstance(detached_width, int):
raise ValueError('Plugin must have detached_width an integer')
self._detached_width = detached_width
def _get_detached_width(self):
return self._detached_width
def _set_expand(self, expand):
if not self._ptype == GRAMPLET:
raise ValueError('expand may only be set for GRAMPLET plugins')
if not isinstance(expand, bool):
raise ValueError('Plugin must have expand as a bool')
self._expand = expand
def _get_expand(self):
return self._expand
def _set_gramplet_title(self, gramplet_title):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet_title may only be set for GRAMPLET plugins')
if not isinstance(gramplet_title, str):
raise ValueError('gramplet_title is type %s, string or unicode required' % type(gramplet_title))
self._gramplet_title = gramplet_title
def _get_gramplet_title(self):
return self._gramplet_title
def _set_help_url(self, help_url):
if not self._ptype == GRAMPLET:
raise ValueError('help_url may only be set for GRAMPLET plugins')
self._help_url = help_url
def _get_help_url(self):
return self._help_url
def _set_navtypes(self, navtypes):
if not self._ptype == GRAMPLET:
raise ValueError('navtypes may only be set for GRAMPLET plugins')
self._navtypes = navtypes
def _get_navtypes(self):
return self._navtypes
def _set_orientation(self, orientation):
if not self._ptype == GRAMPLET:
raise ValueError('orientation may only be set for GRAMPLET plugins')
self._orientation = orientation
def _get_orientation(self):
return self._orientation
gramplet = property(_get_gramplet, _set_gramplet)
height = property(_get_height, _set_height)
detached_height = property(_get_detached_height, _set_detached_height)
detached_width = property(_get_detached_width, _set_detached_width)
expand = property(_get_expand, _set_expand)
gramplet_title = property(_get_gramplet_title, _set_gramplet_title)
navtypes = property(_get_navtypes, _set_navtypes)
orientation = property(_get_orientation, _set_orientation)
help_url = property(_get_help_url, _set_help_url)
def _set_viewclass(self, viewclass):
if not self._ptype == VIEW:
raise ValueError('viewclass may only be set for VIEW plugins')
self._viewclass = viewclass
def _get_viewclass(self):
return self._viewclass
def _set_stock_icon(self, stock_icon):
if not self._ptype == VIEW:
raise ValueError('stock_icon may only be set for VIEW plugins')
self._stock_icon = stock_icon
def _get_stock_icon(self):
return self._stock_icon
viewclass = property(_get_viewclass, _set_viewclass)
stock_icon = property(_get_stock_icon, _set_stock_icon)
#SIDEBAR attributes
def _set_sidebarclass(self, sidebarclass):
if not self._ptype == SIDEBAR:
raise ValueError('sidebarclass may only be set for SIDEBAR plugins')
self._sidebarclass = sidebarclass
def _get_sidebarclass(self):
return self._sidebarclass
def _set_menu_label(self, menu_label):
if not self._ptype == SIDEBAR:
raise ValueError('menu_label may only be set for SIDEBAR plugins')
self._menu_label = menu_label
def _get_menu_label(self):
return self._menu_label
sidebarclass = property(_get_sidebarclass, _set_sidebarclass)
menu_label = property(_get_menu_label, _set_menu_label)
#VIEW and SIDEBAR attributes
def _set_order(self, order):
if not self._ptype in (VIEW, SIDEBAR):
raise ValueError('order may only be set for VIEW and SIDEBAR plugins')
self._order = order
def _get_order(self):
return self._order
order = property(_get_order, _set_order)
#DATABASE attributes
def _set_databaseclass(self, databaseclass):
if not self._ptype == DATABASE:
raise ValueError('databaseclass may only be set for DATABASE plugins')
self._databaseclass = databaseclass
def _get_databaseclass(self):
return self._databaseclass
def _set_reset_system(self, reset_system):
if not self._ptype == DATABASE:
raise ValueError('reset_system may only be set for DATABASE plugins')
self._reset_system = reset_system
def _get_reset_system(self):
return self._reset_system
databaseclass = property(_get_databaseclass, _set_databaseclass)
reset_system = property(_get_reset_system, _set_reset_system)
#GENERAL attr
def _set_data(self, data):
if not self._ptype in (GENERAL,):
raise ValueError('data may only be set for GENERAL plugins')
self._data = data
def _get_data(self):
return self._data
def _set_process(self, process):
if not self._ptype in (GENERAL,):
raise ValueError('process may only be set for GENERAL plugins')
self._process = process
def _get_process(self):
return self._process
data = property(_get_data, _set_data)
process = property(_get_process, _set_process)
#RULE attr
def _set_ruleclass(self, data):
if self._ptype != RULE:
raise ValueError('ruleclass may only be set for RULE plugins')
self._ruleclass = data
def _get_ruleclass(self):
return self._ruleclass
def _set_namespace(self, data):
if self._ptype != RULE:
raise ValueError('namespace may only be set for RULE plugins')
self._namespace = data
def _get_namespace(self):
return self._namespace
ruleclass = property(_get_ruleclass, _set_ruleclass)
namespace = property(_get_namespace, _set_namespace)
def newplugin():
"""
Function to create a new plugindata object, add it to list of
registered plugins
:returns: a newly created PluginData which is already part of the register
"""
gpr = PluginRegister.get_instance()
pgd = PluginData()
gpr.add_plugindata(pgd)
return pgd
def register(ptype, **kwargs):
"""
Convenience function to register a new plugin using a dictionary as input.
The register functions will call newplugin() function, and use the
dictionary kwargs to assign data to the PluginData newplugin() created,
as in: plugindata.key = data
:param ptype: the plugin type, one of REPORT, TOOL, ...
:param kwargs: dictionary with keys attributes of the plugin, and data
the value
:returns: a newly created PluginData which is already part of the register
and which has kwargs assigned as attributes
"""
plg = newplugin()
plg.ptype = ptype
for prop in kwargs:
#check it is a valid attribute with getattr
getattr(plg, prop)
#set the value
setattr(plg, prop, kwargs[prop])
return plg
def make_environment(**kwargs):
env = {
'newplugin': newplugin,
'register': register,
'STABLE': STABLE,
'UNSTABLE': UNSTABLE,
'REPORT': REPORT,
'QUICKREPORT': QUICKREPORT,
'TOOL': TOOL,
'IMPORT': IMPORT,
'EXPORT': EXPORT,
'DOCGEN': DOCGEN,
'GENERAL': GENERAL,
'RULE': RULE,
'MAPSERVICE': MAPSERVICE,
'VIEW': VIEW,
'RELCALC': RELCALC,
'GRAMPLET': GRAMPLET,
'SIDEBAR': SIDEBAR,
'CATEGORY_TEXT': CATEGORY_TEXT,
'CATEGORY_DRAW': CATEGORY_DRAW,
'CATEGORY_CODE': CATEGORY_CODE,
'CATEGORY_WEB': CATEGORY_WEB,
'CATEGORY_BOOK': CATEGORY_BOOK,
'CATEGORY_GRAPHVIZ': CATEGORY_GRAPHVIZ,
'CATEGORY_TREE': CATEGORY_TREE,
'TOOL_DEBUG': TOOL_DEBUG,
'TOOL_ANAL': TOOL_ANAL,
'TOOL_DBPROC': TOOL_DBPROC,
'TOOL_DBFIX': TOOL_DBFIX,
'TOOL_REVCTL': TOOL_REVCTL,
'TOOL_UTILS': TOOL_UTILS,
'CATEGORY_QR_MISC': CATEGORY_QR_MISC,
'CATEGORY_QR_PERSON': CATEGORY_QR_PERSON,
'CATEGORY_QR_FAMILY': CATEGORY_QR_FAMILY,
'CATEGORY_QR_EVENT': CATEGORY_QR_EVENT,
'CATEGORY_QR_SOURCE': CATEGORY_QR_SOURCE,
'CATEGORY_QR_CITATION': CATEGORY_QR_CITATION,
'CATEGORY_QR_SOURCE_OR_CITATION': CATEGORY_QR_SOURCE_OR_CITATION,
'CATEGORY_QR_PLACE': CATEGORY_QR_PLACE,
'CATEGORY_QR_MEDIA': CATEGORY_QR_MEDIA,
'CATEGORY_QR_REPOSITORY': CATEGORY_QR_REPOSITORY,
'CATEGORY_QR_NOTE': CATEGORY_QR_NOTE,
'CATEGORY_QR_DATE': CATEGORY_QR_DATE,
'REPORT_MODE_GUI': REPORT_MODE_GUI,
'REPORT_MODE_BKI': REPORT_MODE_BKI,
'REPORT_MODE_CLI': REPORT_MODE_CLI,
'TOOL_MODE_GUI': TOOL_MODE_GUI,
'TOOL_MODE_CLI': TOOL_MODE_CLI,
'DATABASE': DATABASE,
'GRAMPSVERSION': GRAMPSVERSION,
'START': START,
'END': END,
'IMAGE_DIR': IMAGE_DIR,
}
env.update(kwargs)
return env
#-------------------------------------------------------------------------
#
# PluginRegister
#
#-------------------------------------------------------------------------
class PluginRegister:
"""
PluginRegister is a Singleton which holds plugin data
.. attribute : stable_only
Bool, include stable plugins only or not. Default True
"""
__instance = None
def get_instance():
""" Use this function to get the instance of the PluginRegister """
if PluginRegister.__instance is None:
PluginRegister.__instance = 1 # Set to 1 for __init__()
PluginRegister.__instance = PluginRegister()
return PluginRegister.__instance
get_instance = staticmethod(get_instance)
def __init__(self):
""" This function should only be run once by get_instance() """
if PluginRegister.__instance != 1:
raise Exception("This class is a singleton. "
"Use the get_instance() method")
self.stable_only = True
if __debug__:
self.stable_only = False
self.__plugindata = []
self.__id_to_pdata = {}
def add_plugindata(self, plugindata):
""" This is used to add an entry to the registration list. The way it
is used, this entry is not yet filled in, so we cannot use the id to
add to the __id_to_pdata dict at this time. """
self.__plugindata.append(plugindata)
def scan_dir(self, dir, filenames, uistate=None):
"""
The dir name will be scanned for plugin registration code, which will
be loaded in :class:`PluginData` objects if they satisfy some checks.
:returns: A list with :class:`PluginData` objects
"""
# if the directory does not exist, do nothing
if not (os.path.isdir(dir) or os.path.islink(dir)):
return []
ext = r".gpr.py"
extlen = -len(ext)
pymod = re.compile(r"^(.*)\.py$")
for filename in filenames:
if not filename[extlen:] == ext:
continue
lenpd = len(self.__plugindata)
full_filename = os.path.join(dir, filename)
try:
with open(full_filename, "r", encoding='utf-8') as fd:
stream = fd.read()
except Exception as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
continue
if os.path.exists(os.path.join(os.path.dirname(full_filename),
'locale')):
try:
local_gettext = glocale.get_addon_translator(full_filename).gettext
except ValueError:
print(_('WARNING: Plugin %(plugin_name)s has no translation'
' for any of your configured languages, using US'
' English instead') %
{'plugin_name' : filename.split('.')[0] })
local_gettext = glocale.translation.gettext
else:
local_gettext = glocale.translation.gettext
try:
exec (compile(stream, filename, 'exec'),
make_environment(_=local_gettext), {'uistate': uistate})
for pdata in self.__plugindata[lenpd:]:
# should not be duplicate IDs in different plugins
assert pdata.id not in self.__id_to_pdata
# if pdata.id in self.__id_to_pdata:
# print("Error: %s is duplicated!" % pdata.id)
self.__id_to_pdata[pdata.id] = pdata
except ValueError as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
self.__plugindata = self.__plugindata[:lenpd]
except:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print("".join(traceback.format_exception(*sys.exc_info())))
self.__plugindata = self.__plugindata[:lenpd]
#check if:
# 1. plugin exists, if not remove, otherwise set module name
# 2. plugin not stable, if stable_only=True, remove
# 3. TOOL_DEBUG only if __debug__ True
rmlist = []
ind = lenpd-1
for plugin in self.__plugindata[lenpd:]:
#LOG.warning("\nPlugin scanned %s at registration", plugin.id)
ind += 1
plugin.directory = dir
if not valid_plugin_version(plugin.gramps_target_version):
print(_('ERROR: Plugin file %(filename)s has a version of '
'"%(gramps_target_version)s" which is invalid for Gramps '
'"%(gramps_version)s".' %
{'filename': os.path.join(dir, plugin.fname),
'gramps_version': GRAMPSVERSION,
'gramps_target_version': plugin.gramps_target_version,}
))
rmlist.append(ind)
continue
if not plugin.status == STABLE and self.stable_only:
rmlist.append(ind)
continue
if plugin.ptype == TOOL and plugin.category == TOOL_DEBUG \
and not __debug__:
rmlist.append(ind)
continue
if plugin.fname is None:
continue
match = pymod.match(plugin.fname)
if not match:
rmlist.append(ind)
print(_('ERROR: Wrong python file %(filename)s in register file '
'%(regfile)s') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
if not os.path.isfile(os.path.join(dir, plugin.fname)):
rmlist.append(ind)
print(_('ERROR: Python file %(filename)s in register file '
'%(regfile)s does not exist') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
module = match.groups()[0]
plugin.mod_name = module
plugin.fpath = dir
#LOG.warning("\nPlugin added %s at registration", plugin.id)
rmlist.reverse()
for ind in rmlist:
del self.__id_to_pdata[self.__plugindata[ind].id]
del self.__plugindata[ind]
def get_plugin(self, id):
"""
Return the :class:`PluginData` for the plugin with id
"""
assert(len(self.__id_to_pdata) == len(self.__plugindata))
# if len(self.__id_to_pdata) != len(self.__plugindata):
# print(len(self.__id_to_pdata), len(self.__plugindata))
return self.__id_to_pdata.get(id, None)
def type_plugins(self, ptype):
"""
Return a list of :class:`PluginData` that are of type ptype
"""
return [x for x in self.__plugindata if x.ptype == ptype]
def report_plugins(self, gui=True):
"""
Return a list of gui or cli :class:`PluginData` that are of type REPORT
:param gui: bool, if True then gui plugin, otherwise cli plugin
"""
if gui:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_GUI
in x.report_modes]
else:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_CLI
in x.report_modes]
def tool_plugins(self, gui=True):
"""
Return a list of :class:`PluginData` that are of type TOOL
"""
if gui:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_GUI
in x.tool_modes]
else:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_CLI
in x.tool_modes]
def bookitem_plugins(self):
"""
Return a list of REPORT :class:`PluginData` that are can be used as
bookitem
"""
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_BKI
in x.report_modes]
def quickreport_plugins(self):
"""
Return a list of :class:`PluginData` that are of type QUICKREPORT
"""
return self.type_plugins(QUICKREPORT)
def import_plugins(self):
"""
Return a list of :class:`PluginData` that are of type IMPORT
"""
return self.type_plugins(IMPORT)
def export_plugins(self):
"""
Return a list of :class:`PluginData` that are of type EXPORT
"""
return self.type_plugins(EXPORT)
def docgen_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DOCGEN
"""
return self.type_plugins(DOCGEN)
def general_plugins(self, category=None):
"""
Return a list of :class:`PluginData` that are of type GENERAL
"""
plugins = self.type_plugins(GENERAL)
if category:
return [plugin for plugin in plugins
if plugin.category == category]
return plugins
def mapservice_plugins(self):
"""
Return a list of :class:`PluginData` that are of type MAPSERVICE
"""
return self.type_plugins(MAPSERVICE)
def view_plugins(self):
"""
Return a list of :class:`PluginData` that are of type VIEW
"""
return self.type_plugins(VIEW)
def relcalc_plugins(self):
"""
Return a list of :class:`PluginData` that are of type RELCALC
"""
return self.type_plugins(RELCALC)
def gramplet_plugins(self):
"""
Return a list of :class:`PluginData` that are of type GRAMPLET
"""
return self.type_plugins(GRAMPLET)
def sidebar_plugins(self):
"""
Return a list of :class:`PluginData` that are of type SIDEBAR
"""
return self.type_plugins(SIDEBAR)
def database_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DATABASE
"""
return self.type_plugins(DATABASE)
def rule_plugins(self):
"""
Return a list of :class:`PluginData` that are of type RULE
"""
return self.type_plugins(RULE)
def filter_load_on_reg(self):
"""
Return a list of :class:`PluginData` that have load_on_reg == True
"""
return [x for x in self.__plugindata if x.load_on_reg == True]
| gpl-2.0 | 8,956,402,882,616,859,000 | 33.972779 | 108 | 0.599259 | false | 4.145186 | false | false | false |
openatx/uiautomator2 | uiautomator2/ext/info/__init__.py | 1 | 2645 | import json
import os
import datetime
import atexit
from uiautomator2 import UIAutomatorServer
from uiautomator2.ext.info import conf
class Info(object):
def __init__(self, driver, package_name=None):
self._driver = driver
self.output_dir = 'report/'
self.pkg_name = package_name
self.test_info = {}
atexit.register(self.write_info)
def read_file(self, filename):
try:
with open(self.output_dir + filename, 'r') as f:
return f.read()
except IOError as e:
print(os.strerror(e.errno))
def get_basic_info(self):
device_info = self._driver.device_info
app_info = self._driver.app_info(self.pkg_name)
# query for exact model info
if device_info['model'] in conf.phones:
device_info['model'] = conf.phones[device_info['model']]
self.test_info['basic_info'] = {'device_info': device_info, 'app_info': app_info}
def get_app_icon(self):
icon = self._driver.app_icon(self.pkg_name)
icon.save(self.output_dir + 'icon.png')
def get_record_info(self):
record = json.loads(self.read_file('record.json'))
steps = len(record['steps'])
start_time = datetime.datetime.strptime(record['steps'][0]['time'],
'%H:%M:%S')
end_time = datetime.datetime.strptime(
record['steps'][steps - 1]['time'], '%H:%M:%S')
total_time = end_time - start_time
self.test_info['record_info'] = {
'steps': steps,
'start_time': record['steps'][0]['time'],
'total_time': str(total_time)
}
def get_result_info(self):
log = self.read_file('log.txt')
trace_list = []
if log:
log = log.splitlines()
for i in range(len(log)):
if 'Traceback' in log[i]:
new_trace = log[i]
i += 1
while 'File' in log[i]:
new_trace += '\n' + log[i]
i += 1
new_trace += '\n' + log[i]
trace_list.append(new_trace)
self.test_info['trace_info'] = {
'trace_count': len(trace_list),
'trace_list': trace_list
}
def start(self):
self.get_basic_info()
self.get_app_icon()
def write_info(self):
# self.get_basic_info()
self.get_record_info()
self.get_result_info()
with open(self.output_dir + 'info.json', 'wb') as f:
f.write(json.dumps(self.test_info))
| mit | 3,852,022,439,132,349,000 | 32.481013 | 89 | 0.519471 | false | 3.668516 | false | false | false |
Yubico/yubiadmin | yubiadmin/apps/sys.py | 1 | 5099 | # Copyright (c) 2013 Yubico AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import subprocess
from webob import Response
from threading import Timer
from yubiadmin.util.app import App, render
from yubiadmin.util.system import run
from yubiadmin.apps.dashboard import panel
__all__ = [
'app'
]
UPGRADE_LOG = "/var/tmp/yubix-upgrade"
def get_updates():
s, o = run("apt-get upgrade -s | awk -F'[][() ]+' '/^Inst/{print $2}'")
packages = o.splitlines()
return packages
def needs_restart():
return os.path.isfile('/var/run/reboot-required')
def reboot():
run('reboot')
class Updater(object):
def __init__(self):
self.proc = subprocess.Popen('DEBIAN_FRONTEND=noninteractive '
'apt-get -y dist-upgrade -o '
'Dpkg::Options::="--force-confdef" -o '
'Dpkg::Options::="--force-confold" | '
'tee %s' % UPGRADE_LOG,
stdout=subprocess.PIPE, shell=True)
def __iter__(self):
yield """
<script type="text/javascript">
function reload() {
window.location.replace('/sys');
}
window.onload = function() {
setTimeout(reload, 10000);
}
</script>
<strong>Performing update, this may take a while...</strong><br/>
<pre>
"""
while True:
line = self.proc.stdout.readline()
if line:
yield line
else:
yield '</pre><br /><strong>Update complete!</strong>'
yield '<script type="text/javascript">reload();</script>'
break
class SystemApp(App):
"""
YubiX System
"""
sections = ['general']
priority = 30
@property
def disabled(self):
#return not os.path.isdir('/usr/share/yubix')
return False
@property
def hidden(self):
return self.disabled
@property
def dash_panels(self):
if needs_restart():
yield panel('System', 'System restart required', level='danger')
updates = len(get_updates())
if updates > 0:
yield panel(
'System',
'There are <strong>%d</strong> updates available' % updates,
'/%s/general' % self.name,
'info'
)
_, time = run('date "+%a, %d %b %Y %H:%M"')
_, result = run('uptime')
rest = [x.strip() for x in result.split('up', 1)][1]
parts = [x.strip() for x in rest.split(',')]
uptime = parts[0] if not 'days' in parts[0] else '%s, %s' % \
tuple(parts[:2])
yield panel('System', 'Date: %s<br />Uptime: %s' %
(time, uptime), level='info')
def general(self, request):
alerts = []
if needs_restart():
alerts.append({'message': 'The machine needs to reboot.',
'type': 'error'})
return render('/sys/general', alerts=alerts, updates=get_updates())
def update(self, request):
run('apt-get update')
return self.redirect('/sys')
def dist_upgrade(self, request):
if get_updates():
return Response(app_iter=Updater())
else:
alerts = [{'message': 'Software is up to date!'}]
return render('/sys/general', alerts=alerts)
def reboot(self, request):
if 'now' in request.params:
run('reboot')
else:
timer = Timer(1, run, args=('reboot',))
timer.start()
alerts = [{'type': 'warn', 'message': 'Rebooting System...'}]
return render('/sys/general', alerts=alerts)
app = SystemApp()
| bsd-2-clause | 6,954,149,392,675,465,000 | 31.272152 | 76 | 0.578937 | false | 4.182937 | false | false | false |
hzlf/openbroadcast | website/apps/importer/migrations/0008_auto__add_field_importfile_results_tag_status__add_field_importfile_re.py | 2 | 8256 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ImportFile.results_tag_status'
db.add_column('importer_importfile', 'results_tag_status',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Adding field 'ImportFile.results_discogs_status'
db.add_column('importer_importfile', 'results_discogs_status',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ImportFile.results_tag_status'
db.delete_column('importer_importfile', 'results_tag_status')
# Deleting field 'ImportFile.results_discogs_status'
db.delete_column('importer_importfile', 'results_discogs_status')
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'importer.import': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Import'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'web'", 'max_length': "'10'"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'import_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"})
},
'importer.importfile': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ImportFile'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'import_session': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'null': 'True', 'to': "orm['importer.Import']"}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'results_discogs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_discogs_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'results_musicbrainz': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_tag': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'results_tag_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
}
}
complete_apps = ['importer'] | gpl-3.0 | -4,652,112,666,909,879,000 | 74.752294 | 204 | 0.563953 | false | 3.744218 | false | false | false |
tdyas/pants | src/python/pants/backend/codegen/wire/java/wire_gen.py | 2 | 5773 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.java.jar.jar_dependency import JarDependency
from pants.source.filespec import globs_matches
from pants.task.simple_codegen_task import SimpleCodegenTask
from pants.util.dirutil import fast_relpath
from pants.util.ordered_set import OrderedSet
logger = logging.getLogger(__name__)
class WireGen(NailgunTaskBase, SimpleCodegenTask):
sources_globs = ("**/*",)
@classmethod
def register_options(cls, register):
super().register_options(register)
def wire_jar(name):
return JarDependency(org="com.squareup.wire", name=name, rev="1.8.0")
cls.register_jvm_tool(
register,
"javadeps",
classpath=[wire_jar(name="wire-runtime")],
classpath_spec="//:wire-runtime",
help="Runtime dependencies for wire-using Java code.",
)
cls.register_jvm_tool(register, "wire-compiler", classpath=[wire_jar(name="wire-compiler")])
@classmethod
def is_wire_compiler_jar(cls, jar):
return "com.squareup.wire" == jar.org and "wire-compiler" == jar.name
def __init__(self, *args, **kwargs):
"""Generates Java files from .proto files using the Wire protobuf compiler."""
super().__init__(*args, **kwargs)
def synthetic_target_type(self, target):
return JavaLibrary
def is_gentarget(self, target):
return isinstance(target, JavaWireLibrary)
def synthetic_target_extra_dependencies(self, target, target_workdir):
wire_runtime_deps_spec = self.get_options().javadeps
return self.resolve_deps([wire_runtime_deps_spec])
def _compute_sources(self, target):
relative_sources = OrderedSet()
source_roots = OrderedSet()
def capture_and_relativize_to_source_root(source):
source_root = self.context.source_roots.find_by_path(source)
if not source_root:
source_root = self.context.source_roots.find(target)
source_roots.add(source_root.path)
return fast_relpath(source, source_root.path)
if target.payload.get_field_value("ordered_sources"):
# Re-match the filespecs against the sources in order to apply them in the literal order
# they were specified in.
filespec = target.globs_relative_to_buildroot()
excludes = filespec.get("excludes", [])
for filespec in filespec.get("globs", []):
sources = [
s
for s in target.sources_relative_to_buildroot()
if globs_matches([s], [filespec], excludes)
]
if len(sources) != 1:
raise TargetDefinitionException(
target,
"With `ordered_sources=True`, expected one match for each file literal, "
"but got: {} for literal `{}`.".format(sources, filespec),
)
relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
else:
# Otherwise, use the default (unspecified) snapshot ordering.
for source in target.sources_relative_to_buildroot():
relative_sources.add(capture_and_relativize_to_source_root(source))
return relative_sources, source_roots
def format_args_for_target(self, target, target_workdir):
"""Calculate the arguments to pass to the command line for a single target."""
args = ["--java_out={0}".format(target_workdir)]
# Add all params in payload to args
relative_sources, source_roots = self._compute_sources(target)
if target.payload.get_field_value("no_options"):
args.append("--no_options")
if target.payload.service_writer:
args.append("--service_writer={}".format(target.payload.service_writer))
if target.payload.service_writer_options:
for opt in target.payload.service_writer_options:
args.append("--service_writer_opt")
args.append(opt)
registry_class = target.payload.registry_class
if registry_class:
args.append("--registry_class={0}".format(registry_class))
if target.payload.roots:
args.append("--roots={0}".format(",".join(target.payload.roots)))
if target.payload.enum_options:
args.append("--enum_options={0}".format(",".join(target.payload.enum_options)))
for source_root in source_roots:
args.append("--proto_path={0}".format(os.path.join(get_buildroot(), source_root)))
args.extend(relative_sources)
return args
def execute_codegen(self, target, target_workdir):
args = self.format_args_for_target(target, target_workdir)
if args:
result = self.runjava(
classpath=self.tool_classpath("wire-compiler"),
main="com.squareup.wire.WireCompiler",
args=args,
workunit_name="compile",
workunit_labels=[WorkUnitLabel.TOOL],
)
if result != 0:
raise TaskError("Wire compiler exited non-zero ({0})".format(result))
| apache-2.0 | -4,155,171,082,662,377,500 | 39.943262 | 100 | 0.626364 | false | 4.189405 | false | false | false |
yetilinux/yetiweb | sitemaps.py | 1 | 3118 | from datetime import datetime, timedelta
from pytz import utc
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import reverse
from main.models import Package
from news.models import News
from packages.utils import get_group_info, get_split_packages_info
class PackagesSitemap(Sitemap):
changefreq = "weekly"
priority = "0.5"
def items(self):
return Package.objects.all().order_by()
def lastmod(self, obj):
return obj.last_update
class PackageFilesSitemap(PackagesSitemap):
changefreq = "weekly"
priority = "0.2"
def location(self, obj):
return PackagesSitemap.location(self, obj) + 'files/'
def lastmod(self, obj):
return obj.files_last_update
class PackageGroupsSitemap(Sitemap):
changefreq = "weekly"
priority = "0.4"
def items(self):
return get_group_info()
def lastmod(self, obj):
return obj['last_update']
def location(self, obj):
return '/groups/%s/%s/' % (obj['arch'], obj['name'])
class SplitPackagesSitemap(Sitemap):
changefreq = "weekly"
priority = "0.3"
def items(self):
return get_split_packages_info()
def lastmod(self, obj):
return obj['last_update']
def location(self, obj):
return '/packages/%s/%s/%s/' % (
obj['repo'].name.lower(), obj['arch'], obj['pkgbase'])
class NewsSitemap(Sitemap):
priority = "0.8"
def __init__(self):
now = datetime.utcnow().replace(tzinfo=utc)
self.one_day_ago = now - timedelta(days=1)
self.one_week_ago = now - timedelta(days=7)
def items(self):
return News.objects.all().order_by()
def lastmod(self, obj):
return obj.last_modified
def changefreq(self, obj):
if obj.last_modified > self.one_day_ago:
return 'daily'
if obj.last_modified > self.one_week_ago:
return 'weekly'
return 'yearly'
class BaseSitemap(Sitemap):
base_viewnames = (
('index', 1.0, 'hourly'),
('packages-search', 0.8, 'hourly'),
('page-keys', 0.8, 'weekly'),
('news-list', 0.7, 'weekly'),
('groups-list', 0.5, 'weekly'),
('mirror-status', 0.4, 'hourly'),
'page-about',
'page-art',
'page-svn',
'page-devs',
'page-tus',
'page-fellows',
'page-donate',
'page-download',
'feeds-list',
'mirror-list',
'mirrorlist',
'packages-differences',
'releng-test-overview',
'visualize-index',
)
def items(self):
return self.base_viewnames
def location(self, obj):
name = obj
if isinstance(obj, tuple):
name = obj[0]
return reverse(name)
def priority(self, obj):
if isinstance(obj, tuple):
return obj[1]
return 0.7
def changefreq(self, obj):
if isinstance(obj, tuple):
return obj[2]
return 'monthly'
# vim: set ts=4 sw=4 et:
| gpl-2.0 | -5,915,928,622,169,549,000 | 23.551181 | 70 | 0.565106 | false | 3.646784 | false | false | false |
QingChenmsft/azure-cli | src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/proxy.py | 5 | 2683 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import platform
import subprocess
from abc import abstractmethod
def disable_http_proxy():
"""
Disables the HTTP proxy
"""
_get_proxy_instance().disable_http_proxy()
def set_http_proxy(host, port):
"""
Sets the HTTP proxy to host:port
"""
if not host:
raise ValueError('Missing host')
if not port:
raise ValueError('Missing port')
_get_proxy_instance().set_http_proxy(host, port)
def _get_proxy_instance():
"""
Gets the proxy class instance based on the OS
"""
os_platform = platform.system()
if os_platform == 'Darwin':
return MacProxy()
elif os_platform == 'Windows':
from azure.cli.command_modules.acs.win_proxy import WinProxy
return WinProxy()
elif os_platform == 'Linux':
return LinuxProxy()
else:
raise NotImplementedError('Not implemented yet for {}'.format(os_platform))
class Proxy(object):
"""
Base proxy class
"""
def __init__(self):
pass
@abstractmethod
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy
"""
pass
@abstractmethod
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
pass
class LinuxProxy(Proxy):
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy on Linux
"""
subprocess.call('sudo gsettings set org.gnome.system.proxy mode \'manual\'', shell=True)
subprocess.call(
'sudo gsettings set org.gnome.system.proxy.http host \'{}\''.format(host), shell=True)
subprocess.call(
'sudo gsettings set org.gnome.system.proxy.http port {}'.format(port), shell=True)
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
subprocess.call('sudo gsettings set org.gnome.system.proxy mode \'none\'', shell=True)
class MacProxy(Proxy):
def set_http_proxy(self, host, port):
"""
Sets the HTTP proxy
"""
cmd = 'sudo networksetup -setwebproxy wi-fi {} {}'.format(host, port)
subprocess.call(cmd, shell=True)
def disable_http_proxy(self):
"""
Disables the HTTP proxy
"""
subprocess.call('sudo networksetup -setwebproxystate wi-fi off', shell=True)
| mit | 3,334,482,644,374,970,400 | 25.83 | 98 | 0.56243 | false | 4.320451 | false | false | false |
Guismo1/OpenStack-tools | openstack-net-tenant.py | 1 | 2923 | #!/usr/bin/env python
import sys
import requests
import json
import pprint
import os
OS_AUTH_URL=os.environ.get('OS_AUTH_URL')
OS_TENANT_ID=os.environ.get('OS_TENANT_ID')
OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME')
OS_USERNAME=os.environ.get('OS_USERNAME')
OS_PASSWORD=os.environ.get('OS_PASSWORD')
if OS_AUTH_URL == None or OS_TENANT_ID == None or OS_TENANT_NAME == None or OS_USERNAME == None or OS_PASSWORD == None :
print "You need to source your environment variable from your OpenStack RC file"
sys.exit(1)
#print "OS_AUTH_URL="+OS_AUTH_URL
#print "OS_USERNAME="+OS_USERNAME
#print "OS_PASSWORD="+OS_PASSWORD
def post_request(URL,DATA):
try:
response = requests.post(
url=URL,
headers={
"Content-Type": "application/json",
},
data=json.dumps(DATA)
)
data = json.loads(response.text)
return data
except requests.exceptions.RequestException:
print('HTTP Request failed')
def get_request(URL, TOKEN):
try:
response = requests.get(
url=URL,
headers={
"Content-Type": "application/json",
"X-Auth-Token": TOKEN
}
)
data = json.loads(response.text)
return data
except requests.exceptions.RequestException:
print('HTTP Request failed')
pp=pprint.PrettyPrinter(indent=4)
token=post_request(OS_AUTH_URL+"/tokens",{
"auth": {
"passwordCredentials": {
"username": OS_USERNAME,
"password": OS_PASSWORD
},
"tenantName": OS_TENANT_NAME,
}
})["access"]["token"]["id"]
#print "Token="+token
# Get Compute Service URL
endpoint_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/endpoints", token)["endpoints"]
service_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/services", token)["services"]
s=(item for item in service_list if item["name"] == "Compute Service").next()
e=(e["url"] for e in endpoint_list if e["service_id"]==s["id"]).next()
OS_NOVA_URL=e
#print "OS_NOVA_URL="+OS_NOVA_URL
net_list=get_request(OS_NOVA_URL.replace("$(tenant_id)s",OS_TENANT_ID)+"/os-networks",token)["networks"]
tenant_list=get_request(OS_AUTH_URL.replace('v2.0','v3')+"/projects",token)["projects"]
print "Network_ID\t\t\t\tNetwork_Name\tNetwork_Address\tProject_ID\t\t\t\tProject_Name"
for net in net_list:
l = list(t["name"] for t in tenant_list if t["id"]==net["project_id"])
if not net["project_id"]:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+"!!! FREE !!!"
else:
if l==[]:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+net["project_id"]+"\t"+"!!! DELETED !!!"
else:
print net["id"]+"\t"+net["label"]+"\t"+net["cidr"]+"\t"+net["project_id"]+"\t"+l[0]
| apache-2.0 | 7,000,381,782,790,158,000 | 33.388235 | 120 | 0.584673 | false | 3.28427 | false | false | false |
InsightSoftwareConsortium/ITKExamples | Utilities/CookieCutter/hooks/post_gen_project.py | 1 | 7180 | #!/usr/bin/env python
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Post-generate cookiecutter hook script to incorporate the example to the
ITKExamples source tree.
"""
import os
import shutil
from os.path import join as pjoin
def add_new_group(itk_examples_src, group_name):
""" Add a new group to the ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
"""
# Populate ITKExamples/src/$group_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, 'CMakeLists.txt'), 'a+')
f.write('add_subdirectory(' + group_name + ') \n')
f.write('')
f.close()
# Populate ITKExamples/src/$group_name/index.rst
f = open(
pjoin(itk_examples_src, group_name, 'index.rst'), 'a+')
f.write(group_name + '\n')
for i in range(len(group_name)):
f.write('=')
f.write('\n\n')
f.write('.. toctree::\n')
f.write(' :maxdepth: 2\n\n')
f.close()
def add_new_module(itk_examples_src, group_name, module_name):
""" Add a new module to the ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
module_name : str
ITK module name.
"""
# Append 'add_subdirectory_if_module_enabled( $module_name )' to
# ITKExamples/src/$group_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, 'CMakeLists.txt'), 'a+')
f.write(
'add_subdirectory_if_module_enabled(' + module_name + ')\n')
f.close()
# Append '$module_name/index.rst' to
# ITKExamples/src/$group_name/index.rst
f = open(pjoin(
itk_examples_src, group_name, 'index.rst'), 'a+')
f.write(' ' + module_name + '/index.rst\n')
f.close()
# Append 'add_example($example_name)' to
# ITKExamples/src/$group_name/$module_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, module_name, 'index.rst'), 'a+')
f.write(module_name + '\n')
for i in range(len(module_name)):
f.write('=')
f.write('\n\n')
f.write('.. toctree::\n')
f.write(' :maxdepth: 1\n\n')
f.close()
def add_example_to_module(itk_examples_src, group_name, module_name,
example_name):
""" Add the example information to the ITKExamples module.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
group_name : str
ITK group name.
module_name : str
ITK module name.
example_name : str
ITK example name.
"""
# Append 'add_example( $example_name )' to
# ITKExamples/src/$group_name/$module_name/CMakeLists.txt
f = open(pjoin(
itk_examples_src, group_name, module_name, 'CMakeLists.txt'), 'a+')
f.write('\nadd_example(' + example_name + ')\n')
f.write(
'compare_to_baseline(EXAMPLE_NAME ' + example_name +
'\n BASELINE_PREFIX OutputBaseline\n )\n')
f.close()
# Append 'add_example($example_name)' to
# ITKExamples/src/$group_name/$module_name/index.rst
f = open(pjoin(
itk_examples_src, group_name, module_name, 'index.rst'), 'a+')
f.write(' ' + example_name + '/Documentation.rst\n')
f.close()
def print_instructions(itk_examples_src, example_dir, example_name,
group_name):
""" Print instructions to edit files and contribute to ITKExamples.
Parameters
----------
itk_examples_src : str
ITK examples source directory.
example_dir : str
ITK example directory.
example_name : str
ITK example name.
group_name : str
ITK group name.
"""
example_cmakelists = pjoin(example_dir, 'CMakeLists.txt')
example_rst = pjoin(example_dir, 'Documentation.rst')
example_cxx = pjoin(example_dir, 'Code.cxx')
example_py = pjoin(example_dir, 'Code.py')
print('Example {} added successfully!'.format(example_name))
print('Please:')
print(' 1- Edit the following files:')
print(' * ' + example_cxx)
print(' * ' + example_py)
print(' * ' + example_cmakelists)
print(' * ' + example_rst + '\n')
print(' 2- Commit changes in the ITKExamples source directory and push:')
print(' $ cd ' + itk_examples_src)
print(' $ git checkout -b Add' + example_name)
print(' $ git add ' + group_name)
print(' $ git commit -a -m "ENH: Add ' + example_name + '" ')
print(' $ git push origin Add' + example_name)
def main():
# Get the cookiecutter template variables
group_name = '{{ cookiecutter.group_name }}'
module_name = '{{ cookiecutter.module_name }}'
example_name = '{{ cookiecutter.example_name }}'
itk_examples_src = '{{ cookiecutter.itk_examples_src }}'
example_dest_dir = pjoin(
itk_examples_src, group_name, module_name, example_name)
# Add the example data to the corresponding group and module files
output_dir = os.getcwd()
# If ITKExmaples/src/$group_name/$module_name/$example_name does not exist
if not os.path.exists(example_dest_dir):
# If ITKExamples/src/$group_name does not exist
if not os.path.exists(pjoin(itk_examples_src, group_name)):
# Create directory ITKExamples/src/$group_name
os.mkdir(pjoin(itk_examples_src, group_name))
# Add new group
add_new_group(itk_examples_src, group_name)
# If ITKExamples/src/$group_name/$module_name does not exist
if not os.path.exists(
pjoin(itk_examples_src, group_name, module_name)):
# Create directory ITKExamples/src/$group_name/$module_name
os.mkdir(pjoin(itk_examples_src, group_name, module_name))
# Add new module
add_new_module(itk_examples_src, group_name, module_name)
# Add example information to module
add_example_to_module(itk_examples_src, group_name, module_name,
example_name)
# Move the example to the appropriate place in the ITKExamples source
# tree
shutil.move(output_dir, example_dest_dir)
# Print instructions
print_instructions(itk_examples_src, example_dest_dir, example_name,
group_name)
else:
print('Error: This example is already present in ITKExamples: {}'
.format(example_dest_dir))
# Delete the generated cookiecutter project
shutil.rmtree(output_dir)
if __name__ == '__main__':
main()
| apache-2.0 | 3,509,470,674,119,587,000 | 30.082251 | 78 | 0.613092 | false | 3.478682 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.