repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
attia42/twitter_word2vec | kmeans/experimentm.py | 1 | 3559 | import csv
import nltk
from nltk.tokenize import word_tokenize
import string
from nltk import pos_tag
from gensim.models.word2vec import Word2Vec
from gensim import matutils
from numpy import array, float32 as REAL
from sklearn.cluster import MiniBatchKMeans, KMeans
from multiprocessing import Pool
from collections import Counter
#string.punctuation
#string.digits
file = 'training.1600000.processed.noemoticon2.csv'
#file = 'testdata.manual.2009.06.14.csv'
tags = ["NNP", "NN", "NNS"]
ncls = 1000
niters = 1000
nreplay_kmeans = 1
lower = False
redundant = ["aw", "aww", "awww", "awwww", "haha", "lol", "wow", "wtf", "xd", "yay", "http", "www", "com", "ah", "ahh", "ahhh", "amp"]
def preprocess(tweet):
ret_tweet = ""
i = -1
nn = []
raw_tweet = tweet
for ch in string.punctuation.replace("'","") + string.digits:
tweet = tweet.replace(ch, " ")
tweet_pos = {}
if lower:
tweet = tweet.lower()
try:
toks = word_tokenize(tweet)
pos = pos_tag(toks)
nn = [p for p in pos if p[1] in tags]
#nn = [p for p in pos if p == 'NNP']
except:
pass
if(len(nn)):
tweet_pos["NN"] = nn
ret_tweet = tweet_pos
return ret_tweet
raw = []
with open(file, 'rb') as csvfile:
content = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in content:
tweet = row[5]
raw.append(tweet)
p = Pool(6)
tweets = p.map(preprocess, raw)
t1 = []
t2 = []
for i in range(len(tweets)):
if len(tweets[i]):
t1.append(raw[i])
t2.append(tweets[i])
raw = t1
tweets = t2
print "Loading model..."
wv = Word2Vec.load_word2vec_format('GoogleNews-vectors-negative300.bin', binary=True)
vectors = []
for i in range(len(tweets)):
tweet = tweets[i]
nns = tweet['NN']
vector = []
#print nns
mean = []
no_wv_tweet = True
for w in nns:
if len(w[0]) > 1 and w[0] in wv and w[0].lower() not in redundant:
no_wv_tweet = False
#print w[0]
weight = 1
if w[1] == 'NNP':
weight = 100
mean.append(weight * wv[w[0]])
if(len(mean)):
vectors.append(matutils.unitvec(array(mean).mean(axis=0)).astype(REAL))
else:
vectors.append([])
t1 = []
t2 = []
t3 = []
for i in range(len(vectors)):
if vectors[i] != None and len(vectors[i]):
t1.append(raw[i])
t2.append(tweets[i])
t3.append(vectors[i])
raw = t1
tweets = t2
vectors = t3
#kmeans = KMeans(init='k-means++', n_clusters=ncls, n_init=1)
kmeans = MiniBatchKMeans(init='k-means++', n_clusters=ncls, n_init=nreplay_kmeans, max_iter=niters)
kmeans.fit(vectors)
clss = kmeans.predict(vectors)
clusters = [[] for i in range(ncls)]
for i in range(len(vectors)):
cls = clss[i]
clusters[cls].append(i)
clusterstags = [[] for i in range(ncls)]
countarr = []
for c in clusters:
counts = Counter()
for i in c:
t = [x[0] for x in tweets[i]["NN"] ]#if x[1] == "NNP"]
#tn = [x[1] for x in tweets[i]["NN"]]
sentence = " ".join(t) #+ tn)
counts.update(word.strip('.,?!"\'').lower() for word in sentence.split())
countarr.append(counts)
output = ""
for i in range(ncls):
output = "Most common words for this cluster:\n"
output += str(countarr[i].most_common(12))
output += "\n\n\n\n\n\n"
output += "Word2vec space of related words:\n"
wv_rel = wv.most_similar([kmeans.cluster_centers_[i]], topn=10)
output += str(wv_rel)
output += "\n\n\n\n\n\n"
for t in clusters[i]:
output += str(raw[t]) + "\n"
#output += "\n\n\n"
nm = [x[0] for x in countarr[i].most_common(5)]
nm = str(" ".join(nm))
for ch in string.punctuation:
nm = nm.replace(ch, " ")
f = open('clusters/' + nm +'.txt', 'wb')
f.write(output)
f.close()
| mit | -3,103,478,720,361,940,000 | 18.662983 | 134 | 0.629952 | false |
fga-gpp-mds/2017.2-Receituario-Medico | medical_prescription/exam/test/test_view_list_custom_exam.py | 1 | 2151 | # Django imports
from django.test import TestCase
from django.test.client import RequestFactory
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
# Local Django imports
from exam.views import ListCustomExams
from user.models import User, Patient, HealthProfessional
class ListExamsTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.health_professional = HealthProfessional.objects.create_user(email='[email protected]', password='senha12')
self.patient = Patient.objects.create_user(email='[email protected]',
password='senha12',
CEP='72850735',
UF='DF',
city='Brasília',
neighborhood='Asa sul',
complement='Bloco 2 QD 701')
self.user = User.objects.create_user(email='[email protected]', password='senha12')
def teste_exam_get_exam_without_login(self):
request = self.factory.get('/exam/list_custom_exams/')
request.user = AnonymousUser()
response = ListCustomExams.as_view()(request)
self.assertEqual(response.status_code, 302)
def teste_exam_get_exam_with_patient(self):
request = self.factory.get('/exam/list_custom_exams/')
request.user = self.patient
with self.assertRaises(PermissionDenied):
ListCustomExams.as_view()(request)
def teste_exam_get_exam_with_user(self):
request = self.factory.get('/exam/list_custom_exams/')
request.user = self.user
with self.assertRaises(PermissionDenied):
ListCustomExams.as_view()(request)
def teste_exam_get_exam_with_health_professional(self):
request = self.factory.get('/exam/list_custom_exams/')
request.user = self.health_professional
response = ListCustomExams.as_view()(request)
self.assertEqual(response.status_code, 200)
| mit | -498,659,966,404,250,100 | 41.156863 | 120 | 0.605581 | false |
mxcube/mxcube | mxcubeqt/widgets/optimisation_parameters_widget_layout.py | 1 | 2158 | #
# Project: MXCuBE
# https://github.com/mxcube
#
# This file is part of MXCuBE software.
#
# MXCuBE is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MXCuBE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MXCuBE. If not, see <http://www.gnu.org/licenses/>.
from mxcubeqt.utils import qt_import
__credits__ = ["MXCuBE collaboration"]
__license__ = "LGPLv3+"
class OptimisationParametersWidgetLayout(qt_import.QWidget):
"""
Widget is used to define characterisation optimization parameters like maximum
resolution, aimed multiplicity, and etc.
"""
def __init__(self, parent=None, name=None, window_flags=0):
qt_import.QWidget.__init__(self, parent, qt_import.Qt.WindowFlags(window_flags))
if not name:
self.setObjectName("OptimisationParametersWidgetLayout")
# Hardware objects ----------------------------------------------------
# Internal variables --------------------------------------------------
# Graphic elements ----------------------------------------------------
self.opt_param_widget = qt_import.load_ui_file(
"optimization_parameters_widget_layout.ui"
)
# Layout --------------------------------------------------------------
_main_vlayout = qt_import.QVBoxLayout(self)
_main_vlayout.addWidget(self.opt_param_widget)
_main_vlayout.setSpacing(0)
_main_vlayout.setContentsMargins(0, 0, 0, 0)
# Size policies -------------------------------------------------------
# Other ---------------------------------------------------------------
self.setAttribute(qt_import.Qt.WA_WState_Polished)
| lgpl-3.0 | 7,412,875,325,961,287,000 | 36.859649 | 88 | 0.579703 | false |
PowerHMC/HmcRestClient | src/managed_system/PowerOnManagedSystem.py | 1 | 2162 | # Copyright 2015, 2016 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from src.utility import HTTPClient,HmcHeaders,HMCClientLogger
import xml.etree.ElementTree as etree
import os
log = HMCClientLogger.HMCClientLogger(__name__)
from src.common.JobStatus import *
ROOT = 'ManagedSystem'
CONTENT_TYPE ='application/vnd.ibm.powervm.web+xml; type=JobRequest'
class PowerOnManagedSystem(JobStatus):
"""
Power On the Selected ManagedSystem if its not in operating state else
shows error
"""
def __init__(self):
"""
initializes root and content-type
"""
self.content_type = CONTENT_TYPE
self.root = ROOT
def poweron_ManagedSystem(self, ip, managedsystem_uuid, x_api_session):
"""
Args:
ip:ip address of hmc
managedsystem_uuid:UUID of managedsystem
x_api_session:session to be used
"""
super().__init__(ip, self.root, self.content_type, x_api_session)
log.log_debug("power on managed system started")
headers_object=HmcHeaders.HmcHeaders("web")
namespace=headers_object.ns["xmlns"]
directory = os.path.dirname(__file__)
inputpayload=open(directory+"\data\poweron_managedsystem.xml","r")
request_object=HTTPClient.HTTPClient('uom',ip,self.root,self.content_type,session_id=x_api_session)
request_object.HTTPPut(payload=inputpayload,append=str(managedsystem_uuid)+"/do/PowerOn")
log.log_debug(request_object.response)
if request_object.response_b:
self.get_job_status(request_object)
| apache-2.0 | 6,232,945,339,774,236,000 | 37.607143 | 107 | 0.682701 | false |
Nablaquabla/sns-analysis | ba-missing-files.py | 1 | 5827 | import os
import time as tm
import sys
# Handles the creation of condor files for a given set of directories
# -----------------------------------------------------------------------------
def createCondorFile(dataDir,outDir,run,day,time):
# Condor submission file name convention: run-day-time.condor
with open('/home/bjs66/CondorFiles/%s-%s-%s.condor'%(run,day,time),'w') as f:
# Fixed program location'
f.write('Executable = /home/bjs66/GitHub/sns-analysis/sns-analysis-v3\n')
# Arguments passed to the exe:
# Set main run directory, e.g. Run-15-10-02-27-32-23/151002
# Set current time to be analzyed (w/o .zip extension!), e.g. 184502
# Set output directory, eg Output/ Run-15-10-02-27-32-23/151002
f.write('Arguments = \"3 %s %s %s 1\"\n'%(dataDir,time,outDir))
# Standard cluster universe
f.write('universe = vanilla\n')
f.write('getenv = true\n')
# Program needs at least 250 MB of free memory to hold unzipped data
f.write('request_memory = 300\n')
# Output, error and log name convention: run-day-time.log/out/err
f.write('log = ../../Logs/%s-%s-%s.log\n'%(run,day,time))
f.write('Output = ../../Outs/%s-%s-%s.out\n'%(run,day,time))
f.write('Error = ../../Errs/%s-%ss-%s.err\n'%(run,day,time))
# Do not write any emails
f.write('notification = never\n')
f.write('+Department = Physics\n')
f.write('should_transfer_files = NO\n')
# Add single job to queue
f.write('Queue')
# Main function handling all internals
# -----------------------------------------------------------------------------
def main(runMissing):
# Choose main directory, i.e. ~/csi/beam_on_data/Run-15-06-25-xyz/
mainRunDir = '/var/phy/project/phil/grayson/COHERENT/CsI/'
# Choose output directory, i.e. ~/output/Run-15-06-25-xyz/
mainOutDir = '/var/phy/project/phil/grayson/COHERENT/CsI/bjs-analysis/'
# Choose run to analyze
# runDirs = ['Run-15-03-27-12-42-26']
# run = 'Run-15-03-30-13-33-05'
# run = 'Run-15-04-08-11-38-28'
# run = 'Run-15-04-17-16-56-59'
# run = 'Run-15-04-29-16-34-44'
# runDirs = ['Run-15-05-05-16-09-12']
# run = 'Run-15-05-11-11-46-30'
# run = 'Run-15-05-19-17-04-44'
# run = 'Run-15-05-27-11-13-46'
# runDirs = ['Run-15-05-05-16-09-12','Run-15-05-11-11-46-30','Run-15-05-19-17-04-44','Run-15-05-27-11-13-46']
runDirs = ['Run-15-03-27-12-42-26','Run-15-03-30-13-33-05','Run-15-04-08-11-38-28','Run-15-04-17-16-56-59','Run-15-04-29-16-34-44',
'Run-15-05-05-16-09-12','Run-15-05-11-11-46-30','Run-15-05-19-17-04-44','Run-15-05-27-11-13-46']
subdirs = {}
days_in = {'Run-15-03-27-12-42-26': ['150327','150328','150329','150330'],
'Run-15-03-30-13-33-05': ['150330','150331','150401','150402','150403','150404','150405','150406','150407','150408'],
'Run-15-04-08-11-38-28': ['150408','150409','150410','150411','150412','150413','150414','150415','150416'],
'Run-15-04-17-16-56-59': ['150417','150418','150419','150420','150421','150422','150423','150424','150425','150426','150427','150428','150429'],
'Run-15-04-29-16-34-44': ['150429','150430','150501','150502','150503','150504','150505'],
'Run-15-05-05-16-09-12': ['150505','150506','150507','150508','150509','150510','150511'],
'Run-15-05-11-11-46-30': ['150512','150513','150514','150515','150516','150517','150518','150519'],
'Run-15-05-19-17-04-44': ['150519','150520','150521','150522','150523','150524','150525','150526','150527'],
'Run-15-05-27-11-13-46': ['150527','150528','150529','150530','150531','150601','150602','150603','150604','150605','150606','150607','150608','150609']}
for run in runDirs:
for day in days_in[run]:
subdirs[run] = 'brillance_data'
print run,day
# Prepare paths for further processing
dataRunDir = mainRunDir + '%s/%s/%s'%(subdirs[run],run,day)
outDir = mainOutDir + '%s/%s'%(run,day)
# Get all times within the day folder chosen
inputList = [x.split('.')[0] for x in os.listdir(dataRunDir)]
# Get all times within the day folder chosen
outputList_B = [x.split('-')[1] for x in os.listdir(outDir) if 'B-' in x]
outputList_S = [x.split('-')[1] for x in os.listdir(outDir) if 'S-' in x]
outputList_I = [x.split('-')[1] for x in os.listdir(outDir) if 'I-' in x]
# Check if there is a file missing in the day folder
if len(inputList) != len(outputList_B) or len(inputList) != len(outputList_S) or len(inputList) != len(outputList_I):
missingB = set(inputList) - set(outputList_B)
missingI = set(inputList) - set(outputList_I)
missingS = set(inputList) - set(outputList_S)
missing = list((missingB | missingI) | missingS)
if len(missing) > 0:
print len(missing)
if runMissing == '1':
for m in missing:
createCondorFile(dataRunDir,outDir,run,day,m)
cmd = 'condor_submit /home/bjs66/CondorFiles/%s-%s-%s.condor'%(run,day,m)
os.system(cmd)
if __name__ == '__main__':
main(sys.argv[1])
| gpl-3.0 | -2,322,044,558,256,752,000 | 42.485075 | 168 | 0.519478 | false |
iotaledger/iota.lib.py | iota/transaction/validator.py | 1 | 9563 | from typing import Generator, List, Optional, Type
from iota.crypto.kerl import Kerl
from iota.crypto.signing import validate_signature_fragments
from iota.transaction.base import Bundle, Transaction
__all__ = [
'BundleValidator',
]
# In very rare cases, the IOTA protocol may switch hash algorithms.
# When this happens, the IOTA Foundation will create a snapshot, so
# that all new objects on the Tangle use the new hash algorithm.
#
# However, the snapshot will still contain references to addresses
# created using the legacy hash algorithm, so the bundle validator has
# to be able to use that as a fallback when validation fails.
SUPPORTED_SPONGE = Kerl
LEGACY_SPONGE = None # Curl
class BundleValidator(object):
"""
Checks a bundle and its transactions for problems.
"""
def __init__(self, bundle: Bundle) -> None:
super(BundleValidator, self).__init__()
self.bundle = bundle
self._errors: Optional[List[str]] = []
self._validator = self._create_validator()
@property
def errors(self) -> List[str]:
"""
Returns all errors found with the bundle.
"""
try:
self._errors.extend(self._validator) # type: List[str]
except StopIteration:
pass
return self._errors
def is_valid(self) -> bool:
"""
Returns whether the bundle is valid.
"""
if not self._errors:
try:
# We only have to check for a single error to determine
# if the bundle is valid or not.
self._errors.append(next(self._validator))
except StopIteration:
pass
return not self._errors
def _create_validator(self) -> Generator[str, None, None]:
"""
Creates a generator that does all the work.
"""
# Group transactions by address to make it easier to iterate
# over inputs.
grouped_transactions = self.bundle.group_transactions()
# Define a few expected values.
bundle_hash = self.bundle.hash
last_index = len(self.bundle) - 1
# Track a few others as we go along.
balance = 0
# Check indices and balance first.
# Note that we use a counter to keep track of the current index,
# since at this point we can't trust that the transactions have
# correct ``current_index`` values.
counter = 0
for group in grouped_transactions:
for txn in group:
balance += txn.value
if txn.bundle_hash != bundle_hash:
yield 'Transaction {i} has invalid bundle hash.'.format(
i=counter,
)
if txn.current_index != counter:
yield (
'Transaction {i} has invalid current index value '
'(expected {i}, actual {actual}).'.format(
actual=txn.current_index,
i=counter,
)
)
if txn.last_index != last_index:
yield (
'Transaction {i} has invalid last index value '
'(expected {expected}, actual {actual}).'.format(
actual=txn.last_index,
expected=last_index,
i=counter,
)
)
counter += 1
# Bundle must be balanced (spends must match inputs).
if balance != 0:
yield (
'Bundle has invalid balance '
'(expected 0, actual {actual}).'.format(
actual=balance,
)
)
# Signature validation is only meaningful if the transactions
# are otherwise valid.
if not self._errors:
signature_validation_queue: List[List[Transaction]] = []
for group in grouped_transactions:
# Signature validation only applies to inputs.
if group[0].value >= 0:
continue
validate_group_signature = True
for j, txn in enumerate(group):
if (j > 0) and (txn.value != 0):
# Input is malformed; signature fragments after
# the first should have zero value.
yield (
'Transaction {i} has invalid value '
'(expected 0, actual {actual}).'.format(
actual=txn.value,
# If we get to this point, we know that
# the ``current_index`` value for each
# transaction can be trusted.
i=txn.current_index,
)
)
# We won't be able to validate the signature,
# but continue anyway, so that we can check that
# the other transactions in the group have the
# correct ``value``.
validate_group_signature = False
continue
# After collecting the signature fragment from each
# transaction in the group, queue them up to run through
# the validator.
#
# We have to perform signature validation separately so
# that we can try different algorithms (for
# backwards-compatibility).
#
# References:
#
# - https://github.com/iotaledger/kerl#kerl-integration-in-iota
if validate_group_signature:
signature_validation_queue.append(group)
# Once we've finished checking the attributes from each
# transaction in the bundle, go back and validate
# signatures.
if signature_validation_queue:
# ``yield from`` is an option here, but for
# compatibility with Python 2 clients, we will do it the
# old-fashioned way.
for error in self._get_bundle_signature_errors(
signature_validation_queue
):
yield error
def _get_bundle_signature_errors(
self,
groups: List[List[Transaction]]
) -> List[str]:
"""
Validates the signature fragments in the bundle.
:return:
List of error messages.
If empty, signature fragments are valid.
"""
# Start with the currently-supported hash algo.
current_pos = None
current_errors = []
for current_pos, group in enumerate(groups):
error = self._get_group_signature_error(group, SUPPORTED_SPONGE)
if error:
current_errors.append(error)
# Pause and retry with the legacy algo.
break
# If validation failed, then go back and try with the legacy
# algo (only applies if we are currently transitioning to a new
# algo).
if current_errors and LEGACY_SPONGE:
for group in groups:
if self._get_group_signature_error(group, LEGACY_SPONGE):
# Legacy algo doesn't work, either; no point in
# continuing.
break
else:
# If we get here, then we were able to validate the
# signature fragments successfully using the legacy
# algorithm.
return []
# If we get here, then validation also failed when using the
# legacy algorithm.
# At this point, we know that the bundle is invalid, but we will
# continue validating with the supported algorithm anyway, so
# that we can return an error message for every invalid input.
current_errors.extend(filter(None, (
self._get_group_signature_error(group, SUPPORTED_SPONGE)
for group in groups[current_pos + 1:]
)))
return current_errors
@staticmethod
def _get_group_signature_error(
group: List[Transaction],
sponge_type: Type
) -> Optional[str]:
"""
Validates the signature fragments for a group of transactions
using the specified sponge type.
Note: this method assumes that the transactions in the group
have already passed basic validation (see
:py:meth:`_create_validator`).
:return:
- ``None``: Indicates that the signature fragments are valid.
- ``str``: Error message indicating the fragments are invalid.
"""
validate_group_signature = validate_signature_fragments(
fragments=[txn.signature_message_fragment for txn in group],
hash_=group[0].bundle_hash,
public_key=group[0].address,
sponge_type=sponge_type,
)
if validate_group_signature:
return None
return (
'Transaction {i} has invalid signature '
'(using {fragments} fragments).'.format(
fragments=len(group),
i=group[0].current_index,
)
)
| mit | -1,442,031,086,814,618,400 | 35.361217 | 79 | 0.527136 | false |
nilbody/h2o-3 | h2o-py/tests/testdir_golden/pyunit_svd_1_golden.py | 1 | 2402 | from __future__ import print_function
from builtins import zip
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def svd_1_golden():
print("Importing USArrests.csv data...")
arrestsH2O = h2o.upload_file(pyunit_utils.locate("smalldata/pca_test/USArrests.csv"))
print("Compare with SVD")
fitH2O = h2o.svd(x=arrestsH2O[0:4], nv=4, transform="NONE", max_iterations=2000)
print("Compare singular values (D)")
h2o_d = fitH2O._model_json['output']['d']
r_d = [1419.06139509772, 194.825846110138, 45.6613376308754, 18.0695566224677]
print("R Singular Values: {0}".format(r_d))
print("H2O Singular Values: {0}".format(h2o_d))
for r, h in zip(r_d, h2o_d): assert abs(r - h) < 1e-6, "H2O got {0}, but R got {1}".format(h, r)
print("Compare right singular vectors (V)")
h2o_v = h2o.as_list(h2o.get_frame(fitH2O._model_json['output']['v_key']['name']), use_pandas=False)
h2o_v.pop(0)
r_v = [[-0.04239181, 0.01616262, -0.06588426, 0.99679535],
[-0.94395706, 0.32068580, 0.06655170, -0.04094568],
[-0.30842767, -0.93845891, 0.15496743, 0.01234261],
[-0.10963744, -0.12725666, -0.98347101, -0.06760284]]
print("R Right Singular Vectors: {0}".format(r_v))
print("H2O Right Singular Vectors: {0}".format(h2o_v))
for rl, hl in zip(r_v, h2o_v):
for r, h in zip(rl, hl): assert abs(abs(r) - abs(float(h))) < 1e-5, "H2O got {0}, but R got {1}".format(h, r)
print("Compare left singular vectors (U)")
h2o_u = h2o.as_list(h2o.get_frame(fitH2O._model_json['output']['u_key']['name']), use_pandas=False)
h2o_u.pop(0)
r_u = [[-0.1716251, 0.096325710, 0.06515480, 0.15369551],
[-0.1891166, 0.173452566, -0.42665785, -0.17801438],
[-0.2155930, 0.078998111, 0.02063740, -0.28070784],
[-0.1390244, 0.059889811, 0.01392269, 0.01610418],
[-0.2067788, -0.009812026, -0.17633244, -0.21867425],
[-0.1558794, -0.064555293, -0.28288280, -0.11797419]]
print("R Left Singular Vectors: {0}".format(r_u))
print("H2O Left Singular Vectors: {0}".format(h2o_u))
for rl, hl in zip(r_u, h2o_u):
for r, h in zip(rl, hl): assert abs(abs(r) - abs(float(h))) < 1e-5, "H2O got {0}, but R got {1}".format(h, r)
if __name__ == "__main__":
pyunit_utils.standalone_test(svd_1_golden)
else:
svd_1_golden()
| apache-2.0 | -2,250,794,566,595,891,700 | 39.711864 | 117 | 0.61199 | false |
sophacles/invoke | tests/runner.py | 1 | 8258 | import sys
import os
from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.runner import Runner, run
from invoke.exceptions import Failure
from _utils import support, reset_cwd
def _run(returns=None, **kwargs):
"""
Create a Runner w/ retval reflecting ``returns`` & call ``run(**kwargs)``.
"""
# Set up return value tuple for Runner.run
returns = returns or {}
returns.setdefault('exited', 0)
value = map(
lambda x: returns.get(x, None),
('stdout', 'stderr', 'exited', 'exception'),
)
class MockRunner(Runner):
def run(self, command, warn, hide):
return value
# Ensure top level run() uses that runner, provide dummy command.
kwargs['runner'] = MockRunner
return run("whatever", **kwargs)
class Run(Spec):
"run()"
def setup(self):
os.chdir(support)
self.both = "echo foo && ./err bar"
self.out = "echo foo"
self.err = "./err bar"
self.sub = "inv -c pty_output hide_%s"
def teardown(self):
reset_cwd()
class return_value:
def return_code_in_result(self):
"""
Result has .return_code (and .exited) containing exit code int
"""
r = run(self.out, hide='both')
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True, hide='both')
eq_(result.exited, 127)
def stdout_attribute_contains_stdout(self):
eq_(run(self.out, hide='both').stdout, 'foo\n')
def stderr_attribute_contains_stderr(self):
eq_(run(self.err, hide='both').stderr, 'bar\n')
def ok_attr_indicates_success(self):
eq_(_run().ok, True)
eq_(_run(returns={'exited': 1}, warn=True).ok, False)
def failed_attr_indicates_failure(self):
eq_(_run().failed, False)
eq_(_run(returns={'exited': 1}, warn=True).failed, True)
def has_exception_attr(self):
eq_(_run().exception, None)
class failure_handling:
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide='both'))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
def Failure_repr_includes_stderr(self):
try:
run("./err ohnoz && exit 1", hide='both')
assert false # Ensure failure to Failure fails
except Failure as f:
r = repr(f)
assert 'ohnoz' in r, "Sentinel 'ohnoz' not found in %r" % r
class output_controls:
@trap
def _hide_both(self, val):
run(self.both, hide=val)
eq_(sys.stdall.getvalue(), "")
def hide_both_hides_everything(self):
self._hide_both('both')
def hide_True_hides_everything(self):
self._hide_both(True)
@trap
def hide_out_only_hides_stdout(self):
run(self.both, hide='out')
eq_(sys.stdout.getvalue().strip(), "")
eq_(sys.stderr.getvalue().strip(), "bar")
@trap
def hide_err_only_hides_stderr(self):
run(self.both, hide='err')
eq_(sys.stdout.getvalue().strip(), "foo")
eq_(sys.stderr.getvalue().strip(), "")
@trap
def hide_accepts_stderr_alias_for_err(self):
run(self.both, hide='stderr')
eq_(sys.stdout.getvalue().strip(), "foo")
eq_(sys.stderr.getvalue().strip(), "")
@trap
def hide_accepts_stdout_alias_for_out(self):
run(self.both, hide='stdout')
eq_(sys.stdout.getvalue().strip(), "")
eq_(sys.stderr.getvalue().strip(), "bar")
def hide_both_hides_both_under_pty(self):
r = run(self.sub % 'both', hide='both')
eq_(r.stdout, "")
eq_(r.stderr, "")
def hide_out_hides_both_under_pty(self):
r = run(self.sub % 'out', hide='both')
eq_(r.stdout, "")
eq_(r.stderr, "")
def hide_err_has_no_effect_under_pty(self):
r = run(self.sub % 'err', hide='both')
eq_(r.stdout, "foo\r\nbar\r\n")
eq_(r.stderr, "")
@trap
def _no_hiding(self, val):
r = run(self.both, hide=val)
eq_(sys.stdout.getvalue().strip(), "foo")
eq_(sys.stderr.getvalue().strip(), "bar")
def hide_None_hides_nothing(self):
self._no_hiding(None)
def hide_False_hides_nothing(self):
self._no_hiding(False)
@raises(ValueError)
def hide_unknown_vals_raises_ValueError(self):
run("command", hide="what")
def hide_unknown_vals_mention_value_given_in_error(self):
value = "penguinmints"
try:
run("command", hide=value)
except ValueError as e:
msg = "Error from run(hide=xxx) did not tell user what the bad value was!"
msg += "\nException msg: %s" % e
ok_(value in str(e), msg)
else:
assert False, "run() did not raise ValueError for bad hide= value"
def hide_does_not_affect_capturing(self):
eq_(run(self.out, hide='both').stdout, 'foo\n')
class pseudo_terminals:
def return_value_indicates_whether_pty_was_used(self):
eq_(run("true").pty, False)
eq_(run("true", pty=True).pty, True)
def pty_defaults_to_off(self):
eq_(run("true").pty, False)
def complex_nesting_doesnt_break(self):
# GH issue 191
substr = " hello\t\t\nworld with spaces"
cmd = """ eval 'echo "{0}" ' """.format(substr)
# TODO: consider just mocking os.execv here (and in the other
# tests) though that feels like too much of a tautology / testing
# pexpect
expected = ' hello\t\t\r\nworld with spaces\r\n'
eq_(run(cmd, pty=True, hide='both').stdout, expected)
class command_echo:
@trap
def does_not_echo_commands_run_by_default(self):
run("echo hi")
eq_(sys.stdout.getvalue().strip(), "hi")
@trap
def when_echo_True_commands_echoed_in_bold(self):
run("echo hi", echo=True)
expected = "\033[1;37mecho hi\033[0m\nhi"
eq_(sys.stdout.getvalue().strip(), expected)
#
# Random edge/corner case junk
#
def non_stupid_OSErrors_get_captured(self):
# Somehow trigger an OSError saying "Input/output error" within
# pexpect.spawn().interact() & assert it is in result.exception
skip()
def KeyboardInterrupt_on_stdin_doesnt_flake(self):
# E.g. inv test => Ctrl-C halfway => shouldn't get buffer API errors
skip()
class funky_characters_in_stdout:
def basic_nonstandard_characters(self):
# Crummy "doesn't explode with decode errors" test
run("cat tree.out", hide='both')
def nonprinting_bytes(self):
# Seriously non-printing characters (i.e. non UTF8) also don't asplode
# load('funky').derp()
run("echo '\xff'", hide='both')
def nonprinting_bytes_pty(self):
# PTY use adds another utf-8 decode spot which can also fail.
run("echo '\xff'", pty=True, hide='both')
class Local_(Spec):
def setup(self):
os.chdir(support)
self.both = "echo foo && ./err bar"
def teardown(self):
reset_cwd()
def stdout_contains_both_streams_under_pty(self):
r = run(self.both, hide='both', pty=True)
eq_(r.stdout, 'foo\r\nbar\r\n')
def stderr_is_empty_under_pty(self):
r = run(self.both, hide='both', pty=True)
eq_(r.stderr, '')
| bsd-2-clause | -1,536,870,026,090,481,400 | 31.640316 | 90 | 0.541899 | false |
coreymcdermott/artbot | artbot_scraper/spiders/arthouse_spider.py | 1 | 2106 | # -*- coding: utf-8 -*-
import re
from scrapy import Spider, Request
from dateutil import parser
from artbot_scraper.items import EventItem
from pytz import timezone
class ArthouseSpider(Spider):
name = 'Arthouse Gallery'
allowed_domains = ['www.arthousegallery.com.au']
start_urls = ['http://www.arthousegallery.com.au/exhibitions/']
def parse(self, response):
for href in response.xpath('//div[contains(@id, "index")]//li//a/@href'):
url = response.urljoin(href.extract())
yield Request(url, callback=self.parse_exhibition)
def parse_exhibition(self, response):
item = EventItem()
item['url'] = response.url
item['venue'] = self.name
item['title'] = response.xpath('//div[contains(@id, "headerTitle")]//text()').extract_first().strip() \
+ ' - ' \
+ response.xpath('//div[contains(@id, "headerSubTitle")]//em/text()').extract_first().strip()
item['description'] = ''.join(response.xpath('//div[contains(@id, "exhibition")]//hr/following-sibling::p//text()').extract()).strip()
item['image'] = response.urljoin(response.xpath('//img//@src').extract_first())
season = ''.join(response.xpath('//div[contains(@id, "headerSubTitle")]//text()[not(ancestor::em)]').extract()).strip()
match = re.match(u'(?P<start>^[\d\w\s]+)[\s\-\–]*(?P<end>[\d\w\s]+$)', season)
if (match):
tz = timezone('Australia/Sydney')
start = tz.localize(parser.parse(match.group('start'), fuzzy = True))
end = tz.localize(parser.parse(match.group('end'), fuzzy = True))
if (re.match(u'^\d+$', match.group('start'))):
start = start.replace(month=end.month, year=end.year)
if (re.match(u'^\d+\s+\w+$', match.group('start'))):
start = start.replace(year=end.year)
item['start'] = start
item['end'] = end
yield item
| mit | 2,145,790,434,256,513,000 | 43.765957 | 142 | 0.545627 | false |
mmb90/dftintegrate | dftintegrate/fourier/readdata.py | 1 | 5536 | """
Classes::
ReadData -- A collection of functions to collect extracted VASP/QE data
into a json file.
"""
import json
from copy import deepcopy
from ast import literal_eval
from collections import defaultdict
class ReadData(object):
"""
A collection of functions to collect extracted VASP/QE data
into a json file.
Variables::
name -- A string containing the name to the extracted data.
kmax -- A number that determines how many terms can be used
in the fourier representation based on the density of the
sample points.
kgrid -- A list of lists. Each inner list is a triplet that
represents a k-point. The outer list is the collection of
the triplets or k-points and therefore represents the k-kgrid.
Note these are the irreducible k-points.
weights -- A list of floats. Since kgrid represents the
irreducible wedge, each k-point has a weight that
represents in a way how degenerate it is. These
are in the same order as their corresponding k-points
in kgrid.
eigenvals -- A dictionary. At each k-point there is an
eigenvalue (energy) for each band that was calculated. The
keys are the band numbers and the values are a list of
energies for that band at each k-point.
symops -- A triple nested list. The outer list is a collection
of matrices that represent the symmetry operators for the
system calculated. The inner double nested lists are
representations of the matrices.
trans -- A list of lists. Each symmetry operator has a
translation vector associated with it. We aren't sure
what they are for but we have them so we can implement
them if we figure it out.
Functions::
_read_lines -- Read a file yielding one line at a time. Generator.
read_kpts_eigenvals -- Read kpts_eigenvals.dat in as a list of
k-points and a dictionary.
read_symops_trans -- Read symops_trans.dat in as two lists.
read_kmax -- Read kmax from kmax.dat in. For example one might run
a calulation with a grid of 4 4 4, in this case k is 4. This
is needed in the Fourier basis fit to ensure the highest
frequency term doesn't exceed the Nyquist frequency. This means
that the highest frequency can't exeed k/2, so if k is 4
then the highest frequency can't exeed 2. Since we are in 3D we
have to consider sqrt(x^2+x^2+x^2) < k/2, thus
x = kmax = ceil(k/(2sqrt(3)).
serialize -- Serialize the data to a json file.
"""
def __init__(self, name_of_data_directory):
"""
Arguments::
name_of_data_directory -- A string containing the name to
the VASP data.
"""
self.name = name_of_data_directory
self.read_kpts_eigenvals()
self.read_symops_trans()
self.read_kmax()
self.serialize()
def _read_lines(self, path_to_file):
"""
Read file, yield line by line.
Arguments::
path_to_file -- String containing the path to the file.
"""
with open(path_to_file) as inf:
for line in inf:
yield [literal_eval(x) for x in line.strip().split()]
def read_kpts_eigenvals(self):
"""
Read in kpts_eigenvals.dat with _read_lines. Stores the k-pionts
in kgrid, the weights in weights, and the band energy
(eigenvalues) in eigenvals. See this class's (ReadData)
docstring for more details on kgrid, weights, and eigenvals.
"""
name = self.name
kgrid = []
weights = []
eigenvals = defaultdict(list)
for line in self._read_lines(name + '/kpts_eigenvals.dat'):
if len(line) == 4:
kgrid.append(line[:3])
weights.append(line[-1])
elif len(line) == 2:
eigenvals[line[0]].append(line[1])
self.kgrid = kgrid
self.weights = weights
self.eigenvals = eigenvals
def read_symops_trans(self):
"""
Read in symops_trans.dat with _read_lines. Stores the symmetry
operators in symops and the translations in trans. See this
class's (ReadData) docstring for more details on symops and trans.
"""
name = self.name
symops = []
symop = []
trans = []
lines = self._read_lines(name + '/symops_trans.dat')
for line in lines:
symop.append(line)
symop.append(next(lines))
symop.append(next(lines))
next(lines)
tran = next(lines)
next(lines)
symops.append(deepcopy(symop))
trans.append(tran)
symop.clear()
self.symops = symops
self.trans = trans
def read_kmax(self):
"""
Read in kmax.dat using _read_lines. Only the first line will be
read. It will be assigned to self.kmax.
"""
name = self.name
lines = self._read_lines(name + '/kmax.dat')
self.kmax = next(lines)[0]
def serialize(self):
data_dict = {'kmax': self.kmax, 'kgrid': self.kgrid,
'weights': self.weights, 'eigenvals': self.eigenvals,
'symops': self.symops, 'trans': self.trans}
with open(self.name + '/data.json', mode='w',
encoding='utf-8') as outf:
json.dump(data_dict, outf, indent=2)
| mit | -6,101,243,481,345,637,000 | 34.716129 | 74 | 0.601156 | false |
nkmk/python-snippets | notebook/pypdf2_metadata_remove.py | 1 | 4178 | import PyPDF2
src_pdf = PyPDF2.PdfFileReader('data/src/pdf/sample1.pdf')
dst_pdf = PyPDF2.PdfFileWriter()
dst_pdf.cloneReaderDocumentRoot(src_pdf)
with open('data/temp/sample1_no_meta.pdf', 'wb') as f:
dst_pdf.write(f)
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': 'PyPDF2'}
dst_pdf.addMetadata({'/Producer': ''})
with open('data/temp/sample1_no_meta.pdf', 'wb') as f:
dst_pdf.write(f)
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': ''}
def remove_all_metadata(src_path, dst_path, producer=''):
src_pdf = PyPDF2.PdfFileReader(src_path)
dst_pdf = PyPDF2.PdfFileWriter()
dst_pdf.cloneReaderDocumentRoot(src_pdf)
dst_pdf.addMetadata({'/Producer': producer})
with open(dst_path, 'wb') as f:
dst_pdf.write(f)
remove_all_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': ''}
src_pdf = PyPDF2.PdfFileReader('data/src/pdf/sample1.pdf')
dst_pdf = PyPDF2.PdfFileWriter()
d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys()}
print(d)
# {'/Title': 'sample1', '/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Creator': 'Keynote', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"}
d.pop('/Creator')
d.pop('/Producer')
print(d)
# {'/Title': 'sample1', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"}
dst_pdf.addMetadata(d)
with open('data/temp/sample1_remove_meta.pdf', 'wb') as f:
dst_pdf.write(f)
print(PyPDF2.PdfFileReader('data/temp/sample1_remove_meta.pdf').documentInfo)
# {'/Producer': 'PyPDF2', '/Title': 'sample1', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"}
def remove_metadata(src_path, dst_path, *args, producer=''):
src_pdf = PyPDF2.PdfFileReader(src_path)
dst_pdf = PyPDF2.PdfFileWriter()
dst_pdf.cloneReaderDocumentRoot(src_pdf)
d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys()
if key not in args}
d.setdefault('/Producer', producer)
dst_pdf.addMetadata(d)
with open(dst_path, 'wb') as f:
dst_pdf.write(f)
remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf',
'/Creator', '/ModDate', '/CreationDate')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Title': 'sample1'}
remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf',
'/Creator', '/ModDate', '/CreationDate', '/Producer')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': '', '/Title': 'sample1'}
remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf',
'/Creator', '/ModDate', '/CreationDate', '/Producer', producer='XXX')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': 'XXX', '/Title': 'sample1'}
def select_metadata(src_path, dst_path, *args, producer=''):
src_pdf = PyPDF2.PdfFileReader(src_path)
dst_pdf = PyPDF2.PdfFileWriter()
dst_pdf.cloneReaderDocumentRoot(src_pdf)
d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys()
if key in args}
d.setdefault('/Producer', producer)
dst_pdf.addMetadata(d)
with open(dst_path, 'wb') as f:
dst_pdf.write(f)
select_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf',
'/Title', '/ModDate')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': '', '/Title': 'sample1', '/ModDate': "D:20190114072947Z00'00'"}
select_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf',
'/Title', '/Producer')
print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo)
# {'/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Title': 'sample1'}
| mit | 287,211,407,982,853,760 | 37.485981 | 192 | 0.675085 | false |
endlessm/chromium-browser | third_party/catapult/telemetry/telemetry/internal/actions/scroll_unittest.py | 1 | 11759 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.actions import page_action
from telemetry.internal.actions import scroll
from telemetry.internal.actions import utils
from telemetry.testing import tab_test_case
class ScrollActionTest(tab_test_case.TabTestCase):
def _MakePageVerticallyScrollable(self):
# Make page taller than window so it's scrollable vertically.
self._tab.ExecuteJavaScript(
'document.body.style.height ='
'(3 * __GestureCommon_GetWindowHeight() + 1) + "px";')
def _MakePageHorizontallyScrollable(self):
# Make page wider than window so it's scrollable horizontally.
self._tab.ExecuteJavaScript(
'document.body.style.width ='
'(3 * __GestureCommon_GetWindowWidth() + 1) + "px";')
def setUp(self):
tab_test_case.TabTestCase.setUp(self)
self.Navigate('blank.html')
utils.InjectJavaScript(self._tab, 'gesture_common.js')
def _RunScrollDistanceTest(self, distance, speed, source, maxError):
# TODO(bokan): Distance tests will fail on versions of Chrome that haven't
# been fixed. The fixes landed at the same time as the
# setBrowserControlsShown method was added so only run the test if that's
# available. Once that rolls into ref builds we can remove this check.
distanceFixedInChrome = self._tab.EvaluateJavaScript(
"'setBrowserControlsShown' in chrome.gpuBenchmarking")
if not distanceFixedInChrome:
return
# Hide the URL bar so we can measure scrolled distance without worrying
# about the URL bar consuming delta.
self._tab.ExecuteJavaScript(
'chrome.gpuBenchmarking.setBrowserControlsShown(false);')
# Make the document tall enough to accomodate the requested distance but
# also leave enough space so we can tell if the scroll overshoots the
# target.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
documentHeight = (screenHeight + distance) * 2
self._tab.ExecuteJavaScript(
'document.body.style.height = "' + str(documentHeight) + 'px";')
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
# Allow for some visual viewport offset. For example, if the test doesn't
# want any visual viewport offset due to animation handoff error between
# the two viewports.
start_offset = self._tab.EvaluateJavaScript('window.visualViewport.pageTop')
i = scroll.ScrollAction(
distance=distance,
direction="down",
speed_in_pixels_per_second=speed,
synthetic_gesture_source=source)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
actual = self._tab.EvaluateJavaScript(
'window.visualViewport.pageTop') - start_offset
# TODO(bokan): setBrowserControlsShown isn't quite enough. Chrome will hide
# the browser controls but then they animate in after a timeout. We'll need
# to add a way to lock them to hidden. Until then, just increase the
# allowed error.
urlBarError = 150
self.assertAlmostEqual(distance, actual, delta=maxError + urlBarError)
@decorators.Disabled('chromeos', 'linux') # crbug.com/1006789
def testScrollDistanceFastTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling distance for touch will have some error from the excess delta
# of the event that crosses the slop threshold but isn't applied, also
# scroll resampling can increase the error amount..
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_TOUCH, 200)
@decorators.Disabled('android') # crbug.com/934649
def testScrollDistanceFastWheel(self):
# Wheel scrolling will have a much greater error than touch. There's 2
# reasons: 1) synthetic wheel gesture accumulate the sent deltas and use
# that to determine how much delta to send at each event dispatch time.
# This assumes that the entire sent delta is applied which is wrong due to
# physical pixel snapping which accumulates over the gesture.
# 2) We can only send delta as ticks of the wheel. If the total delta is
# not a multiple of the tick size, we'll "lose" the remainder.
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_MOUSE, 15000)
def testScrollDistanceSlowTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling slowly produces larger error since each event will have a
# smaller delta. Thus error from snapping in each event will be a larger
# share of the total delta.
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_TOUCH, 10)
@decorators.Disabled('android') # crbug.com/934649
def testScrollDistanceSlowWheel(self):
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_MOUSE, 200)
@decorators.Disabled('android', 'mac') # crbug.com/934649
@decorators.Disabled('chromeos', 'linux') # crbug.com/805523
@decorators.Disabled('win-reference') # crbug.com/805523
def testWheelScrollDistanceWhileZoomed(self):
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
# Wheel scrolling can cause animated scrolls. This is a problem here since
# Chrome currently doesn't hand off the animation between the visual and
# layout viewports. To account for this, scroll the visual viewport to it's
# maximum extent so that the entire scroll goes to the layout viewport.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
i = scroll.ScrollAction(
distance=screenHeight*2,
direction="down",
speed_in_pixels_per_second=5000,
synthetic_gesture_source=page_action.GESTURE_SOURCE_MOUSE)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
# Ensure the layout viewport isn't scrolled but the visual is.
self.assertGreater(
self._tab.EvaluateJavaScript('window.visualViewport.offsetTop'),
screenHeight / 2 - 1)
self.assertEqual(self._tab.EvaluateJavaScript('window.scrollY'), 0)
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_MOUSE, 60)
def testTouchScrollDistanceWhileZoomed(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_TOUCH, 20)
def testScrollAction(self):
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
i = scroll.ScrollAction()
i.WillRunAction(self._tab)
self._tab.ExecuteJavaScript("""
window.__scrollAction.beginMeasuringHook = function() {
window.__didBeginMeasuring = true;
};
window.__scrollAction.endMeasuringHook = function() {
window.__didEndMeasuring = true;
};""")
i.RunAction(self._tab)
self.assertTrue(self._tab.EvaluateJavaScript('window.__didBeginMeasuring'))
self.assertTrue(self._tab.EvaluateJavaScript('window.__didEndMeasuring'))
scroll_position = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(
scroll_position != 0, msg='scroll_position=%d;' % (scroll_position))
# https://github.com/catapult-project/catapult/issues/3099
@decorators.Disabled('android')
@decorators.Disabled('chromeos') # crbug.com/984016
def testDiagonalScrollAction(self):
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
i = scroll.ScrollAction(direction='downright')
i.WillRunAction(self._tab)
i.RunAction(self._tab)
viewport_top = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(viewport_top != 0, msg='viewport_top=%d;' % viewport_top)
viewport_left = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollLeft')
self.assertTrue(viewport_left != 0, msg='viewport_left=%d;' % viewport_left)
def testBoundingClientRect(self):
# Verify that the rect returned by getBoundingVisibleRect() in scroll.js is
# completely contained within the viewport. Scroll events dispatched by the
# scrolling API use the center of this rect as their location, and this
# location needs to be within the viewport bounds to correctly decide
# between main-thread and impl-thread scroll. If the scrollable area were
# not clipped to the viewport bounds, then the instance used here (the
# scrollable area being more than twice as tall as the viewport) would
# result in a scroll location outside of the viewport bounds.
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
self._tab.ExecuteJavaScript("""
window.scrollTo(__GestureCommon_GetWindowWidth(),
__GestureCommon_GetWindowHeight());""")
rect_top = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).top'))
rect_height = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).height'))
rect_bottom = rect_top + rect_height
rect_left = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).left'))
rect_width = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).width'))
rect_right = rect_left + rect_width
viewport_height = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowHeight()'))
viewport_width = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowWidth()'))
self.assertTrue(rect_top >= 0, msg='%s >= %s' % (rect_top, 0))
self.assertTrue(rect_left >= 0, msg='%s >= %s' % (rect_left, 0))
self.assertTrue(
rect_bottom <= viewport_height,
msg='%s + %s <= %s' % (rect_top, rect_height, viewport_height))
self.assertTrue(
rect_right <= viewport_width,
msg='%s + %s <= %s' % (rect_left, rect_width, viewport_width))
| bsd-3-clause | -3,071,362,219,920,244,700 | 42.07326 | 80 | 0.710009 | false |
googleapis/googleapis-gen | google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/services/services/payments_account_service/client.py | 1 | 18740 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import payments_account
from google.ads.googleads.v8.services.types import payments_account_service
from .transports.base import PaymentsAccountServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import PaymentsAccountServiceGrpcTransport
class PaymentsAccountServiceClientMeta(type):
"""Metaclass for the PaymentsAccountService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = OrderedDict() # type: Dict[str, Type[PaymentsAccountServiceTransport]]
_transport_registry['grpc'] = PaymentsAccountServiceGrpcTransport
def get_transport_class(cls,
label: str = None,
) -> Type[PaymentsAccountServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class PaymentsAccountServiceClient(metaclass=PaymentsAccountServiceClientMeta):
"""Service to provide payments accounts that can be used to set
up consolidated billing.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = 'googleads.googleapis.com'
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PaymentsAccountServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PaymentsAccountServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> PaymentsAccountServiceTransport:
"""Return the transport used by the client instance.
Returns:
PaymentsAccountServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def customer_path(customer_id: str,) -> str:
"""Return a fully-qualified customer string."""
return "customers/{customer_id}".format(customer_id=customer_id, )
@staticmethod
def parse_customer_path(path: str) -> Dict[str,str]:
"""Parse a customer path into its component segments."""
m = re.match(r"^customers/(?P<customer_id>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def payments_account_path(customer_id: str,payments_account_id: str,) -> str:
"""Return a fully-qualified payments_account string."""
return "customers/{customer_id}/paymentsAccounts/{payments_account_id}".format(customer_id=customer_id, payments_account_id=payments_account_id, )
@staticmethod
def parse_payments_account_path(path: str) -> Dict[str,str]:
"""Parse a payments_account path into its component segments."""
m = re.match(r"^customers/(?P<customer_id>.+?)/paymentsAccounts/(?P<payments_account_id>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str, ) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str,str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str, ) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder, )
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str,str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str, ) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization, )
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str,str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str, ) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project, )
@staticmethod
def parse_common_project_path(path: str) -> Dict[str,str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str, ) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(project=project, location=location, )
@staticmethod
def parse_common_location_path(path: str) -> Dict[str,str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(self, *,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, PaymentsAccountServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the payments account service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.PaymentsAccountServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")))
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, PaymentsAccountServiceTransport):
# transport is a PaymentsAccountServiceTransport instance.
if credentials:
raise ValueError('When providing a transport instance, '
'provide its credentials directly.')
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = PaymentsAccountServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def list_payments_accounts(self,
request: payments_account_service.ListPaymentsAccountsRequest = None,
*,
customer_id: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> payments_account_service.ListPaymentsAccountsResponse:
r"""Returns all payments accounts associated with all managers
between the login customer ID and specified serving customer in
the hierarchy, inclusive.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `PaymentsAccountError <>`__
`QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.ListPaymentsAccountsRequest`):
The request object. Request message for fetching all
accessible payments accounts.
customer_id (:class:`str`):
Required. The ID of the customer to
apply the PaymentsAccount list operation
to.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.services.types.ListPaymentsAccountsResponse:
Response message for
[PaymentsAccountService.ListPaymentsAccounts][google.ads.googleads.v8.services.PaymentsAccountService.ListPaymentsAccounts].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([customer_id]):
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a payments_account_service.ListPaymentsAccountsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, payments_account_service.ListPaymentsAccountsRequest):
request = payments_account_service.ListPaymentsAccountsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_payments_accounts]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('customer_id', request.customer_id),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
__all__ = (
'PaymentsAccountServiceClient',
)
| apache-2.0 | -9,072,559,174,270,836,000 | 43.619048 | 154 | 0.631964 | false |
feureau/Small-Scripts | Blender/Blender config/2.91/scripts/addons/bricker_v2-2-1/lib/property_groups/created_model_properties.py | 1 | 35831 | # Copyright (C) 2020 Christopher Gearhart
# [email protected]
# http://bblanimation.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# System imports
# NONE!
# Blender imports
import bpy
from bpy.props import *
# Module imports
from .boolean_properties import *
from ...functions.property_callbacks import *
# Create custom property group
class CreatedModelProperties(bpy.types.PropertyGroup):
# CMLIST ITEM SETTINGS
name = StringProperty(update=uniquify_name)
id = IntProperty()
idx = IntProperty()
# NAME OF SOURCE
source_obj = PointerProperty(
type=bpy.types.Object,
poll=lambda self, object: object.type == "MESH",
name="Source Object",
description="Name of the source object to Brickify",
update=set_default_obj_if_empty,
)
# TRANSFORMATION SETTINGS
model_loc = StringProperty(default="-1,-1,-1")
model_rot = StringProperty(default="-1,-1,-1")
model_scale = StringProperty(default="-1,-1,-1")
transform_scale = FloatProperty(
name="Scale",
description="Scale of the brick model",
update=update_model_scale,
step=1,
default=1.0,
)
apply_to_source_object = BoolProperty(
name="Apply to source",
description="Apply transformations to source object when Brick Model is deleted",
default=True,
)
parent_obj = PointerProperty(
type=bpy.types.Object,
name="Parent Object",
description="Name of the parent object used for model transformations",
)
expose_parent = BoolProperty(
name="Show Manipulator",
description="Expose the parent object for this brick model for viewport manipulation",
update=update_parent_exposure,
default=False,
)
# ANIMATION SETTINGS
use_animation = BoolProperty(
name="Use Animation",
description="Create Brick Model for each frame, from start to stop frame (WARNING: Calculation takes time, and may result in large blend file)",
default=False,
)
start_frame = IntProperty(
name="Start",
description="First frame of the brick animation",
update=dirty_anim,
min=0, max=500000,
default=1,
)
stop_frame = IntProperty(
name="End",
description="Final frame of the brick animation",
update=dirty_anim,
min=0, max=500000,
default=10,
)
step_frame = IntProperty(
name="Step",
description="Number of frames to skip forward when generating the brick animation",
update=dirty_anim,
min=0, max=500000,
default=1,
)
# BASIC MODEL SETTINGS
brick_height = FloatProperty(
name="Brick Height",
description="Height of the bricks in the final Brick Model (in meters, excluding the stud)",
update=dirty_matrix,
subtype="DISTANCE",
step=1,
precision=3,
min = 0.000001,
soft_min=0.001, soft_max=10,
default=0.1,
)
gap = FloatProperty(
name="Gap Between Bricks",
description="Distance between bricks (relative to brick height)",
update=dirty_matrix,
subtype="PERCENTAGE",
step=1,
precision=1,
min=0.0, max=100.0,
default=0.5,
)
split_model = BoolProperty(
name="Split Model",
description="Split model into separate objects (slower)",
update=dirty_model,
default=False,
)
random_loc = FloatProperty(
name="Random Location",
description="Max random location applied to each brick",
update=dirty_model,
step=1,
precision=3,
min=0, soft_max=1,
default=0.01,
)
random_rot = FloatProperty(
name="Random Rotation",
description="Max random rotation applied to each brick",
update=dirty_model,
step=1,
precision=3,
min=0, soft_max=1,
default=0.025,
)
shell_thickness = IntProperty(
name="Shell Thickness",
description="Thickness of the outer shell of bricks",
update=dirty_build,
min=1, max=50,
default=1,
)
# MERGE SETTINGS
merge_type = EnumProperty(
name="Merge Type",
description="Type of algorithm used for merging bricks together",
items=[
# ("NONE", "None (fast)", "Bricks are not merged"),
("GREEDY", "Greedy", "Creates fewest amount of bricks possible"),
("RANDOM", "Random", "Merges randomly for realistic build"),
],
update=dirty_build,
default="RANDOM",
)
legal_bricks_only = BoolProperty(
name="Legal Bricks Only",
description="Construct model using only legal brick sizes",
update=dirty_build,
default=True,
)
merge_seed = IntProperty(
name="Seed",
description="Random seed for brick merging calculations",
update=dirty_build,
min=0,
default=1000,
)
align_bricks = BoolProperty(
name="Align Bricks Horizontally",
description="Keep bricks aligned horizontally, and fill the gaps with plates",
update=dirty_build,
default=True,
)
offset_brick_layers = IntProperty(
name="Offset Brick Layers",
description="Offset the layers that will be merged into bricks if possible",
update=dirty_build,
step=1,
min=0, max=2,
default=0,
)
# SMOKE SETTINGS
smoke_density = FloatProperty(
name="Smoke Density",
description="Density of brickified smoke (threshold for smoke: 1 - d)",
update=dirty_matrix,
min=0, max=1,
default=0.9,
)
smoke_quality = FloatProperty(
name="Smoke Quality",
description="Amount of data to analyze for density and color of brickified smoke",
update=dirty_matrix,
min=1, soft_max=100,
default=1,
)
smoke_brightness = FloatProperty(
name="Smoke Brightness",
description="Add brightness to smoke colors read from smoke data",
update=dirty_matrix,
soft_min=0, soft_max=100,
default=1,
)
smoke_saturation = FloatProperty(
name="Smoke Saturation",
description="Change saturation level of smoke colors read from smoke data",
update=dirty_matrix,
min=0, soft_max=100,
default=1,
)
flame_color = FloatVectorProperty(
name="Hex Value",
subtype="COLOR",
update=dirty_matrix,
default=[1.0, 0.63, 0.2],
)
flame_intensity = FloatProperty(
name="Flame Intensity",
description="Intensity of the flames",
update=dirty_matrix,
min=1, soft_max=50,
default=4,
)
# BRICK TYPE SETTINGS
brick_type = EnumProperty(
name="Brick Type",
description="Type of brick used to build the model",
items=get_brick_type_items,
update=update_brick_type,
# default="BRICKS",
)
max_width = IntProperty(
name="Max Width",
description="Maximum brick width in studs",
update=dirty_build,
step=1,
min=1, soft_max=100,
default=8,
)
max_depth = IntProperty(
name="Max Depth",
description="Maximum brick depth in studs",
update=dirty_build,
step=1,
min=1, soft_max=100,
default=24,
)
custom_object1 = PointerProperty(
type=bpy.types.Object,
poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)),
name="Custom Object 1",
description="Custom object to use as brick type",
)
custom_mesh1 = PointerProperty(
type=bpy.types.Mesh,
name="Custom Mesh 1",
description="Cached mesh from Custom Object 1 with materials applied/transform removed",
)
custom_object2 = PointerProperty(
type=bpy.types.Object,
poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)),
name="Custom Object 2",
description="Custom object to use as brick type",
)
custom_mesh2 = PointerProperty(
type=bpy.types.Mesh,
name="Custom Mesh 2",
description="Cached mesh from Custom Object 2 with materials applied/transform removed",
)
custom_object3 = PointerProperty(
type=bpy.types.Object,
poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)),
name="Custom Object 3",
description="Custom object to use as brick type",
)
custom_mesh3 = PointerProperty(
type=bpy.types.Mesh,
name="Custom Mesh 3",
description="Cached mesh from Custom Object 3 with materials applied/transform removed",
)
dist_offset = FloatVectorProperty(
name="Offset Distance",
description="Offset distance between custom bricks (1.0 = side-by-side)",
update=dirty_matrix,
step=1,
precision=3,
subtype="TRANSLATION",
min=0.001, soft_max=1.0,
default=(1, 1, 1),
)
# BOOLEAN SETTINGS
booleans = CollectionProperty(type=BooleanProperties)
boolean_index = IntProperty(default=-1)
# MATERIAL & COLOR SETTINGS
material_type = EnumProperty(
name="Material Type",
description="Choose what materials will be applied to model",
items=[
("NONE", "None", "No material applied to bricks"),
("CUSTOM", "Single Material", "Choose one material to apply to all generated bricks"),
("RANDOM", "Random", "Apply a random material from Brick materials to each generated brick"),
("SOURCE", "Use Source Materials", "Apply material based on closest intersecting face"),
],
update=dirty_build,
default="SOURCE",
)
custom_mat = PointerProperty(
type=bpy.types.Material,
name="Custom Material",
description="Material to apply to all bricks",
)
internal_mat = PointerProperty(
type=bpy.types.Material,
name="Internal Material",
description="Material to apply to bricks inside material shell",
update=dirty_material,
)
mat_shell_depth = IntProperty(
name="Shell Material Depth",
description="Depth to which the outer materials should be applied (1 = Only exposed bricks)",
step=1,
min=1, max=50,
update=dirty_matrix,
default=1,
)
merge_internals = EnumProperty(
name="Merge Shell with Internals",
description="Merge bricks on shell with internal bricks",
items=[
("NEITHER", "Neither", "Don't merge shell bricks with internals in either direction"),
("HORIZONTAL", "Horizontal", "Merge shell bricks with internals horizontally, but not vertically"),
("VERTICAL", "Vertical", "Merge shell bricks with internals vertically, but not horizontally"),
("BOTH", "Horizontal & Vertical", "Merge shell bricks with internals in both directions"),
],
update=dirty_build,
default="BOTH",
)
random_mat_seed = IntProperty(
name="Seed",
description="Random seed for material assignment",
min=0,
update=dirty_material,
default=1000,
)
use_uv_map = BoolProperty(
name="Use UV Map",
description="Transfer colors from UV map (if disabled or no UV map found, brick color will be based on RGB of first shader node)",
update=dirty_build,
default=True,
)
uv_image = PointerProperty(
type=bpy.types.Image,
name="UV Image",
description="UV Image to use for UV Map color transfer (defaults to active UV if left blank)",
update=dirty_build,
)
color_snap = EnumProperty(
name="Color Mapping",
description="Method for mapping source material(s)/texture(s) to new materials",
items=[
("NONE", "None", "Use source material(s)"),
("RGB", "RGB", "Map RGB values to new materials (similar materials will merge into one material based on threshold)"),
("ABS", "ABS", "Map RGB values to nearest ABS Plastic Materials")
],
update=dirty_build,
default="RGB",
)
color_depth = IntProperty(
name="Color Depth",
description="Number of colors to use in representing the UV texture (2^depth colors are created)",
min=1, max=10,
update=dirty_build,
default=4,
)
blur_radius = IntProperty(
name="Blur Radius",
description="Distance over which to blur the image before sampling",
min=0, max=10,
update=dirty_build,
default=0, # 1
)
color_snap_specular = FloatProperty(
name="Specular",
description="Specular value for the created materials",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=0.5,
)
color_snap_roughness = FloatProperty(
name="Roughness",
description="Roughness value for the created materials",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=0.5,
)
color_snap_sss = FloatProperty(
name="Subsurface Scattering",
description="Subsurface scattering value for the created materials",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=0.0,
)
color_snap_sss_saturation = FloatProperty(
name="SSS Saturation",
description="Saturation of the subsurface scattering for the created materials (relative to base color value)",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=1.0,
)
color_snap_ior = FloatProperty(
name="IOR",
description="IOR value for the created materials",
precision=3,
min=0.0, soft_max=1000.0,
update=dirty_material,
default=1.45,
)
color_snap_transmission = FloatProperty(
name="Transmission",
description="Transmission value for the created materials",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=0.0,
)
color_snap_displacement = FloatProperty(
name="Displacement",
description="Displacement value for the created materials (overrides ABS Plastic displacement value)",
subtype="FACTOR",
precision=3,
min=0.0, soft_max=1.0,
update=dirty_material,
default=0.04,
)
use_abs_template = BoolProperty(
name="Use ABS Template",
description="Use the default ABS Plastic Material node tree to build the RGB materials",
update=dirty_material,
default=True,
)
include_transparency = BoolProperty(
name="Include Transparency",
description="Include alpha value of original material color",
update=dirty_build,
default=True,
)
transparent_weight = FloatProperty(
name="Transparency Weight",
description="How much the original material's alpha value affects the chosen ABS Plastic Material",
precision=1,
min=0, soft_max=2,
update=dirty_material,
default=1,
)
target_material = PointerProperty(
name="Target Material",
type=bpy.types.Material,
description="Add material to materials list",
update=add_material_to_list,
)
target_material_message = StringProperty(
description="Message from target material chosen (warning or success)",
default="",
)
target_material_time = StringProperty( # stored as string because float cuts off digits
description="'str(time.time())' from when the material message was created",
default="0",
)
# BRICK DETAIL SETTINGS
stud_detail = EnumProperty(
name="Stud Detailing",
description="Choose where to draw brick studs",
items=[
("NONE", "None", "Don't include brick studs/logos on bricks"),
("EXPOSED", "Exposed Bricks", "Include brick studs/logos only on bricks with the top exposed"),
("ALL", "All Bricks", "Include brick studs/logos only on bricks with the top exposed"),
],
update=dirty_bricks,
default="EXPOSED",
)
logo_type = EnumProperty(
name="Logo Type",
description="Choose logo type to draw on brick studs",
items=get_logo_types,
update=dirty_bricks,
# default="NONE",
)
logo_resolution = IntProperty(
name="Resolution",
description="Resolution of the brick logo",
update=dirty_bricks,
min=1, soft_max=10,
default=2,
)
logo_decimate = FloatProperty(
name="Decimate",
description="Decimate the brick logo (lower number for higher resolution)",
update=dirty_bricks,
precision=0,
min=0, max=10,
default=7.25,
)
logo_object = PointerProperty(
type=bpy.types.Object,
poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)),
name="Logo Object",
description="Select a custom logo object to use on top of each stud",
update=dirty_bricks,
)
logo_scale = FloatProperty(
name="Logo Scale",
description="Logo scale relative to stud scale",
subtype="PERCENTAGE",
step=1,
update=dirty_bricks,
precision=1,
min=0.0001, soft_max=100.0,
default=78.0,
)
logo_inset = FloatProperty(
name="Logo Inset",
description="How far to inset logo to stud",
subtype="PERCENTAGE",
step=1,
update=dirty_bricks,
precision=1,
soft_min=0.0, soft_max=100.0,
default=50.0,
)
hidden_underside_detail = EnumProperty(
name="Underside Detailing of Obstructed Bricks",
description="Level of detail on underside of bricks with obstructed undersides",
items=[
("FLAT", "Flat", "Draw single face on brick underside", 0),
("LOW", "Low Detail", "Hollow out brick underside and draw tube supports", 1),
("HIGH", "High Detail", "Draw underside of bricks at full detail (support beams, ticks, inset tubes)", 3),
],
update=dirty_bricks,
default="FLAT",
)
exposed_underside_detail = EnumProperty(
name="Underside Detailing of Exposed Bricks",
description="Level of detail on underside of bricks with exposed undersides",
items=[
("FLAT", "Flat", "Draw single face on brick underside", 0),
("LOW", "Low Detail", "Hollow out brick underside and draw tube supports", 1),
("HIGH", "High Detail", "Draw underside of bricks at full detail (support beams, ticks, inset tubes)", 3),
],
update=dirty_bricks,
default="FLAT",
)
circle_verts = IntProperty(
name="Vertices",
description="Number of vertices in each circle in brick mesh",
update=update_circle_verts,
min=4, soft_max=64,
default=16,
)
# BEVEL SETTINGS
bevel_added = BoolProperty(
name="Bevel Bricks",
description="Bevel brick edges and corners for added realism",
default=False,
)
bevel_show_render = BoolProperty(
name="Render",
description="Use modifier during render",
update=update_bevel_render,
default=True,
)
bevel_show_viewport = BoolProperty(
name="Realtime",
description="Display modifier in viewport",
update=update_bevel_viewport,
default=True,
)
bevel_show_edit_mode = BoolProperty(
name="Edit Mode",
description="Display modifier in Edit mode",
update=update_bevel_edit_mode,
default=True,
)
bevel_width = FloatProperty(
name="Bevel Width",
description="Bevel amount (relative to Brick Height)",
subtype="DISTANCE",
step=1,
min=0.0, soft_max=10,
update=update_bevel,
default=0.01,
)
bevel_segments = IntProperty(
name="Bevel Resolution",
description="Number of segments for round edges/verts",
step=1,
min=1, max=100,
update=update_bevel,
default=1,
)
bevel_profile = FloatProperty(
name="Bevel Profile",
description="The profile shape (0.5 = round)",
subtype="FACTOR",
step=1,
min=0.0, max=1.0,
update=update_bevel,
default=0.7,
)
# INTERNAL SUPPORTS SETTINGS
internal_supports = EnumProperty(
name="Internal Supports",
description="Choose what type of brick support structure to use inside your model",
items=[
("NONE", "None", "No internal supports"),
("COLUMNS", "Columns", "Use columns inside model"),
("LATTICE", "Lattice", "Use latice inside model"),
],
update=dirty_internal,
default="NONE",
)
lattice_step = IntProperty(
name="Step",
description="Distance between cross-beams",
update=dirty_internal,
step=1,
min=2, soft_max=100,
default=4,
)
lattice_height = IntProperty(
name="Height",
description="Height of the cross-beams",
update=dirty_internal,
step=1,
min=1, soft_max=100,
default=1,
)
alternate_xy = BoolProperty(
name="Alternate X and Y",
description="Alternate back-and-forth and side-to-side beams",
update=dirty_internal,
default=True,
)
col_thickness = IntProperty(
name="Thickness",
description="Thickness of the columns",
update=dirty_internal,
min=1, soft_max=100,
default=2,
)
col_step = IntProperty(
name="Step",
description="Distance between columns",
update=dirty_internal,
step=1,
min=1, soft_max=100,
default=2,
)
# ADVANCED SETTINGS
insideness_ray_cast_dir = EnumProperty(
name="Insideness Ray Cast Direction",
description="Ray cast method for calculation of insideness",
items=[
("HIGH_EFFICIENCY", "High Efficiency", "Reuses single intersection ray cast for insideness calculation"),
("X", "X", "Cast rays along X axis for insideness calculations"),
("Y", "Y", "Cast rays along Y axis for insideness calculations"),
("Z", "Z", "Cast rays along Z axis for insideness calculations"),
("XYZ", "XYZ (Best Result)", "Cast rays in all axis directions for insideness calculation (slowest; uses result consistent for at least 2 of the 3 rays)"),
],
update=dirty_matrix,
default="HIGH_EFFICIENCY",
)
brick_shell = EnumProperty(
name="Brick Shell",
description="Choose whether the outer shell of bricks will be inside or outside the source mesh",
items=[
("INSIDE", "Inside Mesh", "Draw brick shell inside source mesh (recommended)"),
("OUTSIDE", "Outside Mesh", "Draw brick shell outside source mesh"),
("CONSISTENT", "Consistent", "Draw brick shell on a consistent side of the source mesh topology (may fix noisy model if source mesh is not water-tight)"),
],
update=update_brick_shell,
default="INSIDE",
)
calculation_axes = EnumProperty(
name="Expanded Axes",
description="The brick shell will be drawn on the outside in these directions",
items=[
("XYZ", "XYZ", "XYZ"),
("XY", "XY", "XY"),
("YZ", "YZ", "YZ"),
("XZ", "XZ", "XZ"),
("X", "X", "X"),
("Y", "Y", "Y"),
("Z", "Z", "Z"),
],
update=dirty_matrix,
default="XY",
)
use_normals = BoolProperty(
name="Use Normals",
description="Use normals to calculate insideness of bricks (may improve the result if normals on source mesh are oriented correctly)",
update=dirty_matrix,
default=False,
)
grid_offset = FloatVectorProperty(
name="Grid Offset",
description="Offset the brick grid along the volume of the source mesh (factor of brick dimensions)",
subtype="XYZ",
min=-1, max=1,
update=dirty_matrix,
default=(0, 0, 0),
)
use_absolute_grid = BoolProperty(
name="Absolute Grid Coords",
description="Place bricks on a fixed grid that is consistent between all models",
update=dirty_matrix,
default=False,
)
use_absolute_grid_anim = BoolProperty(
name="Absolute Grid Coords",
description="Place bricks on a fixed grid that is consistent between all models",
update=dirty_matrix,
default=True,
)
calc_internals = BoolProperty(
name="Calculate Internals",
description="Calculate values for bricks inside shell (disable for faster calculation at the loss of the 'Shell Thickness' and 'Supports' features)",
update=dirty_matrix,
default=True,
)
use_local_orient = BoolProperty(
name="Use Local Orient",
description="Generate bricks based on local orientation of source object",
default=False,
)
instance_method = EnumProperty(
name="Instance Method",
description="Method to use for instancing equivalent meshes to save on memory and render times",
items=[
("NONE", "None", "No object instancing"),
("LINK_DATA", "Link Data", "Link mesh data for like objects when 'Split Model' is enabled"),
("POINT_CLOUD", "Point Cloud (experimental)", "Instance a single mesh over a point cloud (this method does not support multiple materials or brick merging)"),
],
update=dirty_build,
default="LINK_DATA",
)
# Deep Cache of bricksdict
bfm_cache = StringProperty(default="")
# Blender State for Undo Stack
blender_undo_state = IntProperty(default=0)
# Back-End UI Properties
active_key = IntVectorProperty(subtype="XYZ", default=(-1,-1,-1))
# Internal Model Properties
model_created = BoolProperty(default=False)
brickifying_in_background = BoolProperty(default=False)
job_progress = IntProperty(
name="",
description="",
subtype="PERCENTAGE",
default=0,
soft_min=0,
soft_max=100,
)
linked_from_external = BoolProperty(default=False)
num_animated_frames = IntProperty(default=0)
completed_frames = StringProperty(default="")
frames_to_animate = IntProperty(default=1)
stop_background_process = BoolProperty(default=False)
animated = BoolProperty(default=False)
armature = BoolProperty(default=False)
zstep = IntProperty(default=3)
parent_obj = PointerProperty(type=bpy.types.Object)
collection = PointerProperty(type=bpy.types.Collection if b280() else bpy.types.Group)
mat_obj_abs = PointerProperty(type=bpy.types.Object)
mat_obj_random = PointerProperty(type=bpy.types.Object)
rgba_vals = StringProperty(default="789c8b8e0500011500b9") # result of `compress_str(json.dumps({}))`
customized = BoolProperty(default=True)
brick_sizes_used = StringProperty(default="") # list of brick_sizes used separated by | (e.g. '5,4,3|7,4,5|8,6,5')
brick_types_used = StringProperty(default="") # list of brick_types used separated by | (e.g. 'PLATE|BRICK|STUD')
model_created_on_frame = IntProperty(default=-1)
is_smoke = BoolProperty(default=False)
has_custom_obj1 = BoolProperty(default=False)
has_custom_obj2 = BoolProperty(default=False)
has_custom_obj3 = BoolProperty(default=False)
# model stats
num_bricks_in_model = IntProperty(default=0)
num_materials_in_model = IntProperty(default=0)
model_weight = IntProperty(default=0)
real_world_dimensions = FloatVectorProperty(
name="Real World Dimensions",
description="",
subtype="XYZ",
unit="LENGTH",
precision=6,
default=(0, 0, 0),
)
# Properties for checking of model needs updating
anim_is_dirty = BoolProperty(default=True)
material_is_dirty = BoolProperty(default=True)
model_is_dirty = BoolProperty(default=True)
build_is_dirty = BoolProperty(default=False)
bricks_are_dirty = BoolProperty(default=True)
matrix_is_dirty = BoolProperty(default=True)
matrix_lost = BoolProperty(default=False)
internal_is_dirty = BoolProperty(default=True)
last_logo_type = StringProperty(default="NONE")
last_split_model = BoolProperty(default=False)
last_start_frame = IntProperty(
name="Last Start",
description="Current start frame of brickified animation",
default=-1,
)
last_stop_frame = IntProperty(
name="Last End",
description="Current end frame of brickified animation",
default=-1,
)
last_step_frame = IntProperty(
name="Last Step",
description="Current number of frames to skip forward when generating brickified animation",
default=-1,
)
last_source_mid = StringProperty(default="-1,-1,-1")
last_material_type = StringProperty(default="SOURCE")
last_use_abs_template = BoolProperty(default=False)
last_shell_thickness = IntProperty(default=1)
last_internal_supports = StringProperty(default="NONE")
last_brick_type = StringProperty(default="BRICKS")
last_instance_method = StringProperty(default="LINK_DATA")
last_matrix_settings = StringProperty(default="")
last_legal_bricks_only = BoolProperty(default=False)
last_mat_shell_depth = IntProperty(default=1)
last_bevel_width = FloatProperty()
last_bevel_segments = IntProperty()
last_bevel_profile = IntProperty()
last_is_smoke = BoolProperty()
# Bricker Version of Model
version = StringProperty(default="1.0.4")
### BACKWARDS COMPATIBILITY
# v1.0
maxBrickScale1 = IntProperty()
maxBrickScale2 = IntProperty()
# v1.3
distOffsetX = FloatProperty()
distOffsetY = FloatProperty()
distOffsetZ = FloatProperty()
# v1.4
logoDetail = StringProperty("NONE")
# v1.5
source_name = StringProperty()
parent_name = StringProperty()
# v1.6
modelLoc = StringProperty(default="-1,-1,-1")
modelRot = StringProperty(default="-1,-1,-1")
modelScale = StringProperty(default="-1,-1,-1")
transformScale = FloatProperty(default=1)
applyToSourceObject = BoolProperty(default=True)
exposeParent = BoolProperty(default=False)
useAnimation = BoolProperty(default=False)
startFrame = IntProperty(default=1)
stopFrame = IntProperty(default=10)
maxWorkers = IntProperty(default=5)
backProcTimeout = FloatProperty(default=0)
brickHeight = FloatProperty(default=0.1)
mergeSeed = IntProperty(default=1000)
connectThresh = IntProperty(default=1)
smokeDensity = FloatProperty(default=0.9)
smokeQuality = FloatProperty(default=1)
smokeBrightness = FloatProperty(default=1)
smokeSaturation = FloatProperty(default=1)
flameColor = FloatVectorProperty(default=[1.0, 0.63, 0.2])
flameIntensity = FloatProperty(default=4)
splitModel = BoolProperty(default=False)
randomLoc = FloatProperty(default=0.01)
randomRot = FloatProperty(default=0.025)
brickShell = StringProperty(default="INSIDE")
calculationAxes = StringProperty(default="XY")
shellThickness = IntProperty(default=1)
brickType = StringProperty(default="BRICKS")
alignBricks = BoolProperty(default=True)
offsetBrickLayers = IntProperty(default=0)
maxWidth = IntProperty(default=2)
maxDepth = IntProperty(default=10)
mergeType = StringProperty(default="RANDOM")
legalBricksOnly = BoolProperty(default=True)
customObject1 = PointerProperty(type=bpy.types.Object)
customObject2 = PointerProperty(type=bpy.types.Object)
customObject3 = PointerProperty(type=bpy.types.Object)
distOffset = FloatVectorProperty(default=(1, 1, 1))
paintbrushMat = PointerProperty(type=bpy.types.Material)
materialType = StringProperty(default="NONE")
customMat = PointerProperty(type=bpy.types.Material)
internalMat = PointerProperty(type=bpy.types.Material)
matShellDepth = IntProperty(default=1)
mergeInternals = StringProperty(default="BOTH")
randomMatSeed = IntProperty(default=1000)
useUVMap = BoolProperty(default=True)
uvImage = PointerProperty(type=bpy.types.Image)
colorSnap = StringProperty(default="RGB")
colorSnapAmount = FloatProperty(default=0.001)
color_snap_amount = FloatProperty(default=0.001)
colorSnapSpecular = FloatProperty(0.5)
colorSnapRoughness = FloatProperty(0.5)
colorSnapIOR = FloatProperty(1.45)
colorSnapTransmission = FloatProperty(0.0)
includeTransparency = BoolProperty(default=True)
transparentWeight = FloatProperty(default=1)
targetMaterial = StringProperty(default="")
studDetail = StringProperty(default="EXPOSED")
logoType = StringProperty(default="NONE")
logoResolution = IntProperty(default=2)
logoDecimate = FloatProperty(default=7.25)
logoScale = FloatProperty(default=78.0)
logoInset = FloatProperty(default=50.0)
hiddenUndersideDetail = StringProperty(default="FLAT")
exposedUndersideDetail = StringProperty(default="FLAT")
circleVerts = IntProperty(default=16)
bevelAdded = BoolProperty(default=False)
bevelShowRender = BoolProperty(default=True)
bevelShowViewport = BoolProperty(default=True)
bevelShowEditmode = BoolProperty(default=True)
bevelWidth = FloatProperty(default=0.01)
bevelSegments = IntProperty(default=1)
bevelProfile = FloatProperty(default=0.7)
internalSupports = StringProperty(default="NONE")
latticeStep = IntProperty(default=4)
latticeHeight = IntProperty(default=1)
alternateXY = BoolProperty(default=1)
colThickness = IntProperty(default=2)
colStep = IntProperty(default=2)
insidenessRayCastDir = StringProperty(default="HIGH_EFFICIENCY")
useNormals = BoolProperty(default=False)
verifyExposure = BoolProperty(default=False)
calcInternals = BoolProperty(default=True)
useLocalOrient = BoolProperty(default=False)
instanceBricks = BoolProperty(default=True)
BFMCache = StringProperty(default="")
modelCreated = BoolProperty(default=False)
numAnimatedFrames = IntProperty(default=0)
framesToAnimate = IntProperty(default=0)
modelCreatedOnFrame = IntProperty(default=-1)
| gpl-3.0 | 8,693,281,229,719,090,000 | 35.674514 | 170 | 0.634395 | false |
xclxxl414/rqalpha | rqalpha/__main__.py | 1 | 14210 | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import sys
import os
import shutil
import six
import click
from importlib import import_module
from rqalpha.utils.click_helper import Date
from rqalpha.utils.config import parse_config, dump_config
CONTEXT_SETTINGS = {
'default_map': {
'run': {
}
}
}
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option('-v', '--verbose', count=True)
@click.help_option('-h', '--help')
@click.pass_context
def cli(ctx, verbose):
ctx.obj["VERBOSE"] = verbose
def inject_mod_commands():
from rqalpha.utils.config import get_mod_conf
from rqalpha.mod import SYSTEM_MOD_LIST
from rqalpha.utils.package_helper import import_mod
mod_config = get_mod_conf()
for mod_name, config in six.iteritems(mod_config['mod']):
if 'lib' in config:
lib_name = config["lib"]
else:
lib_name = "rqalpha_mod_{}".format(mod_name)
if not config['enabled']:
continue
try:
if mod_name in SYSTEM_MOD_LIST:
# inject system mod
import_mod("rqalpha.mod." + lib_name)
else:
# inject third part mod
import_mod(lib_name)
except Exception as e:
pass
def entry_point():
inject_mod_commands()
cli(obj={})
@cli.command()
@click.option('-d', '--data-bundle-path', default=os.path.expanduser('~/.rqalpha'), type=click.Path(file_okay=False))
@click.option('--locale', 'locale', type=click.STRING, default="zh_Hans_CN")
def update_bundle(data_bundle_path, locale):
"""
Sync Data Bundle
"""
from rqalpha import main
main.update_bundle(data_bundle_path, locale)
@cli.command()
@click.help_option('-h', '--help')
# -- Base Configuration
@click.option('-d', '--data-bundle-path', 'base__data_bundle_path', type=click.Path(exists=True))
@click.option('-f', '--strategy-file', 'base__strategy_file', type=click.Path(exists=True))
@click.option('-s', '--start-date', 'base__start_date', type=Date())
@click.option('-e', '--end-date', 'base__end_date', type=Date())
@click.option('-bm', '--benchmark', 'base__benchmark', type=click.STRING, default=None)
@click.option('-mm', '--margin-multiplier', 'base__margin_multiplier', type=click.FLOAT)
@click.option('-a', '--account', 'base__accounts', nargs=2, multiple=True, help="set account type with starting cash")
@click.option('--position', 'base__init_positions', type=click.STRING, help="set init position")
@click.option('-fq', '--frequency', 'base__frequency', type=click.Choice(['1d', '1m', 'tick']))
@click.option('-rt', '--run-type', 'base__run_type', type=click.Choice(['b', 'p', 'r']), default="b")
@click.option('-rp', '--round-price', 'base__round_price', is_flag=True)
@click.option('-mk', '--market', 'base__market', type=click.Choice(['cn', 'hk']), default=None)
@click.option('--resume', 'base__resume_mode', is_flag=True)
@click.option('--source-code', 'base__source_code')
# -- Extra Configuration
@click.option('-l', '--log-level', 'extra__log_level', type=click.Choice(['verbose', 'debug', 'info', 'error', 'none']))
@click.option('--disable-user-system-log', 'extra__user_system_log_disabled', is_flag=True, help='disable user system log stdout')
@click.option('--disable-user-log', 'extra__user_log_disabled', is_flag=True, help='disable user log stdout')
@click.option('--logger', 'extra__logger', nargs=2, multiple=True, help='config logger, e.g. --logger system_log debug')
@click.option('--locale', 'extra__locale', type=click.Choice(['cn', 'en']), default="cn")
@click.option('--extra-vars', 'extra__context_vars', type=click.STRING, help="override context vars")
@click.option("--enable-profiler", "extra__enable_profiler", is_flag=True, help="add line profiler to profile your strategy")
@click.option('--config', 'config_path', type=click.STRING, help="config file path")
# -- Mod Configuration
@click.option('-mc', '--mod-config', 'mod_configs', nargs=2, multiple=True, type=click.STRING, help="mod extra config")
def run(**kwargs):
"""
Start to run a strategy
"""
config_path = kwargs.get('config_path', None)
if config_path is not None:
config_path = os.path.abspath(config_path)
kwargs.pop('config_path')
if not kwargs.get('base__securities', None):
kwargs.pop('base__securities', None)
from rqalpha import main
source_code = kwargs.get("base__source_code")
cfg = parse_config(kwargs, config_path=config_path, click_type=True, source_code=source_code)
source_code = cfg.base.source_code
results = main.run(cfg, source_code=source_code)
# store results into ipython when running in ipython
from rqalpha.utils import is_run_from_ipython
if results is not None and is_run_from_ipython():
import IPython
from rqalpha.utils import RqAttrDict
ipy = IPython.get_ipython()
report = results.get("sys_analyser", {})
ipy.user_global_ns["results"] = results
ipy.user_global_ns["report"] = RqAttrDict(report)
if results is None:
sys.exit(1)
@cli.command()
@click.option('-d', '--directory', default="./", type=click.Path(), required=True)
def examples(directory):
"""
Generate example strategies to target folder
"""
source_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "examples")
try:
shutil.copytree(source_dir, os.path.join(directory, "examples"))
except OSError as e:
if e.errno == errno.EEXIST:
six.print_("Folder examples is exists.")
@cli.command()
@click.option('-v', '--verbose', is_flag=True)
def version(**kwargs):
"""
Output Version Info
"""
from rqalpha import version_info
six.print_("Current Version: ", version_info)
@cli.command()
@click.option('-d', '--directory', default="./", type=click.Path(), required=True)
def generate_config(directory):
"""
Generate default config file
"""
default_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.yml")
target_config_path = os.path.abspath(os.path.join(directory, 'config.yml'))
shutil.copy(default_config, target_config_path)
six.print_("Config file has been generated in", target_config_path)
# For Mod Cli
@cli.command(context_settings=dict(
ignore_unknown_options=True,
))
@click.help_option('-h', '--help')
@click.argument('cmd', nargs=1, type=click.Choice(['list', 'enable', 'disable', 'install', 'uninstall']))
@click.argument('params', nargs=-1)
def mod(cmd, params):
"""
Mod management command
rqalpha mod list \n
rqalpha mod install xxx \n
rqalpha mod uninstall xxx \n
rqalpha mod enable xxx \n
rqalpha mod disable xxx \n
"""
def list(params):
"""
List all mod configuration
"""
from tabulate import tabulate
from rqalpha.utils.config import get_mod_conf
mod_config = get_mod_conf()
table = []
for mod_name, mod in six.iteritems(mod_config['mod']):
table.append([
mod_name,
("enabled" if mod['enabled'] else "disabled")
])
headers = [
"name",
"status"
]
six.print_(tabulate(table, headers=headers, tablefmt="psql"))
six.print_("You can use `rqalpha mod list/install/uninstall/enable/disable` to manage your mods")
def install(params):
"""
Install third-party Mod
"""
try:
from pip._internal import main as pip_main
from pip._internal.commands.install import InstallCommand
except ImportError:
from pip import main as pip_main
from pip.commands.install import InstallCommand
params = [param for param in params]
options, mod_list = InstallCommand().parse_args(params)
mod_list = [mod_name for mod_name in mod_list if mod_name != "."]
params = ["install"] + params
for mod_name in mod_list:
mod_name_index = params.index(mod_name)
if mod_name.startswith("rqalpha_mod_sys_"):
six.print_('System Mod can not be installed or uninstalled')
return
if "rqalpha_mod_" in mod_name:
lib_name = mod_name
else:
lib_name = "rqalpha_mod_" + mod_name
params[mod_name_index] = lib_name
# Install Mod
installed_result = pip_main(params)
# Export config
from rqalpha.utils.config import load_yaml, user_mod_conf_path
user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}}
if installed_result == 0:
# 如果为0,则说明安装成功
if len(mod_list) == 0:
"""
主要是方便 `pip install -e .` 这种方式 本地调试 Mod 使用,需要满足以下条件:
1. `rqalpha mod install -e .` 命令是在对应 自定义 Mod 的根目录下
2. 该 Mod 必须包含 `setup.py` 文件(否则也不能正常的 `pip install -e .` 来安装)
3. 该 Mod 包名必须按照 RQAlpha 的规范来命名,具体规则如下
* 必须以 `rqalpha-mod-` 来开头,比如 `rqalpha-mod-xxx-yyy`
* 对应import的库名必须要 `rqalpha_mod_` 来开头,并且需要和包名后半部分一致,但是 `-` 需要替换为 `_`, 比如 `rqalpha_mod_xxx_yyy`
"""
mod_name = _detect_package_name_from_dir(params)
mod_name = mod_name.replace("-", "_").replace("rqalpha_mod_", "")
mod_list.append(mod_name)
for mod_name in mod_list:
if "rqalpha_mod_" in mod_name:
mod_name = mod_name.replace("rqalpha_mod_", "")
if "==" in mod_name:
mod_name = mod_name.split('==')[0]
user_conf['mod'][mod_name] = {}
user_conf['mod'][mod_name]['enabled'] = False
dump_config(user_mod_conf_path(), user_conf)
return installed_result
def uninstall(params):
"""
Uninstall third-party Mod
"""
try:
from pip._internal import main as pip_main
from pip._internal.commands.uninstall import UninstallCommand
except ImportError:
# be compatible with pip < 10.0
from pip import main as pip_main
from pip.commands.uninstall import UninstallCommand
params = [param for param in params]
options, mod_list = UninstallCommand().parse_args(params)
params = ["uninstall"] + params
for mod_name in mod_list:
mod_name_index = params.index(mod_name)
if mod_name.startswith("rqalpha_mod_sys_"):
six.print_('System Mod can not be installed or uninstalled')
return
if "rqalpha_mod_" in mod_name:
lib_name = mod_name
else:
lib_name = "rqalpha_mod_" + mod_name
params[mod_name_index] = lib_name
# Uninstall Mod
uninstalled_result = pip_main(params)
# Remove Mod Config
from rqalpha.utils.config import user_mod_conf_path, load_yaml
user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}}
for mod_name in mod_list:
if "rqalpha_mod_" in mod_name:
mod_name = mod_name.replace("rqalpha_mod_", "")
del user_conf['mod'][mod_name]
dump_config(user_mod_conf_path(), user_conf)
return uninstalled_result
def enable(params):
"""
enable mod
"""
mod_name = params[0]
if "rqalpha_mod_" in mod_name:
mod_name = mod_name.replace("rqalpha_mod_", "")
# check whether is installed
module_name = "rqalpha_mod_" + mod_name
if module_name.startswith("rqalpha_mod_sys_"):
module_name = "rqalpha.mod." + module_name
try:
import_module(module_name)
except ImportError:
installed_result = install([module_name])
if installed_result != 0:
return
from rqalpha.utils.config import user_mod_conf_path, load_yaml
user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}}
try:
user_conf['mod'][mod_name]['enabled'] = True
except KeyError:
user_conf['mod'][mod_name] = {'enabled': True}
dump_config(user_mod_conf_path(), user_conf)
def disable(params):
"""
disable mod
"""
mod_name = params[0]
if "rqalpha_mod_" in mod_name:
mod_name = mod_name.replace("rqalpha_mod_", "")
from rqalpha.utils.config import user_mod_conf_path, load_yaml
user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}}
try:
user_conf['mod'][mod_name]['enabled'] = False
except KeyError:
user_conf['mod'][mod_name] = {'enabled': False}
dump_config(user_mod_conf_path(), user_conf)
locals()[cmd](params)
def _detect_package_name_from_dir(params):
setup_path = os.path.join(os.path.abspath(params[-1]), 'setup.py')
if not os.path.exists(setup_path):
return None
return os.path.split(os.path.dirname(setup_path))[1]
if __name__ == '__main__':
entry_point()
| apache-2.0 | 1,797,748,913,422,962,200 | 34.845758 | 130 | 0.602266 | false |
kim135797531/opencog-python-blending | opencog_b/python/blending/util/blending_util.py | 1 | 1884 | # coding=utf-8
__author__ = 'DongMin Kim'
from opencog.type_constructors import *
# Choose atoms which are connected to specific atom.
def get_incoming_nodes(a, target):
ret = []
xget_target_link = a.xget_atoms_by_target_atom(types.Link, target)
for link in xget_target_link:
xget_target_link_node = a.xget_outgoing(link.h)
for node in xget_target_link_node:
if node.h != target.h:
ret.append(node)
return ret
def get_weighted_tv(atoms):
"""
Make new TruthValue by evaluate weighted average of exist
link's TruthValue.
This is implement code of this idea written by Ben Goertzel:
https://groups.google.com/forum/#!topic/opencog/fa5c4yE8YdU
:param list(EqualLinkKey) atoms: List of EqualLinkKey which are
expected to make weighted average TruthValue from theirs.
:rtype TruthValue: New truth value.
"""
if len(atoms) < 2:
raise UserWarning(
"Weighted TruthValue can't be evaluated with small size."
)
mean_sum = 0
weighted_strength_sum = 0
confidence_sum = 0
link_count = 0
for atom in atoms:
weighted_strength_sum += (atom.tv.confidence * atom.tv.mean)
confidence_sum += atom.tv.confidence
link_count += 1
try:
new_strength = weighted_strength_sum / confidence_sum
except ZeroDivisionError:
# This is arithmetic mean, maybe given atoms doesn't have TruthValue.
for atom in atoms:
mean_sum += atom.tv.mean
new_strength = mean_sum / link_count
# TODO: Currently, confidence value for new blended node is just
# average of old value.
# 충돌값 보정을 단순 평균이 아닌 적절한 이유를 가진 값으로 바꿔야 한다.
new_confidence = confidence_sum / link_count
return TruthValue(new_strength, new_confidence)
| agpl-3.0 | 70,699,895,936,959,990 | 28.451613 | 77 | 0.652245 | false |
elysium001/zamboni | sites/s3dev/settings_base.py | 1 | 5495 | """private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from mkt.settings import (CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ALLOWED_HOSTS = ['.allizom.org', '.mozflare.net']
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['default']['ATOMIC_REQUESTS'] = True
DATABASES['default']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections.
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
DATABASES['slave']['ATOMIC_REQUESTS'] = True
DATABASES['slave']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections.
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
}
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
# Celery
BROKER_URL = private.BROKER_URL
CELERY_ALWAYS_EAGER = False
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
WEBSITE_ICONS_PATH = UPLOADS_PATH + '/website_icons'
FEATURED_APP_BG_PATH = UPLOADS_PATH + '/featured_app_background'
FEED_COLLECTION_BG_PATH = UPLOADS_PATH + '/feed_collection_background'
FEED_SHELF_BG_PATH = UPLOADS_PATH + '/feed_shelf_background'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
WEBAPP_PROMO_IMG_PATH = UPLOADS_PATH + '/webapp_promo_imgs'
WEBSITE_PROMO_IMG_PATH = UPLOADS_PATH + '/website_promo_imgs'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
EXTENSIONS_PATH = NETAPP_STORAGE + '/extensions'
SIGNED_EXTENSIONS_PATH = NETAPP_STORAGE + '/signed-extensions'
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.DEBUG},
'z.elasticsearch': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
'z.task': {'level': logging.DEBUG},
'z.users': {'level': logging.DEBUG},
})
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-s3dev'
ES_DEFAULT_NUM_REPLICAS = 2
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_s3dev' % v) for k, v in ES_INDEXES.items())
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = False
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
CLEANCSS_BIN = 'cleancss'
LESS_BIN = 'lessc'
STYLUS_BIN = 'stylus'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 540
VALIDATOR_TIMEOUT = 180
LESS_PREPROCESS = True
XSENDFILE = True
ALLOW_SELF_REVIEWS = True
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
MONOLITH_SERVER = 'https://monolith-dev.allizom.org'
GEOIP_URL = 'https://geo-dev-marketplace.allizom.org'
AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY
AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME
RAISE_ON_SIGNAL_ERROR = True
API_THROTTLE = False
NEWRELIC_ENABLED_LIST = ['dev1.addons.phx1.mozilla.com',
'dev2.addons.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_ENABLED_LIST
AES_KEYS = private.AES_KEYS
TASK_USER_ID = 4757633
SERVE_TMP_PATH = False
| bsd-3-clause | 6,948,503,791,888,337,000 | 28.86413 | 79 | 0.66697 | false |
bbuchalter/python_koans | python2/koans/about_modules.py | 1 | 2488 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This is very different to AboutModules in Ruby Koans
# Our AboutMultipleInheritance class is a little more comparable
#
from runner.koan import *
from another_local_module import *
from local_module_with_all_defined import *
class AboutModules(Koan):
def test_importing_other_python_scripts_as_modules(self):
import local_module # local_module.py
duck = local_module.Duck()
self.assertEqual("Daffy", duck.name)
def test_importing_attributes_from_classes_using_from_keyword(self):
from local_module import Duck
duck = Duck() # no module qualifier needed this time
self.assertEqual("Daffy", duck.name)
def test_we_can_import_multiple_items_at_once(self):
import jims, joes
jims_dog = jims.Dog()
joes_dog = joes.Dog()
self.assertEqual("jims dog", jims_dog.identify())
self.assertEqual("joes dog", joes_dog.identify())
def test_importing_all_module_attributes_at_once(self):
"""
importing all attributes at once is done like so:
from another_local_module import *
The import wildcard cannot be used from within classes or functions.
"""
goose = Goose()
hamster = Hamster()
self.assertEqual("Mr Stabby", goose.name)
self.assertEqual("Phil", hamster.name)
def test_modules_hide_attributes_prefixed_by_underscores(self):
try:
private_squirrel = _SecretSquirrel()
except NameError as ex:
self.assertMatch("not defined", ex[0])
def test_private_attributes_are_still_accessible_in_modules(self):
from local_module import Duck # local_module.py
duck = Duck()
self.assertEqual("password", duck._password)
# module level attribute hiding doesn't affect class attributes
# (unless the class itself is hidden).
def test_a_modules_XallX_statement_limits_what_wildcards_will_match(self):
"""Examine results of from local_module_with_all_defined import *"""
# 'Goat' is on the __all__ list
goat = Goat()
self.assertEqual("George", goat.name)
# How about velociraptors?
lizard = _Velociraptor()
self.assertEqual("Cuddles", lizard.name)
# SecretDuck? Never heard of her!
try:
duck = SecretDuck()
except NameError as ex:
self.assertMatch("not defined", ex[0])
| mit | 2,705,349,621,464,801,000 | 30.897436 | 78 | 0.639068 | false |
HunanTV/redis-ctl | test/http.py | 1 | 10950 | import json
import redistrib.command as comm
import base
from models.base import db
from models.proxy import Proxy
from models.cluster import Cluster
import models.task
class HttpRequest(base.TestCase):
def test_http(self):
with self.app.test_client() as client:
self.assertEqual({'nodes': [], 'proxies': []},
self.app.polling_targets())
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertReqStatus(200, r)
self.assertEqual({
'nodes': [{
'host': '127.0.0.1',
'port': 7100,
'suppress_alert': 1,
}],
'proxies': [],
}, self.app.polling_targets())
r = client.post('/cluster/add', data={
'descr': 'the-quick-brown-fox',
})
self.assertReqStatus(200, r)
cluster_id = r.data
r = client.post('/task/launch', data=json.dumps({
'cluster': cluster_id,
'nodes': [{
'host': '127.0.0.1',
'port': 7100,
}],
}))
self.assertReqStatus(200, r)
self.exec_all_tasks()
self.assertRaises(ValueError, comm.quit_cluster, '127.0.0.1', 7100)
comm.shutdown_cluster('127.0.0.1', 7100)
def test_cluster(self):
with self.app.test_client() as client:
comm.start_cluster('127.0.0.1', 7100)
r = client.get('/cluster/autodiscover?host=127.0.0.1&port=7100')
self.assertReqStatus(200, r)
result = json.loads(r.data)
self.assertTrue(result['cluster_discovered'])
nodes = result['nodes']
self.assertEqual(1, len(nodes))
self.assertEqual({
'host': '127.0.0.1',
'port': 7100,
'role': 'master',
'known': False,
}, nodes[0])
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertReqStatus(200, r)
r = client.post('/cluster/autojoin', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertReqStatus(200, r)
cluster_id = r.data
r = client.post('/cluster/set_info', data={
'cluster_id': cluster_id,
'descr': '.',
})
self.assertReqStatus(200, r)
r = client.post('/cluster/register_proxy', data={
'cluster_id': cluster_id,
'host': '127.0.0.1',
'port': '8889',
})
self.assertReqStatus(200, r)
r = list(db.session.query(Proxy).all())
self.assertEqual(1, len(r))
self.assertEqual('127.0.0.1', r[0].host)
self.assertEqual(8889, r[0].port)
self.assertEqual(1, r[0].suppress_alert)
self.assertEqual(int(cluster_id), r[0].cluster_id)
r = list(db.session.query(Cluster).all())
self.assertEqual(1, len(r))
self.assertEqual('.', r[0].description)
r = client.post('/cluster/set_info', data={
'cluster_id': cluster_id,
'descr': 'xyzw',
})
self.assertReqStatus(200, r)
r = list(db.session.query(Cluster).all())
self.assertEqual(1, len(r))
self.assertEqual('xyzw', r[0].description)
comm.shutdown_cluster('127.0.0.1', 7100)
def test_cluster_with_multiple_nodes(self):
with self.app.test_client() as client:
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertReqStatus(200, r)
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7101',
})
self.assertReqStatus(200, r)
r = client.post('/cluster/add', data={
'descr': 'the-quick-brown-fox',
})
self.assertReqStatus(200, r)
cluster_id = r.data
r = client.post('/task/launch', data=json.dumps({
'cluster': cluster_id,
'nodes': [{
'host': '127.0.0.1',
'port': 7100,
}],
}))
self.assertReqStatus(200, r)
self.exec_all_tasks()
r = client.post('/task/join', data=json.dumps({
'cluster_id': cluster_id,
'nodes': [{
'host': '127.0.0.1',
'port': 7101,
}],
}))
self.assertReqStatus(200, r)
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(1, len(nodes))
tasks = list(models.task.undone_tasks())
self.assertEqual(1, len(tasks))
self.exec_all_tasks()
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(2, len(nodes))
self.assertEqual(16384, len(node_7100.assigned_slots))
r = client.post('/task/migrate_slots', data={
'src_host': '127.0.0.1',
'src_port': 7100,
'dst_host': '127.0.0.1',
'dst_port': 7101,
'slots': '8192,8193,8194,8195',
})
self.assertReqStatus(200, r)
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(2, len(nodes))
self.assertEqual(16384, len(node_7100.assigned_slots))
tasks = list(models.task.undone_tasks())
self.assertEqual(1, len(tasks))
self.exec_all_tasks()
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(2, len(nodes))
self.assertEqual(16380, len(node_7100.assigned_slots))
r = client.post('/task/quit', data=json.dumps({
'host': '127.0.0.1',
'port': 7101,
'migratings': [{
'host': '127.0.0.1',
'port': 7100,
'slots': [8192, 8193, 8194, 8195],
}],
}))
self.assertReqStatus(200, r)
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(2, len(nodes))
tasks = list(models.task.undone_tasks())
self.assertEqual(1, len(tasks))
self.exec_all_tasks()
nodes, node_7100 = comm.list_nodes('127.0.0.1', 7100)
self.assertEqual(1, len(nodes))
comm.shutdown_cluster('127.0.0.1', 7100)
def test_set_alarm(self):
with self.app.test_client() as client:
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertEqual(200, r.status_code)
r = client.post('/redis/add', data={
'host': '127.0.0.1',
'port': '7101',
})
self.assertEqual(200, r.status_code)
r = client.post('/set_alarm/redis', data={
'host': '127.0.0.1',
'port': '7100',
'suppress': '0',
})
self.assertEqual(200, r.status_code)
r = client.post('/cluster/autojoin', data={
'host': '127.0.0.1',
'port': '7100',
})
self.assertEqual(200, r.status_code)
cluster_id = r.data
r = client.post('/cluster/set_info', data={
'cluster_id': cluster_id,
'descr': '.',
})
self.assertEqual(200, r.status_code)
r = client.post('/cluster/register_proxy', data={
'cluster_id': cluster_id,
'host': '127.0.0.1',
'port': '8889',
})
self.assertEqual(200, r.status_code)
self.app.write_polling_targets()
with open(self.app.polling_file, 'r') as fin:
polls = json.loads(fin.read())
self.assertEqual(2, len(polls['nodes']))
poll_nodes = sorted(polls['nodes'],
key=lambda n: '%s:%d' % (n['host'], n['port']))
n = poll_nodes[0]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(7100, n['port'])
self.assertEqual(0, n['suppress_alert'])
n = poll_nodes[1]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(7101, n['port'])
self.assertEqual(1, n['suppress_alert'])
self.assertEqual(1, len(polls['proxies']))
poll_proxies = sorted(
polls['proxies'],
key=lambda n: '%s:%d' % (n['host'], n['port']))
n = poll_proxies[0]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(8889, n['port'])
self.assertEqual(1, n['suppress_alert'])
r = client.post('/set_alarm/redis', data={
'host': '127.0.0.1',
'port': '7101',
'suppress': '0',
})
self.assertEqual(200, r.status_code)
r = client.post('/set_alarm/redis', data={
'host': '127.0.0.1',
'port': '7102',
'suppress': '0',
})
self.assertEqual(400, r.status_code)
self.assertEqual({'reason': 'no such node'}, json.loads(r.data))
self.app.write_polling_targets()
with open(self.app.polling_file, 'r') as fin:
polls = json.loads(fin.read())
self.assertEqual(2, len(polls['nodes']))
poll_nodes = sorted(polls['nodes'],
key=lambda n: '%s:%d' % (n['host'], n['port']))
n = poll_nodes[0]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(7100, n['port'])
self.assertEqual(0, n['suppress_alert'])
n = poll_nodes[1]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(7101, n['port'])
self.assertEqual(0, n['suppress_alert'])
self.assertEqual(1, len(polls['proxies']))
poll_proxies = sorted(
polls['proxies'],
key=lambda n: '%s:%d' % (n['host'], n['port']))
n = poll_proxies[0]
self.assertEqual('127.0.0.1', n['host'])
self.assertEqual(8889, n['port'])
self.assertEqual(1, n['suppress_alert'])
| mit | 3,597,076,686,152,082,400 | 33.651899 | 79 | 0.454064 | false |
dagargo/phatty | phatty/editor.py | 1 | 32769 | # -*- coding: utf-8 -*-
#
# Copyright 2017 David García Goñi
#
# This file is part of Phatty.
#
# Phatty is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phatty is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Phatty. If not, see <http://www.gnu.org/licenses/>.
"""Phatty user interface"""
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GObject
from gi.repository import GLib
from threading import Thread, Lock
import logging
import pkg_resources
from phatty import connector
from phatty.connector import ConnectorError
from phatty import preset
from phatty import utils
import sys
import getopt
import mido
GLib.threads_init()
CONN_MSG = 'Connected (firmware version {:s})'
ERROR_IN_BANK_TRANSFER = 'Error in bank transfer {:s}'
ERROR_WHILE_SAVING_DATA = 'Error while saving data to {:s}'
ERROR_WHILE_READING_DATA = 'Error while reading data from {:s}'
glade_file = pkg_resources.resource_filename(__name__, 'resources/gui.glade')
init_preset_file = pkg_resources.resource_filename(
__name__, 'resources/init_preset.syx')
version = pkg_resources.get_distribution(utils.APP_NAME).version
def print_help():
print('Usage: {:s} [-v]'.format(utils.APP_NAME))
log_level = logging.ERROR
try:
opts, args = getopt.getopt(sys.argv[1:], "hv")
except getopt.GetoptError:
print_help()
sys.exit(1)
for opt, arg in opts:
if opt == '-h':
print_help()
sys.exit()
elif opt == '-v':
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logger = logging.getLogger(__name__)
utils.create_config()
builder = Gtk.Builder()
builder.add_from_file(glade_file)
class TransferDialog(object):
def __init__(self, parent):
self.dialog = builder.get_object('transfer_dialog')
self.label = builder.get_object('transfer_label')
self.progressbar = builder.get_object('progressbar')
self.button = builder.get_object('transfer_cancel')
self.dialog.set_transient_for(parent)
self.dialog.connect('delete-event', lambda widget,
event: self.cancel() or True)
self.button.connect('clicked', lambda widget: self.cancel())
def show(self, title):
self.dialog.set_title(title)
self.running = True
self.progressbar.set_fraction(0)
self.dialog.show()
def show_fraction(self, title):
self.button.show()
self.show(title)
def show_pulse(self, title):
self.label.set_text('')
self.button.hide()
self.pulsating = True
self.show(title)
def cancel(self):
self.running = False
self.pulsating = False
def hide(self):
self.dialog.hide()
def set_status(self, msg, fraction):
self.label.set_text(msg)
self.progressbar.set_fraction(fraction)
def pulse_progressbar(self):
self.progressbar.pulse()
if self.running:
return self.pulsating
else:
return False
class SettingsDialog(object):
def __init__(self, phatty):
self.phatty = phatty
self.dialog = builder.get_object('settings_dialog')
self.accept = builder.get_object('settings_accept_button')
self.cancel = builder.get_object('settings_cancel_button')
self.bulk_switch = builder.get_object('bulk_switch')
self.auto_switch = builder.get_object('auto_switch')
self.dialog.set_transient_for(phatty.main_window)
self.dialog.connect('delete-event', lambda widget,
event: widget.hide() or True)
self.cancel.connect('clicked', lambda widget: self.dialog.hide())
self.accept.connect('clicked', lambda widget: self.save())
def show(self):
self.bulk_switch.set_active(self.phatty.config[utils.BULK_ON])
self.auto_switch.set_active(self.phatty.config[utils.DOWNLOAD_AUTO])
self.dialog.show()
def save(self):
self.phatty.config[utils.BULK_ON] = self.bulk_switch.get_active()
self.phatty.config[utils.DOWNLOAD_AUTO] = self.auto_switch.get_active()
self.dialog.hide()
class Editor(object):
"""Phatty user interface"""
def __init__(self):
self.connector = connector.Connector()
self.main_window = None
self.sysex_presets = []
self.config = utils.read_config()
self.transferring = Lock()
def load_devices(self, select):
self.device_liststore.clear()
i = 0
found = -1
for port in connector.get_ports():
logger.debug('Adding port {:s}...'.format(port))
self.device_liststore.append([port])
if self.config[utils.DEVICE] == port and select:
logger.debug('Port {:s} is active'.format(port))
found = i
i += 1
self.device_combo.set_active(found)
if select and self.device_combo.get_active() == -1:
self.ui_reconnect()
def set_ui_config(self):
self.configuring = True
self.load_devices(True)
self.lfo_midi_sync.set_state(self.config[utils.LFO_MIDI_SYNC])
self.configuring = False
def save_config(self):
logger.debug('Configuration: {:s}'.format(str(self.config)))
utils.write_config(self.config)
def init_ui(self):
self.main_window = builder.get_object('main_window')
self.main_window.connect(
'delete-event', lambda widget, event: self.quit())
self.main_window.set_position(Gtk.WindowPosition.CENTER)
self.main_container = builder.get_object('main_container')
self.about_dialog = builder.get_object('about_dialog')
self.about_dialog.set_version(version)
self.device_combo = builder.get_object('device_combo')
self.device_combo.connect('changed', lambda widget: self.set_device())
self.device_liststore = builder.get_object('device_liststore')
self.refresh_button = builder.get_object('refresh_button')
self.refresh_button.connect(
'clicked', lambda widget: self.load_devices(False))
# These are global parameters and hence are neither is stored in
# presets nor its value can be recalled
self.lfo_midi_sync = builder.get_object('lfo_midi_sync')
self.lfo_midi_sync.connect('state-set', lambda widget, state: self.set_lfo_midi_sync(state))
self.download_button = builder.get_object('download_button')
self.download_button.connect(
'clicked', lambda widget: self.download_presets())
self.upload_button = builder.get_object('upload_button')
self.upload_button.connect(
'clicked', lambda widget: self.upload_presets())
self.upload_button.set_sensitive(False)
self.about_button = builder.get_object('about_button')
self.about_button.connect('clicked', lambda widget: self.show_about())
self.preferences_button = builder.get_object('preferences_button')
self.preferences_button.connect(
'clicked', lambda widget: self.settings_dialog.show())
self.open_button = builder.get_object('open_button')
self.open_button.connect(
'clicked', lambda widget: self.open_bank_from_file())
self.save_button = builder.get_object('save_button')
self.save_button.connect(
'clicked', lambda widget: self.save_bank_to_file())
self.statusbar = builder.get_object('statusbar')
self.context_id = self.statusbar.get_context_id(utils.APP_NAME)
self.preset_list = builder.get_object('preset_list')
self.presets = builder.get_object('preset_liststore')
self.preset_selection = builder.get_object('preset_selection')
self.presets.connect('row-deleted', self.row_deleted)
self.preset_selection.connect('changed', self.selection_changed)
self.preset_name_renderer = builder.get_object(
'preset_name_renderer')
self.preset_name_renderer.connect('edited', self.set_preset_name)
self.transfer_dialog = TransferDialog(self.main_window)
self.settings_dialog = SettingsDialog(self)
# Filter and envelopes
self.filter_poles = builder.get_object('filter_poles')
self.filter_poles.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_filter_poles,
int(self.filter_poles.get_value() - 1)))
self.vel_to_filter = builder.get_object('vel_to_filter')
self.vel_to_filter.connect('value-changed', lambda widget: self.call_connector(
self.connector.set_panel_vel_to_filter,
int(self.vel_to_filter.get_value() + 8)))
self.vel_to_amp = builder.get_object('vel_to_amp')
self.vel_to_amp.connect('value-changed', lambda widget: self.call_connector(
self.connector.set_panel_vel_to_amp,
int(self.vel_to_amp.get_value())))
self.release = builder.get_object('release')
self.release.connect('state-set', lambda widget, state: self.call_connector(
self.connector.set_panel_release,
1 if state else 0))
# Keyboard and controls
self.scale = builder.get_object('scale')
self.scale.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_scale,
self.scale.get_active()))
self.pw_up_amount = builder.get_object('pw_up_amount')
self.pw_up_amount.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_pw_up_amount,
self.pw_up_amount.get_active()))
self.pw_down_amount = builder.get_object('pw_down_amount')
self.pw_down_amount.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_pw_down_amount,
self.pw_down_amount.get_active()))
self.legato = builder.get_object('legato')
self.legato.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_legato,
self.legato.get_active()))
self.keyboard_priority = builder.get_object('keyboard_priority')
self.keyboard_priority.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_keyboard_priority,
self.keyboard_priority.get_active()))
self.glide_on_legato = builder.get_object('glide_on_legato')
self.glide_on_legato.connect('state-set', lambda widget, state: self.call_connector(
self.connector.set_panel_glide_on_legato,
1 if state else 0))
# Modulation parameters
self.mod_source_5 = builder.get_object('mod_source_5')
self.mod_source_5.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_mod_source_5,
self.mod_source_5.get_active()))
self.mod_source_6 = builder.get_object('mod_source_6')
self.mod_source_6.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_mod_source_6,
self.mod_source_6.get_active()))
self.mod_dest_2 = builder.get_object('mod_dest_2')
self.mod_dest_2.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_mod_dest_2,
self.mod_dest_2.get_active()))
self.lfo_key_retrigger = builder.get_object('lfo_key_retrigger')
self.lfo_key_retrigger.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_lfo_key_retrigger,
self.lfo_key_retrigger.get_active()))
# Arpeggiator
self.arp_pattern = builder.get_object('arp_pattern')
self.arp_pattern.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_pattern,
self.arp_pattern.get_active()))
self.arp_mode = builder.get_object('arp_mode')
self.arp_mode.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_mode,
self.arp_mode.get_active()))
self.arp_octaves = builder.get_object('arp_octaves')
self.arp_octaves.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_octaves,
int(self.arp_octaves.get_value() + 3)))
self.arp_gate = builder.get_object('arp_gate')
self.arp_gate.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_gate,
self.arp_gate.get_active()))
self.arp_clock_source = builder.get_object('arp_clock_source')
self.arp_clock_source.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_clock_source,
self.arp_clock_source.get_active()))
self.arp_clock_division = builder.get_object('arp_clock_division')
self.arp_clock_division.connect('changed', lambda widget: self.call_connector(
self.connector.set_panel_arp_clock_division,
self.arp_clock_division.get_active()))
# Preset buttons
self.download_panel = builder.get_object('download_panel')
self.download_panel.connect(
'clicked', lambda widget: self.get_panel())
self.download_preset = builder.get_object('download_preset')
self.download_preset.connect(
'clicked', lambda widget: self.get_preset())
self.upload_preset = builder.get_object('upload_preset')
self.upload_preset.connect('clicked', lambda widget: self.set_preset())
self.save_preset = builder.get_object('save_preset')
self.save_preset.connect(
'clicked', lambda widget: self.save_current_preset())
self.open_preset = builder.get_object('open_preset')
self.open_preset.connect(
'clicked', lambda widget: self.open_into_current_preset())
self.reset_preset = builder.get_object('reset_preset')
self.reset_preset.connect(
'clicked', lambda widget: self.reset_current_preset())
self.filter_syx = Gtk.FileFilter()
self.filter_syx.set_name('MIDI sysex')
self.filter_syx.add_pattern('*.' + preset.FILE_EXTENSION)
self.filter_syx.add_pattern('*.' + preset.FILE_EXTENSION_EX)
self.filter_any = Gtk.FileFilter()
self.filter_any.set_name('Any files')
self.filter_any.add_pattern('*')
self.main_window.present()
def get_panel(self):
model, iter = self.preset_selection.get_selected()
active_preset = model[iter][0]
try:
panel = self.connector.get_panel_as_preset(active_preset)
self.sysex_presets[active_preset] = panel
self.presets[active_preset][1] = preset.get_name(panel)
self.set_preset_attributes(active_preset)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def get_preset(self):
model, iter = self.preset_selection.get_selected()
active_preset = model[iter][0]
try:
p = self.connector.get_preset(active_preset)
self.sysex_presets[active_preset] = p
self.presets[active_preset][1] = preset.get_name(p)
self.set_preset_attributes(active_preset)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def set_preset(self):
model, iter = self.preset_selection.get_selected()
active_preset = model[iter][0]
try:
self.connector.tx_message(self.sysex_presets[active_preset])
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def save_current_preset(self):
model, iter = self.preset_selection.get_selected()
active_preset = model[iter][0]
def_filename = model[iter][1].strip() + '.syx'
dialog = Gtk.FileChooserDialog('Save as', self.main_window,
Gtk.FileChooserAction.SAVE,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE, Gtk.ResponseType.OK))
Gtk.FileChooser.set_do_overwrite_confirmation(dialog, True)
dialog.add_filter(self.filter_syx)
dialog.add_filter(self.filter_any)
dialog.set_current_name(def_filename)
response = dialog.run()
filename = dialog.get_filename()
dialog.destroy()
if response == Gtk.ResponseType.OK:
try:
data = self.sysex_presets[active_preset]
self.connector.write_data_to_file(filename, data)
except IOError as e:
msg = ERROR_WHILE_SAVING_DATA.format(filename)
desc = str(e)
GLib.idle_add(self.show_error_dialog, msg, desc)
def open_into_current_preset(self):
dialog = Gtk.FileChooserDialog('Open', self.main_window,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
dialog.add_filter(self.filter_syx)
dialog.add_filter(self.filter_any)
response = dialog.run()
filename = dialog.get_filename()
dialog.destroy()
if response == Gtk.ResponseType.OK:
self.override_preset(filename)
def override_preset(self, filename):
logger.debug('Overriding selected preset with file {:s}'.format(filename))
try:
data = self.connector.read_data_from_file(filename)
model, iter = self.preset_selection.get_selected()
active_preset = model[iter][0]
preset.set_number(data, active_preset)
self.sysex_presets[active_preset] = data
model[iter][1] = preset.get_name(data)
self.set_preset_attributes(active_preset)
self.connector.tx_message(data)
except (IOError, ConnectorError) as e:
msg = ERROR_WHILE_READING_DATA.format(filename)
desc = str(e)
GLib.idle_add(self.show_error_dialog, msg, desc)
def reset_current_preset(self):
self.override_preset(init_preset_file)
def set_preset_attributes(self, id):
active_preset = self.sysex_presets[id]
# Filter and amp
filter_poles = preset.get_filter_poles(active_preset)
self.filter_poles.set_value(filter_poles + 1)
vel_to_filter = preset.get_vel_to_filter(active_preset) - 8
self.vel_to_filter.set_value(vel_to_filter)
vel_to_amp = preset.get_vel_to_amp(active_preset)
self.vel_to_amp.set_value(vel_to_amp)
release = preset.get_release(active_preset)
self.release.set_active(True if release == 1 else False)
# Keyboard and controls
scale = preset.get_scale(active_preset)
self.scale.set_active(scale)
pw_up_amount = preset.get_pw_up_amount(active_preset)
self.pw_up_amount.set_active(pw_up_amount)
pw_down_amount = preset.get_pw_down_amount(active_preset)
self.pw_down_amount.set_active(pw_down_amount)
legato = preset.get_legato(active_preset)
self.legato.set_active(legato)
keyboard_priority = preset.get_keyboard_priority(active_preset)
self.keyboard_priority.set_active(keyboard_priority)
glide_on_legato = preset.get_glide_on_legato(active_preset)
self.glide_on_legato.set_active(
True if glide_on_legato == 1 else False)
# Modulation
mod_source_5 = preset.get_mod_source_5(active_preset)
self.mod_source_5.set_active(mod_source_5)
mod_source_6 = preset.get_mod_source_6(active_preset)
self.mod_source_6.set_active(mod_source_6)
mod_dest_2 = preset.get_mod_dest_2(active_preset)
self.mod_dest_2.set_active(mod_dest_2)
lfo_key_retrigger = preset.get_lfo_key_retrigger(active_preset)
self.lfo_key_retrigger.set_active(lfo_key_retrigger)
# Arpeggiator
arp_pattern = preset.get_arp_pattern(active_preset)
self.arp_pattern.set_active(arp_pattern)
arp_mode = preset.get_arp_mode(active_preset)
self.arp_mode.set_active(arp_mode)
arp_octaves = preset.get_arp_octaves(active_preset)
self.arp_octaves.set_value(arp_octaves - 3)
arp_gate = preset.get_arp_gate(active_preset)
self.arp_gate.set_active(arp_gate)
arp_clock_source = preset.get_arp_clock_source(active_preset)
self.arp_clock_source.set_active(arp_clock_source)
arp_clock_division = preset.get_arp_clock_division(active_preset)
self.arp_clock_division.set_active(arp_clock_division)
def selection_changed(self, selection):
model, iter = selection.get_selected()
if iter:
id = model[iter][0]
logger.debug('Preset {:d} selected'.format(id))
try:
self.connector.set_preset(id)
self.set_preset_attributes(id)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def row_deleted(self, tree_model, path):
if not self.transferring.locked():
logger.debug('Reordering...')
new_sysex_presets = []
for i in range(connector.MAX_PRESETS):
sysex_preset = self.sysex_presets[self.presets[i][0]]
preset.set_number(sysex_preset, i)
new_sysex_presets.append(sysex_preset)
self.presets[i][0] = i
self.sysex_presets = new_sysex_presets
def set_preset_name(self, widget, row, name):
logger.debug('Changing preset name...')
active_preset = int(row)
normalized_name = preset.normalize_name(name)
self.presets[active_preset][1] = normalized_name
preset.set_name(self.sysex_presets[active_preset], normalized_name)
try:
self.connector.set_panel_name(normalized_name)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def connect(self):
device = self.config[utils.DEVICE]
self.connector.connect(device, self.connect_callback)
if self.connector.connected():
conn_msg = CONN_MSG.format(self.connector.sw_version)
self.set_status_msg(conn_msg)
else:
self.set_status_msg('Not connected')
def ui_reconnect(self):
self.connect()
self.set_ui()
def set_ui(self):
if self.connector.connected():
if self.config[utils.DOWNLOAD_AUTO]:
self.download_presets()
self.set_sensitivities()
def set_sensitivities(self):
for c in [self.open_button, self.save_button, self.download_button, self.lfo_midi_sync]:
c.set_sensitive(self.connector.connected())
for c in [self.main_container, self.upload_button, self.download_panel, self.download_preset, self.upload_preset, self.save_preset, self.open_preset, self.reset_preset]:
c.set_sensitive(self.connector.connected()
and len(self.sysex_presets) > 0)
def download_presets(self):
logger.debug('Starting download thread...')
self.preset_selection.unselect_all()
self.transfer_dialog.show_fraction('Downloading presets')
self.transferring.acquire()
self.presets.clear()
self.sysex_presets.clear()
self.thread = Thread(target=self.do_download)
self.thread.start()
def do_download(self):
try:
for i in range(connector.MAX_PRESETS):
if not self.transfer_dialog.running:
logger.debug('Cancelling download...')
break
msg = 'Downloading preset {:d}...'.format(i)
logger.debug(msg)
fraction = (i + 1) / connector.MAX_PRESETS
GLib.idle_add(self.transfer_dialog.set_status, msg, fraction)
p = self.connector.get_preset(i)
preset_name = preset.get_name(p)
GLib.idle_add(self.add_preset, i, preset_name)
self.sysex_presets.append(p)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
GLib.idle_add(self.end_download)
def add_preset(self, number, name):
self.presets.append([number, name])
def end_download(self):
if not self.transfer_dialog.running:
self.presets.clear()
self.sysex_presets.clear()
self.thread.join()
logger.debug('Thread finished')
self.upload_button.set_sensitive(len(self.sysex_presets) > 0)
self.transferring.release()
self.transfer_dialog.hide()
self.preset_list.set_cursor(0)
self.set_sensitivities()
def upload_presets(self):
logger.debug('Starting upload thread...')
self.transfer_dialog.show_fraction("Uploading presets")
self.transferring.acquire()
self.thread = Thread(target=self.do_upload)
self.thread.start()
def do_upload(self):
try:
for i in range(connector.MAX_PRESETS):
if not self.transfer_dialog.running:
logger.debug('Cancelling upload...')
break
msg = 'Uploading preset {:d}...'.format(i)
logger.debug(msg)
fraction = (i + 1) / connector.MAX_PRESETS
GLib.idle_add(self.transfer_dialog.set_status, msg, fraction)
self.connector.tx_message(self.sysex_presets[i])
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
GLib.idle_add(self.end_upload)
def end_upload(self):
self.thread.join()
logger.debug('Thread finished')
self.transferring.release()
self.transfer_dialog.hide()
def set_status_msg(self, msg):
logger.info(msg)
if self.main_window:
self.statusbar.pop(self.context_id)
self.statusbar.push(self.context_id, msg)
def open_bank_from_file(self):
dialog = Gtk.FileChooserDialog('Open', self.main_window,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
dialog.add_filter(self.filter_syx)
dialog.add_filter(self.filter_any)
response = dialog.run()
filename = dialog.get_filename()
dialog.destroy()
if response == Gtk.ResponseType.OK:
self.transfer_dialog.show_pulse('Sending bank')
GLib.timeout_add(50, self.transfer_dialog.pulse_progressbar)
self.thread = Thread(
target=self.set_bank_from_file, args=(filename,))
self.thread.start()
def set_bank_from_file(self, filename):
try:
self.connector.set_bank_from_file(filename)
self.cancel_and_hide_transfer()
GLib.idle_add(self.download_presets)
except (ValueError) as e:
self.cancel_and_hide_transfer()
msg = ERROR_IN_BANK_TRANSFER.format(filename)
desc = str(e)
GLib.idle_add(self.show_error_dialog, msg, desc)
except ConnectorError as e:
self.cancel_and_hide_transfer()
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def save_bank_to_file(self):
type = 'bulk' if self.config[utils.BULK_ON] else 'bank'
title = 'Receiving {:s}'.format(type)
self.transfer_dialog.show_pulse(title)
GLib.timeout_add(50, self.transfer_dialog.pulse_progressbar)
self.thread = Thread(target=self.get_bank_and_save)
self.thread.start()
def get_bank_and_save(self):
try:
if self.config[utils.BULK_ON]:
data = self.connector.get_bulk()
def_filename = 'bulk.' + preset.FILE_EXTENSION
else:
data = self.connector.get_bank()
def_filename = 'bank.' + preset.FILE_EXTENSION
self.cancel_and_hide_transfer()
GLib.idle_add(self.ask_filename_and_save, def_filename, data)
except ConnectorError as e:
self.cancel_and_hide_transfer()
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def ask_filename_and_save(self, def_filename, data):
dialog = Gtk.FileChooserDialog('Save as', self.main_window,
Gtk.FileChooserAction.SAVE,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE, Gtk.ResponseType.OK))
Gtk.FileChooser.set_do_overwrite_confirmation(dialog, True)
dialog.add_filter(self.filter_syx)
dialog.add_filter(self.filter_any)
dialog.set_current_name(def_filename)
response = dialog.run()
filename = dialog.get_filename()
dialog.destroy()
if response == Gtk.ResponseType.OK:
try:
self.connector.write_data_to_file(filename, data)
except IOError as e:
msg = ERROR_WHILE_SAVING_DATA.format(filename)
desc = str(e)
GLib.idle_add(self.show_error_dialog, msg, desc)
def cancel_and_hide_transfer(self):
self.transfer_dialog.cancel()
GLib.idle_add(self.transfer_dialog.hide)
GLib.idle_add(self.thread.join)
def show_error_dialog(self, msg, desc):
dialog = Gtk.MessageDialog(self.main_window,
flags=Gtk.DialogFlags.MODAL,
type=Gtk.MessageType.ERROR,
buttons=Gtk.ButtonsType.OK,
message_format=msg)
dialog.connect(
'response', lambda widget, response: widget.destroy())
dialog.format_secondary_text(desc)
logger.error(desc)
dialog.run()
dialog.destroy()
def call_connector(self, method, *args):
logger.debug('Calling connector {:s}...'.format(str(method)))
try:
method(*args)
except ConnectorError as e:
GLib.idle_add(self.show_error_dialog, str(e), None)
self.ui_reconnect()
def show_about(self):
self.about_dialog.run()
self.about_dialog.hide()
def set_device(self):
active = self.device_combo.get_active()
if active > -1:
device = self.device_liststore[active][0]
else:
device = ''
self.config[utils.DEVICE] = device
self.ui_reconnect()
def connect_callback(self, message):
self.call_connector(self.connector.set_lfo_midi_sync, 1 if self.config[utils.LFO_MIDI_SYNC] else 0)
if message.type == 'program_change':
program = message.program
logger.debug('Preset {:d} selected'.format(program))
if program >= 0 and program < connector.MAX_PRESETS and self.presets:
self.preset_selection.disconnect_by_func(
self.selection_changed)
self.preset_list.set_cursor(program)
self.preset_selection.connect(
'changed', self.selection_changed)
def set_lfo_midi_sync(self, state):
self.config[utils.LFO_MIDI_SYNC] = state
if not self.configuring:
self.call_connector(self.connector.set_lfo_midi_sync, 1 if state else 0)
def quit(self):
logger.debug('Quitting...')
self.connector.disconnect()
self.main_window.hide()
Gtk.main_quit()
def main(self):
self.init_ui()
self.set_ui_config()
Gtk.main()
self.save_config()
| gpl-3.0 | -616,318,847,269,640,600 | 41.776762 | 177 | 0.612842 | false |
rackerlabs/qonos | qonos/openstack/common/rpc/dispatcher.py | 1 | 5310 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Code for rpc message dispatching.
Messages that come in have a version number associated with them. RPC API
version numbers are in the form:
Major.Minor
For a given message with version X.Y, the receiver must be marked as able to
handle messages of version A.B, where:
A = X
B >= Y
The Major version number would be incremented for an almost completely new API.
The Minor version number would be incremented for backwards compatible changes
to an existing API. A backwards compatible change could be something like
adding a new method, adding an argument to an existing method (but not
requiring it), or changing the type for an existing argument (but still
handling the old type as well).
The conversion over to a versioned API must be done on both the client side and
server side of the API at the same time. However, as the code stands today,
there can be both versioned and unversioned APIs implemented in the same code
base.
EXAMPLES
========
Nova was the first project to use versioned rpc APIs. Consider the compute rpc
API as an example. The client side is in nova/compute/rpcapi.py and the server
side is in nova/compute/manager.py.
Example 1) Adding a new method.
-------------------------------
Adding a new method is a backwards compatible change. It should be added to
nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to
X.Y+1. On the client side, the new method in nova/compute/rpcapi.py should
have a specific version specified to indicate the minimum API version that must
be implemented for the method to be supported. For example::
def get_host_uptime(self, ctxt, host):
topic = _compute_topic(self.topic, ctxt, host, None)
return self.call(ctxt, self.make_msg('get_host_uptime'), topic,
version='1.1')
In this case, version '1.1' is the first version that supported the
get_host_uptime() method.
Example 2) Adding a new parameter.
----------------------------------
Adding a new parameter to an rpc method can be made backwards compatible. The
RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped.
The implementation of the method must not expect the parameter to be present.::
def some_remote_method(self, arg1, arg2, newarg=None):
# The code needs to deal with newarg=None for cases
# where an older client sends a message without it.
pass
On the client side, the same changes should be made as in example 1. The
minimum version that supports the new parameter should be specified.
"""
from qonos.openstack.common.rpc import common as rpc_common
class RpcDispatcher(object):
"""Dispatch rpc messages according to the requested API version.
This class can be used as the top level 'manager' for a service. It
contains a list of underlying managers that have an API_VERSION attribute.
"""
def __init__(self, callbacks):
"""Initialize the rpc dispatcher.
:param callbacks: List of proxy objects that are an instance
of a class with rpc methods exposed. Each proxy
object should have an RPC_API_VERSION attribute.
"""
self.callbacks = callbacks
super(RpcDispatcher, self).__init__()
def dispatch(self, ctxt, version, method, **kwargs):
"""Dispatch a message based on a requested version.
:param ctxt: The request context
:param version: The requested API version from the incoming message
:param method: The method requested to be called by the incoming
message.
:param kwargs: A dict of keyword arguments to be passed to the method.
:returns: Whatever is returned by the underlying method that gets
called.
"""
if not version:
version = '1.0'
had_compatible = False
for proxyobj in self.callbacks:
if hasattr(proxyobj, 'RPC_API_VERSION'):
rpc_api_version = proxyobj.RPC_API_VERSION
else:
rpc_api_version = '1.0'
is_compatible = rpc_common.version_is_compatible(rpc_api_version,
version)
had_compatible = had_compatible or is_compatible
if not hasattr(proxyobj, method):
continue
if is_compatible:
return getattr(proxyobj, method)(ctxt, **kwargs)
if had_compatible:
raise AttributeError("No such RPC function '%s'" % method)
else:
raise rpc_common.UnsupportedRpcVersion(version=version)
| apache-2.0 | 3,698,796,998,904,787,500 | 37.478261 | 79 | 0.673446 | false |
Mirantis/disk_perf_test_tool | wally/test_run_class.py | 1 | 1980 | from typing import List, Callable, Any, Dict, Optional, Set
from concurrent.futures import ThreadPoolExecutor
from cephlib.istorage import IStorage
from cephlib.node import NodeInfo, IRPCNode
from cephlib.ssh import ConnCreds
from cephlib.storage_selectors import DevRolesConfig
from .openstack_api import OSCreds, OSConnection
from .config import Config
from .result_classes import IWallyStorage
class TestRun:
"""Test run information"""
def __init__(self, config: Config, storage: IStorage, rstorage: IWallyStorage) -> None:
# NodesInfo list
self.nodes_info: Dict[str, NodeInfo] = {}
self.ceph_master_node: Optional[IRPCNode] = None
self.ceph_extra_args: Optional[str] = None
# Nodes list
self.nodes: List[IRPCNode] = []
self.build_meta: Dict[str,Any] = {}
self.clear_calls_stack: List[Callable[['TestRun'], None]] = []
# openstack credentials
self.os_creds: Optional[OSCreds] = None # type: ignore
self.os_connection: Optional[OSConnection] = None # type: ignore
self.rpc_code: bytes = None # type: ignore
self.default_rpc_plugins: Dict[str, bytes] = None # type: ignore
self.storage = storage
self.rstorage = rstorage
self.config = config
self.sensors_run_on: Set[str] = set()
self.os_spawned_nodes_ids: List[int] = None # type: ignore
self.devs_locator: DevRolesConfig = []
def get_pool(self):
return ThreadPoolExecutor(self.config.get('worker_pool_sz', 32))
def merge_node(self, creds: ConnCreds, roles: Set[str], **params) -> NodeInfo:
info = NodeInfo(creds, roles, params)
nid = info.node_id
if nid in self.nodes_info:
self.nodes_info[nid].roles.update(info.roles)
self.nodes_info[nid].params.update(info.params)
return self.nodes_info[nid]
else:
self.nodes_info[nid] = info
return info
| apache-2.0 | 7,912,155,741,855,254,000 | 35 | 91 | 0.64798 | false |
vmahuli/contrail-controller | src/opserver/partition_handler.py | 1 | 28794 | #!/usr/bin/python
from gevent import monkey
monkey.patch_all()
import logging
import gevent
from gevent.coros import BoundedSemaphore
from kafka import KafkaClient, KeyedProducer, SimpleConsumer, common
from uveserver import UVEServer
import os
import json
import copy
import traceback
import uuid
import struct
import socket
import discoveryclient.client as client
from sandesh_common.vns.constants import ALARM_PARTITION_SERVICE_NAME
from pysandesh.util import UTCTimestampUsec
import select
import redis
from collections import namedtuple
PartInfo = namedtuple("PartInfo",["ip_address","instance_id","acq_time","port"])
def sse_pack(d):
"""Pack data in SSE format"""
buffer = ''
for k in ['event','data']:
if k in d.keys():
buffer += '%s: %s\n' % (k, d[k])
return buffer + '\n'
class UveCacheProcessor(gevent.Greenlet):
def __init__(self, logger, q, partitions):
gevent.Greenlet.__init__(self)
self._logger = logger
self._q = q
self._partkeys = {}
for partno in range(0,partitions):
self._partkeys[partno] = set()
self._uvedb = {}
def get_uve(self, key, filters=None, is_alarm=False):
failures = False
rsp = {}
try:
filters = filters or {}
tfilter = filters.get('cfilt')
ackfilter = filters.get('ackfilt')
if is_alarm:
tfilter = tfilter or {}
# When returning only alarms, ignore non-alarm cfilt
for k in tfilter.keys():
if k != "UVEAlarms":
del tfilter[k]
if len(tfilter) == 0:
tfilter["UVEAlarms"] = set(["alarms"])
barekey = key.split(":",1)[1]
table = key.split(":",1)[0]
if table not in self._uvedb:
return failures, rsp
if barekey not in self._uvedb[table]:
return failures, rsp
for tkey,tval in self._uvedb[table][barekey].iteritems():
afilter_list = set()
if tfilter is not None:
if tkey not in tfilter:
continue
else:
afilter_list = tfilter[tkey]
if not tval:
continue
for akey, aval in tval.iteritems():
if len(afilter_list):
if akey not in afilter_list:
continue
if ackfilter is not None and \
tkey == "UVEAlarms" and akey == "alarms":
alarms = []
for alarm in aval:
ack = "false"
if "ack" in alarm:
if alarm["ack"]:
ack = "true"
else:
ack = "false"
if ack == ackfilter:
alarms.append(alarm)
if not len(alarms):
continue
else:
if not tkey in rsp:
rsp[tkey] = {}
rsp[tkey][akey] = alarms
else:
if not tkey in rsp:
rsp[tkey] = {}
rsp[tkey][akey] = aval
except Exception as ex:
template = "Exception {0} in uve cache proc. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("%s : traceback %s" % \
(messag, traceback.format_exc()))
return failures, rsp
def _run(self):
for telem in self._q:
elem = telem['data']
if telem['event'] == 'clear':
# remove all keys of this partition
partno = elem['partition']
for key in self._partkeys[partno]:
barekey = key.split(":",1)[1]
table = key.split(":",1)[0]
del self._uvedb[table][barekey]
self._partkeys[partno].remove("%s:%s" % \
(table, barekey))
elif telem['event'] == 'sync' or telem['event'] == 'update':
partno = elem['partition']
self._partkeys[partno].add(elem['key'])
barekey = elem['key'].split(":",1)[1]
table = elem['key'].split(":",1)[0]
if table not in self._uvedb:
self._uvedb[table] = {}
if barekey not in self._uvedb[table]:
self._uvedb[table][barekey] = {}
if elem['type'] is None:
# delete the entire UVE
self._partkeys[partno].remove("%s:%s" % \
(table, barekey))
del self._uvedb[table][barekey]
else:
typ = elem['type']
if typ not in self._uvedb[table][barekey]:
self._uvedb[table][barekey][typ] = None
if elem['value'] is None:
# remove one type of this UVE
del self._uvedb[table][barekey][typ]
else:
self._uvedb[table][barekey][typ] = elem['value']
elif telem['event'] == 'stop':
break
else:
pass
class UveStreamPart(gevent.Greenlet):
def __init__(self, partno, logger, q, pi, rpass, sse):
gevent.Greenlet.__init__(self)
self._logger = logger
self._q = q
self._pi = pi
self._partno = partno
self._rpass = rpass
self._sse = sse
def syncpart(self, redish):
inst = self._pi.instance_id
part = self._partno
keys = list(redish.smembers("AGPARTKEYS:%s:%d" % (inst, part)))
ppe = redish.pipeline()
for key in keys:
ppe.hgetall("AGPARTVALUES:%s:%d:%s" % (inst, part, key))
pperes = ppe.execute()
idx=0
for res in pperes:
for tk,tv in res.iteritems():
dt = {'partition':self._partno,
'key':keys[idx], 'type':tk, 'value':json.loads(tv)}
if self._sse:
msg = {'event': 'sync', 'data':json.dumps(dt)}
self._q.put(sse_pack(msg))
else:
msg = {'event': 'sync', 'data':dt}
self._q.put(msg)
idx += 1
def _run(self):
lredis = None
pb = None
while True:
try:
lredis = redis.StrictRedis(
host=self._pi.ip_address,
port=self._pi.port,
password=self._rpass,
db=2)
pb = lredis.pubsub()
inst = self._pi.instance_id
part = self._partno
pb.subscribe('AGPARTPUB:%s:%d' % (inst, part))
self.syncpart(lredis)
for message in pb.listen():
if message["type"] != "message":
continue
dataline = message["data"]
try:
elems = json.loads(dataline)
except:
self._logger.error("AggUVE Parsing failed: %s" % str(message))
continue
else:
self._logger.info("AggUVE loading: %s" % str(elems))
ppe = lredis.pipeline()
for elem in elems:
# This UVE was deleted
if elem["type"] is None:
ppe.exists("AGPARTVALUES:%s:%d:%s" % \
(inst, part, elem["key"]))
else:
ppe.hget("AGPARTVALUES:%s:%d:%s" % \
(inst, part, elem["key"]), elem["type"])
pperes = ppe.execute()
idx = 0
for elem in elems:
if elem["type"] is None:
dt = {'partition':part,
'key':elem["key"], 'type':None}
else:
vjson = pperes[idx]
if vjson is None:
vdata = None
else:
vdata = json.loads(vjson)
dt = {'partition':part,
'key':elem["key"], 'type':elem["type"],
'value':vdata}
if self._sse:
msg = {'event': 'update', 'data':json.dumps(dt)}
self._q.put(sse_pack(msg))
else:
msg = {'event': 'update', 'data':dt}
self._q.put(msg)
idx += 1
except gevent.GreenletExit:
break
except Exception as ex:
template = "Exception {0} in uve stream proc. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("%s : traceback %s" % \
(messag, traceback.format_exc()))
lredis = None
if pb is not None:
pb.close()
pb = None
gevent.sleep(2)
return None
class UveStreamer(gevent.Greenlet):
def __init__(self, logger, q, rfile, agp_cb, partitions, rpass):
gevent.Greenlet.__init__(self)
self._logger = logger
self._q = q
self._rfile = rfile
self._agp_cb = agp_cb
self._agp = {}
self._parts = {}
self._partitions = partitions
self._rpass = rpass
self._sse = True
if self._rfile is None:
self._sse = False
def _run(self):
inputs = [ self._rfile ]
outputs = [ ]
if self._sse:
msg = {'event': 'init', 'data':\
json.dumps({'partitions':self._partitions})}
self._q.put(sse_pack(msg))
else:
msg = {'event': 'init', 'data':\
{'partitions':self._partitions}}
self._q.put(msg)
while True:
try:
if self._rfile is not None:
readable, writable, exceptional = \
select.select(inputs, outputs, inputs, 1)
if (readable or writable or exceptional):
break
else:
gevent.sleep(1)
newagp = self._agp_cb()
set_new, set_old = set(newagp.keys()), set(self._agp.keys())
intersect = set_new.intersection(set_old)
# deleted parts
for elem in set_old - intersect:
self.partition_stop(elem)
# new parts
for elem in set_new - intersect:
self.partition_start(elem, newagp[elem])
# changed parts
for elem in intersect:
if self._agp[elem] != newagp[elem]:
self.partition_stop(elem)
self.partition_start(elem, newagp[elem])
self._agp = newagp
except gevent.GreenletExit:
break
for part, pi in self._agp.iteritems():
self.partition_stop(part)
if self._sse:
msg = {'event': 'stop', 'data':json.dumps(None)}
self._q.put(sse_pack(msg))
else:
msg = {'event': 'stop', 'data':None}
self._q.put(msg)
def partition_start(self, partno, pi):
self._logger.error("Starting agguve part %d using %s" %( partno, pi))
if self._sse:
msg = {'event': 'clear', 'data':\
json.dumps({'partition':partno, 'acq_time':pi.acq_time})}
self._q.put(sse_pack(msg))
else:
msg = {'event': 'clear', 'data':\
{'partition':partno, 'acq_time':pi.acq_time}}
self._q.put(msg)
self._parts[partno] = UveStreamPart(partno, self._logger,
self._q, pi, self._rpass, self._sse)
self._parts[partno].start()
def partition_stop(self, partno):
self._logger.error("Stopping agguve part %d" % partno)
self._parts[partno].kill()
self._parts[partno].get()
del self._parts[partno]
class PartitionHandler(gevent.Greenlet):
def __init__(self, brokers, group, topic, logger, limit):
gevent.Greenlet.__init__(self)
self._brokers = brokers
self._group = group
self._topic = topic
self._logger = logger
self._limit = limit
self._uvedb = {}
self._partoffset = 0
self._kfk = None
def msg_handler(self, mlist):
self._logger.info("%s Reading %s" % (self._topic, str(mlist)))
return True
def _run(self):
pcount = 0
while True:
try:
self._logger.error("New KafkaClient %s" % self._topic)
self._kfk = KafkaClient(self._brokers , "kc-" + self._topic)
try:
consumer = SimpleConsumer(self._kfk, self._group, self._topic, buffer_size = 4096*4, max_buffer_size=4096*32)
#except:
except Exception as ex:
template = "Consumer Failure {0} occured. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("Error: %s trace %s" % \
(messag, traceback.format_exc()))
raise RuntimeError(messag)
self._logger.error("Starting %s" % self._topic)
# Find the offset of the last message that has been queued
consumer.seek(-1,2)
try:
mi = consumer.get_message(timeout=0.1)
consumer.commit()
except common.OffsetOutOfRangeError:
mi = None
#import pdb; pdb.set_trace()
self._logger.info("Last Queued for %s is %s" % \
(self._topic,str(mi)))
# start reading from last previously processed message
if mi != None:
consumer.seek(0,1)
else:
consumer.seek(0,0)
if self._limit:
raise gevent.GreenletExit
while True:
try:
mlist = consumer.get_messages(10,timeout=0.5)
if not self.msg_handler(mlist):
raise gevent.GreenletExit
consumer.commit()
pcount += len(mlist)
except TypeError as ex:
self._logger.error("Type Error: %s trace %s" % \
(str(ex.args), traceback.format_exc()))
gevent.sleep(0.1)
except common.FailedPayloadsError as ex:
self._logger.error("Payload Error: %s" % str(ex.args))
gevent.sleep(0.1)
except gevent.GreenletExit:
break
except AssertionError as ex:
self._partoffset = ex
break
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("%s : traceback %s" % \
(messag, traceback.format_exc()))
self.stop_partition()
gevent.sleep(2)
self._logger.error("Stopping %s pcount %d" % (self._topic, pcount))
partdb = self.stop_partition()
return self._partoffset, partdb
class UveStreamProc(PartitionHandler):
# Arguments:
#
# brokers : broker list for kafka bootstrap
# partition : partition number
# uve_topic : Topic to consume
# logger : logging object to use
# callback : Callback function for reporting the set of the UVEs
# that may have changed for a given notification
# rsc : Callback function to check on collector status
# and get sync contents for new collectors
# aginst : instance_id of alarmgen
# rport : redis server port
# disc : discovery client to publish to
def __init__(self, brokers, partition, uve_topic, logger, callback,
host_ip, rsc, aginst, rport, disc = None):
super(UveStreamProc, self).__init__(brokers, "workers",
uve_topic, logger, False)
self._uvedb = {}
self._uvein = {}
self._callback = callback
self._partno = partition
self._host_ip = host_ip
self._ip_code, = struct.unpack('>I', socket.inet_pton(
socket.AF_INET, host_ip))
self.disc_rset = set()
self._resource_cb = rsc
self._aginst = aginst
self._disc = disc
self._acq_time = UTCTimestampUsec()
self._rport = rport
def acq_time(self):
return self._acq_time
def resource_check(self, msgs):
'''
This function compares the known collectors with the
list from discovery, and syncs UVE keys accordingly
'''
newset , coll_delete, chg_res = self._resource_cb(self._partno, self.disc_rset, msgs)
for coll in coll_delete:
self._logger.error("Part %d lost collector %s" % (self._partno, coll))
self.stop_partition(coll)
if len(chg_res):
self.start_partition(chg_res)
self.disc_rset = newset
if self._disc:
data = { 'instance-id' : self._aginst,
'partition' : str(self._partno),
'ip-address': self._host_ip,
'acq-time': str(self._acq_time),
'port':str(self._rport)}
self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data)
def stop_partition(self, kcoll=None):
clist = []
if not kcoll:
clist = self._uvedb.keys()
# If all collectors are being cleared, clear resoures too
self.disc_rset = set()
if self._disc:
# TODO: Unpublish instead of setting acq-time to 0
data = { 'instance-id' : self._aginst,
'partition' : str(self._partno),
'ip-address': self._host_ip,
'acq-time': "0",
'port':str(self._rport)}
self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data)
else:
clist = [kcoll]
self._logger.error("Stopping part %d collectors %s" % \
(self._partno,clist))
partdb = {}
chg = {}
for coll in clist:
partdb[coll] = {}
for gen in self._uvedb[coll].keys():
partdb[coll][gen] = {}
for tab in self._uvedb[coll][gen].keys():
for rkey in self._uvedb[coll][gen][tab].keys():
uk = tab + ":" + rkey
chg[uk] = None
partdb[coll][gen][uk] = \
set(self._uvedb[coll][gen][tab][rkey].keys())
del self._uvedb[coll]
self._logger.error("Stopping part %d UVEs %s" % \
(self._partno,str(chg.keys())))
self._callback(self._partno, chg)
return partdb
def start_partition(self, cbdb):
''' This function loads the initial UVE database.
for the partition
'''
self._logger.error("Starting part %d collectors %s" % \
(self._partno, str(cbdb.keys())))
uves = {}
for kcoll,coll in cbdb.iteritems():
self._uvedb[kcoll] = {}
for kgen,gen in coll.iteritems():
self._uvedb[kcoll][kgen] = {}
for kk in gen.keys():
tabl = kk.split(":",1)
tab = tabl[0]
rkey = tabl[1]
if not tab in self._uvedb[kcoll][kgen]:
self._uvedb[kcoll][kgen][tab] = {}
self._uvedb[kcoll][kgen][tab][rkey] = {}
uves[kk] = {}
for typ, contents in gen[kk].iteritems():
self._uvedb[kcoll][kgen][tab][rkey][typ] = {}
self._uvedb[kcoll][kgen][tab][rkey][typ]["c"] = 0
self._uvedb[kcoll][kgen][tab][rkey][typ]["u"] = \
uuid.uuid1(self._ip_code)
uves[kk][typ] = contents
self._logger.error("Starting part %d UVEs %s" % \
(self._partno, str(uves.keys())))
self._callback(self._partno, uves)
def contents(self):
return self._uvedb
def stats(self):
''' Return the UVE incoming stats collected over
the last time period for this partition
Also, the stats should be cleared to prepare
for the next period of collection.
'''
ret_in = copy.deepcopy(self._uvein)
self._uvein = {}
return ret_in
def msg_handler(self, mlist):
self.resource_check(mlist)
for mm in mlist:
if mm is None:
continue
self._logger.debug("%s Reading offset %d" % \
(self._topic, mm.offset))
if not self.msg_handler_single(mm):
self._logger.info("%s could not handle %s" % \
(self._topic, str(mm)))
return False
return True
def msg_handler_single(self, om):
self._partoffset = om.offset
chg = {}
try:
uv = json.loads(om.message.value)
coll = uv["coll"]
gen = uv["gen"]
if not self._uvedb.has_key(coll):
# This partition is not synced yet.
# Ignore this message
self._logger.debug("%s Ignoring UVE %s" % (self._topic, str(om)))
return True
if not self._uvedb[coll].has_key(gen):
self._uvedb[coll][gen] = {}
if (uv["message"] == "UVEUpdate"):
tabl = uv["key"].split(":",1)
tab = tabl[0]
rkey = tabl[1]
if tab not in self._uvedb[coll][gen]:
self._uvedb[coll][gen][tab] = {}
if not rkey in self._uvedb[coll][gen][tab]:
self._uvedb[coll][gen][tab][rkey] = {}
removed = False
# uv["type"] and uv["value"] can be decoded as follows:
# uv["type"] can be one of the following:
# - None # All Types under this UVE are deleted
# uv["value"] will not be present
# (this option is only for agg UVE updates)
# - "<Struct>" # uv["value"] refers to this struct
# uv["value"] can be one of the following:
# - None # This Type has been deleted.
# - {} # The Type has a value, which is
# not available in this message.
# (this option is only for raw UVE updates)
# - {<Value>} # The Value of the Type
# (this option is only for agg UVE updates)
if uv["type"] is None:
# TODO: Handling of delete UVE case
return False
if uv["value"] is None:
if uv["type"] in self._uvedb[coll][gen][tab][rkey]:
del self._uvedb[coll][gen][tab][rkey][uv["type"]]
if not len(self._uvedb[coll][gen][tab][rkey]):
del self._uvedb[coll][gen][tab][rkey]
removed = True
if not removed:
if uv["type"] in self._uvedb[coll][gen][tab][rkey]:
self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] +=1
else:
self._uvedb[coll][gen][tab][rkey][uv["type"]] = {}
self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] = 1
self._uvedb[coll][gen][tab][rkey][uv["type"]]["u"] = \
uuid.uuid1(self._ip_code)
chg[uv["key"]] = { uv["type"] : uv["value"] }
# Record stats on the input UVE Notifications
if not self._uvein.has_key(tab):
self._uvein[tab] = {}
if not self._uvein[tab].has_key(coll):
self._uvein[tab][coll] = {}
if not self._uvein[tab][coll].has_key(gen):
self._uvein[tab][coll][gen] = {}
if not self._uvein[tab][coll][gen].has_key(uv["type"]):
self._uvein[tab][coll][gen][uv["type"]] = 1
else:
self._uvein[tab][coll][gen][uv["type"]] += 1
else:
# Record stats on UVE Keys being processed
for tab in self._uvedb[coll][gen].keys():
for rkey in self._uvedb[coll][gen][tab].keys():
uk = tab + ":" + rkey
# when a generator is delelted, we need to
# notify for *ALL* its UVEs
chg[uk] = None
del self._uvedb[coll][gen]
except Exception as ex:
template = "An exception of type {0} in uve proc . Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.info("%s" % messag)
return False
else:
self._callback(self._partno, chg)
return True
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
workers = {}
brokers = "localhost:9092,localhost:9093,localhost:9094"
group = "workers"
kafka = KafkaClient(brokers,str(os.getpid()))
cons = SimpleConsumer(kafka, group, "ctrl")
cons.provide_partition_info()
print "Starting control"
end_ready = False
while end_ready == False:
try:
while True:
part, mmm = cons.get_message(timeout=None)
mm = mmm.message
print "Consumed ctrl " + str(mm)
if mm.value == "start":
if workers.has_key(mm.key):
print "Dup partition %s" % mm.key
raise ValueError
else:
ph = UveStreamProc(brokers, int(mm.key), "uve-" + mm.key, "alarm-x" + mm.key, logging)
ph.start()
workers[int(mm.key)] = ph
elif mm.value == "stop":
#import pdb; pdb.set_trace()
if workers.has_key(int(mm.key)):
ph = workers[int(mm.key)]
gevent.kill(ph)
res,db = ph.get()
print "Returned " + str(res)
print "State :"
for k,v in db.iteritems():
print "%s -> %s" % (k,str(v))
del workers[int(mm.key)]
else:
end_ready = True
cons.commit()
gevent.sleep(2)
break
except TypeError:
gevent.sleep(0.1)
except common.FailedPayloadsError as ex:
print "Payload Error: " + str(ex.args)
gevent.sleep(0.1)
lw=[]
for key, value in workers.iteritems():
gevent.kill(value)
lw.append(value)
gevent.joinall(lw)
print "Ending Consumers"
| apache-2.0 | -5,221,414,559,799,287,000 | 38.661157 | 129 | 0.44992 | false |
edesky/text_validator | lib/validate.py | 1 | 1509 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import argparse
import json
import math
parser = argparse.ArgumentParser()
parser.add_argument("--text", required=True, help="Input text")
parser.add_argument("--short", action='store_true')
args = parser.parse_args()
input_text = args.text.decode('utf8')
VALID_CHARS = u'qwertzuiopasdfghjklyxcvbnm1234567890QWERTZUIOPASDFGHJKLYXCVBNM ?:.,;-=/+ěščřžýáíéĚŠČŘŽÝÁÍÉůúŇ'
def chars_score(text):
known_cnt = 0
for char in text:
if char in VALID_CHARS:
known_cnt += 1
return float(known_cnt)/max(len(text), 1)
def length_score(text):
cnt = 0
for char in text:
if char in VALID_CHARS:
cnt += 1
return min(math.log(max(cnt, 1), 1000), 1.0)
# TODO: add another functions here
# def xyz_score(text):
# text magic here
# return 0.5
def compute(scores):
total = 0
cnt = 0
for score in scores:
cnt += 1
total = (total * (cnt-1) + score['value']) / cnt;
return {'score': total, 'parts': scores}
scores = []
scores.append({'name': 'chars_score', 'value': chars_score(input_text)})
scores.append({'name': 'length_score', 'value': length_score(input_text)})
# TODO: add another functions here
# scores.append({'name': 'xyz_score', 'value': xyz_score(args.text)})
total_score = compute(scores)
if args.short:
print(total_score['score'])
else:
print(json.dumps(total_score, sort_keys=True, indent=4))
| mit | 4,563,589,001,996,884,000 | 25.571429 | 110 | 0.649866 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/system-config-printer/errordialogs.py | 1 | 3111 | #!/usr/bin/python
## system-config-printer
## Copyright (C) 2006, 2007, 2008, 2010 Red Hat, Inc.
## Authors:
## Florian Festi <[email protected]>
## Tim Waugh <[email protected]>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import cups
import gtk
from gettext import gettext as _
def show_dialog (title, text, type, parent=None):
dialog = gtk.MessageDialog (parent,
gtk.DIALOG_MODAL |
gtk.DIALOG_DESTROY_WITH_PARENT,
type,
gtk.BUTTONS_OK,
title)
dialog.format_secondary_text (text)
dialog.run ()
dialog.destroy ()
def show_info_dialog (title, text, parent=None):
return show_dialog (title, text, gtk.MESSAGE_INFO, parent=parent)
def show_error_dialog (title, text, parent=None):
return show_dialog (title, text, gtk.MESSAGE_ERROR, parent=parent)
def show_IPP_Error(exception, message, parent=None):
if exception == 0:
# In this case, the user has canceled an authentication dialog.
return
elif exception == cups.IPP_SERVICE_UNAVAILABLE:
# In this case, the user has canceled a retry dialog.
return
else:
title = _("CUPS server error")
text = (_("There was an error during the CUPS "
"operation: '%s'.")) % message
show_error_dialog (title, text, parent)
def show_HTTP_Error(status, parent=None):
if (status == cups.HTTP_UNAUTHORIZED or
status == cups.HTTP_FORBIDDEN):
title = _('Not authorized')
text = (_('The password may be incorrect, or the '
'server may be configured to deny '
'remote administration.'))
else:
title = _('CUPS server error')
if status == cups.HTTP_BAD_REQUEST:
msg = _("Bad request")
elif status == cups.HTTP_NOT_FOUND:
msg = _("Not found")
elif status == cups.HTTP_REQUEST_TIMEOUT:
msg = _("Request timeout")
elif status == cups.HTTP_UPGRADE_REQUIRED:
msg = _("Upgrade required")
elif status == cups.HTTP_SERVER_ERROR:
msg = _("Server error")
elif status == -1:
msg = _("Not connected")
else:
msg = _("status %s") % status
text = _("There was an HTTP error: %s.") % msg
show_error_dialog (title, text, parent)
| gpl-3.0 | 8,152,813,570,208,546,000 | 35.6 | 82 | 0.6072 | false |
drelu/SAGA-Hadoop | hadoop1/launcher.py | 1 | 2591 | #!/usr/bin/env python
import time
import saga
import os, sys
import subprocess
import pdb
import logging
logging.basicConfig(level=logging.ERROR)
def main():
try:
# create a job service for Futuregrid's 'india' PBS cluster
js = saga.job.Service("pbs+ssh://india")
#js = saga.job.Service("fork://localhost")
# describe our job
jd = saga.job.Description()
# resource requirements
jd.total_cpu_count = 16
# environment, executable & arguments
executable = os.path.join(os.getcwd(), "bootstrap_hadoop.py")
logging.debug("Run %s"%executable)
jd.executable = executable
jd.arguments = []
# output options
jd.output = "hadoop_job.stdout"
jd.error = "hadoop_job.stderr"
jd.working_directory=os.getcwd()
# create the job (state: New)
myjob = js.create_job(jd)
print "Starting Hadoop bootstrap job...\n"
# run the job (submit the job to PBS)
myjob.run()
jobid = myjob.get_id()
print "**** Job ID : %s" % (jobid)
print "**** Job State : %s" % (myjob.get_state())
while True:
state = myjob.get_state()
if state=="Running":
if os.path.exists("work/started"):
get_hadoop_config_data(str(jobid))
break
time.sleep(3)
except Exception, ex:
print "An error occured: %s" % (str(ex))
def get_hadoop_config_data(jobid):
pbs_id = jobid[jobid.find("-")+2:len(jobid)-1]
nodes = subprocess.check_output(["qstat", "-f", pbs_id])
hosts = "empty"
for i in nodes.split("\n"):
if i.find("exec_host")>0:
hosts = i[i.find("=")+1:].strip()
hadoop_home=os.path.join(os.getcwd(), "work/hadoop-1.0.0")
print "HADOOP installation directory: %s"%hadoop_home
print "Allocated Resources for Hadoop cluster: " + hosts
print "HDFS Web Interface: http://%s:50070"% hosts[:hosts.find("/")]
print "\nTo use Hadoop set HADOOP_CONF_DIR: "
print "export HADOOP_CONF_DIR=%s"%(os.path.join(os.getcwd(), "work", get_most_current_job(), "conf"))
print "%s/bin/hadoop dfsadmin -report"%hadoop_home
print ""
def get_most_current_job():
dir = "work"
files = os.listdir(dir)
max = None
for i in files:
if i.startswith("hadoop-conf"):
t = os.path.getctime(os.path.join(dir,i))
if max == None or t>max[0]:
max = (t, i)
return max[1]
if __name__ == "__main__":
main()
| apache-2.0 | 944,839,436,700,455,200 | 29.482353 | 106 | 0.564261 | false |
heilaaks/snippy | tests/lib/helper.py | 1 | 7104 | # -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# snippy - software development and maintenance notes manager.
# Copyright 2017-2020 Heikki J. Laaksonen <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""helper: Generic helpers testing."""
from __future__ import print_function
import io
import os.path
import re
import sys
import json
import pkg_resources
from jsonschema import Draft7Validator, RefResolver
class Helper(object):
"""Generic helpers testing.
This class intentionally copies some of the implementation from the
production code. The purpose is to avoid dependencies in this module
to be able to import this module anywhere.
"""
EXPORT_TIME = '2018-02-02T02:02:02.000001+00:00'
IMPORT_TIME = '2018-03-02T02:02:02.000001+00:00'
EXPORT_TEMPLATE = '2017-10-14T19:56:31.000001+00:00'
DB_SQLITE = 'sqlite'
DB_POSTGRESQL = 'postgresql'
DB_COCKROACHDB = 'cockroachdb'
DB_IN_MEMORY = 'in-memory'
STORAGES = (DB_SQLITE, DB_POSTGRESQL, DB_COCKROACHDB, DB_IN_MEMORY)
COLOR_OK = '\033[32m'
COLOR_END = '\033[0m'
# All resource attributes that can be sent in HTTP request.
REQUEST_ATTRIBUTES = (
'data',
'brief',
'description',
'name',
'groups',
'tags',
'links',
'source',
'versions',
'languages',
'filename'
)
RE_MATCH_ANSI_ESCAPE_SEQUENCES = re.compile(r'''
\x1b[^m]*m # Match all ANSI escape sequences.
''', re.VERBOSE)
RE_MATCH_LEADING_WHITEPSACES = re.compile(r'''
\n\s+ # Match newline and all leading whitespaces after it.
''', re.VERBOSE)
@classmethod
def read_template(cls, filename):
"""Get default content template in text format.
The returned template must be in the same format where external editor
like vi gets the default template. This means that all the tags are
removed and the group tag is replaced with 'default' group.
Args:
filename (str): Template filename as stored in data/templates.
Returns:
str: Empty template in the same format as for external editor.
"""
template = cls._read_resource('data/templates', filename)
template = re.sub(r'''
<groups> # Match groups tag.
''', 'default', template, flags=re.VERBOSE)
template = re.sub(r'''
[<]\S+[>] # Match any tag in the template.
''', '', template, flags=re.VERBOSE)
# In case of the solution template, there is a <data> tag that leaves
# empty fist line. Since all templates start from the first line, the
# whitespaces can be removed from left of the string.
template = template.lstrip()
return template
@classmethod
def read_completion(cls, filename):
"""Get shell completion script.
Args
filename (str): Name of the shell completion file.
"""
return cls._read_resource('data/completion', filename)
@staticmethod
def remove_ansi(message):
"""Remove all ANSI escape codes from given string.
Args:
message (str): Message which ANSI escape codes are removed.
Returns:
str: Same message but without ANSI escape sequences.
"""
return Helper.RE_MATCH_ANSI_ESCAPE_SEQUENCES.sub('', message)
@classmethod
def get_schema_validator(cls):
"""Get JSON schema validator for REST API response.
Returns:
obj: Jsonschema draft7 validator.
"""
response_resource = json.loads(cls._read_resource('data/server/openapi/schema', 'responseresource.json'))
response_collection_get = json.loads(cls._read_resource('data/server/openapi/schema', 'responsecollectionget.json'))
response_collection_post = json.loads(cls._read_resource('data/server/openapi/schema', 'responsecollectionpost.json'))
response_groups = json.loads(cls._read_resource('data/server/openapi/schema', 'responsegroups.json'))
response_tags = json.loads(cls._read_resource('data/server/openapi/schema', 'responsetags.json'))
response_errors = json.loads(cls._read_resource('data/server/openapi/schema', 'responseerrors.json'))
response_hello = json.loads(cls._read_resource('data/server/openapi/schema', 'responsehello.json'))
schema = {
'oneOf': [
response_collection_get,
response_collection_post,
response_groups,
response_tags,
response_errors,
response_hello,
response_resource
]
}
filepath = pkg_resources.resource_filename('snippy', 'data/server/openapi/schema/')
if not os.path.isdir(filepath):
print('NOK: cannot run test because server api response schema base uri is not accessible: {}'.format(filepath))
sys.exit(1)
server_schema_base_uri = 'file:' + filepath
Draft7Validator.check_schema(schema)
resolver = RefResolver(base_uri=server_schema_base_uri, referrer=schema)
validator = Draft7Validator(schema, resolver=resolver, format_checker=None)
return validator
@staticmethod
def _read_resource(path, filename):
"""Read resource file.
Args:
path (str): Relative path under snippy project.
filename (str): Resource filename.
Returns:
str: File read into a string.
"""
filename = os.path.join(pkg_resources.resource_filename('snippy', path), filename)
if not os.path.isfile(filename):
print('NOK: cannot run tests because snippy resource file is not accessible: {}'.format(filename))
sys.exit(1)
resource_file = ''
with io.open(filename, encoding='utf-8') as infile:
resource_file = infile.read()
return resource_file
class Classproperty(object): # pylint: disable=too-few-public-methods
"""Implement classproperty.
Implement decorator that mimics object property. See [1] for more
details.
[1] https://stackoverflow.com/a/3203659
"""
def __init__(self, getter):
self._getter = getter
def __get__(self, _, owner):
"""Get property of a class."""
return self._getter(owner)
| agpl-3.0 | -3,998,784,855,587,372,000 | 32.668246 | 126 | 0.634713 | false |
leshchevds/ganeti | lib/ssh.py | 1 | 43027 | #
#
# Copyright (C) 2006, 2007, 2010, 2011 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module encapsulating ssh functionality.
"""
import logging
import os
import shutil
import tempfile
from collections import namedtuple
from functools import partial
from ganeti import utils
from ganeti import errors
from ganeti import constants
from ganeti import netutils
from ganeti import pathutils
from ganeti import vcluster
from ganeti import compat
from ganeti import serializer
from ganeti import ssconf
def GetUserFiles(user, mkdir=False, dircheck=True, kind=constants.SSHK_DSA,
_homedir_fn=None):
"""Return the paths of a user's SSH files.
@type user: string
@param user: Username
@type mkdir: bool
@param mkdir: Whether to create ".ssh" directory if it doesn't exist
@type dircheck: bool
@param dircheck: Whether to check if ".ssh" directory exists
@type kind: string
@param kind: One of L{constants.SSHK_ALL}
@rtype: tuple; (string, string, string)
@return: Tuple containing three file system paths; the private SSH key file,
the public SSH key file and the user's C{authorized_keys} file
@raise errors.OpExecError: When home directory of the user can not be
determined
@raise errors.OpExecError: Regardless of the C{mkdir} parameters, this
exception is raised if C{~$user/.ssh} is not a directory and C{dircheck}
is set to C{True}
"""
if _homedir_fn is None:
_homedir_fn = utils.GetHomeDir
user_dir = _homedir_fn(user)
if not user_dir:
raise errors.OpExecError("Cannot resolve home of user '%s'" % user)
if kind == constants.SSHK_DSA:
suffix = "dsa"
elif kind == constants.SSHK_RSA:
suffix = "rsa"
elif kind == constants.SSHK_ECDSA:
suffix = "ecdsa"
else:
raise errors.ProgrammerError("Unknown SSH key kind '%s'" % kind)
ssh_dir = utils.PathJoin(user_dir, ".ssh")
if mkdir:
utils.EnsureDirs([(ssh_dir, constants.SECURE_DIR_MODE)])
elif dircheck and not os.path.isdir(ssh_dir):
raise errors.OpExecError("Path %s is not a directory" % ssh_dir)
return [utils.PathJoin(ssh_dir, base)
for base in ["id_%s" % suffix, "id_%s.pub" % suffix,
"authorized_keys"]]
def GetAllUserFiles(user, mkdir=False, dircheck=True, _homedir_fn=None):
"""Wrapper over L{GetUserFiles} to retrieve files for all SSH key types.
See L{GetUserFiles} for details.
@rtype: tuple; (string, dict with string as key, tuple of (string, string) as
value)
"""
helper = compat.partial(GetUserFiles, user, mkdir=mkdir, dircheck=dircheck,
_homedir_fn=_homedir_fn)
result = [(kind, helper(kind=kind)) for kind in constants.SSHK_ALL]
authorized_keys = [i for (_, (_, _, i)) in result]
assert len(frozenset(authorized_keys)) == 1, \
"Different paths for authorized_keys were returned"
return (authorized_keys[0],
dict((kind, (privkey, pubkey))
for (kind, (privkey, pubkey, _)) in result))
def _SplitSshKey(key):
"""Splits a line for SSH's C{authorized_keys} file.
If the line has no options (e.g. no C{command="..."}), only the significant
parts, the key type and its hash, are used. Otherwise the whole line is used
(split at whitespace).
@type key: string
@param key: Key line
@rtype: tuple
"""
parts = key.split()
if parts and parts[0] in constants.SSHAK_ALL:
# If the key has no options in front of it, we only want the significant
# fields
return (False, parts[:2])
else:
# Can't properly split the line, so use everything
return (True, parts)
def AddAuthorizedKeys(file_obj, keys):
"""Adds a list of SSH public key to an authorized_keys file.
@type file_obj: str or file handle
@param file_obj: path to authorized_keys file
@type keys: list of str
@param keys: list of strings containing keys
"""
key_field_list = [(key, _SplitSshKey(key)) for key in keys]
if isinstance(file_obj, basestring):
f = open(file_obj, "a+")
else:
f = file_obj
try:
nl = True
for line in f:
# Ignore whitespace changes
line_key = _SplitSshKey(line)
key_field_list[:] = [(key, split_key) for (key, split_key)
in key_field_list
if split_key != line_key]
nl = line.endswith("\n")
else:
if not nl:
f.write("\n")
for (key, _) in key_field_list:
f.write(key.rstrip("\r\n"))
f.write("\n")
f.flush()
finally:
f.close()
def HasAuthorizedKey(file_obj, key):
"""Check if a particular key is in the 'authorized_keys' file.
@type file_obj: str or file handle
@param file_obj: path to authorized_keys file
@type key: str
@param key: string containing key
"""
key_fields = _SplitSshKey(key)
if isinstance(file_obj, basestring):
f = open(file_obj, "r")
else:
f = file_obj
try:
for line in f:
# Ignore whitespace changes
line_key = _SplitSshKey(line)
if line_key == key_fields:
return True
finally:
f.close()
return False
def CheckForMultipleKeys(file_obj, node_names):
"""Check if there is at most one key per host in 'authorized_keys' file.
@type file_obj: str or file handle
@param file_obj: path to authorized_keys file
@type node_names: list of str
@param node_names: list of names of nodes of the cluster
@returns: a dictionary with hostnames which occur more than once
"""
if isinstance(file_obj, basestring):
f = open(file_obj, "r")
else:
f = file_obj
occurrences = {}
try:
index = 0
for line in f:
index += 1
if line.startswith("#"):
continue
chunks = line.split()
# find the chunk with user@hostname
user_hostname = [chunk.strip() for chunk in chunks if "@" in chunk][0]
if not user_hostname in occurrences:
occurrences[user_hostname] = []
occurrences[user_hostname].append(index)
finally:
f.close()
bad_occurrences = {}
for user_hostname, occ in occurrences.items():
_, hostname = user_hostname.split("@")
if hostname in node_names and len(occ) > 1:
bad_occurrences[user_hostname] = occ
return bad_occurrences
def AddAuthorizedKey(file_obj, key):
"""Adds an SSH public key to an authorized_keys file.
@type file_obj: str or file handle
@param file_obj: path to authorized_keys file
@type key: str
@param key: string containing key
"""
AddAuthorizedKeys(file_obj, [key])
def RemoveAuthorizedKeys(file_name, keys):
"""Removes public SSH keys from an authorized_keys file.
@type file_name: str
@param file_name: path to authorized_keys file
@type keys: list of str
@param keys: list of strings containing keys
"""
key_field_list = [_SplitSshKey(key) for key in keys]
fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(file_name))
try:
out = os.fdopen(fd, "w")
try:
f = open(file_name, "r")
try:
for line in f:
# Ignore whitespace changes while comparing lines
if _SplitSshKey(line) not in key_field_list:
out.write(line)
out.flush()
os.rename(tmpname, file_name)
finally:
f.close()
finally:
out.close()
except:
utils.RemoveFile(tmpname)
raise
def RemoveAuthorizedKey(file_name, key):
"""Removes an SSH public key from an authorized_keys file.
@type file_name: str
@param file_name: path to authorized_keys file
@type key: str
@param key: string containing key
"""
RemoveAuthorizedKeys(file_name, [key])
def _AddPublicKeyProcessLine(new_uuid, new_key, line_uuid, line_key, found):
"""Processes one line of the public key file when adding a key.
This is a sub function that can be called within the
C{_ManipulatePublicKeyFile} function. It processes one line of the public
key file, checks if this line contains the key to add already and if so,
notes the occurrence in the return value.
@type new_uuid: string
@param new_uuid: the node UUID of the node whose key is added
@type new_key: string
@param new_key: the SSH key to be added
@type line_uuid: the UUID of the node whose line in the public key file
is processed in this function call
@param line_key: the SSH key of the node whose line in the public key
file is processed in this function call
@type found: boolean
@param found: whether or not the (UUID, key) pair of the node whose key
is being added was found in the public key file already.
@rtype: (boolean, string)
@return: a possibly updated value of C{found} and the processed line
"""
if line_uuid == new_uuid and line_key == new_key:
logging.debug("SSH key of node '%s' already in key file.", new_uuid)
found = True
return (found, "%s %s\n" % (line_uuid, line_key))
def _AddPublicKeyElse(new_uuid, new_key):
"""Adds a new SSH key to the key file if it did not exist already.
This is an auxiliary function for C{_ManipulatePublicKeyFile} which
is carried out when a new key is added to the public key file and
after processing the whole file, we found out that the key does
not exist in the file yet but needs to be appended at the end.
@type new_uuid: string
@param new_uuid: the UUID of the node whose key is added
@type new_key: string
@param new_key: the SSH key to be added
@rtype: string
@return: a new line to be added to the file
"""
return "%s %s\n" % (new_uuid, new_key)
def _RemovePublicKeyProcessLine(
target_uuid, _target_key,
line_uuid, line_key, found):
"""Processes a line in the public key file when aiming for removing a key.
This is an auxiliary function for C{_ManipulatePublicKeyFile} when we
are removing a key from the public key file. This particular function
only checks if the current line contains the UUID of the node in
question and writes the line to the temporary file otherwise.
@type target_uuid: string
@param target_uuid: UUID of the node whose key is being removed
@type _target_key: string
@param _target_key: SSH key of the node (not used)
@type line_uuid: string
@param line_uuid: UUID of the node whose line is processed in this call
@type line_key: string
@param line_key: SSH key of the nodes whose line is processed in this call
@type found: boolean
@param found: whether or not the UUID was already found.
@rtype: (boolean, string)
@return: a tuple, indicating if the target line was found and the processed
line; the line is 'None', if the original line is removed
"""
if line_uuid != target_uuid:
return (found, "%s %s\n" % (line_uuid, line_key))
else:
return (True, None)
def _RemovePublicKeyElse(
target_uuid, _target_key):
"""Logs when we tried to remove a key that does not exist.
This is an auxiliary function for C{_ManipulatePublicKeyFile} which is
run after we have processed the complete public key file and did not find
the key to be removed.
@type target_uuid: string
@param target_uuid: the UUID of the node whose key was supposed to be removed
@type _target_key: string
@param _target_key: the key of the node which was supposed to be removed
(not used)
@rtype: string
@return: in this case, always None
"""
logging.debug("Trying to remove key of node '%s' which is not in list"
" of public keys.", target_uuid)
return None
def _ReplaceNameByUuidProcessLine(
node_name, _key, line_identifier, line_key, found, node_uuid=None):
"""Replaces a node's name with its UUID on a matching line in the key file.
This is an auxiliary function for C{_ManipulatePublicKeyFile} which processes
a line of the ganeti public key file. If the line in question matches the
node's name, the name will be replaced by the node's UUID.
@type node_name: string
@param node_name: name of the node to be replaced by the UUID
@type _key: string
@param _key: SSH key of the node (not used)
@type line_identifier: string
@param line_identifier: an identifier of a node in a line of the public key
file. This can be either a node name or a node UUID, depending on if it
got replaced already or not.
@type line_key: string
@param line_key: SSH key of the node whose line is processed
@type found: boolean
@param found: whether or not the line matches the node's name
@type node_uuid: string
@param node_uuid: the node's UUID which will replace the node name
@rtype: (boolean, string)
@return: a tuple indicating whether the target line was found and the
processed line
"""
if node_name == line_identifier:
return (True, "%s %s\n" % (node_uuid, line_key))
else:
return (found, "%s %s\n" % (line_identifier, line_key))
def _ReplaceNameByUuidElse(
node_uuid, node_name, _key):
"""Logs a debug message when we try to replace a key that is not there.
This is an implementation of the auxiliary C{process_else_fn} function for
the C{_ManipulatePubKeyFile} function when we use it to replace a line
in the public key file that is indexed by the node's name instead of the
node's UUID.
@type node_uuid: string
@param node_uuid: the node's UUID
@type node_name: string
@param node_name: the node's UUID
@type _key: string (not used)
@param _key: the node's SSH key (not used)
@rtype: string
@return: in this case, always None
"""
logging.debug("Trying to replace node name '%s' with UUID '%s', but"
" no line with that name was found.", node_name, node_uuid)
return None
def _ParseKeyLine(line, error_fn):
"""Parses a line of the public key file.
@type line: string
@param line: line of the public key file
@type error_fn: function
@param error_fn: function to process error messages
@rtype: tuple (string, string)
@return: a tuple containing the UUID of the node and a string containing
the SSH key and possible more parameters for the key
"""
if len(line.rstrip()) == 0:
return (None, None)
chunks = line.split(" ")
if len(chunks) < 2:
raise error_fn("Error parsing public SSH key file. Line: '%s'"
% line)
uuid = chunks[0]
key = " ".join(chunks[1:]).rstrip()
return (uuid, key)
def _ManipulatePubKeyFile(target_identifier, target_key,
key_file=pathutils.SSH_PUB_KEYS,
error_fn=errors.ProgrammerError,
process_line_fn=None, process_else_fn=None):
"""Manipulates the list of public SSH keys of the cluster.
This is a general function to manipulate the public key file. It needs
two auxiliary functions C{process_line_fn} and C{process_else_fn} to
work. Generally, the public key file is processed as follows:
1) The function processes each line of the original ganeti public key file,
applies the C{process_line_fn} function on it, which returns a possibly
manipulated line and an indicator whether the line in question was found.
If a line is returned, it is added to a list of lines for later writing
to the file.
2) If all lines are processed and the 'found' variable is False, the
seconds auxiliary function C{process_else_fn} is called to possibly
add more lines to the list of lines.
3) Finally, the list of lines is assembled to a string and written
atomically to the public key file, thereby overriding it.
If the public key file does not exist, we create it. This is necessary for
a smooth transition after an upgrade.
@type target_identifier: str
@param target_identifier: identifier of the node whose key is added; in most
cases this is the node's UUID, but in some it is the node's host name
@type target_key: str
@param target_key: string containing a public SSH key (a complete line
possibly including more parameters than just the key)
@type key_file: str
@param key_file: filename of the file of public node keys (optional
parameter for testing)
@type error_fn: function
@param error_fn: Function that returns an exception, used to customize
exception types depending on the calling context
@type process_line_fn: function
@param process_line_fn: function to process one line of the public key file
@type process_else_fn: function
@param process_else_fn: function to be called if no line of the key file
matches the target uuid
"""
assert process_else_fn is not None
assert process_line_fn is not None
old_lines = []
f_orig = None
if os.path.exists(key_file):
try:
f_orig = open(key_file, "r")
old_lines = f_orig.readlines()
finally:
f_orig.close()
else:
try:
f_orig = open(key_file, "w")
f_orig.close()
except IOError as e:
raise errors.SshUpdateError("Cannot create public key file: %s" % e)
found = False
new_lines = []
for line in old_lines:
(uuid, key) = _ParseKeyLine(line, error_fn)
if not uuid:
continue
(new_found, new_line) = process_line_fn(target_identifier, target_key,
uuid, key, found)
if new_found:
found = True
if new_line is not None:
new_lines.append(new_line)
if not found:
new_line = process_else_fn(target_identifier, target_key)
if new_line is not None:
new_lines.append(new_line)
new_file_content = "".join(new_lines)
utils.WriteFile(key_file, data=new_file_content)
def AddPublicKey(new_uuid, new_key, key_file=pathutils.SSH_PUB_KEYS,
error_fn=errors.ProgrammerError):
"""Adds a new key to the list of public keys.
@see: _ManipulatePubKeyFile for parameter descriptions.
"""
_ManipulatePubKeyFile(new_uuid, new_key, key_file=key_file,
process_line_fn=_AddPublicKeyProcessLine,
process_else_fn=_AddPublicKeyElse,
error_fn=error_fn)
def RemovePublicKey(target_uuid, key_file=pathutils.SSH_PUB_KEYS,
error_fn=errors.ProgrammerError):
"""Removes a key from the list of public keys.
@see: _ManipulatePubKeyFile for parameter descriptions.
"""
_ManipulatePubKeyFile(target_uuid, None, key_file=key_file,
process_line_fn=_RemovePublicKeyProcessLine,
process_else_fn=_RemovePublicKeyElse,
error_fn=error_fn)
def ReplaceNameByUuid(node_uuid, node_name, key_file=pathutils.SSH_PUB_KEYS,
error_fn=errors.ProgrammerError):
"""Replaces a host name with the node's corresponding UUID.
When a node is added to the cluster, we don't know it's UUID yet. So first
its SSH key gets added to the public key file and in a second step, the
node's name gets replaced with the node's UUID as soon as we know the UUID.
@type node_uuid: string
@param node_uuid: the node's UUID to replace the node's name
@type node_name: string
@param node_name: the node's name to be replaced by the node's UUID
@see: _ManipulatePubKeyFile for the other parameter descriptions.
"""
process_line_fn = partial(_ReplaceNameByUuidProcessLine, node_uuid=node_uuid)
process_else_fn = partial(_ReplaceNameByUuidElse, node_uuid=node_uuid)
_ManipulatePubKeyFile(node_name, None, key_file=key_file,
process_line_fn=process_line_fn,
process_else_fn=process_else_fn,
error_fn=error_fn)
def ClearPubKeyFile(key_file=pathutils.SSH_PUB_KEYS, mode=0600):
"""Resets the content of the public key file.
"""
utils.WriteFile(key_file, data="", mode=mode)
def OverridePubKeyFile(key_map, key_file=pathutils.SSH_PUB_KEYS):
"""Overrides the public key file with a list of given keys.
@type key_map: dict from str to list of str
@param key_map: dictionary mapping uuids to lists of SSH keys
"""
new_lines = []
for (uuid, keys) in key_map.items():
for key in keys:
new_lines.append("%s %s\n" % (uuid, key))
new_file_content = "".join(new_lines)
utils.WriteFile(key_file, data=new_file_content)
def QueryPubKeyFile(target_uuids, key_file=pathutils.SSH_PUB_KEYS,
error_fn=errors.ProgrammerError):
"""Retrieves a map of keys for the requested node UUIDs.
@type target_uuids: str or list of str
@param target_uuids: UUID of the node to retrieve the key for or a list
of UUIDs of nodes to retrieve the keys for
@type key_file: str
@param key_file: filename of the file of public node keys (optional
parameter for testing)
@type error_fn: function
@param error_fn: Function that returns an exception, used to customize
exception types depending on the calling context
@rtype: dict mapping strings to list of strings
@return: dictionary mapping node uuids to their ssh keys
"""
all_keys = target_uuids is None
if isinstance(target_uuids, str):
target_uuids = [target_uuids]
result = {}
f = open(key_file, "r")
try:
for line in f:
(uuid, key) = _ParseKeyLine(line, error_fn)
if not uuid:
continue
if all_keys or (uuid in target_uuids):
if uuid not in result:
result[uuid] = []
result[uuid].append(key)
finally:
f.close()
return result
def InitSSHSetup(key_type, key_bits, error_fn=errors.OpPrereqError,
_homedir_fn=None, _suffix=""):
"""Setup the SSH configuration for the node.
This generates a dsa keypair for root, adds the pub key to the
permitted hosts and adds the hostkey to its own known hosts.
@param key_type: the type of SSH keypair to be generated
@param key_bits: the key length, in bits, to be used
"""
priv_key, _, auth_keys = GetUserFiles(constants.SSH_LOGIN_USER, kind=key_type,
mkdir=True, _homedir_fn=_homedir_fn)
new_priv_key_name = priv_key + _suffix
new_pub_key_name = priv_key + _suffix + ".pub"
for name in new_priv_key_name, new_pub_key_name:
if os.path.exists(name):
utils.CreateBackup(name)
utils.RemoveFile(name)
result = utils.RunCmd(["ssh-keygen", "-b", str(key_bits), "-t", key_type,
"-f", new_priv_key_name,
"-q", "-N", ""])
if result.failed:
raise error_fn("Could not generate ssh keypair, error %s" %
result.output)
AddAuthorizedKey(auth_keys, utils.ReadFile(new_pub_key_name))
def InitPubKeyFile(master_uuid, key_type, key_file=pathutils.SSH_PUB_KEYS):
"""Creates the public key file and adds the master node's SSH key.
@type master_uuid: str
@param master_uuid: the master node's UUID
@type key_type: one of L{constants.SSHK_ALL}
@param key_type: the type of ssh key to be used
@type key_file: str
@param key_file: name of the file containing the public keys
"""
_, pub_key, _ = GetUserFiles(constants.SSH_LOGIN_USER, kind=key_type)
ClearPubKeyFile(key_file=key_file)
key = utils.ReadFile(pub_key)
AddPublicKey(master_uuid, key, key_file=key_file)
class SshRunner:
"""Wrapper for SSH commands.
"""
def __init__(self, cluster_name):
"""Initializes this class.
@type cluster_name: str
@param cluster_name: name of the cluster
"""
self.cluster_name = cluster_name
family = ssconf.SimpleStore().GetPrimaryIPFamily()
self.ipv6 = (family == netutils.IP6Address.family)
def _BuildSshOptions(self, batch, ask_key, use_cluster_key,
strict_host_check, private_key=None, quiet=True,
port=None):
"""Builds a list with needed SSH options.
@param batch: same as ssh's batch option
@param ask_key: allows ssh to ask for key confirmation; this
parameter conflicts with the batch one
@param use_cluster_key: if True, use the cluster name as the
HostKeyAlias name
@param strict_host_check: this makes the host key checking strict
@param private_key: use this private key instead of the default
@param quiet: whether to enable -q to ssh
@param port: the SSH port to use, or None to use the default
@rtype: list
@return: the list of options ready to use in L{utils.process.RunCmd}
"""
options = [
"-oEscapeChar=none",
"-oHashKnownHosts=no",
"-oGlobalKnownHostsFile=%s" % pathutils.SSH_KNOWN_HOSTS_FILE,
"-oUserKnownHostsFile=/dev/null",
"-oCheckHostIp=no",
]
if use_cluster_key:
options.append("-oHostKeyAlias=%s" % self.cluster_name)
if quiet:
options.append("-q")
if private_key:
options.append("-i%s" % private_key)
if port:
options.append("-oPort=%d" % port)
# TODO: Too many boolean options, maybe convert them to more descriptive
# constants.
# Note: ask_key conflicts with batch mode
if batch:
if ask_key:
raise errors.ProgrammerError("SSH call requested conflicting options")
options.append("-oBatchMode=yes")
if strict_host_check:
options.append("-oStrictHostKeyChecking=yes")
else:
options.append("-oStrictHostKeyChecking=no")
else:
# non-batch mode
if ask_key:
options.append("-oStrictHostKeyChecking=ask")
elif strict_host_check:
options.append("-oStrictHostKeyChecking=yes")
else:
options.append("-oStrictHostKeyChecking=no")
if self.ipv6:
options.append("-6")
else:
options.append("-4")
return options
def BuildCmd(self, hostname, user, command, batch=True, ask_key=False,
tty=False, use_cluster_key=True, strict_host_check=True,
private_key=None, quiet=True, port=None):
"""Build an ssh command to execute a command on a remote node.
@param hostname: the target host, string
@param user: user to auth as
@param command: the command
@param batch: if true, ssh will run in batch mode with no prompting
@param ask_key: if true, ssh will run with
StrictHostKeyChecking=ask, so that we can connect to an
unknown host (not valid in batch mode)
@param use_cluster_key: whether to expect and use the
cluster-global SSH key
@param strict_host_check: whether to check the host's SSH key at all
@param private_key: use this private key instead of the default
@param quiet: whether to enable -q to ssh
@param port: the SSH port on which the node's daemon is running
@return: the ssh call to run 'command' on the remote host.
"""
argv = [constants.SSH]
argv.extend(self._BuildSshOptions(batch, ask_key, use_cluster_key,
strict_host_check, private_key,
quiet=quiet, port=port))
if tty:
argv.extend(["-t", "-t"])
argv.append("%s@%s" % (user, hostname))
# Insert variables for virtual nodes
argv.extend("export %s=%s;" %
(utils.ShellQuote(name), utils.ShellQuote(value))
for (name, value) in
vcluster.EnvironmentForHost(hostname).items())
argv.append(command)
return argv
def Run(self, *args, **kwargs):
"""Runs a command on a remote node.
This method has the same return value as `utils.RunCmd()`, which it
uses to launch ssh.
Args: see SshRunner.BuildCmd.
@rtype: L{utils.process.RunResult}
@return: the result as from L{utils.process.RunCmd()}
"""
return utils.RunCmd(self.BuildCmd(*args, **kwargs))
def CopyFileToNode(self, node, port, filename):
"""Copy a file to another node with scp.
@param node: node in the cluster
@param filename: absolute pathname of a local file
@rtype: boolean
@return: the success of the operation
"""
if not os.path.isabs(filename):
logging.error("File %s must be an absolute path", filename)
return False
if not os.path.isfile(filename):
logging.error("File %s does not exist", filename)
return False
command = [constants.SCP, "-p"]
command.extend(self._BuildSshOptions(True, False, True, True, port=port))
command.append(filename)
if netutils.IP6Address.IsValid(node):
node = netutils.FormatAddress((node, None))
command.append("%s:%s" % (node, vcluster.ExchangeNodeRoot(node, filename)))
result = utils.RunCmd(command)
if result.failed:
logging.error("Copy to node %s failed (%s) error '%s',"
" command was '%s'",
node, result.fail_reason, result.output, result.cmd)
return not result.failed
def VerifyNodeHostname(self, node, ssh_port):
"""Verify hostname consistency via SSH.
This functions connects via ssh to a node and compares the hostname
reported by the node to the name with have (the one that we
connected to).
This is used to detect problems in ssh known_hosts files
(conflicting known hosts) and inconsistencies between dns/hosts
entries and local machine names
@param node: nodename of a host to check; can be short or
full qualified hostname
@param ssh_port: the port of a SSH daemon running on the node
@return: (success, detail), where:
- success: True/False
- detail: string with details
"""
cmd = ("if test -z \"$GANETI_HOSTNAME\"; then"
" hostname --fqdn;"
"else"
" echo \"$GANETI_HOSTNAME\";"
"fi")
retval = self.Run(node, constants.SSH_LOGIN_USER, cmd,
quiet=False, port=ssh_port)
if retval.failed:
msg = "ssh problem"
output = retval.output
if output:
msg += ": %s" % output
else:
msg += ": %s (no output)" % retval.fail_reason
logging.error("Command %s failed: %s", retval.cmd, msg)
return False, msg
remotehostname = retval.stdout.strip()
if not remotehostname or remotehostname != node:
if node.startswith(remotehostname + "."):
msg = "hostname not FQDN"
else:
msg = "hostname mismatch"
return False, ("%s: expected %s but got %s" %
(msg, node, remotehostname))
return True, "host matches"
def WriteKnownHostsFile(cfg, file_name):
"""Writes the cluster-wide equally known_hosts file.
"""
data = ""
if cfg.GetRsaHostKey():
data += "%s ssh-rsa %s\n" % (cfg.GetClusterName(), cfg.GetRsaHostKey())
if cfg.GetDsaHostKey():
data += "%s ssh-dss %s\n" % (cfg.GetClusterName(), cfg.GetDsaHostKey())
utils.WriteFile(file_name, mode=0600, data=data)
def _EnsureCorrectGanetiVersion(cmd):
"""Ensured the correct Ganeti version before running a command via SSH.
Before a command is run on a node via SSH, it makes sense in some
situations to ensure that this node is indeed running the correct
version of Ganeti like the rest of the cluster.
@type cmd: string
@param cmd: string
@rtype: list of strings
@return: a list of commands with the newly added ones at the beginning
"""
logging.debug("Ensure correct Ganeti version: %s", cmd)
version = constants.DIR_VERSION
all_cmds = [["test", "-d", os.path.join(pathutils.PKGLIBDIR, version)]]
if constants.HAS_GNU_LN:
all_cmds.extend([["ln", "-s", "-f", "-T",
os.path.join(pathutils.PKGLIBDIR, version),
os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")],
["ln", "-s", "-f", "-T",
os.path.join(pathutils.SHAREDIR, version),
os.path.join(pathutils.SYSCONFDIR, "ganeti/share")]])
else:
all_cmds.extend([["rm", "-f",
os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")],
["ln", "-s", "-f",
os.path.join(pathutils.PKGLIBDIR, version),
os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")],
["rm", "-f",
os.path.join(pathutils.SYSCONFDIR, "ganeti/share")],
["ln", "-s", "-f",
os.path.join(pathutils.SHAREDIR, version),
os.path.join(pathutils.SYSCONFDIR, "ganeti/share")]])
all_cmds.append(cmd)
return all_cmds
def RunSshCmdWithStdin(cluster_name, node, basecmd, port, data,
debug=False, verbose=False, use_cluster_key=False,
ask_key=False, strict_host_check=False,
ensure_version=False):
"""Runs a command on a remote machine via SSH and provides input in stdin.
@type cluster_name: string
@param cluster_name: Cluster name
@type node: string
@param node: Node name
@type basecmd: string
@param basecmd: Base command (path on the remote machine)
@type port: int
@param port: The SSH port of the remote machine or None for the default
@param data: JSON-serializable input data for script (passed to stdin)
@type debug: bool
@param debug: Enable debug output
@type verbose: bool
@param verbose: Enable verbose output
@type use_cluster_key: bool
@param use_cluster_key: See L{ssh.SshRunner.BuildCmd}
@type ask_key: bool
@param ask_key: See L{ssh.SshRunner.BuildCmd}
@type strict_host_check: bool
@param strict_host_check: See L{ssh.SshRunner.BuildCmd}
"""
cmd = [basecmd]
# Pass --debug/--verbose to the external script if set on our invocation
if debug:
cmd.append("--debug")
if verbose:
cmd.append("--verbose")
if ensure_version:
all_cmds = _EnsureCorrectGanetiVersion(cmd)
else:
all_cmds = [cmd]
if port is None:
port = netutils.GetDaemonPort(constants.SSH)
srun = SshRunner(cluster_name)
scmd = srun.BuildCmd(node, constants.SSH_LOGIN_USER,
utils.ShellQuoteArgs(
utils.ShellCombineCommands(all_cmds)),
batch=False, ask_key=ask_key, quiet=False,
strict_host_check=strict_host_check,
use_cluster_key=use_cluster_key,
port=port)
tempfh = tempfile.TemporaryFile()
try:
tempfh.write(serializer.DumpJson(data))
tempfh.seek(0)
result = utils.RunCmd(scmd, interactive=True, input_fd=tempfh)
finally:
tempfh.close()
if result.failed:
raise errors.OpExecError("Command '%s' failed: %s" %
(result.cmd, result.fail_reason))
def ReadRemoteSshPubKey(pub_key_file, node, cluster_name, port, ask_key,
strict_host_check):
"""Fetches a public SSH key from a node via SSH.
@type pub_key_file: string
@param pub_key_file: a tuple consisting of the file name of the public DSA key
"""
ssh_runner = SshRunner(cluster_name)
cmd = ["cat", pub_key_file]
ssh_cmd = ssh_runner.BuildCmd(node, constants.SSH_LOGIN_USER,
utils.ShellQuoteArgs(cmd),
batch=False, ask_key=ask_key, quiet=False,
strict_host_check=strict_host_check,
use_cluster_key=False,
port=port)
result = utils.RunCmd(ssh_cmd)
if result.failed:
raise errors.OpPrereqError("Could not fetch a public SSH key (%s) from node"
" '%s': ran command '%s', failure reason: '%s'."
% (pub_key_file, node, cmd, result.fail_reason),
errors.ECODE_INVAL)
return result.stdout
def GetSshKeyFilenames(key_type, suffix=""):
"""Get filenames of the SSH key pair of the given type.
@type key_type: string
@param key_type: type of SSH key, must be element of C{constants.SSHK_ALL}
@type suffix: string
@param suffix: optional suffix for the key filenames
@rtype: tuple of (string, string)
@returns: a tuple containing the name of the private key file and the
public key file.
"""
if key_type not in constants.SSHK_ALL:
raise errors.SshUpdateError("Unsupported key type '%s'. Supported key types"
" are: %s." % (key_type, constants.SSHK_ALL))
(_, root_keyfiles) = \
GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=False, dircheck=False)
if not key_type in root_keyfiles.keys():
raise errors.SshUpdateError("No keyfile for key type '%s' available."
% key_type)
key_filenames = root_keyfiles[key_type]
if suffix:
key_filenames = [_ComputeKeyFilePathWithSuffix(key_filename, suffix)
for key_filename in key_filenames]
return key_filenames
def GetSshPubKeyFilename(key_type, suffix=""):
"""Get filename of the public SSH key of the given type.
@type key_type: string
@param key_type: type of SSH key, must be element of C{constants.SSHK_ALL}
@type suffix: string
@param suffix: optional suffix for the key filenames
@rtype: string
@returns: file name of the public key file
"""
return GetSshKeyFilenames(key_type, suffix=suffix)[1]
def _ComputeKeyFilePathWithSuffix(key_filepath, suffix):
"""Converts the given key filename to a key filename with a suffix.
@type key_filepath: string
@param key_filepath: path of the key file
@type suffix: string
@param suffix: suffix to be appended to the basename of the file
"""
path = os.path.dirname(key_filepath)
ext = os.path.splitext(os.path.basename(key_filepath))[1]
basename = os.path.splitext(os.path.basename(key_filepath))[0]
return os.path.join(path, basename + suffix + ext)
def ReplaceSshKeys(src_key_type, dest_key_type,
src_key_suffix="", dest_key_suffix=""):
"""Replaces an SSH key pair by another SSH key pair.
Note that both parts, the private and the public key, are replaced.
@type src_key_type: string
@param src_key_type: key type of key pair that is replacing the other
key pair
@type dest_key_type: string
@param dest_key_type: key type of the key pair that is being replaced
by the source key pair
@type src_key_suffix: string
@param src_key_suffix: optional suffix of the key files of the source
key pair
@type dest_key_suffix: string
@param dest_key_suffix: optional suffix of the keey files of the
destination key pair
"""
(src_priv_filename, src_pub_filename) = GetSshKeyFilenames(
src_key_type, suffix=src_key_suffix)
(dest_priv_filename, dest_pub_filename) = GetSshKeyFilenames(
dest_key_type, suffix=dest_key_suffix)
if not (os.path.exists(src_priv_filename) and
os.path.exists(src_pub_filename)):
raise errors.SshUpdateError(
"At least one of the source key files is missing: %s",
", ".join([src_priv_filename, src_pub_filename]))
for dest_file in [dest_priv_filename, dest_pub_filename]:
if os.path.exists(dest_file):
utils.CreateBackup(dest_file)
utils.RemoveFile(dest_file)
shutil.move(src_priv_filename, dest_priv_filename)
shutil.move(src_pub_filename, dest_pub_filename)
def ReadLocalSshPubKeys(key_types, suffix=""):
"""Reads the local root user SSH key.
@type key_types: list of string
@param key_types: types of SSH keys. Must be subset of constants.SSHK_ALL. If
'None' or [], all available keys are returned.
@type suffix: string
@param suffix: optional suffix to be attached to key names when reading
them. Used for temporary key files.
@rtype: list of string
@return: list of public keys
"""
fetch_key_types = []
if key_types:
fetch_key_types += key_types
else:
fetch_key_types = constants.SSHK_ALL
(_, root_keyfiles) = \
GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=False, dircheck=False)
result_keys = []
for (public_key_type, (_, public_key_file)) in root_keyfiles.items():
if public_key_type not in fetch_key_types:
continue
public_key_dir = os.path.dirname(public_key_file)
public_key_filename = ""
if suffix:
public_key_filename = \
os.path.splitext(os.path.basename(public_key_file))[0] \
+ suffix + ".pub"
else:
public_key_filename = public_key_file
public_key_path = os.path.join(public_key_dir,
public_key_filename)
if not os.path.exists(public_key_path):
raise errors.SshUpdateError("Cannot find SSH public key of type '%s'."
% public_key_type)
else:
key = utils.ReadFile(public_key_path)
result_keys.append(key)
return result_keys
# Update gnt-cluster.rst when changing which combinations are valid.
KeyBitInfo = namedtuple('KeyBitInfo', ['default', 'validation_fn'])
SSH_KEY_VALID_BITS = {
constants.SSHK_DSA: KeyBitInfo(1024, lambda b: b == 1024),
constants.SSHK_RSA: KeyBitInfo(2048, lambda b: b >= 768),
constants.SSHK_ECDSA: KeyBitInfo(384, lambda b: b in [256, 384, 521]),
}
def DetermineKeyBits(key_type, key_bits, old_key_type, old_key_bits):
"""Checks the key bits to be used for a given key type, or provides defaults.
@type key_type: one of L{constants.SSHK_ALL}
@param key_type: The key type to use.
@type key_bits: positive int or None
@param key_bits: The number of bits to use, if supplied by user.
@type old_key_type: one of L{constants.SSHK_ALL} or None
@param old_key_type: The previously used key type, if any.
@type old_key_bits: positive int or None
@param old_key_bits: The previously used number of bits, if any.
@rtype: positive int
@return: The number of bits to use.
"""
if key_bits is None:
if old_key_type is not None and old_key_type == key_type:
key_bits = old_key_bits
else:
key_bits = SSH_KEY_VALID_BITS[key_type].default
if not SSH_KEY_VALID_BITS[key_type].validation_fn(key_bits):
raise errors.OpPrereqError("Invalid key type and bit size combination:"
" %s with %s bits" % (key_type, key_bits),
errors.ECODE_INVAL)
return key_bits
| bsd-2-clause | -2,877,672,071,644,923,400 | 32.432012 | 80 | 0.659144 | false |
vwflow/raws-python | raws_json/__init__.py | 1 | 28752 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This is a modified version of '__init__.py' (version 1.1.1), part of the 'atom' module
# from the gdata-python-client project (http://code.google.com/p/gdata-python-client/) by Google Inc.
# Copyright (C) 2006, 2007, 2008 Google Inc.
#
# It has been modified to support json formatted data instead of atom.
# Copyright (C) 2012 rambla.eu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.import os
__author__ = 'api.jscudder (Jeffrey Scudder)'
"""JsonService provides CRUD ops. in line with the Atom Publishing Protocol.
JsonService: Encapsulates the ability to perform insert, update and delete
operations with the Atom Publishing Protocol on which GData is
based. An instance can perform query, insertion, deletion, and
update.
HttpRequest: Function that performs a GET, POST, PUT, or DELETE HTTP request
to the specified end point. An JsonService object or a subclass can be
used to specify information about the request.
"""
import os
import httplib
import urllib
import re
import base64
import socket
URL_REGEX = re.compile('http(s)?\://([\w\.-]*)(\:(\d+))?(/.*)?')
class JsonService(object):
"""Performs Atom Publishing Protocol CRUD operations.
The JsonService contains methods to perform HTTP CRUD operations.
"""
# Default values for members -> disabled, also works without them
# port = 80
# ssl = False
# If debug is True, the HTTPConnection will display debug information
debug = False
def __init__(self, server=None, additional_headers=None):
"""Creates a new JsonService client.
Args:
server: string (optional) The start of a URL for the server
to which all operations should be directed. Example:
'www.google.com'
additional_headers: dict (optional) Any additional HTTP headers which
should be included with CRUD operations.
"""
self.server = server
self.additional_headers = additional_headers or {}
self.additional_headers['User-Agent'] = 'Python Google Data Client Lib'
def _ProcessUrl(self, url, for_proxy=False):
"""Processes a passed URL. If the URL does not begin with https?, then
the default value for self.server is used"""
return ProcessUrl(self, url, for_proxy=for_proxy)
def UseBasicAuth(self, username, password, for_proxy=False):
"""Sets an Authenticaiton: Basic HTTP header containing plaintext.
The username and password are base64 encoded and added to an HTTP header
which will be included in each request. Note that your username and
password are sent in plaintext.
Args:
username: str
password: str
"""
UseBasicAuth(self, username, password, for_proxy=for_proxy)
def PrepareConnection(self, full_uri):
"""Opens a connection to the server based on the full URI.
Examines the target URI and the proxy settings, which are set as
environment variables, to open a connection with the server. This
connection is used to make an HTTP request.
Args:
full_uri: str Which is the target relative (lacks protocol and host) or
absolute URL to be opened. Example:
'https://www.google.com/accounts/ClientLogin' or
'base/feeds/snippets' where the server is set to www.google.com.
Returns:
A tuple containing the httplib.HTTPConnection and the full_uri for the
request.
"""
return PrepareConnection(self, full_uri)
# Alias the old name for the above method to preserve backwards
# compatibility.
_PrepareConnection = PrepareConnection
# CRUD operations
def Get(self, uri, extra_headers=None, url_params=None, escape_params=True):
"""Query the APP server with the given URI
The uri is the portion of the URI after the server value
(server example: 'www.google.com').
Example use:
To perform a query against Google Base, set the server to
'base.google.com' and set the uri to '/base/feeds/...', where ... is
your query. For example, to find snippets for all digital cameras uri
should be set to: '/base/feeds/snippets?bq=digital+camera'
Args:
uri: string The query in the form of a URI. Example:
'/base/feeds/snippets?bq=digital+camera'.
extra_headers: dicty (optional) Extra HTTP headers to be included
in the GET request. These headers are in addition to
those stored in the client's additional_headers property.
The client automatically sets the Content-Type and
Authorization headers.
url_params: dict (optional) Additional URL parameters to be included
in the query. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
httplib.HTTPResponse The server's response to the GET request.
"""
extra_headers.update({'Accept': 'application/json', 'Content-Type': 'application/json'})
return HttpRequest(self, 'GET', None, uri, extra_headers=extra_headers,
url_params=url_params, escape_params=escape_params)
def Post(self, data, uri, extra_headers=None, url_params=None,
escape_params=True, content_type='application/atom+xml'):
"""Insert data into an APP server at the given URI.
Args:
data: string, ElementTree._Element, or something with a __str__ method
The XML to be sent to the uri.
uri: string The location (feed) to which the data should be inserted.
Example: '/base/feeds/items'.
extra_headers: dict (optional) HTTP headers which are to be included.
The client automatically sets the Content-Type,
Authorization, and Content-Length headers.
url_params: dict (optional) Additional URL parameters to be included
in the URI. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
httplib.HTTPResponse Server's response to the POST request.
"""
extra_headers.update({'Accept': 'application/json'})
return HttpRequest(self, 'POST', data, uri, extra_headers=extra_headers,
url_params=url_params, escape_params=escape_params,
content_type=content_type)
def Put(self, data, uri, extra_headers=None, url_params=None,
escape_params=True, content_type='application/atom+xml'):
"""Updates an entry at the given URI.
Args:
data: string, ElementTree._Element, or xml_wrapper.ElementWrapper The
XML containing the updated data.
uri: string A URI indicating entry to which the update will be applied.
Example: '/base/feeds/items/ITEM-ID'
extra_headers: dict (optional) HTTP headers which are to be included.
The client automatically sets the Content-Type,
Authorization, and Content-Length headers.
url_params: dict (optional) Additional URL parameters to be included
in the URI. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
httplib.HTTPResponse Server's response to the PUT request.
"""
extra_headers.update({'Accept': 'application/json'})
return HttpRequest(self, 'PUT', data, uri, extra_headers=extra_headers,
url_params=url_params, escape_params=escape_params,
content_type=content_type)
def Delete(self, uri, extra_headers=None, url_params=None,
escape_params=True):
"""Deletes the entry at the given URI.
Args:
uri: string The URI of the entry to be deleted. Example:
'/base/feeds/items/ITEM-ID'
extra_headers: dict (optional) HTTP headers which are to be included.
The client automatically sets the Content-Type and
Authorization headers.
url_params: dict (optional) Additional URL parameters to be included
in the URI. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
httplib.HTTPResponse Server's response to the DELETE request.
"""
return HttpRequest(self, 'DELETE', None, uri, extra_headers=extra_headers,
url_params=url_params, escape_params=escape_params)
def Head(self, uri, extra_headers=None, url_params=None, escape_params=True):
"""Send a HEAD request to the APP server with the given URI
The uri is the portion of the URI after the server value
(server example: 'www.google.com').
Example use:
To perform a query against Google Base, set the server to
'base.google.com' and set the uri to '/base/feeds/...', where ... is
your query. For example, to find snippets for all digital cameras uri
should be set to: '/base/feeds/snippets?bq=digital+camera'
Args:
uri: string The query in the form of a URI. Example:
'/base/feeds/snippets?bq=digital+camera'.
extra_headers: dicty (optional) Extra HTTP headers to be included
in the GET request. These headers are in addition to
those stored in the client's additional_headers property.
The client automatically sets the Content-Type and
Authorization headers.
url_params: dict (optional) Additional URL parameters to be included
in the query. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
httplib.HTTPResponse The server's response to the GET request.
"""
return HttpRequest(self, 'HEAD', None, uri, extra_headers=extra_headers,
url_params=url_params, escape_params=escape_params)
def HttpRequest(service, operation, data, uri, extra_headers=None,
url_params=None, escape_params=True, content_type='application/atom+xml'):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.service
client = atom.service.JsonService()
http_response = client.Get('http://www.google.com/')
or you could set the client.server to 'www.google.com' and use the
following:
client.server = 'www.google.com'
http_response = client.Get('/')
Args:
service: atom.JsonService object which contains some of the parameters
needed to make the request. The following members are used to
construct the HTTP call: server (str), additional_headers (dict),
port (int), and ssl (bool).
operation: str The HTTP operation to be performed. This is usually one of
'GET', 'POST', 'PUT', or 'DELETE'
data: ElementTree, filestream, list of parts, or other object which can be
converted to a string.
Should be set to None when performing a GET or PUT.
If data is a file-like object which can be read, this method will read
a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be evaluated
and sent.
uri: The beginning of the URL to which the request should be sent.
Examples: '/', '/base/feeds/snippets',
'/m8/feeds/contacts/default/base'
extra_headers: dict of strings. HTTP headers which should be sent
in the request. These headers are in addition to those stored in
service.additional_headers.
url_params: dict of strings. Key value pairs to be added to the URL as
URL parameters. For example {'foo':'bar', 'test':'param'} will
become ?foo=bar&test=param.
escape_params: bool default True. If true, the keys and values in
url_params will be URL escaped when the form is constructed
(Special characters converted to %XX form.)
content_type: str The MIME type for the data being sent. Defaults to
'application/atom+xml', this is only used if data is set.
"""
full_uri = BuildUri(uri, url_params, escape_params)
(connection, full_uri) = PrepareConnection(service, full_uri)
if extra_headers is None:
extra_headers = {}
# Turn on debug mode if the debug member is set.
if service.debug:
connection.debuglevel = 1
connection.putrequest(operation, full_uri)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if (data and not service.additional_headers.has_key('Content-Length') and
not extra_headers.has_key('Content-Length')):
content_length = __CalculateDataLength(data)
if content_length:
extra_headers['Content-Length'] = str(content_length)
else:
extra_headers['Content-Length'] = "0"
if content_type:
extra_headers['Content-Type'] = content_type
# Send the HTTP headers.
if isinstance(service.additional_headers, dict):
for header in service.additional_headers:
connection.putheader(header, service.additional_headers[header])
if isinstance(extra_headers, dict):
for header in extra_headers:
connection.putheader(header, extra_headers[header])
connection.endheaders()
# If there is data, send it in the request.
if data:
if isinstance(data, list):
for data_part in data:
__SendDataPart(data_part, connection)
else:
__SendDataPart(data, connection)
# Return the HTTP Response from the server.
return connection.getresponse()
def __SendDataPart(data, connection):
if isinstance(data, str):
connection.send(data)
return
elif isinstance(data, unicode):
# unicode string must be converted into 8-bit string version (otherwise httplib will raise UnicodeDecodeError)
connection.send(data.encode('utf-8'))
return
# NEXT SECTION COMMENTED OUT, replace by json.decode() if desired
# elif ElementTree.iselement(data):
# connection.send(ElementTree.tostring(data))
# return
# Check to see if data is a file-like object that has a read method.
elif hasattr(data, 'read'):
# Read the file and send it a chunk at a time.
while 1:
binarydata = data.read(100000)
if binarydata == '': break
connection.send(binarydata)
return
else:
# The data object was not a file.
# Try to convert to a string and send the data.
connection.send(str(data))
return
def __CalculateDataLength(data):
"""Attempts to determine the length of the data to send.
This method will respond with a length only if the data is a string or
and ElementTree element.
Args:
data: object If this is not a string or ElementTree element this funtion
will return None.
"""
if isinstance(data, str):
return len(data)
elif isinstance(data, unicode):
return len(data.decode('utf-8'))
elif isinstance(data, list):
return None
# elif ElementTree.iselement(data):
# return len(ElementTree.tostring(data))
elif hasattr(data, 'read'):
# If this is a file-like object, don't try to guess the length.
return None
else:
return len(str(data))
def PrepareConnection(service, full_uri):
"""Opens a connection to the server based on the full URI.
Examines the target URI and the proxy settings, which are set as
environment variables, to open a connection with the server. This
connection is used to make an HTTP request.
Args:
service: atom.JsonService or a subclass. It must have a server string which
represents the server host to which the request should be made. It may also
have a dictionary of additional_headers to send in the HTTP request.
full_uri: str Which is the target relative (lacks protocol and host) or
absolute URL to be opened. Example:
'https://www.google.com/accounts/ClientLogin' or
'base/feeds/snippets' where the server is set to www.google.com.
Returns:
A tuple containing the httplib.HTTPConnection and the full_uri for the
request.
"""
(server, port, ssl, partial_uri) = ProcessUrl(service, full_uri)
if ssl:
# destination is https
proxy = os.environ.get('https_proxy')
if proxy:
(p_server, p_port, p_ssl, p_uri) = ProcessUrl(service, proxy, True)
proxy_username = os.environ.get('proxy-username')
if not proxy_username:
proxy_username = os.environ.get('proxy_username')
proxy_password = os.environ.get('proxy-password')
if not proxy_password:
proxy_password = os.environ.get('proxy_password')
if proxy_username:
user_auth = base64.encodestring('%s:%s' % (proxy_username,
proxy_password))
proxy_authorization = ('Proxy-authorization: Basic %s\r\n' % (
user_auth.strip()))
else:
proxy_authorization = ''
proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (server, port)
user_agent = 'User-Agent: %s\r\n' % (
service.additional_headers['User-Agent'])
proxy_pieces = (proxy_connect + proxy_authorization + user_agent
+ '\r\n')
#now connect, very simple recv and error checking
p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
p_sock.connect((p_server,p_port))
p_sock.sendall(proxy_pieces)
response = ''
# Wait for the full response.
while response.find("\r\n\r\n") == -1:
response += p_sock.recv(8192)
p_status=response.split()[1]
if p_status!=str(200):
raise 'Error status=',str(p_status)
# Trivial setup for ssl socket.
ssl = socket.ssl(p_sock, None, None)
fake_sock = httplib.FakeSocket(p_sock, ssl)
# Initalize httplib and replace with the proxy socket.
connection = httplib.HTTPConnection(server)
connection.sock=fake_sock
full_uri = partial_uri
else:
connection = httplib.HTTPSConnection(server, port)
full_uri = partial_uri
else:
# destination is http
proxy = os.environ.get('http_proxy')
if proxy:
(p_server, p_port, p_ssl, p_uri) = ProcessUrl(service.server, proxy, True)
proxy_username = os.environ.get('proxy-username')
if not proxy_username:
proxy_username = os.environ.get('proxy_username')
proxy_password = os.environ.get('proxy-password')
if not proxy_password:
proxy_password = os.environ.get('proxy_password')
if proxy_username:
UseBasicAuth(service, proxy_username, proxy_password, True)
connection = httplib.HTTPConnection(p_server, p_port)
if not full_uri.startswith("http://"):
if full_uri.startswith("/"):
full_uri = "http://%s%s" % (service.server, full_uri)
else:
full_uri = "http://%s/%s" % (service.server, full_uri)
else:
connection = httplib.HTTPConnection(server, port)
full_uri = partial_uri
return (connection, full_uri)
def UseBasicAuth(service, username, password, for_proxy=False):
"""Sets an Authenticaiton: Basic HTTP header containing plaintext.
The username and password are base64 encoded and added to an HTTP header
which will be included in each request. Note that your username and
password are sent in plaintext. The auth header is added to the
additional_headers dictionary in the service object.
Args:
service: atom.JsonService or a subclass which has an
additional_headers dict as a member.
username: str
password: str
"""
base_64_string = base64.encodestring('%s:%s' % (username, password))
base_64_string = base_64_string.strip()
if for_proxy:
header_name = 'Proxy-Authorization'
else:
header_name = 'Authorization'
service.additional_headers[header_name] = 'Basic %s' % (base_64_string,)
def ProcessUrl(service, url, for_proxy=False):
"""Processes a passed URL. If the URL does not begin with https?, then
the default value for server is used"""
server = None
port = 80
ssl = False
if hasattr(service, 'server'):
server = service.server
else:
server = service
if not for_proxy:
if hasattr(service, 'port'):
port = service.port
if hasattr(service, 'ssl'):
ssl = service.ssl
uri = url
m = URL_REGEX.match(url)
if m is None:
return (server, port, ssl, uri)
else:
if m.group(1) is not None:
port = 443
ssl = True
if m.group(3) is None:
server = m.group(2)
else:
server = m.group(2)
port = int(m.group(4))
if m.group(5) is not None:
uri = m.group(5)
else:
uri = '/'
return (server, port, ssl, uri)
def DictionaryToParamList(url_parameters, escape_params=True):
"""Convert a dictionary of URL arguments into a URL parameter string.
Args:
url_parameters: The dictionaty of key-value pairs which will be converted
into URL parameters. For example,
{'dry-run': 'true', 'foo': 'bar'}
will become ['dry-run=true', 'foo=bar'].
Returns:
A list which contains a string for each key-value pair. The strings are
ready to be incorporated into a URL by using '&'.join([] + parameter_list)
"""
# Choose which function to use when modifying the query and parameters.
# Use quote_plus when escape_params is true.
transform_op = [str, urllib.quote_plus][bool(escape_params)]
# Create a list of tuples containing the escaped version of the
# parameter-value pairs.
parameter_tuples = [(transform_op(param), transform_op(value))
for param, value in (url_parameters or {}).items()]
# Turn parameter-value tuples into a list of strings in the form
# 'PARAMETER=VALUE'.
return ['='.join(x) for x in parameter_tuples]
def BuildUri(uri, url_params=None, escape_params=True):
"""Converts a uri string and a collection of parameters into a URI.
Args:
uri: string
url_params: dict (optional)
escape_params: boolean (optional)
uri: string The start of the desired URI. This string can alrady contain
URL parameters. Examples: '/base/feeds/snippets',
'/base/feeds/snippets?bq=digital+camera'
url_parameters: dict (optional) Additional URL parameters to be included
in the query. These are translated into query arguments
in the form '&dict_key=value&...'.
Example: {'max-results': '250'} becomes &max-results=250
escape_params: boolean (optional) If false, the calling code has already
ensured that the query will form a valid URL (all
reserved characters have been escaped). If true, this
method will escape the query and any URL parameters
provided.
Returns:
string The URI consisting of the escaped URL parameters appended to the
initial uri string.
"""
# Prepare URL parameters for inclusion into the GET request.
parameter_list = DictionaryToParamList(url_params, escape_params)
# Append the URL parameters to the URL.
if parameter_list:
if uri.find('?') != -1:
# If there are already URL parameters in the uri string, add the
# parameters after a new & character.
full_uri = '&'.join([uri] + parameter_list)
else:
# The uri string did not have any URL parameters (no ? character)
# so put a ? between the uri and URL parameters.
full_uri = '%s%s' % (uri, '?%s' % ('&'.join([] + parameter_list)))
else:
full_uri = uri
return full_uri
class MediaSource(object):
"""Raws Entries can refer to media sources, so this class provides a
place to store references to these objects along with some metadata.
"""
def __init__(self, file_handle=None, content_type=None, content_length=None,
file_path=None, file_name=None, svr_filename = None):
"""Creates an object of type MediaSource.
Args:
file_handle: A file handle pointing to the file to be encapsulated in the
MediaSource
content_type: string The MIME type of the file. Required if a file_handle
is given.
content_length: int The size of the file. Required if a file_handle is
given.
file_path: string (optional) A full path name to the file. Used in
place of a file_handle.
file_name: string The name of the file without any path information.
Required if a file_handle is given.
"""
self.file_handle = file_handle
self.content_type = content_type
self.content_length = content_length
self.file_name = file_name
self.svr_filename = svr_filename
self.file_path = file_path
if (file_handle is None and file_path is not None):
self.setFile(file_path, content_type)
if not self.svr_filename:
self.svr_filename = self.file_name
def setFile(self, file_name, content_type):
"""A helper function which can create a file handle from a given filename
and set the content type and length all at once.
Args:
file_name: string The path and file name to the file containing the media
content_type: string A MIME type representing the type of the media
"""
self.file_handle = open(file_name, 'rb')
self.content_type = content_type
self.content_length = os.path.getsize(file_name)
self.file_name = os.path.basename(file_name)
def writeFile(self, file_path):
# can not write if no path and handle
if not file_path or not self.file_handle:
return False
self.file_path = file_path
fd = open(file_path, 'wb')
fd.write(self.file_handle.read())
fd.close()
return True
| apache-2.0 | 4,799,143,421,886,727,000 | 40.015692 | 116 | 0.643399 | false |
quaquel/EMAworkbench | test/test_em_framework/test_outcomes.py | 1 | 5451 | '''
Created on Jul 28, 2015
.. codeauthor:: jhkwakkel <j.h.kwakkel (at) tudelft (dot) nl>
'''
from __future__ import (division, print_function, absolute_import,
unicode_literals)
import unittest
import unittest.mock as mock
from ema_workbench.em_framework.outcomes import ScalarOutcome,\
TimeSeriesOutcome
class TestScalarOutcome(unittest.TestCase):
outcome_class = ScalarOutcome
outcome_klass = "ScalarOutcome"
def test_outcome(self):
name = 'test'
outcome = self.outcome_class(name)
self.assertEqual(outcome.name, name)
self.assertEqual(outcome.variable_name, [name])
self.assertIsNone(outcome.function)
self.assertEqual(repr(outcome), self.outcome_klass+'(\'test\')')
name = 'test'
var_name = 'something else'
outcome = self.outcome_class(name, variable_name=var_name)
self.assertEqual(outcome.name, name)
self.assertEqual(outcome.variable_name, [var_name])
self.assertIsNone(outcome.function)
name = 'test'
var_name = 'something else'
function = mock.Mock()
outcome = self.outcome_class(name, variable_name=var_name,
function=function)
self.assertEqual(outcome.name, name)
self.assertEqual(outcome.variable_name, [var_name])
self.assertIsNotNone(outcome.function)
with self.assertRaises(ValueError):
name = 'test'
var_name = 'something else'
function = 'not a function'
outcome = self.outcome_class(name, variable_name=var_name,
function=function)
with self.assertRaises(ValueError):
name = 'test'
var_name = 1
outcome = self.outcome_class(name, variable_name=var_name,
function=function)
with self.assertRaises(ValueError):
name = 'test'
var_name = ['a variable', 1]
outcome = self.outcome_class(name, variable_name=var_name,
function=function)
name = 'test'
var_name = 'something else'
function = lambda x: x
outcome1 = self.outcome_class(name, variable_name=var_name,
function=function)
outcome2 = self.outcome_class(name, variable_name=var_name,
function=function)
self.assertEqual(outcome1, outcome2)
def test_process(self):
name = 'test'
outcome = self.outcome_class(name)
outputs = [1]
self.assertEqual(outcome.process(outputs), outputs[0])
name = 'test'
function = mock.Mock()
function.return_value = 2
outcome = self.outcome_class(name, function=function)
outputs = [1]
self.assertEqual(outcome.process(outputs), 2)
function.assert_called_once()
name = 'test'
function = mock.Mock()
function.return_value = 2
variable_name = ['a', 'b']
outcome = self.outcome_class(name, function=function,
variable_name=variable_name)
outputs = [1, 2]
self.assertEqual(outcome.process(outputs), 2)
function.assert_called_once()
function.assert_called_with(1, 2)
with self.assertRaises(ValueError):
name = 'test'
function = mock.Mock()
function.return_value = 2
variable_name = ['a', 'b']
outcome = self.outcome_class(name, function=function,
variable_name=variable_name)
outcome.process([1])
class TestTimeSeriesOutcome(TestScalarOutcome):
outcome_class = TimeSeriesOutcome
outcome_klass = "TimeSeriesOutcome"
def test_process(self):
name = 'test'
outcome = self.outcome_class(name)
outputs = [[1]]
self.assertEqual(outcome.process(outputs), outputs[0])
name = 'test'
function = mock.Mock()
function.return_value = [2]
outcome = self.outcome_class(name, function=function)
outputs = [1]
self.assertEqual(outcome.process(outputs), [2])
function.assert_called_once()
name = 'test'
function = mock.Mock()
function.return_value = [2]
variable_name = ['a', 'b']
outcome = self.outcome_class(name, function=function,
variable_name=variable_name)
outputs = [1, 2]
self.assertEqual(outcome.process(outputs), [2])
function.assert_called_once()
function.assert_called_with(1, 2)
with self.assertRaises(ValueError):
name = 'test'
function = mock.Mock()
function.return_value = [2]
variable_name = ['a', 'b']
outcome = self.outcome_class(name, function=function,
variable_name=variable_name)
outcome.process([1])
if __name__ == "__main__":
unittest.main() | bsd-3-clause | 1,020,453,058,477,931,600 | 32.243902 | 72 | 0.532196 | false |
google/google-ctf | 2019/finals/web-gphotos-finals/app/gallery/middleware.py | 1 | 1230 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.text import slugify
import hashlib
import os
class UserMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
user_id = request.COOKIES.get('user')
if not user_id:
user_id = hashlib.md5(os.urandom(16)).hexdigest()
else:
user_id = slugify(user_id)
user_dir = os.path.join('media', user_id)
if not os.path.exists(user_dir):
os.makedirs(user_dir)
os.makedirs(os.path.join(user_dir, 'thumbs'))
request.user_id = user_id
response = self.get_response(request)
response.set_cookie('user', user_id)
return response
| apache-2.0 | 8,556,968,117,111,451,000 | 28.285714 | 74 | 0.704065 | false |
EggieCode/ansible-role-ufw | test/test.py | 1 | 2161 | #!/usr/bin/env python3
import imp
import os
import unittest
from pprint import pprint
imp.load_source('ufw_forward', os.path.join(os.path.dirname(__file__), os.path.pardir, 'library', 'ufw_forward.py'))
from ufw_forward import UFWForwards
class TestBase(unittest.TestCase):
def test_do_magic(self):
test = { "incomming_dev": "eth0",
"outgoing_dev": "lxdbr0",
"outgoing_network": "10.20.10.0/24",
"masquerading": True,
"conntrack_state": "RELATED,ESTABLISHED",
"reroute": [],
"forwards": [
{
"container": "mumble.baviaan.eggie.zone",
"destination_ip": "10.20.10.11",
"destination_port": [
64738
],
"incomming_ip": "88.99.152.112",
"incomming_port": [
64738
],
"protocol": [
"tcp",
"udp"
]
},
{
"container": "brandon-minecraft.baviaan.eggie.zone",
"destination_ip": "10.20.10.12",
"destination_port": [
25565
],
"incomming_ip": "88.99.152.112",
"incomming_port": [
25565
],
"protocol": [
"tcp"
]
}
]
}
response = {
'nat_rules' : [],
'filter_rules' : []
}
ufw_forwards = UFWForwards(test, False)
ufw_forwards.nat_rules = response['nat_rules']
ufw_forwards.filter_rules = response['filter_rules']
ufw_forwards.generate()
for rule in response['nat_rules']:
print(" ".join(rule))
pprint(response['filter_rules']))
for rule in response['filter_rules']:
print(" ".join(rule))
if __name__ == '__main__':
unittest.main()
| mit | -6,576,547,500,244,771,000 | 28.202703 | 116 | 0.410921 | false |
UKPLab/emnlp2017-claim-identification | src/main/python/process_data_se_WithDevel.py | 1 | 4976 | import cPickle
import numpy as np
import pandas as pd
import re
import sys
from collections import defaultdict
def build_data_cv(data_folder, cv=10, clean_string=True):
"""
Loads data.
"""
revs = []
pos_file = data_folder[0] # train file
neg_file = data_folder[1] # test file
devel_file = data_folder[2]
vocab = defaultdict(float)
for (mysplit,myfile) in [(0,pos_file),(1,neg_file),(2,devel_file)]:
with open(myfile, "rb") as f:
for line in f:
rev = []
strippedLine = line.strip()
try:
lline,label = strippedLine.split("\t")
except ValueError:
lline = ""
label = strippedLine
rev.append(lline.strip())
if clean_string:
orig_rev = clean_str(" ".join(rev))
else:
orig_rev = " ".join(rev).lower()
words = set(orig_rev.split())
for word in words:
vocab[word] += 1
datum = {"y":int(label),
"text": orig_rev,
"num_words": len(orig_rev.split()),
"split": mysplit}
revs.append(datum)
#print revs
return revs, vocab
def get_W(word_vecs, k=300):
"""
Get word matrix. W[i] is the vector for word indexed by i
"""
vocab_size = len(word_vecs)
word_idx_map = dict()
W = np.zeros(shape=(vocab_size+1, k), dtype='float32')
W[0] = np.zeros(k, dtype='float32')
i = 1
for word in word_vecs:
W[i] = word_vecs[word]
word_idx_map[word] = i
i += 1
return W, word_idx_map
def load_bin_vec(fname, vocab):
"""
Loads 300x1 word vecs from Google (Mikolov) word2vec
"""
word_vecs = {}
with open(fname, "rb") as f:
header = f.readline()
vocab_size, layer1_size = map(int, header.split())
binary_len = np.dtype('float32').itemsize * layer1_size
for line in xrange(vocab_size):
word = []
while True:
ch = f.read(1)
if ch == ' ':
word = ''.join(word)
break
if ch != '\n':
word.append(ch)
if word in vocab:
word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')
else:
f.read(binary_len)
return word_vecs
def add_unknown_words(word_vecs, vocab, min_df=1, k=300):
"""
For words that occur in at least min_df documents, create a separate word vector.
0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones
"""
for word in vocab:
if word not in word_vecs and vocab[word] >= min_df:
word_vecs[word] = np.random.uniform(-0.25,0.25,k)
def clean_str(string, TREC=False):
"""
Tokenization/string cleaning for all datasets except for SST.
Every dataset is lower cased except for TREC
"""
string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip() if TREC else string.strip().lower()
def clean_str_sst(string):
"""
Tokenization/string cleaning for the SST dataset
"""
string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
if __name__=="__main__":
w2v_file = sys.argv[1]
trainFile = sys.argv[2]
testFile = sys.argv[3]
develFile = sys.argv[4]
saveFile = sys.argv[5]
data_folder = [trainFile,testFile,develFile]
print "loading data...",
revs, vocab = build_data_cv(data_folder, cv=10, clean_string=True)
max_l = np.max(pd.DataFrame(revs)["num_words"])
print "data loaded!"
print "number of sentences: " + str(len(revs))
print "vocab size: " + str(len(vocab))
print "max sentence length: " + str(max_l)
print "loading word2vec vectors...",
sys.stdout.flush()
w2v = load_bin_vec(w2v_file, vocab)
print "word2vec loaded!"
print "num words already in word2vec: " + str(len(w2v))
add_unknown_words(w2v, vocab)
W, word_idx_map = get_W(w2v)
rand_vecs = {}
add_unknown_words(rand_vecs, vocab)
W2, _ = get_W(rand_vecs)
cPickle.dump([revs, W, W2, word_idx_map, vocab], open(saveFile, "wb"))
print "dataset created!"
#sys.exit(1) # SE
| apache-2.0 | -3,612,380,496,805,541,400 | 32.85034 | 96 | 0.530748 | false |
magne-max/zipline-ja | zipline/testing/__init__.py | 1 | 1320 | from .core import ( # noqa
AssetID,
AssetIDPlusDay,
EPOCH,
ExplodingObject,
FakeDataPortal,
FetcherDataPortal,
MockDailyBarReader,
OpenPrice,
add_security_data,
all_pairs_matching_predicate,
all_subindices,
assert_single_position,
assert_timestamp_equal,
check_allclose,
check_arrays,
chrange,
create_daily_df_for_asset,
create_data_portal,
create_data_portal_from_trade_history,
create_empty_splits_mergers_frame,
create_minute_bar_data,
create_minute_df_for_asset,
drain_zipline,
empty_asset_finder,
empty_assets_db,
empty_trading_env,
make_alternating_boolean_array,
make_cascading_boolean_array,
make_test_handler,
make_trade_data_for_asset_info,
parameter_space,
patch_os_environment,
patch_read_csv,
permute_rows,
powerset,
product_upper_triangle,
read_compressed,
seconds_to_timestamp,
security_list_copy,
str_to_seconds,
subtest,
temp_pipeline_engine,
test_resource_path,
tmp_asset_finder,
tmp_assets_db,
tmp_bcolz_equity_minute_bar_reader,
tmp_dir,
tmp_trading_env,
to_series,
to_utc,
trades_by_sid_to_dfs,
write_bcolz_minute_data,
write_compressed,
)
from .fixtures import ZiplineTestCase # noqa
| apache-2.0 | -7,354,642,395,603,160,000 | 22.571429 | 45 | 0.671212 | false |
bcgov/gwells | app/backend/gwells/views/bulk.py | 1 | 18358 | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import logging
from decimal import Decimal
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.response import Response
from drf_yasg.utils import swagger_auto_schema
from django.db import transaction
from django.utils import timezone
from django.contrib.gis.geos import Point
from aquifers.constants import AQUIFER_ID_FOR_UNCORRELATED_WELLS
from aquifers.models import Aquifer, VerticalAquiferExtent, VerticalAquiferExtentsHistory
from wells.models import Well
from gwells.models.bulk import BulkWellAquiferCorrelationHistory
from gwells.permissions import (
HasBulkWellAquiferCorrelationUploadRole,
HasBulkVerticalAquiferExtentsUploadRole
)
logger = logging.getLogger(__name__)
class BulkWellAquiferCorrelation(APIView):
"""
Changes multiple aquifers well correlations all at once
"""
permission_classes = (HasBulkWellAquiferCorrelationUploadRole, )
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.change_log = []
self.create_date = timezone.now()
self.unknown_well_tag_numbers = set()
self.unknown_aquifer_ids = set()
self.wells_outside_aquifer = dict()
self.no_geom_aquifers = set()
self.retired_aquifers = set()
self.unpublished_aquifers = set()
self.unpublished_wells = set()
@swagger_auto_schema(auto_schema=None)
@transaction.atomic
def post(self, request, **kwargs):
aquifers = request.data
changes = {}
wells_to_update = []
# check for a ?commit querystring parameter for this /bulk API
# this flag will actually perform the bulk_update() on the DB
# without it will just check for errors and return the changes
# that would have been made
update_db = 'commit' in request.GET
incoming_well_tag_numbers = {wtn for aquifer in aquifers for wtn in aquifer['wellTagNumbers']}
incoming_aquifer_ids = {aquifer['aquiferId'] for aquifer in aquifers}
existing_wells = self.lookup_existing_wells(incoming_well_tag_numbers)
existing_aquifers = self.lookup_existing_aquifers(incoming_aquifer_ids)
if self.has_errors():
return self.return_errors({})
for aquifer in aquifers:
aquifer_id = int(aquifer['aquiferId'])
well_tag_numbers = aquifer['wellTagNumbers']
# capture errors about any unknown aquifers
aquifer = existing_aquifers[aquifer_id]
wells = [well for wtn, well in existing_wells.items() if wtn in well_tag_numbers]
# now figure out what has changed for each well
for well in wells:
well_tag_number = well.well_tag_number
existing_aquifer_id = well.aquifer_id if well.aquifer_id else None
# We need to skip aquifer 1143 as it is the aquifer without geom that wells are
# assigned to when they are not correlated at the time of interpretation.
if aquifer_id != AQUIFER_ID_FOR_UNCORRELATED_WELLS:
# If the correlation is changing — check if the well is inside the aquifer
self.check_well_in_aquifer(well, aquifer)
if existing_aquifer_id == aquifer_id: # this well correlation is unchanged
change = {
'action': 'same'
}
else:
if existing_aquifer_id is None:
# No existing aquifer for this well? Must be a new correlation
self.append_to_change_log(well_tag_number, aquifer_id, None)
change = {
'action': 'new',
'aquiferId': aquifer_id
}
wells_to_update.append(well)
elif existing_aquifer_id != aquifer_id: # existing ids don't match - must be a change
self.append_to_change_log(well_tag_number, aquifer_id, existing_aquifer_id)
change = {
'action': 'update',
'existingAquiferId': existing_aquifer_id,
'newAquiferId': aquifer_id
}
wells_to_update.append(well)
if change:
changes[well_tag_number] = change
if update_db:
# change all well's to point to the new aquifer
for well in wells:
well.aquifer = aquifer
if update_db: # no errors then updated the DB (if ?commit is passed in)
self.update_wells(wells_to_update)
elif self.has_warnings():
return self.return_errors(changes)
# no errors then we return the changes that were (or could be) performed
http_status = status.HTTP_200_OK if update_db else status.HTTP_202_ACCEPTED
return Response(changes, status=http_status)
def has_errors(self):
has_errors = (
len(self.unknown_well_tag_numbers) > 0 or
len(self.unknown_aquifer_ids) > 0
)
return has_errors
def has_warnings(self):
has_warnings = (
len(self.wells_outside_aquifer) > 0 or
len(self.no_geom_aquifers) > 0 or
len(self.unpublished_wells) > 0 or
len(self.unpublished_aquifers) > 0 or
len(self.retired_aquifers) > 0
)
return has_warnings
def lookup_existing_wells(self, well_tag_numbers):
wells = Well.objects.filter(pk__in=well_tag_numbers)
keyed_wells = {well.well_tag_number: well for well in wells}
known_well_tag_numbers = set(keyed_wells.keys())
self.unknown_well_tag_numbers = well_tag_numbers - known_well_tag_numbers
self.unpublished_wells = [well.well_tag_number for well in wells if well.well_publication_status_id != 'Published']
return keyed_wells
def lookup_existing_aquifers(self, aquifer_ids):
aquifers = Aquifer.objects.filter(pk__in=aquifer_ids).defer('geom') # we are not using geom
keyed_aquifers = {aquifer.aquifer_id: aquifer for aquifer in aquifers}
known_aquifer_ids = set(keyed_aquifers.keys())
self.unknown_aquifer_ids = aquifer_ids - known_aquifer_ids
self.retired_aquifers = [a.aquifer_id for a in aquifers if a.status_retired]
self.unpublished_aquifers = [a.aquifer_id for a in aquifers if not a.status_published]
return keyed_aquifers
def check_well_in_aquifer(self, well, aquifer):
if aquifer.geom is None:
self.no_geom_aquifers.add(aquifer.aquifer_id)
return None
if aquifer.geom_simplified is None:
raise Exception(f"Aquifer {aquifer.aquifer_id} has no geom_simplified")
# Expand simplified polygon by ~1000m in WGS-84 (srid 4326)
aquifer_geom = aquifer.geom_simplified.buffer(0.01)
if not aquifer_geom.contains(well.geom):
well_3005_geom = well.geom.transform(3005, clone=True)
distance = aquifer.geom.distance(well_3005_geom)
# NOTE: 3005 projection's `.distance()` returns almost-meters
self.wells_outside_aquifer[well.well_tag_number] = {'distance': distance, 'units': 'meters'}
return False
return True
def return_errors(self, changes):
# roll back the transaction as the bulk_update could have run for one
# aquifer but errored on another. Best to abort the whole thing and warn the user
transaction.set_rollback(True)
errors = {
'unknownAquifers': self.unknown_aquifer_ids,
'unknownWells': self.unknown_well_tag_numbers,
'wellsNotInAquifer': self.wells_outside_aquifer,
'aquiferHasNoGeom': self.no_geom_aquifers,
'retiredAquifers': self.retired_aquifers,
'unpublishedAquifers': self.unpublished_aquifers,
'unpublishedWells': self.unpublished_wells,
'changes': changes # always return the list of changes even if there are unknowns
}
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
def update_wells(self, wells):
logger.info("Bulk updating %d wells", len(wells))
# bulk update using efficient SQL for any well aquifer correlations that have changed
Well.objects.bulk_update(wells, ['aquifer'])
# save the BulkWellAquiferCorrelation records
BulkWellAquiferCorrelationHistory.objects.bulk_create(self.change_log)
def append_to_change_log(self, well_tag_number, to_aquifer_id, from_aquifer_id):
bulk_history_item = BulkWellAquiferCorrelationHistory(
well_id=well_tag_number,
update_to_aquifer_id=to_aquifer_id,
update_from_aquifer_id=from_aquifer_id,
create_user=self.request.user.profile.username,
create_date=self.create_date
)
self.change_log.append(bulk_history_item)
class BulkVerticalAquiferExtents(APIView):
"""
Changes multiple vertical aquifer extents all at once
"""
permission_classes = (HasBulkVerticalAquiferExtentsUploadRole, )
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.conflicts = []
self.change_log = []
self.create_date = timezone.now()
self.unknown_well_tag_numbers = set()
self.unknown_aquifer_ids = set()
@swagger_auto_schema(auto_schema=None)
@transaction.atomic
def post(self, request, **kwargs):
vertical_aquifer_extents = request.data
new_vae_models = []
# check for a ?commit querystring parameter for this /bulk API
# this flag will actually perform the bulk_update() on the DB
# without it will just check for errors and return the changes
# that would have been made
update_db = 'commit' in request.GET
# create a dict of the extents keyed by well_tag_number
incoming_vae_data = self.as_wells(vertical_aquifer_extents)
incoming_well_tag_numbers = incoming_vae_data.keys()
existing_wells = self.lookup_existing_wells(incoming_well_tag_numbers)
incoming_aquifer_ids = set(row['aquiferId'] for row in vertical_aquifer_extents)
existing_aquifers = self.lookup_existing_aquifers(incoming_aquifer_ids)
if len(self.unknown_well_tag_numbers) > 0 or len(self.unknown_aquifer_ids) > 0:
return self.return_errors()
# loop through every well in this bulk update
for well_tag_number, data in incoming_vae_data.items():
well = existing_wells[well_tag_number]
existing_data = VerticalAquiferExtent.objects \
.filter(well_id=well_tag_number) \
.order_by('start')[:]
existing_aquifer_ids = [item.aquifer_id for item in existing_data]
extents = [{'start': item.start, 'end': item.end} for item in existing_data]
# record the current extents at this well so we know the complete state at this time
for existing_vae in existing_data:
self.append_to_history_log(existing_vae)
# loop through all incoming extents and see if they overlap with any existing or new extents
max_depth = float('-inf')
data.sort(key=lambda item: item['fromDepth'])
for vae in data:
aquifer_id = vae['aquiferId']
from_depth = Decimal(format(vae['fromDepth'], '.2f')) if vae['fromDepth'] is not None else None
to_depth = Decimal(format(vae['toDepth'], '.2f')) if vae['toDepth'] is not None else Decimal('Infinity')
if aquifer_id in existing_aquifer_ids:
self.add_conflict(vae, 'Aquifer %s already defined for well' % aquifer_id)
continue
if from_depth < 0:
self.add_conflict(vae, 'From depth can not be less then zero')
continue
if to_depth < 0:
self.add_conflict(vae, 'To depth can not be less then zero')
continue
if to_depth < from_depth:
self.add_conflict(vae, 'From depth must be below to depth')
continue
aquifer = existing_aquifers[aquifer_id]
if self.check_extent_overlaps(from_depth, to_depth, extents):
self.add_conflict(vae, 'Overlaps with an existing vertical aquifer extent')
continue
if from_depth < max_depth:
self.add_conflict(vae, 'Overlaps with another vertical aquifer extent in the CSV')
continue
max_depth = to_depth
if update_db:
vae_model = self.build_vertical_aquifer_extent_model(well, aquifer, from_depth, to_depth)
new_vae_models.append(vae_model)
self.append_to_history_log(vae_model)
# if there are any unknown aquifers or wells then we want to return errors
if len(self.conflicts) > 0:
return self.return_errors()
if update_db: # no errors then updated the DB (if ?commit is passed in)
self.create_vertical_aquifer_extents(new_vae_models)
# no errors then we return the changes that were (or could be) performed
http_status = status.HTTP_200_OK if update_db else status.HTTP_202_ACCEPTED
return Response({}, status=http_status)
def as_wells(self, vertical_aquifer_extents):
""" Returns extents as a dict keyed by well_tag_number """
wells = {}
for record in vertical_aquifer_extents:
wells.setdefault(record['wellTagNumber'], []).append(record)
return wells
def lookup_existing_wells(self, well_tag_numbers):
""" Returns a dict keyed by well_tag_number of existing wells """
wells = Well.objects.filter(pk__in=well_tag_numbers)
keyed_wells = {well.well_tag_number: well for well in wells}
known_well_tag_numbers = set(keyed_wells.keys())
self.unknown_well_tag_numbers = well_tag_numbers - known_well_tag_numbers
return keyed_wells
def lookup_existing_aquifers(self, aquifer_ids):
""" Returns a dict keyed by aquifer_id of existing aquifers """
aquifers = Aquifer.objects.filter(pk__in=aquifer_ids)
keyed_aquifers = {aquifer.aquifer_id: aquifer for aquifer in aquifers}
known_aquifer_ids = set(keyed_aquifers.keys())
self.unknown_aquifer_ids = aquifer_ids - known_aquifer_ids
return keyed_aquifers
def add_conflict(self, data, msg):
""" Logs a conflict to be returned as a list of conflicts """
self.conflicts.append({
**data,
'message': msg,
})
def build_vertical_aquifer_extent_model(self, well, aquifer, from_depth, to_depth):
""" A new VerticalAquiferExtentModel which uses the well's geom """
if well.geom:
longitude = well.geom.x
latitude = well.geom.y
point = Point(-abs(float(longitude)), float(latitude), srid=4326)
return VerticalAquiferExtent(
well=well,
aquifer=aquifer,
geom=point,
start=from_depth,
end=None if math.isinf(to_depth) else to_depth,
create_user=self.request.user.profile.username,
create_date=self.create_date
)
def check_extent_overlaps(self, from_depth, to_depth, existing_extents):
""" Checks an extent against a list of existing extents """
if len(existing_extents) == 0:
return False
max_depth = float('-inf')
for extent in existing_extents:
start = extent['start']
end = extent['end'] if extent['end'] is not None else Decimal('Infinity')
if from_depth >= max_depth and to_depth <= start:
return False
max_depth = end
return from_depth < max_depth # check the bottom of all extents
def return_errors(self):
# roll back the transaction as the bulk_update could have run for one
# aquifer but errored on another. Best to abort the whole thing and warn the user
transaction.set_rollback(True)
errors = {
'unknownAquifers': self.unknown_aquifer_ids,
'unknownWells': self.unknown_well_tag_numbers,
'conflicts': self.conflicts
}
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
def create_vertical_aquifer_extents(self, models):
""" Creates all the vertical aquifer extents and history log items all at once """
logger.info("Bulk updating %d VerticalAquiferExtents", len(models))
# bulk update using efficient SQL for any well aquifer correlations that have changed
VerticalAquiferExtent.objects.bulk_create(models)
# save the BulkWellAquiferCorrelation records
VerticalAquiferExtentsHistory.objects.bulk_create(self.change_log)
def append_to_history_log(self, model):
""" Adds a vertical aquifer extent's data to the history log """
bulk_history_item = VerticalAquiferExtentsHistory(
well_tag_number=model.well_id,
aquifer_id=model.aquifer_id,
geom=model.geom,
start=model.start,
end=model.end,
create_user=self.request.user.profile.username,
create_date=self.create_date
)
self.change_log.append(bulk_history_item)
| apache-2.0 | -6,203,309,790,408,724,000 | 41.787879 | 123 | 0.620288 | false |
live-clones/dolfin-adjoint | tests_dolfin/optimization_scalar/optimization_scalar.py | 1 | 1880 | from __future__ import print_function
from dolfin import *
from dolfin_adjoint import *
import sys
dolfin.set_log_level(ERROR)
n = 10
mesh = UnitIntervalMesh(n)
V = FunctionSpace(mesh, "CG", 2)
ic = project(Expression("sin(2*pi*x[0])", degree=1), V)
u = ic.copy(deepcopy=True)
def main(nu):
u_next = Function(V)
v = TestFunction(V)
timestep = Constant(1.0/n, name="Timestep")
F = ((u_next - u)/timestep*v
+ u_next*u_next.dx(0)*v
+ nu*u_next.dx(0)*v.dx(0))*dx
bc = DirichletBC(V, 0.0, "on_boundary")
t = 0.0
end = 0.1
while (t <= end):
solve(F == 0, u_next, bc)
u.assign(u_next)
t += float(timestep)
adj_inc_timestep()
def eval_cb(j, m):
print("j = %f, m = %f." % (j, float(m)))
def derivative_cb(j, dj, m):
print("j = %f, dj = %f, m = %f." % (j, dj, float(m)))
def replay_cb(var, data, m):
#print "Got data for variable %s at m = %f." % (var, float(m))
pass
if __name__ == "__main__":
nu = Constant(0.0001, name="Nu")
# Run the forward model once to have the annotation
main(nu)
J = Functional(inner(u, u)*dx*dt[FINISH_TIME])
# Run the optimisation
reduced_functional = ReducedFunctional(J, ConstantControl("Nu"),
eval_cb_post= eval_cb,
derivative_cb_post=derivative_cb,
replay_cb=replay_cb,
scale=2.0)
try:
nu_opt = minimize(reduced_functional, 'SLSQP')
tol = 1e-4
if reduced_functional(nu_opt) > tol:
print('Test failed: Optimised functional value exceeds tolerance: ', reduced_functional(nu_opt), ' > ', tol, '.')
sys.exit(1)
except ImportError:
info_red("No suitable scipy version found. Aborting test.")
| lgpl-3.0 | -1,891,839,056,240,876,000 | 27.923077 | 125 | 0.534574 | false |
Eternali/synk | synk-pre/synk-pre.py | 1 | 5279 | '''
Synk - Sublime Text Plugin
'''
import os
import socket
import sublime
import sublime_plugin
from threading import Thread, Timer
# variables for storing user defined settings
settings_filename = "synk_pre.sublime-settings"
enabled_field = "enabled"
server_ips_field = "project_server_ips" # NOTE: add feature to have more than one server later
uplink_ports_field = "uplink_ports"
downlink_ports_field = "downlink_ports"
all_files_field = "synk_all_files"
current_file_field = "synk_current_file"
delay_field = "delay_in_seconds"
# Object for connecting to the server
class ServerConnection(object):
def __init__(self, attempts=5):
self.settings = sublime.load_settings(settings_filename)
self.delay = self.settings.get(delay_field)
self.server = self.settings.get(server_ips_field)
self.up_port = self.settings.get(uplink_ports_field)
self.down_port = self.settings.get(downlink_ports_field)
self.current_file = self.settings.get(current_file_field)
self.upsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.downsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.file_locked = False
for a in range(attempts):
try:
self.upsock.connect((self.server, self.up_port))
self.downsock.connect((self.server, self.down_port))
return
except:
continue
sublime.status_message("An error occured while attempting to connect to the server.")
def recv_data(conn):
received = ""
while True:
data = conn.recv(4096)
received += data.decode("utf-8")
if len(data) <= 4096:
break
return received
def write_file(fname, data, mode="w"):
with open(fname, mode) as f:
for line in data:
f.write(line + '\n')
def push_changes(self, filename=self.current_file, attempts=30):
for a in attempts:
if not self.file_locked:
self.file_locked = True
data = self.current_file + '\n' + view.substr(sublime.Region(0, view.size()))
self.upsock.send(data.encode("utf-8"))
self.file_locked = False
break
def get_changes(self):
#change_thread = Thread(target=self.get_changes_thread)
#change_thread.start()
Timer(self.delay, self.get_changes_thread).start()
def get_changes_thread(self):
while True:
self.recved_data = self.recv_data(self.downsock)
if len(self.recv_data) and not self.file_locked and not view.is_loading():
self.file_locked = True
self.write_file(self.current_file, self.recved_data)
self.file_locked = False
class SynkPreListener(sublime_plugin.EventListener):
save_queue = []
@staticmethod
def generate_backup_filename(filename):
dirname, basename = [os.path.dirname(filename), os.path.basename(filename).split('.')]
if len(basename) > 1:
basename.insert(-1, 'bak')
else:
basename.append('bak')
return dirname + '/' + '.'.join(basename)
def on_modified(self, view):
settings = sublime.load_settings(settings_filename)
if not (view.file_name() and view.is_dirty()):
return
delay = settings.get(delay_field)
all_files = settings.get(all_files_field)
current_file = settings.get(current_file_field)
if not all_files and current_file != view.file_name():
return
def callback():
settings = sublime.load_settings(settings_filename)
current_file = settings.get(current_file_field)
if view.is_dirty() and not view.is_loading():
view.run_command("save")
serv_conn.push_changes(filename=current_file)
else:
content = view.substr(sublime.Region(0, view.size()))
try:
with open(SynkPreListener.generate_backup_filename(view.filename()), 'w', encoding="utf-8") as f:
f.write(content)
except Exception as e:
sublime.status_message(str(e))
raise e
class SynkPreCommand(sublime_plugin.TextCommand):
def run(self, **kwargs):
enable = kwargs.get("enable", None)
all_files = kwargs.get("all_files", False)
settings = sublime.load_settings(settings_filename)
if enable is None:
enable = not settings.get(enabled_field)
if not enable:
message = "Autosynk is turned off."
filename = settings.get(current_file_field)
settings.set(enabled_field, enable)
settings.set(all_files_field, all_files)
filename = sublime.Window.active_view(sublime.active_window()).file_name()
settings.set(current_file_field, filename)
if enable:
message = "Autosynk is turned on."
if not all_files:
message += " for: " + os.path.basename(filename)
serv_conn = ServerConnection()
global serv_conn
serv_conn.get_changes()
sublime.status_message(message)
| gpl-3.0 | -5,355,001,853,979,260,000 | 33.730263 | 113 | 0.602387 | false |
PostTenebrasLab/DrinkingBuddyServer | drinkingBuddyDB_declarative.py | 1 | 3467 | #!/usr/bin/python3
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from marshmallow import Schema, fields
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
Base = declarative_base()
class Category(Base):
__tablename__ = 'categories'
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
class Terminal(Base):
__tablename__ = 'terminals'
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
key = Column(String(64), nullable=False)
class Functionality(Base):
__tablename__ = 'functionalities'
id = Column(Integer, primary_key=True)
category_id = Column(Integer, ForeignKey('categories.id'))
category = relationship(Category)
terminal_id = Column(Integer, ForeignKey('terminals.id'))
terminal = relationship(Terminal)
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
quantity = Column(Integer)
minquantity = Column(Integer)
price = Column(Integer)
barcode = Column(String(32), nullable=True)
pictureURL = Column(String(512), nullable=True)
category_id = Column(Integer, ForeignKey('categories.id'))
category = relationship(Category)
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(50))
balance = Column(Integer)
type = Column(Integer)
class Card(Base):
__tablename__ = 'cards'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship(User)
class Locker(Base):
__tablename__ = 'locker'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship(User)
class Transaction(Base):
__tablename__ = 'transactions'
id = Column(Integer, primary_key=True)
date = Column(DateTime)
value = Column(Integer)
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship(User)
class TransactionItem(Base):
__tablename__ = 'transaction_items'
id = Column(Integer, primary_key=True)
date = Column(DateTime)
quantity = Column(Integer)
price_per_item = Column(Integer)
canceled = Column(Boolean, default=False)
canceled_date = Column(DateTime)
element_id = Column(Integer, ForeignKey('items.id'))
element = relationship(Item)
transaction_id = Column(Integer, ForeignKey('transactions.id'))
transaction = relationship(Transaction)
class UserSchema(Schema):
class Meta:
fields = ("id", "name", "balance")
class ItemSchema(Schema):
class Meta:
fields = ("id", "name")
class TransactionItemSchema(Schema):
element = fields.Nested(ItemSchema)
class Meta:
fields = ("id", "date", "value", "element_id", "element")
class TransactionSchema(Schema):
user = fields.Nested(UserSchema)
transactionItems = fields.Nested(TransactionItemSchema, many=True)
class Meta:
fields = ("id", "date", "value", "user_id", "user", "transactionItems")
# Create Database
# engine = create_engine("sqlite:///db.db", echo=True)
# Base.metadata.create_all(engine)
| mit | 974,896,659,334,482,000 | 27.418033 | 87 | 0.677531 | false |
ifzing/ceilometer-extended-monitor | network/lbaas.py | 1 | 2469 | # -*- encoding: utf-8 -*-
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer.central import plugin
from ceilometer import sample
from ceilometer import neutron_client
class LbaasInBytesPollster(plugin.CentralPollster):
LOG = log.getLogger(__name__ + '.LBaaS')
def _get_lb_in_bytes(self):
in_bytes = []
nt = neutron_client.Client()
for pool in nt._list_pools():
bytes = nt._get_lb_in_bytes(pool['id'])
in_bytes.append({'id': pool['id'],
'bytes': bytes})
return in_bytes
def _iter_pool_stats(self, cache):
if 'in_bytes' not in cache:
cache['in_bytes'] = list(self._get_lb_in_bytes())
return iter(cache['in_bytes'])
def get_samples(self, manager, cache):
for pool in self._iter_pool_stats(cache):
self.LOG.info("LBAAS POOL: %s" % pool['id'])
yield sample.Sample(
name='network.lb.in.bytes',
type=sample.TYPE_CUMULATIVE,
unit='byte',
volume=pool['bytes'],
user_id=None,
project_id=None,
resource_id=pool['id'],
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={})
class LbaasOutBytesPollster(plugin.CentralPollster):
LOG = log.getLogger(__name__ + '.LBaaS')
def _get_lb_out_bytes(self):
in_bytes = []
nt = neutron_client.Client()
for pool in nt._list_pools():
bytes = nt._get_lb_out_bytes(pool['id'])
in_bytes.append({'id': pool['id'],
'bytes': bytes})
return in_bytes
def _iter_pool_stats(self, cache):
if 'out_bytes' not in cache:
cache['out_bytes'] = list(self._get_lb_out_bytes())
return iter(cache['out_bytes'])
def get_samples(self, manager, cache):
for pool in self._iter_pool_stats(cache):
self.LOG.info("LBAAS POOL: %s" % pool['id'])
yield sample.Sample(
name='network.lb.out.bytes',
type=sample.TYPE_CUMULATIVE,
unit='byte',
volume=pool['bytes'],
user_id=None,
project_id=None,
resource_id=pool['id'],
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={})
| apache-2.0 | 8,639,770,541,434,874,000 | 31.486842 | 63 | 0.534629 | false |
bromjiri/Presto | predictor/predictor_new.py | 1 | 8137 | import settings
import pandas as pd
import numpy as np
import os
from datetime import datetime
from datetime import timedelta
import predictor.predictor_classifier as cls
import predictor.predictor_statistic as stat
import random
import nltk
class Stock:
def __init__(self, subject):
input_file = settings.PREDICTOR_STOCK + "/" + subject + ".csv"
self.stock_df = pd.read_csv(input_file, sep=',', index_col='Date')
def create_dict(self, from_date, to_date):
self.stock_ser = self.stock_df['Diff'].loc[from_date:to_date]
# binning
self.stock_ser = self.stock_ser.apply(binning_none)
self.stock_dict = self.stock_ser.dropna().astype(int).to_dict()
def get_dict(self):
return self.stock_dict
def get_stock_dates(self):
return self.stock_ser.index.values
class Sent:
def __init__(self, subject, source):
input_file = settings.PREDICTOR_SENTIMENT + "/" + source + "/" + source + "-sent-" + subject + ".csv"
self.sent_df = pd.read_csv(input_file, sep=',', index_col='Date')
def get_weekend(self, col_name, stock_dates):
weekend_df = np.round(self.sent_df, 2)
aggreg = 0
days = 1
for idx, row in weekend_df.iterrows():
value = row[col_name]
date = pd.to_datetime(idx)
date_plus = date + timedelta(days=1)
if str(date_plus.date()) not in stock_dates:
# print("weekend")
value += aggreg
aggreg = value
days += 1
else:
total = value + aggreg
mean = total / days
aggreg = 0
days = 1
weekend_df.set_value(idx, col_name, mean)
# print(date.date(), row[col_name], value)
return np.round(weekend_df[col_name].diff().loc[stock_dates], 2)
def create_dict(self, precision, method, from_date, to_date, stock_dates, binning):
sentiment_col = "Sent" + precision
sent_ser = self.sent_df[sentiment_col]
if method == "Natural":
sent_ser = sent_ser.diff().loc[from_date:to_date]
elif method == "Friday":
sent_ser = sent_ser.loc[stock_dates].diff()
elif method == "Sunday":
sent_ser = sent_ser.diff().loc[stock_dates]
elif method == "Weekend":
sent_ser = self.get_weekend(sentiment_col, stock_dates)
# binning
std_dev1 = sent_ser.std() / 4
std_dev2 = sent_ser.std()
if binning == 'none':
sent_ser_new = sent_ser.apply(binning_none)
elif binning == 'low':
sent_ser_new = sent_ser.apply(binning_low, args=(std_dev1,))
else:
sent_ser_new = sent_ser.apply(binning_high, args=(std_dev1, std_dev2,))
# print(pd.concat([sent_ser, sent_ser_new], axis=1))
self.sent_dict = sent_ser_new.dropna().astype(int).to_dict()
self.key_list = sorted(self.sent_dict.keys())
def get_dict(self):
return self.sent_dict
def get_features(self, key):
index = self.key_list.index(key)
features = dict()
features['d1'] = self.sent_dict[self.key_list[index-3]]
features['d2'] = self.sent_dict[self.key_list[index-2]]
features['d3'] = self.sent_dict[self.key_list[index-1]]
return features
def binning_none(row):
if row > 0:
return 4
elif row < 0:
return 0
else:
return row
def binning_low(row, std_dev1):
if row > std_dev1:
return 4
elif row < std_dev1 and row > -std_dev1:
return 2
elif row < -std_dev1:
return 0
else:
return row
def binning_high(row, std_dev1, std_dev2):
if row > std_dev2:
return 4
elif row < std_dev2 and row > std_dev1:
return 3
elif row < std_dev1 and row > -std_dev1:
return 2
elif row < -std_dev1 and row > -std_dev2:
return 1
elif row < -std_dev2:
return 0
else:
return row
def run_one(source, subject, precision, method, from_date, to_date, binning, filename_nltk, filename_skl):
# stock dataframe
stock = Stock(subject)
stock.create_dict(from_date, to_date)
stock_dict = stock.get_dict()
# print(sorted(stock_dict.items()))
indexes = ["djia", "snp", "nasdaq"]
# if subject in indexes:
# subject = "the"
# sentiment dataframe
sent = Sent(subject, source)
sent.create_dict(precision, method, from_date, to_date, stock.get_stock_dates(), binning)
# print(sorted(sent.get_dict().items()))
# features
features_list = list()
for key in sorted(stock_dict)[3:]:
features = sent.get_features(key)
features_list.append([features, stock_dict[key]])
# print([key, sorted(features.items()), stock_dict[key]])
features_list_pos = list()
features_list_neg = list()
for feature in features_list:
if feature[1] == 0:
features_list_neg.append(feature)
else:
features_list_pos.append(feature)
statistic = stat.Statistic(source, subject, precision, method, binning)
# print(len(features_list), len(features_list_pos), len(features_list_neg))
max_half = min(len(features_list_pos), len(features_list_neg))
train_border = int(max_half * 4 / 5)
# print(train_border, max_half)
# exit()
cycles = 50
for x in range(0, cycles):
random.shuffle(features_list_pos)
random.shuffle(features_list_neg)
# random.shuffle(features_list)
trainfeats = features_list_pos[:train_border] + features_list_neg[:train_border]
testfeats = features_list_pos[train_border:max_half] + features_list_neg[train_border:max_half]
# print(len(trainfeats), len(testfeats))
# trainfeats = features_list[:170]
# testfeats = features_list[170:]
nlt_output, skl_output = cls.train(trainfeats, testfeats, nlt=nltk_run, skl=sklearn_run)
# print(nlt_output['most1'])
# exit()
if nltk_run:
statistic.add_nltk(nlt_output)
if sklearn_run:
statistic.add_skl(skl_output)
if nltk_run:
statistic.mean_nltk(cycles)
statistic.print_nltk()
# statistic.write_nltk(filename_nltk)
if sklearn_run:
statistic.mean_skl(cycles)
statistic.print_skl()
statistic.print_stddev()
# statistic.write_skl(filename_skl)
nltk_run = True
sklearn_run = True
from_date = '2016-11-01'
to_date = '2017-08-31'
source = "stwits-comb"
binnings = ['none', 'low', 'high']
# subjects = ["snp", "djia", "nasdaq"]
subjects = ["djia", "snp", "nasdaq"]
# subjects = ["microsoft"]
precisions = ["0.6", "0.8", "1.0"]
# precisions = ["0.6"]
methods = ["Friday", "Natural", "Weekend", "Sunday"]
# methods = ["Friday"]
for subject in subjects:
folder = settings.PREDICTOR_PREDICTION + '/' + source + '/' + subject + '/'
os.makedirs(folder, exist_ok=True)
filename_nltk = folder + source + '-prediction-' + subject + "-nltk.csv"
filename_skl = folder + source + '-prediction-' + subject + "-skl.csv"
# if nltk_run:
# open(filename_nltk, 'w').close()
#
# if sklearn_run:
# open(filename_skl, 'w').close()
for method in methods:
# if nltk_run:
# f = open(filename_nltk, 'a')
# f.write(source + ", " + subject + ", " + method + ", NLTK\n")
# f.write("precision, binning, accuracy, pos_prec, neg_prec, pos_rec, neg_rec, d1, d2, d3\n")
# f.close()
#
# if sklearn_run:
# f = open(filename_skl, 'a')
# f.write(source + ", " + subject + ", " + method + ", SKL\n")
# f.write("precision, binning, mnb, bnb, lr, lsvc, nsvc, voted\n")
# f.close()
for precision in precisions:
for binning in binnings:
# print(source, subject, precision, method)
run_one(source, subject, precision, method, from_date, to_date, binning, filename_nltk, filename_skl)
| mit | -6,080,565,351,509,108,000 | 28.915441 | 117 | 0.577731 | false |
imgos/asterisk-scripts | script/googlecontacts.py | 1 | 2701 | #!/usr/bin/python3
"""Get Google Contacts
Usage: google_contacts.py [--noauth_local_webserver]
Options:
--noauth_local_webserver passed on to google auth
"""
import docopt
import httplib2
import subprocess
import unidecode
from apiclient import discovery
from argparse import Namespace
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
###
APPLICATION_NAME = "Asterisk DB Updater"
# If modifying these scopes, delete your previously saved credentials
SCOPES = [
"https://www.googleapis.com/auth/contacts.readonly",
"https://www.googleapis.com/auth/people.readonly",
]
OAUTH_CONFIG_FILE = "/etc/asterisk-scripts/asterisk_client_secrets.json"
OAUTH_TOKEN_FILE = "/etc/asterisk-scripts/asterisk_script_tokens.json"
###
def get_credentials(flags):
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
:param flags: oauth flags
:return: the obtained credentials
"""
store = Storage(OAUTH_TOKEN_FILE)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(OAUTH_CONFIG_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store, flags)
print(f"Storing credentials to: {OAUTH_TOKEN_FILE}")
return credentials
def main():
"""Update asterisk db from google contacts"""
opts = docopt.docopt(__doc__)
flags = Namespace(
auth_host_name="localhost",
auth_host_port=[8080, 8090],
logging_level="ERROR",
noauth_local_webserver=opts["--noauth_local_webserver"],
)
credentials = get_credentials(flags)
http = credentials.authorize(httplib2.Http())
if opts["--noauth_local_webserver"]:
return
service = discovery.build("people", "v1", http=http)
contacts_response = (
service.people()
.connections()
.list(
resourceName="people/me",
personFields="names,phoneNumbers",
sortOrder="LAST_NAME_ASCENDING",
)
.execute()
)
for i, contact in enumerate(contacts_response["connections"]):
display_name = (
contact["names"][0]["displayName"]
if len(contact["names"]) > 0
else "Unknown"
)
for phone in contact["phoneNumbers"]:
ast_cmd = f'database put cidname {phone["canonicalForm"]} "{display_name}"'
subprocess.run(["asterisk", "-rx", unidecode.unidecode(ast_cmd)])
if __name__ == "__main__":
main()
| mit | -8,137,995,456,563,472,000 | 26.561224 | 87 | 0.654202 | false |
fls-bioinformatics-core/RnaChipIntegrator | test/test_Features.py | 1 | 14809 | #
# test_Features.py: unit tests for Features module
# Copyright (C) University of Manchester 2011-5 Peter Briggs
from common import *
from rnachipintegrator.Features import Feature
from rnachipintegrator.Features import FeatureSet
import unittest
class TestFeature(unittest.TestCase):
def setUp(self):
# Set up Feature objects to be used in the tests
# Forward strand example
self.rna_data = \
Feature('CG9130-RB','chr3L','1252012','1255989','+')
# Reverse strand example
self.rna_data_2 = \
Feature('CG13051-RA','chr3L','16257914','16258166','-')
def test_properties(self):
self.assertEqual(self.rna_data.chrom,'chr3L')
self.assertEqual(self.rna_data.source_file,None)
self.assertEqual(self.rna_data_2.chrom,'chr3L')
self.assertEqual(self.rna_data_2.source_file,None)
def test_with_source_file(self):
feature = Feature('CG9130-RB','chr3L','1252012','1255989','+',
source_file="Features1.txt")
self.assertEqual(feature.chrom,'chr3L')
self.assertEqual(feature.id,'CG9130-RB')
self.assertEqual(feature.start,1252012)
self.assertEqual(feature.end,1255989)
self.assertEqual(feature.strand,'+')
self.assertEqual(feature.source_file,"Features1.txt")
def test__eq__(self):
self.assertEqual(self.rna_data,Feature('CG9130-RB',
'chr3L',
'1252012',
'1255989','+'))
self.assertNotEqual(self.rna_data,self.rna_data_2)
def test_contains_position(self):
position = 1253000
self.assertTrue(self.rna_data.containsPosition(position),
"Transcript should contain position")
position = 4250000
self.assertFalse(self.rna_data.containsPosition(position),
"Transcript should not contain position")
position = 10000
self.assertFalse(self.rna_data.containsPosition(position),
"Transcript should not contain position")
def test_get_closest_edge_distance(self):
# Single position
position = 1200000
# Single reference position
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position),
abs(self.rna_data.start-position),
"Incorrect closest edge distance #1")
position = 1300000
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position),
abs(self.rna_data.end-position),
"Incorrect closest edge distance #2")
def test_get_closest_edge_distance_outside_region(self):
# Reference region (2 positions)
position1 = 1100000
position2 = 1200000
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2),
abs(self.rna_data.start-position2),
"Incorrect closest edge distance (region #1)")
position1 = 1300000
position2 = 1400000
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2),
abs(self.rna_data.end-position1),
"Incorrect closest edge distance (region #2)")
def test_get_closest_edge_distance_partially_inside_region(self):
# Partially inside reference region
position1 = 1200000
position2 = 1255000
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2),
abs(self.rna_data.end-position2),
"Incorrect closest edge distance (inside region #1)")
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2,
zero_inside_region=True),
0,
"Incorrect closest edge distance (inside region #2)")
def test_get_closest_edge_distance_completely_inside_region(self):
# Completely inside reference region
position1 = 1250000
position2 = 1300000
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2),
abs(self.rna_data.start-position1),
"Incorrect closest edge distance (inside region #3)")
self.assertEqual(self.rna_data.getClosestEdgeDistanceTo(position1,
position2,
zero_inside_region=True),
0,
"Incorrect closest edge distance (inside region #4)")
def test_get_promoter_region(self):
leading = 10000
trailing = 2500
promoter = self.rna_data.getPromoterRegion(leading,trailing)
self.assertEqual(promoter,
(self.rna_data.getTSS()-leading,
self.rna_data.getTSS()+trailing),
"Incorrect promoter region for + strand example")
promoter = self.rna_data_2.getPromoterRegion(leading,trailing)
self.assertEqual(promoter,
(self.rna_data_2.getTSS()+leading,
self.rna_data_2.getTSS()-trailing),
"Incorrect promoter region for - strand example")
class TestFeatureSet(unittest.TestCase):
def setUp(self):
# Create input files for tests
create_test_file('Transcripts-ex1.txt',transcripts_ex1)
create_test_file('Transcripts-ex2.txt',transcripts_ex2)
create_test_file('Transcripts-ex2a.txt',transcripts_ex2a)
def tearDown(self):
# Remove input files
delete_test_file('Transcripts-ex1.txt')
delete_test_file('Transcripts-ex2.txt')
def test_populate_from_list_of_features(self):
features = FeatureSet(
features_list=(
Feature('CG31973','chr2L',25402,59243,'-'),
Feature('CG2674-RC','chr2L',107926,114433,'+'),
Feature('CG2674-RE','chr2L',106903,114433,'+'),
Feature('CG2674-RA','chr2L',107760,114433,'+')))
self.assertEqual(features.source_file,None)
self.assertEqual(features[0],
Feature('CG31973','chr2L',25402,59243,'-'))
self.assertEqual(features[1],
Feature('CG2674-RC','chr2L',107926,114433,'+'))
self.assertEqual(features[2],
Feature('CG2674-RE','chr2L',106903,114433,'+'))
self.assertEqual(features[3],
Feature('CG2674-RA','chr2L',107760,114433,'+'))
def test_reading_in_RNAseq_data(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
self.assertEqual(rna_seq.source_file,'Transcripts-ex1.txt')
self.assertEqual(len(rna_seq),10,
"Wrong number of lines from RNA-seq file")
self.assertTrue(rna_seq.isFlagged(),
"Data should be flagged")
def test_source_file_is_stored(self):
features = FeatureSet('Transcripts-ex1.txt')
self.assertEqual(features.source_file,'Transcripts-ex1.txt')
for feature in features:
self.assertEqual(feature.source_file,'Transcripts-ex1.txt')
def test_filter_on_chromosome(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
rna_chr = rna_seq.filterByChr('chr3LHet')
self.assertEqual(len(rna_chr),1,
"Wrong number of chromosomes")
for rna_data in rna_chr:
self.assertEqual(rna_data.chrom,'chr3LHet',
"Wrong chromosome filtered")
def test_filter_on_strand(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
rna_plus = rna_seq.filterByStrand('+')
self.assertEqual(len(rna_plus),5,
"Wrong number of + strands")
rna_minus = rna_seq.filterByStrand('-')
self.assertEqual(len(rna_minus),5,
"Wrong number of - strands")
def test_filter_on_flag(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
rna_flagged = rna_seq.filterByFlag(1)
self.assertEqual(len(rna_flagged),4,
"Wrong number of flagged data lines")
def test_getTSS(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
rna_plus = rna_seq.filterByStrand('+')
for rna_data in rna_plus:
self.assertTrue((rna_data.strand == '+' and
rna_data.start == rna_data.getTSS()),
"Incorrect TSS on + strand")
rna_minus = rna_seq.filterByStrand('-')
for rna_data in rna_minus:
self.assertTrue((rna_data.strand == '-' and
rna_data.end == rna_data.getTSS()),
"Incorrect TSS on - strand")
def test_filter_on_TSS(self):
rna_seq = FeatureSet('Transcripts-ex1.txt')
lower,upper = 5000000,10000000
rna_tss = rna_seq.filterByTSS(upper,lower)
self.assertEqual(len(rna_tss),3,
"Wrong number of transcripts filtered on TSS")
for rna_data in rna_tss:
self.assertTrue((rna_data.getTSS() >= lower and
rna_data.getTSS() <= upper),
"Transcript outside range")
def test_sort_by_distance(self):
rna_sort = FeatureSet('Transcripts-ex1.txt')
position = 4250000
# Do sort on distance
# Sort is done in place, so assignment is not required
# however the sort function should return a reference to
# the initial object
result = rna_sort.sortByDistanceFrom(position)
self.assertEqual(result,rna_sort,
"Returned object doesn't match subject")
# Check that each distance is greater than the previous one
last_rna_data = None
for rna_data in rna_sort:
if not last_rna_data:
last_rna_data = rna_data
else:
self.assertTrue((abs(rna_data.getTSS() - position) >=
abs(last_rna_data.getTSS() - position)),
"Sort by distance failed")
def test_sort_by_closest_distance_to_edge(self):
rna_sort = FeatureSet('Transcripts-ex1.txt')
position = 4250000
# Do sort
# Sort is done in place, so assignment is not required
# however the sort function should return a reference to
# the initial object
result = rna_sort.sortByClosestEdgeTo(position)
self.assertEqual(result,rna_sort,
"Returned object doesn't match subject")
# Check that the closest distances are in ascending order
last_rna_data = None
for rna_data in rna_sort:
if not last_rna_data:
last_rna_data = rna_data
else:
self.assertTrue((min(abs(rna_data.getTSS() - position),
abs(rna_data.getTES() - position)) >=
min(abs(last_rna_data.getTSS() - position),
abs(last_rna_data.getTES() - position))),
"Sort by closest distance to edge failed")
def test_sort_by_closest_TSS_to_edge(self):
rna_sort = FeatureSet('Transcripts-ex1.txt')
position = (16000000,17500000)
# Do sort
# Sort is done in place, so assignment is not required
# however the sort function should return a reference to
# the initial object
result = rna_sort.sortByClosestTSSTo(*position)
self.assertEqual(result,rna_sort,
"Returned object doesn't match subject")
# Check that the closest distances are in ascending order
last_rna_data = None
for rna_data in rna_sort:
if not last_rna_data:
last_rna_data = rna_data
else:
self.assertTrue((min(abs(rna_data.getTSS() - position[0]),
abs(rna_data.getTSS() - position[1])) >=
min(abs(last_rna_data.getTSS() - position[0]),
abs(last_rna_data.getTSS() - position[1]))),
"Sort by closest TSS to edge failed")
def test_reading_bad_file_scientific_notation(self):
self.assertRaises(Exception,FeatureSet,'Transcripts-ex2.txt')
def test_reading_bad_file_end_lower_than_start(self):
self.assertRaises(Exception,FeatureSet,'Transcripts-ex2a.txt')
def test_get_item(self):
features = FeatureSet('Transcripts-ex1.txt')
feature = features[2]
self.assertEqual(feature,Feature('CG32847-RB',
'chr3L',
'15114722',
'15115217',
'+'))
def test_get_slice(self):
features = FeatureSet('Transcripts-ex1.txt')
features_slice = features[1:3]
self.assertTrue(isinstance(features_slice,FeatureSet))
self.assertEqual(len(features_slice),2)
self.assertEqual(features[1],features_slice[0])
self.assertEqual(features[2],features_slice[1])
def test__eq__(self):
# Check equality of FeatureSets
feature_set1 = FeatureSet()
feature_set2 = FeatureSet()
# Empty feature sets
self.assertEqual(feature_set1,feature_set2)
# Populate
feature_set1.addFeature(Feature('CG1000','chr1','1','2','+'))
feature_set2.addFeature(Feature('CG1000','chr1','1','2','+'))
self.assertEqual(feature_set1,feature_set2)
# Add second
feature_set1.addFeature(Feature('CG2000','chr1','1','2','+'))
self.assertNotEqual(feature_set1,feature_set2)
feature_set2.addFeature(Feature('CG2000','chr1','1','2','+'))
self.assertEqual(feature_set1,feature_set2)
# Add third
feature_set1.addFeature(Feature('CG2001','chr2',3,4,'-'))
feature_set2.addFeature(Feature('CG2002','chr2',3,5,'+'))
self.assertNotEqual(feature_set1,feature_set2)
| artistic-2.0 | 7,190,113,374,988,786,000 | 44.990683 | 89 | 0.55446 | false |
dingmingliu/quanttrade | bt/core.py | 1 | 37660 | """
Contains the core building blocks of the framework.
"""
import math
from copy import deepcopy
import pandas as pd
import numpy as np
import cython as cy
class Node(object):
"""
The Node is the main building block in bt's tree structure design.
Both StrategyBase and SecurityBase inherit Node. It contains the
core functionality of a tree node.
Args:
* name (str): The Node name
* parent (Node): The parent Node
* children (dict, list): A collection of children. If dict,
the format is {name: child}, if list then list of children.
Attributes:
* name (str): Node name
* parent (Node): Node parent
* root (Node): Root node of the tree (topmost node)
* children (dict): Node's children
* now (datetime): Used when backtesting to store current date
* stale (bool): Flag used to determine if Node is stale and need
updating
* prices (TimeSeries): Prices of the Node. Prices for a security will
be the security's price, for a strategy it will be an index that
reflects the value of the strategy over time.
* price (float): last price
* value (float): last value
* weight (float): weight in parent
* full_name (str): Name including parents' names
* members (list): Current Node + node's children
"""
_price = cy.declare(cy.double)
_value = cy.declare(cy.double)
_weight = cy.declare(cy.double)
_issec = cy.declare(cy.bint)
_has_strat_children = cy.declare(cy.bint)
def __init__(self, name, parent=None, children=None):
self.name = name
# strategy children helpers
self._has_strat_children = False
self._strat_children = []
# if children is not None, we assume that we want to limit the
# available children space to the provided list.
if children is not None:
if isinstance(children, list):
# if all strings - just save as universe_filter
if all(isinstance(x, str) for x in children):
self._universe_tickers = children
# empty dict - don't want to uselessly create
# tons of children when they might not be needed
children = {}
else:
# this will be case if we pass in children
# (say a bunch of sub-strategies)
tmp = {}
ut = []
for c in children:
if type(c) == str:
tmp[c] = SecurityBase(c)
ut.append(c)
else:
# deepcopy object for possible later reuse
tmp[c.name] = deepcopy(c)
# if strategy, turn on flag and add name to list
# strategy children have special treatment
if isinstance(c, StrategyBase):
self._has_strat_children = True
self._strat_children.append(c.name)
# if not strategy, then we will want to add this to
# universe_tickers to filter on setup
else:
ut.append(c.name)
children = tmp
# we want to keep whole universe in this case
# so set to None
self._universe_tickers = ut
if parent is None:
self.parent = self
self.root = self
else:
self.parent = parent
self.root = parent.root
parent._add_child(self)
# default children
if children is None:
children = {}
self._universe_tickers = None
self.children = children
self._childrenv = children.values()
for c in self._childrenv:
c.parent = self
c.root = self.root
# set default value for now
self.now = 0
# make sure root has stale flag
# used to avoid unncessary update
# sometimes we change values in the tree and we know that we will need
# to update if another node tries to access a given value (say weight).
# This avoid calling the update until it is actually needed.
self.root.stale = False
# helper vars
self._price = 0
self._value = 0
self._weight = 0
# is security flag - used to avoid updating 0 pos securities
self._issec = False
def __getitem__(self, key):
return self.children[key]
@property
def prices(self):
"""
A TimeSeries of the Node's price.
"""
# can optimize depending on type -
# securities don't need to check stale to
# return latest prices, whereas strategies do...
raise NotImplementedError()
@property
def price(self):
"""
Current price of the Node
"""
# can optimize depending on type -
# securities don't need to check stale to
# return latest prices, whereas strategies do...
raise NotImplementedError()
@property
def value(self):
"""
Current value of the Node
"""
if self.root.stale:
self.root.update(self.root.now, None)
return self._value
@property
def weight(self):
"""
Current weight of the Node (with respect to the parent).
"""
if self.root.stale:
self.root.update(self.root.now, None)
return self._weight
def setup(self, dates):
"""
Setup method used to initialize a Node with a set of dates.
"""
raise NotImplementedError()
def _add_child(self, child):
child.parent = self
child.root = self.root
if self.children is None:
self.children = {child.name: child}
else:
self.children[child.name] = child
self._childrenv = self.children.values()
def update(self, date, data=None, inow=None):
"""
Update Node with latest date, and optionally some data.
"""
raise NotImplementedError()
def adjust(self, amount, update=True, isflow=True):
"""
Adjust Node value by amount.
"""
raise NotImplementedError()
def allocate(self, amount, update=True):
"""
Allocate capital to Node.
"""
raise NotImplementedError()
@property
def members(self):
"""
Node members. Members include current node as well as Node's
children.
"""
res = [self]
for c in self.children.values():
res.extend(c.members)
return res
@property
def full_name(self):
if self.parent == self:
return self.name
else:
return '%s>%s' % (self.parent.full_name, self.name)
class StrategyBase(Node):
"""
Strategy Node. Used to define strategy logic within a tree.
A Strategy's role is to allocate capital to it's children
based on a function.
Args:
* name (str): Strategy name
* children (dict, list): A collection of children. If dict,
the format is {name: child}, if list then list of children.
Children can be any type of Node.
* parent (Node): The parent Node
Attributes:
* name (str): Strategy name
* parent (Strategy): Strategy parent
* root (Strategy): Root node of the tree (topmost node)
* children (dict): Strategy's children
* now (datetime): Used when backtesting to store current date
* stale (bool): Flag used to determine if Strategy is stale and need
updating
* prices (TimeSeries): Prices of the Strategy - basically an index that
reflects the value of the strategy over time.
* price (float): last price
* value (float): last value
* weight (float): weight in parent
* full_name (str): Name including parents' names
* members (list): Current Strategy + strategy's children
* commission_fn (fn(quantity, price)): A function used to determine the
commission (transaction fee) amount. Could be used to model slippage
(implementation shortfall). Note that often fees are symmetric for
buy and sell and absolute value of quantity should be used for
calculation.
* capital (float): Capital amount in Strategy - cash
* universe (DataFrame): Data universe available at the current time.
Universe contains the data passed in when creating a Backtest. Use
this data to determine strategy logic.
"""
_capital = cy.declare(cy.double)
_net_flows = cy.declare(cy.double)
_last_value = cy.declare(cy.double)
_last_price = cy.declare(cy.double)
_last_fee = cy.declare(cy.double)
_paper_trade = cy.declare(cy.bint)
bankrupt = cy.declare(cy.bint)
def __init__(self, name, children=None, parent=None):
Node.__init__(self, name, children=children, parent=parent)
self._capital = 0
self._weight = 1
self._value = 0
self._price = 100
# helper vars
self._net_flows = 0
self._last_value = 0
self._last_price = 100
self._last_fee = 0
# default commission function
self.commission_fn = self._dflt_comm_fn
self._paper_trade = False
self._positions = None
self.bankrupt = False
@property
def price(self):
"""
Current price.
"""
if self.root.stale:
self.root.update(self.now, None)
return self._price
@property
def prices(self):
"""
TimeSeries of prices.
"""
if self.root.stale:
self.root.update(self.now, None)
return self._prices.ix[:self.now]
@property
def values(self):
"""
TimeSeries of values.
"""
if self.root.stale:
self.root.update(self.now, None)
return self._values.ix[:self.now]
@property
def capital(self):
"""
Current capital - amount of unallocated capital left in strategy.
"""
# no stale check needed
return self._capital
@property
def cash(self):
"""
TimeSeries of unallocated capital.
"""
# no stale check needed
return self._cash
@property
def fees(self):
"""
TimeSeries of fees.
"""
# no stale check needed
return self._fees
@property
def universe(self):
"""
Data universe available at the current time.
Universe contains the data passed in when creating a Backtest.
Use this data to determine strategy logic.
"""
# avoid windowing every time
# if calling and on same date return
# cached value
if self.now == self._last_chk:
return self._funiverse
else:
self._last_chk = self.now
self._funiverse = self._universe.ix[:self.now]
return self._funiverse
@property
def positions(self):
"""
TimeSeries of positions.
"""
# if accessing and stale - update first
if self.root.stale:
self.root.update(self.root.now, None)
if self._positions is not None:
return self._positions
else:
vals = pd.DataFrame({x.name: x.positions for x in self.members
if isinstance(x, SecurityBase)})
self._positions = vals
return vals
def setup(self, universe):
"""
Setup strategy with universe. This will speed up future calculations
and updates.
"""
# save full universe in case we need it
self._original_data = universe
# determine if needs paper trading
# and setup if so
if self is not self.parent:
self._paper_trade = True
self._paper_amount = 1000000
paper = deepcopy(self)
paper.parent = paper
paper.root = paper
paper._paper_trade = False
paper.setup(self._original_data)
paper.adjust(self._paper_amount)
self._paper = paper
# setup universe
funiverse = universe
if self._universe_tickers is not None:
# if we have universe_tickers defined, limit universe to
# those tickers
valid_filter = list(set(universe.columns)
.intersection(self._universe_tickers))
funiverse = universe[valid_filter].copy()
# if we have strat children, we will need to create their columns
# in the new universe
if self._has_strat_children:
for c in self._strat_children:
funiverse[c] = np.nan
# must create to avoid pandas warning
funiverse = pd.DataFrame(funiverse)
self._universe = funiverse
# holds filtered universe
self._funiverse = funiverse
self._last_chk = None
# We're not bankrupt yet
self.bankrupt = False
# setup internal data
self.data = pd.DataFrame(index=funiverse.index,
columns=['price', 'value', 'cash', 'fees'],
data=0.0)
self._prices = self.data['price']
self._values = self.data['value']
self._cash = self.data['cash']
self._fees = self.data['fees']
# setup children as well - use original universe here - don't want to
# pollute with potential strategy children in funiverse
if self.children is not None:
[c.setup(universe) for c in self._childrenv]
@cy.locals(newpt=cy.bint, val=cy.double, ret=cy.double)
def update(self, date, data=None, inow=None):
"""
Update strategy. Updates prices, values, weight, etc.
"""
# resolve stale state
self.root.stale = False
# update helpers on date change
# also set newpt flag
newpt = False
if self.now == 0:
newpt = True
elif date != self.now:
self._net_flows = 0
self._last_price = self._price
self._last_value = self._value
self._last_fee = 0.0
newpt = True
# update now
self.now = date
if inow is None:
if self.now == 0:
inow = 0
else:
inow = self.data.index.get_loc(date)
# update children if any and calculate value
val = self._capital # default if no children
if self.children is not None:
for c in self._childrenv:
# avoid useless update call
if c._issec and not c._needupdate:
continue
c.update(date, data, inow)
val += c.value
if self.root == self:
if (val < 0) and not self.bankrupt:
# Declare a bankruptcy
self.bankrupt = True
self.flatten()
# update data if this value is different or
# if now has changed - avoid all this if not since it
# won't change
if newpt or self._value != val:
self._value = val
self._values.values[inow] = val
try:
ret = self._value / (self._last_value
+ self._net_flows) - 1
except ZeroDivisionError:
if self._value == 0:
ret = 0
else:
raise ZeroDivisionError(
'Could not update %s. Last value '
'was %s and net flows were %s. Current'
'value is %s. Therefore, '
'we are dividing by zero to obtain the return '
'for the period.' % (self.name,
self._last_value,
self._net_flows,
self._value))
self._price = self._last_price * (1 + ret)
self._prices.values[inow] = self._price
# update children weights
if self.children is not None:
for c in self._childrenv:
# avoid useless update call
if c._issec and not c._needupdate:
continue
try:
c._weight = c.value / val
except ZeroDivisionError:
c._weight = 0.0
# if we have strategy children, we will need to update them in universe
if self._has_strat_children:
for c in self._strat_children:
# TODO: optimize ".loc" here as well
self._universe.loc[date, c] = self.children[c].price
# Cash should track the unallocated capital at the end of the day, so
# we should update it every time we call "update".
# Same for fees
self._cash.values[inow] = self._capital
self._fees.values[inow] = self._last_fee
# update paper trade if necessary
if newpt and self._paper_trade:
self._paper.update(date)
self._paper.run()
self._paper.update(date)
# update price
self._price = self._paper.price
self._prices.values[inow] = self._price
@cy.locals(amount=cy.double, update=cy.bint, flow=cy.bint, fees=cy.double)
def adjust(self, amount, update=True, flow=True, fee=0.0):
"""
Adjust capital - used to inject capital to a Strategy. This injection
of capital will have no effect on the children.
Args:
* amount (float): Amount to adjust by.
* update (bool): Force update?
* flow (bool): Is this adjustment a flow? Basically a flow will
have an impact on the price index. Examples of flows are
commissions.
"""
# adjust capital
self._capital += amount
self._last_fee += fee
# if flow - increment net_flows - this will not affect
# performance. Commissions and other fees are not flows since
# they have a performance impact
if flow:
self._net_flows += amount
if update:
# indicates that data is now stale and must
# be updated before access
self.root.stale = True
@cy.locals(amount=cy.double, update=cy.bint)
def allocate(self, amount, child=None, update=True):
"""
Allocate capital to Strategy. By default, capital is allocated
recursively down the children, proportionally to the children's
weights. If a child is specified, capital will be allocated
to that specific child.
Allocation also have a side-effect. They will deduct the same amount
from the parent's "account" to offset the allocation. If there is
remaining capital after allocation, it will remain in Strategy.
Args:
* amount (float): Amount to allocate.
* child (str): If specified, allocation will be directed to child
only. Specified by name.
* update (bool): Force update.
"""
# allocate to child
if child is not None:
if child not in self.children:
c = SecurityBase(child)
c.setup(self._universe)
# update to bring up to speed
c.update(self.now)
# add child to tree
self._add_child(c)
# allocate to child
self.children[child].allocate(amount)
# allocate to self
else:
# adjust parent's capital
# no need to update now - avoids repetition
if self.parent == self:
self.parent.adjust(-amount, update=False, flow=True)
else:
# do NOT set as flow - parent will be another strategy
# and therefore should not incur flow
self.parent.adjust(-amount, update=False, flow=False)
# adjust self's capital
self.adjust(amount, update=False, flow=True)
# push allocation down to children if any
# use _weight to avoid triggering an update
if self.children is not None:
[c.allocate(amount * c._weight, update=False)
for c in self._childrenv]
# mark as stale if update requested
if update:
self.root.stale = True
@cy.locals(delta=cy.double, weight=cy.double, base=cy.double)
def rebalance(self, weight, child, base=np.nan, update=True):
"""
Rebalance a child to a given weight.
This is a helper method to simplify code logic. This method is used
when we want to se the weight of a particular child to a set amount.
It is similar to allocate, but it calculates the appropriate allocation
based on the current weight.
Args:
* weight (float): The target weight. Usually between -1.0 and 1.0.
* child (str): child to allocate to - specified by name.
* base (float): If specified, this is the base amount all weight
delta calculations will be based off of. This is useful when we
determine a set of weights and want to rebalance each child
given these new weights. However, as we iterate through each
child and call this method, the base (which is by default the
current value) will change. Therefore, we can set this base to
the original value before the iteration to ensure the proper
allocations are made.
* update (bool): Force update?
"""
# if weight is 0 - we want to close child
if weight == 0:
if child in self.children:
return self.close(child)
else:
return
# if no base specified use self's value
if np.isnan(base):
base = self.value
# else make sure we have child
if child not in self.children:
c = SecurityBase(child)
c.setup(self._universe)
# update child to bring up to speed
c.update(self.now)
self._add_child(c)
# allocate to child
# figure out weight delta
c = self.children[child]
delta = weight - c.weight
c.allocate(delta * base)
def close(self, child):
"""
Close a child position - alias for rebalance(0, child). This will also
flatten (close out all) the child's children.
Args:
* child (str): Child, specified by name.
"""
c = self.children[child]
# flatten if children not None
if c.children is not None and len(c.children) != 0:
c.flatten()
c.allocate(-c.value)
def flatten(self):
"""
Close all child positions.
"""
# go right to base alloc
[c.allocate(-c.value) for c in self._childrenv if c.value != 0]
def run(self):
"""
This is the main logic method. Override this method to provide some
algorithm to execute on each date change. This method is called by
backtester.
"""
pass
def set_commissions(self, fn):
"""
Set commission (transaction fee) function.
Args:
fn (fn(quantity, price)): Function used to determine commission
amount.
"""
self.commission_fn = fn
for c in self._childrenv:
if isinstance(c, StrategyBase):
c.set_commissions(fn)
@cy.locals(q=cy.double, p=cy.double)
def _dflt_comm_fn(self, q, p):
return max(1, abs(q) * 0.01)
class SecurityBase(Node):
"""
Security Node. Used to define a security within a tree.
A Security's has no children. It simply models an asset that can be bought
or sold.
Args:
* name (str): Security name
* multiplier (float): security multiplier - typically used for
derivatives.
Attributes:
* name (str): Security name
* parent (Security): Security parent
* root (Security): Root node of the tree (topmost node)
* now (datetime): Used when backtesting to store current date
* stale (bool): Flag used to determine if Security is stale and need
updating
* prices (TimeSeries): Security prices.
* price (float): last price
* value (float): last value - basically position * price * multiplier
* weight (float): weight in parent
* full_name (str): Name including parents' names
* members (list): Current Security + strategy's children
* position (float): Current position (quantity).
"""
_last_pos = cy.declare(cy.double)
_position = cy.declare(cy.double)
multiplier = cy.declare(cy.double)
_prices_set = cy.declare(cy.bint)
_needupdate = cy.declare(cy.bint)
@cy.locals(multiplier=cy.double)
def __init__(self, name, multiplier=1):
Node.__init__(self, name, parent=None, children=None)
self._value = 0
self._price = 0
self._weight = 0
self._position = 0
self.multiplier = multiplier
# opt
self._last_pos = 0
self._issec = True
self._needupdate = True
@property
def price(self):
"""
Current price.
"""
# if accessing and stale - update first
if self._needupdate or self.now != self.parent.now:
self.update(self.root.now)
return self._price
@property
def prices(self):
"""
TimeSeries of prices.
"""
# if accessing and stale - update first
if self._needupdate or self.now != self.parent.now:
self.update(self.root.now)
return self._prices.ix[:self.now]
@property
def values(self):
"""
TimeSeries of values.
"""
# if accessing and stale - update first
if self._needupdate or self.now != self.parent.now:
self.update(self.root.now)
if self.root.stale:
self.root.update(self.root.now, None)
return self._values.ix[:self.now]
@property
def position(self):
"""
Current position
"""
# no stale check needed
return self._position
@property
def positions(self):
"""
TimeSeries of positions.
"""
# if accessing and stale - update first
if self._needupdate:
self.update(self.root.now)
if self.root.stale:
self.root.update(self.root.now, None)
return self._positions.ix[:self.now]
def setup(self, universe):
"""
Setup Security with universe. Speeds up future runs.
Args:
* universe (DataFrame): DataFrame of prices with security's name as
one of the columns.
"""
# if we already have all the prices, we will store them to speed up
# future udpates
try:
prices = universe[self.name]
except KeyError:
prices = None
# setup internal data
if prices is not None:
self._prices = prices
self.data = pd.DataFrame(index=universe.index,
columns=['value', 'position'],
data=0.0)
self._prices_set = True
else:
self.data = pd.DataFrame(index=universe.index,
columns=['price', 'value', 'position'])
self._prices = self.data['price']
self._prices_set = False
self._values = self.data['value']
self._positions = self.data['position']
@cy.locals(prc=cy.double)
def update(self, date, data=None, inow=None):
"""
Update security with a given date and optionally, some data.
This will update price, value, weight, etc.
"""
# filter for internal calls when position has not changed - nothing to
# do. Internal calls (stale root calls) have None data. Also want to
# make sure date has not changed, because then we do indeed want to
# update.
if date == self.now and self._last_pos == self._position:
return
if inow is None:
if date == 0:
inow = 0
else:
inow = self.data.index.get_loc(date)
# date change - update price
if date != self.now:
# update now
self.now = date
if self._prices_set:
self._price = self._prices.values[inow]
# traditional data update
elif data is not None:
prc = data[self.name]
self._price = prc
self._prices.values[inow] = prc
self._positions.values[inow] = self._position
self._last_pos = self._position
self._value = self._position * self._price * self.multiplier
self._values.values[inow] = self._value
if self._weight == 0 and self._position == 0:
self._needupdate = False
@cy.locals(amount=cy.double, update=cy.bint, q=cy.double, outlay=cy.double)
def allocate(self, amount, update=True):
"""
This allocates capital to the Security. This is the method used to
buy/sell the security.
A given amount of shares will be determined on the current price, a
commisison will be calculated based on the parent's commission fn, and
any remaining capital will be passed back up to parent as an
adjustment.
Args:
* amount (float): Amount of adjustment.
* update (bool): Force update?
"""
# will need to update if this has been idle for a while...
# update if needupdate or if now is stale
# fetch parent's now since our now is stale
if self._needupdate or self.now != self.parent.now:
self.update(self.parent.now)
# ignore 0 alloc
# Note that if the price of security has dropped to zero, then it should
# never be selected by SelectAll, SelectN etc. I.e. we should not open
# the position at zero price. At the same time, we are able to close
# it at zero price, because at that point amount=0.
# Note also that we don't erase the position in an asset which price has
# dropped to zero (though the weight will indeed be = 0)
if amount == 0:
return
if self.parent is self or self.parent is None:
raise Exception(
'Cannot allocate capital to a parentless security')
if self._price == 0 or np.isnan(self._price):
raise Exception(
'Cannot allocate capital to '
'%s because price is 0 or nan as of %s'
% (self.name, self.parent.now))
# buy/sell
# determine quantity - must also factor in commission
# closing out?
if amount == -self._value:
q = -self._position
else:
if (self._position > 0) or ((self._position == 0) and (amount > 0)):
# if we're going long or changing long position
q = math.floor(amount / (self._price * self.multiplier))
else:
# if we're going short or changing short position
q = math.ceil(amount / (self._price * self.multiplier))
# if q is 0 nothing to do
if q == 0 or np.isnan(q):
return
# this security will need an update, even if pos is 0 (for example if
# we close the positions, value and pos is 0, but still need to do that
# last update)
self._needupdate = True
# adjust position & value
self._position += q
# calculate proper adjustment for parent
# parent passed down amount so we want to pass
# -outlay back up to parent to adjust for capital
# used
outlay, fee = self.outlay(q)
# call parent
self.parent.adjust(-outlay, update=update, flow=False, fee=fee)
@cy.locals(q=cy.double, p=cy.double)
def commission(self, q, p):
"""
Calculates the commission (transaction fee) based on quantity and price.
Uses the parent's commission_fn.
Args:
* q (float): quantity
* p (float): price
"""
return self.parent.commission_fn(q, p)
@cy.locals(q=cy.double)
def outlay(self, q):
"""
Determines the complete cash outlay (including commission) necessary
given a quantity q.
Second returning parameter is a commission itself.
Args:
* q (float): quantity
"""
fee = self.commission(q, self._price * self.multiplier)
full_outlay = q * self._price * self.multiplier + fee
return full_outlay, fee
def run(self):
"""
Does nothing - securities have nothing to do on run.
"""
pass
class Algo(object):
"""
Algos are used to modularize strategy logic so that strategy logic becomes
modular, composable, more testable and less error prone. Basically, the
Algo should follow the unix philosophy - do one thing well.
In practice, algos are simply a function that receives one argument, the
Strategy (refered to as target) and are expected to return a bool.
When some state preservation is necessary between calls, the Algo
object can be used (this object). The __call___ method should be
implemented and logic defined therein to mimic a function call. A
simple function may also be used if no state preservation is neceesary.
Args:
* name (str): Algo name
"""
def __init__(self, name=None):
self._name = name
@property
def name(self):
"""
Algo name.
"""
if self._name is None:
self._name = self.__class__.__name__
return self._name
def __call__(self, target):
raise NotImplementedError("%s not implemented!" % self.name)
class AlgoStack(Algo):
"""
An AlgoStack derives from Algo runs multiple Algos until a
failure is encountered.
The purpose of an AlgoStack is to group a logic set of Algos together. Each
Algo in the stack is run. Execution stops if one Algo returns False.
Args:
* algos (list): List of algos.
"""
def __init__(self, *algos):
super(AlgoStack, self).__init__()
self.algos = algos
self.check_run_always = any(hasattr(x, 'run_always')
for x in self.algos)
def __call__(self, target):
# normal runing mode
if not self.check_run_always:
for algo in self.algos:
if not algo(target):
return False
return True
# run mode when at least one algo has a run_always attribute
else:
# store result in res
# allows continuation to check for and run
# algos that have run_always set to True
res = True
for algo in self.algos:
if res:
res = algo(target)
elif hasattr(algo, 'run_always'):
if algo.run_always:
algo(target)
return res
class Strategy(StrategyBase):
"""
Strategy expands on the StrategyBase and incorporates Algos.
Basically, a Strategy is built by passing in a set of algos. These algos
will be placed in an Algo stack and the run function will call the stack.
Furthermore, two class attributes are created to pass data between algos.
perm for permanent data, temp for temporary data.
Args:
* name (str): Strategy name
* algos (list): List of Algos to be passed into an AlgoStack
* children (dict, list): Children - useful when you want to create
strategies of strategies
Attributes:
* stack (AlgoStack): The stack
* temp (dict): A dict containing temporary data - cleared on each call
to run. This can be used to pass info to other algos.
* perm (dict): Permanent data used to pass info from one algo to
another. Not cleared on each pass.
"""
def __init__(self, name, algos=[], children=None):
super(Strategy, self).__init__(name, children=children)
self.stack = AlgoStack(*algos)
self.temp = {}
self.perm = {}
def run(self):
# clear out temp data
self.temp = {}
# run algo stack
self.stack(self)
# run children
for c in self.children.values():
c.run()
| apache-2.0 | -3,770,879,272,697,670,000 | 32.151408 | 80 | 0.553877 | false |
adamkovics/atmosphere | atmosphere/gas_opacity.py | 1 | 8280 | """
Add gas opacities to model based on the composition and vertical structure.
"""
import numpy as np
import logging
logger = logging.getLogger()
def interpolate_kc(p, T, kc, verbose=False):
"""Linearly interpolate k-coefficients at a particular
pressure and temperature, using the input k-coefficent grid, kc.
The standard structure of k-coefficient data array is:
[wavelengths,pressures,temperatures,g-nodes]
where the g-node are the Legendre-Gauss quadrature nodes
or "g-ordinate". Returned array of coefficients corresponds to:
[wavelengths,g-nodes]
"""
pressures = np.array(kc['pressures'])
temperatures = np.array(kc['temperatures'])
ind_p = np.where(pressures < p)
ind_T = np.where(temperatures < T)
i = (np.max(ind_p) if np.size(ind_p) else np.array(0)).clip(0,len(pressures)-2)
j = (np.max(ind_T) if np.size(ind_T) else np.array(0)).clip(0,len(temperatures)-2)
L11 = np.log(kc['kc'][:,i,j,:])
L12 = np.log(kc['kc'][:,i+1,j,:])
L21 = np.log(kc['kc'][:,i,j+1,:])
L22 = np.log(kc['kc'][:,i+1,j+1,:])
L1T = L11 + (L12-L11)*(T-temperatures[j])/(temperatures[j+1]-temperatures[j])
L2T = L21 + (L22-L21)*(T-temperatures[j])/(temperatures[j+1]-temperatures[j])
LPT = L1T + (L2T-L1T)*((np.log(p)-np.log(pressures[i]))/
(np.log(pressures[i+1])-np.log(pressures[i])))
kc_interp = np.exp(LPT)
return kc_interp
def append_kc_to_layers(model, kc, species):
"""Set k-coefficients for each layer by interpolating
to appropriate temperature and pressure and update the
data structure for the amtosphere."""
kc_shape = (model['nlay'], model['nlam'], kc['ng'])
model['layers'].update({'kc':{species:np.ndarray(kc_shape),
'ng':kc['ng'],
'g':kc['g'],
'w':kc['w'],
}})
for i in range(model['nlay']):
model['layers']['kc'][species][i,:,:] = interpolate_kc(model['layers']['p'][i],
model['layers']['T'][i],
kc)
return
# There are additional k-coefficients for C2H2, C2H6, and CO.
# These are currently calculated on the VIMS grid as they are applicable in
# roughly the 2.7--3um wavelength region that is inacccessible from the ground.
#
# Here we revise the gas opacity in the model to include multiple k-coefficient files.
#
# It is a reasonable estimate to sum k-coefficients after interpolating each onto the
# same pressure and temperature, however, a minimal amount of error-checking should confirm
# that the same wavelength grid and g-ordinates are being used.
#
# Overview of revisions to the code:
#
# (1) Generalization of the set_methane() to other species.
# (2) Error-checking for wavelength (and P,T) grid
# (3) back-compatibility for set_methane() method.
# (4) Some thought to CH3D abundance variability.
#
def set_methane(model, kc_file, CH3D_scale=None, verbose=False):
"""Set methane opacities in atmosphere structure, model, by
interpolatating k-coefficents from the specied kc_file,
using the temperatures and pressures for each layer.
"""
if CH3D_scale:
if len(kc_file) != 2:
logger.debug('two k-coefficient files needed for set_methane_opacity()')
return None
kc = np.load(kc_file[0]).item()
kc_CH3D = np.load(kc_file[1]).item()
kc['kc'] = kc['kc']+CH3D_scale*kc_CH3D['kc']
model.update({'wavelength':kc['wavelength']['mu'],
'nlam':kc['wavelength']['nlam'], })
append_kc_to_layers(model, kc, 'CH4')
tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'],
(model['nlay'],1,1))
if 'tau' not in model['layers']: model['layers'].update({'tau':{}})
model['layers']['tau'].update({'CH4':tau_CH4})
return
if kc_file.endswith('.npy'):
kc = np.load(kc_file).item()
model.update({'wavelength':kc['wavelength']['mu'],
'nlam':kc['wavelength']['nlam'], })
append_kc_to_layers(model, kc, 'CH4')
tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'],
(model['nlay'],1,1))
if 'tau' not in model['layers']: model['layers'].update({'tau':{}})
model['layers']['tau'].update({'CH4':tau_CH4})
return
if kc_file.endswith('.fits'):
import pyfits
hdu = pyfits.open(kc_file)
kc = {'kc': hdu[0].data,
'pressures':hdu[2].data['pressures'],
'temperatures':hdu[3].data['temperatures'],
'g': hdu[4].data['g'],
'w': hdu[5].data['w'],
'ng': hdu[0].header['NG'],
}
model.update({'wavelength':hdu[1].data['wavelength'],
'nlam':len(hdu[1].data['wavelength']),
})
hdu.close()
append_kc_to_layers(model, kc, 'CH4')
tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'],
(model['nlay'],1,1))
if 'tau' not in model['layers']: model['layers'].update({'tau':{}})
model['layers']['tau'].update({'CH4':tau_CH4})
return
def print_atmosphere_details(model):
logger.debug('model dictionary data structure:')
for item in model.keys():
logger.debug("{0:7s} - type: {2} - shape: {1}".format(
item, np.shape(model[item]), type(model[item])))
logger.debug("\natmosphere['layers'] dictionary data structure:")
for item in model['layers'].keys():
logger.debug("{0:7s} - type: {2} - shape: {1}".format(
item, np.shape(model['layers'][item]), type(model['layers'][item])))
def set_cia(model, scale=4.0, show_figure=False):
"""Append collision-induced-absorption opacity for
N2-N2 and H2-N2 (in the near-IR) to the atmosphere
data structure, model."""
import pyfits
import os
fits = pyfits.open(os.path.join(os.getenv('RTDATAPATH'),
'gas_opacity/CIA/N2_N2.fits'))
k_N2N2 = fits[0].data
fits.close()
fits = pyfits.open(os.path.join(os.getenv('RTDATAPATH'),
'gas_opacity/CIA/H2_N2.fits'))
k_H2N2 = fits[0].data
fits.close()
if 'wavelength' not in model.keys():
logger.warning('Set wavelength scale first (e.g., with CH4 opacity.)')
return None
tau_H2N2 = np.empty((model['nlay'],model['nlam']))
tau_N2N2 = np.empty((model['nlay'],model['nlam']))
layers = model['layers']
N0 = 2.686e19 # Loschmidt number
for i in range(model['nlay']):
k_H2N2_interp = np.interp(model['wavelength'],
(1e4/k_H2N2[::-1,0]), scale*k_H2N2[::-1,1],)
k_N2N2_interp = np.interp(model['wavelength'],
(1e4/k_N2N2[::-1,0]), scale*k_N2N2[::-1,1],)
tau_H2N2[i,:] = k_H2N2_interp*layers['kmamg'][i] * \
layers['n'][i]/N0 * \
layers['m_N2'][i]*layers['m_H2']
tau_N2N2[i,:] = k_N2N2_interp*layers['kmamg'][i] * \
layers['n'][i]/N0 * \
layers['m_N2'][i]*layers['m_N2'][i]
layers['tau'].update({'H2_N2':tau_H2N2,
'N2_N2':tau_N2N2,})
if show_figure:
fig, ax = subplots(figsize=(16,4))
ax.plot(k_H2N2[:,0], k_H2N2[:,1], 'k', drawstyle='steps-mid')
ax.set_xlabel('wavenumber (cm$^{-1}$)')
ax.set_ylabel(r'km$^{-1}$ amagat$^{-2}$')
ax.set_xlim(4000,5000)
fig, ax = subplots(figsize=(16,4))
ax.plot(k_N2N2[:,0], k_N2N2[:,1], 'k', drawstyle='steps-mid')
ax.set_xlabel('wavenumber (cm$^{-1}$)')
ax.set_ylabel(r'km$^{-1}$ amagat$^{-2}$')
ax.set_xlim(4000,5000) ; | gpl-2.0 | 4,651,276,065,316,480,000 | 39.004831 | 91 | 0.533333 | false |
ArcherSys/ArcherSys | node_modules/npm/node_modules/node-gyp/gyp/buildbot/buildbot_run.py | 1 | 18134 | <<<<<<< HEAD
<<<<<<< HEAD
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Argument-less script to select what to run on the buildbots."""
import os
import shutil
import subprocess
import sys
if sys.platform in ['win32', 'cygwin']:
EXE_SUFFIX = '.exe'
else:
EXE_SUFFIX = ''
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
ROOT_DIR = os.path.dirname(TRUNK_DIR)
ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
def CallSubProcess(*args, **kwargs):
"""Wrapper around subprocess.call which treats errors as build exceptions."""
retcode = subprocess.call(*args, **kwargs)
if retcode != 0:
print '@@@STEP_EXCEPTION@@@'
sys.exit(1)
def PrepareCmake():
"""Build CMake 2.8.8 since the version in Precise is 2.8.7."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber CMake checkout@@@'
shutil.rmtree(CMAKE_DIR)
# We always build CMake 2.8.8, so no need to do anything
# if the directory already exists.
if os.path.isdir(CMAKE_DIR):
return
print '@@@BUILD_STEP Initialize CMake checkout@@@'
os.mkdir(CMAKE_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
print '@@@BUILD_STEP Sync CMake@@@'
CallSubProcess(
['git', 'clone',
'--depth', '1',
'--single-branch',
'--branch', 'v2.8.8',
'--',
'git://cmake.org/cmake.git',
CMAKE_DIR],
cwd=CMAKE_DIR)
print '@@@BUILD_STEP Build CMake@@@'
CallSubProcess(
['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
cwd=CMAKE_DIR)
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
def PrepareAndroidTree():
"""Prepare an Android tree to run 'android' format tests."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber Android checkout@@@'
shutil.rmtree(ANDROID_DIR)
# The release of Android we use is static, so there's no need to do anything
# if the directory already exists.
if os.path.isdir(ANDROID_DIR):
return
print '@@@BUILD_STEP Initialize Android checkout@@@'
os.mkdir(ANDROID_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
CallSubProcess(
['repo', 'init',
'-u', 'https://android.googlesource.com/platform/manifest',
'-b', 'android-4.2.1_r1',
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
cwd=ANDROID_DIR)
print '@@@BUILD_STEP Sync Android@@@'
CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
print '@@@BUILD_STEP Build Android@@@'
CallSubProcess(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
cwd=ANDROID_DIR)
def GypTestFormat(title, format=None, msvs_version=None):
"""Run the gyp tests for a given format, emitting annotator tags.
See annotator docs at:
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
Args:
format: gyp format to test.
Returns:
0 for sucesss, 1 for failure.
"""
if not format:
format = title
print '@@@BUILD_STEP ' + title + '@@@'
sys.stdout.flush()
env = os.environ.copy()
if msvs_version:
env['GYP_MSVS_VERSION'] = msvs_version
command = ' '.join(
[sys.executable, 'trunk/gyptest.py',
'--all',
'--passed',
'--format', format,
'--path', CMAKE_BIN_DIR,
'--chdir', 'trunk'])
if format == 'android':
# gyptest needs the environment setup from envsetup/lunch in order to build
# using the 'android' backend, so this is done in a single shell.
retcode = subprocess.call(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
% (ROOT_DIR, command)],
cwd=ANDROID_DIR, env=env)
else:
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
if retcode:
# Emit failure tag, and keep going.
print '@@@STEP_FAILURE@@@'
return 1
return 0
def GypBuild():
# Dump out/ directory.
print '@@@BUILD_STEP cleanup@@@'
print 'Removing %s...' % OUT_DIR
shutil.rmtree(OUT_DIR, ignore_errors=True)
print 'Done.'
retcode = 0
# The Android gyp bot runs on linux so this must be tested first.
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
PrepareAndroidTree()
retcode += GypTestFormat('android')
elif sys.platform.startswith('linux'):
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('make')
PrepareCmake()
retcode += GypTestFormat('cmake')
elif sys.platform == 'darwin':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('xcode')
retcode += GypTestFormat('make')
elif sys.platform == 'win32':
retcode += GypTestFormat('ninja')
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
else:
raise Exception('Unknown platform')
if retcode:
# TODO(bradnelson): once the annotator supports a postscript (section for
# after the build proper that could be used for cumulative failures),
# use that instead of this. This isolates the final return value so
# that it isn't misattributed to the last stage.
print '@@@BUILD_STEP failures@@@'
sys.exit(retcode)
if __name__ == '__main__':
GypBuild()
=======
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Argument-less script to select what to run on the buildbots."""
import os
import shutil
import subprocess
import sys
if sys.platform in ['win32', 'cygwin']:
EXE_SUFFIX = '.exe'
else:
EXE_SUFFIX = ''
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
ROOT_DIR = os.path.dirname(TRUNK_DIR)
ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
def CallSubProcess(*args, **kwargs):
"""Wrapper around subprocess.call which treats errors as build exceptions."""
retcode = subprocess.call(*args, **kwargs)
if retcode != 0:
print '@@@STEP_EXCEPTION@@@'
sys.exit(1)
def PrepareCmake():
"""Build CMake 2.8.8 since the version in Precise is 2.8.7."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber CMake checkout@@@'
shutil.rmtree(CMAKE_DIR)
# We always build CMake 2.8.8, so no need to do anything
# if the directory already exists.
if os.path.isdir(CMAKE_DIR):
return
print '@@@BUILD_STEP Initialize CMake checkout@@@'
os.mkdir(CMAKE_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
print '@@@BUILD_STEP Sync CMake@@@'
CallSubProcess(
['git', 'clone',
'--depth', '1',
'--single-branch',
'--branch', 'v2.8.8',
'--',
'git://cmake.org/cmake.git',
CMAKE_DIR],
cwd=CMAKE_DIR)
print '@@@BUILD_STEP Build CMake@@@'
CallSubProcess(
['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
cwd=CMAKE_DIR)
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
def PrepareAndroidTree():
"""Prepare an Android tree to run 'android' format tests."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber Android checkout@@@'
shutil.rmtree(ANDROID_DIR)
# The release of Android we use is static, so there's no need to do anything
# if the directory already exists.
if os.path.isdir(ANDROID_DIR):
return
print '@@@BUILD_STEP Initialize Android checkout@@@'
os.mkdir(ANDROID_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
CallSubProcess(
['repo', 'init',
'-u', 'https://android.googlesource.com/platform/manifest',
'-b', 'android-4.2.1_r1',
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
cwd=ANDROID_DIR)
print '@@@BUILD_STEP Sync Android@@@'
CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
print '@@@BUILD_STEP Build Android@@@'
CallSubProcess(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
cwd=ANDROID_DIR)
def GypTestFormat(title, format=None, msvs_version=None):
"""Run the gyp tests for a given format, emitting annotator tags.
See annotator docs at:
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
Args:
format: gyp format to test.
Returns:
0 for sucesss, 1 for failure.
"""
if not format:
format = title
print '@@@BUILD_STEP ' + title + '@@@'
sys.stdout.flush()
env = os.environ.copy()
if msvs_version:
env['GYP_MSVS_VERSION'] = msvs_version
command = ' '.join(
[sys.executable, 'trunk/gyptest.py',
'--all',
'--passed',
'--format', format,
'--path', CMAKE_BIN_DIR,
'--chdir', 'trunk'])
if format == 'android':
# gyptest needs the environment setup from envsetup/lunch in order to build
# using the 'android' backend, so this is done in a single shell.
retcode = subprocess.call(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
% (ROOT_DIR, command)],
cwd=ANDROID_DIR, env=env)
else:
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
if retcode:
# Emit failure tag, and keep going.
print '@@@STEP_FAILURE@@@'
return 1
return 0
def GypBuild():
# Dump out/ directory.
print '@@@BUILD_STEP cleanup@@@'
print 'Removing %s...' % OUT_DIR
shutil.rmtree(OUT_DIR, ignore_errors=True)
print 'Done.'
retcode = 0
# The Android gyp bot runs on linux so this must be tested first.
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
PrepareAndroidTree()
retcode += GypTestFormat('android')
elif sys.platform.startswith('linux'):
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('make')
PrepareCmake()
retcode += GypTestFormat('cmake')
elif sys.platform == 'darwin':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('xcode')
retcode += GypTestFormat('make')
elif sys.platform == 'win32':
retcode += GypTestFormat('ninja')
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
else:
raise Exception('Unknown platform')
if retcode:
# TODO(bradnelson): once the annotator supports a postscript (section for
# after the build proper that could be used for cumulative failures),
# use that instead of this. This isolates the final return value so
# that it isn't misattributed to the last stage.
print '@@@BUILD_STEP failures@@@'
sys.exit(retcode)
if __name__ == '__main__':
GypBuild()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Argument-less script to select what to run on the buildbots."""
import os
import shutil
import subprocess
import sys
if sys.platform in ['win32', 'cygwin']:
EXE_SUFFIX = '.exe'
else:
EXE_SUFFIX = ''
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
ROOT_DIR = os.path.dirname(TRUNK_DIR)
ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
def CallSubProcess(*args, **kwargs):
"""Wrapper around subprocess.call which treats errors as build exceptions."""
retcode = subprocess.call(*args, **kwargs)
if retcode != 0:
print '@@@STEP_EXCEPTION@@@'
sys.exit(1)
def PrepareCmake():
"""Build CMake 2.8.8 since the version in Precise is 2.8.7."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber CMake checkout@@@'
shutil.rmtree(CMAKE_DIR)
# We always build CMake 2.8.8, so no need to do anything
# if the directory already exists.
if os.path.isdir(CMAKE_DIR):
return
print '@@@BUILD_STEP Initialize CMake checkout@@@'
os.mkdir(CMAKE_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
print '@@@BUILD_STEP Sync CMake@@@'
CallSubProcess(
['git', 'clone',
'--depth', '1',
'--single-branch',
'--branch', 'v2.8.8',
'--',
'git://cmake.org/cmake.git',
CMAKE_DIR],
cwd=CMAKE_DIR)
print '@@@BUILD_STEP Build CMake@@@'
CallSubProcess(
['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
cwd=CMAKE_DIR)
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
def PrepareAndroidTree():
"""Prepare an Android tree to run 'android' format tests."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber Android checkout@@@'
shutil.rmtree(ANDROID_DIR)
# The release of Android we use is static, so there's no need to do anything
# if the directory already exists.
if os.path.isdir(ANDROID_DIR):
return
print '@@@BUILD_STEP Initialize Android checkout@@@'
os.mkdir(ANDROID_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', '[email protected]'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
CallSubProcess(
['repo', 'init',
'-u', 'https://android.googlesource.com/platform/manifest',
'-b', 'android-4.2.1_r1',
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
cwd=ANDROID_DIR)
print '@@@BUILD_STEP Sync Android@@@'
CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
print '@@@BUILD_STEP Build Android@@@'
CallSubProcess(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
cwd=ANDROID_DIR)
def GypTestFormat(title, format=None, msvs_version=None):
"""Run the gyp tests for a given format, emitting annotator tags.
See annotator docs at:
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
Args:
format: gyp format to test.
Returns:
0 for sucesss, 1 for failure.
"""
if not format:
format = title
print '@@@BUILD_STEP ' + title + '@@@'
sys.stdout.flush()
env = os.environ.copy()
if msvs_version:
env['GYP_MSVS_VERSION'] = msvs_version
command = ' '.join(
[sys.executable, 'trunk/gyptest.py',
'--all',
'--passed',
'--format', format,
'--path', CMAKE_BIN_DIR,
'--chdir', 'trunk'])
if format == 'android':
# gyptest needs the environment setup from envsetup/lunch in order to build
# using the 'android' backend, so this is done in a single shell.
retcode = subprocess.call(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
% (ROOT_DIR, command)],
cwd=ANDROID_DIR, env=env)
else:
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
if retcode:
# Emit failure tag, and keep going.
print '@@@STEP_FAILURE@@@'
return 1
return 0
def GypBuild():
# Dump out/ directory.
print '@@@BUILD_STEP cleanup@@@'
print 'Removing %s...' % OUT_DIR
shutil.rmtree(OUT_DIR, ignore_errors=True)
print 'Done.'
retcode = 0
# The Android gyp bot runs on linux so this must be tested first.
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
PrepareAndroidTree()
retcode += GypTestFormat('android')
elif sys.platform.startswith('linux'):
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('make')
PrepareCmake()
retcode += GypTestFormat('cmake')
elif sys.platform == 'darwin':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('xcode')
retcode += GypTestFormat('make')
elif sys.platform == 'win32':
retcode += GypTestFormat('ninja')
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
else:
raise Exception('Unknown platform')
if retcode:
# TODO(bradnelson): once the annotator supports a postscript (section for
# after the build proper that could be used for cumulative failures),
# use that instead of this. This isolates the final return value so
# that it isn't misattributed to the last stage.
print '@@@BUILD_STEP failures@@@'
sys.exit(retcode)
if __name__ == '__main__':
GypBuild()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | 4,987,594,992,532,851,000 | 30.482639 | 117 | 0.635657 | false |
dawsonjon/Chips-2.0 | chips/compiler/tokens.py | 1 | 9314 | __author__ = "Jon Dawson"
__copyright__ = "Copyright (C) 2012, Jonathan P Dawson"
__version__ = "0.1"
import os.path
import subprocess
from chips.compiler.exceptions import C2CHIPError
operators = [
"!", "~", "+", "-", "*", "/", "//", "%", "=", "==", "<", ">", "<=", ">=",
"!=", "|", "&", "^", "||", "&&", "(", ")", "{", "}", "[", "]", ";", "<<",
">>", ",", "+=", "-=", "*=", "/=", "%=", "&=", "|=", "<<=", ">>=", "^=",
"++", "--", "?", ":", ".", "->",
]
class Tokens:
"""Break the input file into a stream of tokens,
provide functions to traverse the stream."""
def __init__(self, filename, parameters={}):
self.tokens = []
self.definitions = []
self.filename = None
self.lineno = None
self.scan(
os.path.join(os.path.dirname(__file__), "builtins.h"),
external_preprocessor=False)
self.scan(os.path.abspath(filename))
tokens = []
for token in self.tokens:
f, l, t = token
if t in parameters:
tokens.append((f, l, str(parameters[t])))
else:
tokens.append(token)
self.tokens = tokens
def scan(self,
filename,
input_file=None,
parameters={},
external_preprocessor=True):
"""Convert the test file into tokens"""
self.filename = filename
if external_preprocessor:
directory = os.path.abspath(__file__)
directory = os.path.dirname(directory)
directory = os.path.join(directory, "include")
cpp_commands = [
"cpp",
"-nostdinc",
"-isystem",
directory,
filename]
pipe = subprocess.Popen(cpp_commands, stdout=subprocess.PIPE)
input_file = pipe.stdout
else:
if input_file is None:
try:
input_file = open(self.filename)
except IOError:
raise C2CHIPError("Cannot open file: " + self.filename)
token = []
tokens = []
self.lineno = 1
jump = False
for line in input_file:
# include files
line = line + " "
if jump:
if line.strip().startswith("#endif"):
jump = False
if line.strip().startswith("#else"):
jump = False
self.lineno += 1
continue
elif external_preprocessor and line.strip().startswith("#"):
l = line.strip()
l = l.lstrip("#")
l = l.split('"')
lineno = int(l[0].strip())
self.lineno = lineno
filename = l[1].strip().strip('"')
self.filename = filename
continue
elif line.strip().startswith("#include"):
filename = self.filename
lineno = self.lineno
self.tokens.extend(tokens)
if line.strip().endswith(">"):
directory = os.path.abspath(__file__)
directory = os.path.dirname(directory)
directory = os.path.join(directory, "include")
else:
directory = os.path.abspath(self.filename)
directory = os.path.dirname(directory)
self.filename = line.strip().replace(
"#include", "").strip(' ><"')
self.filename = os.path.join(directory, self.filename)
self.scan(self.filename)
self.lineno = lineno
self.filename = filename
tokens = []
self.lineno += 1
continue
elif line.strip().startswith("#define"):
definition = line.strip().split(" ")[1]
self.definitions.append(definition)
self.lineno += 1
continue
elif line.strip().startswith("#undef"):
definition = line.strip().split(" ")[1]
self.definitions.remove(definition)
self.lineno += 1
continue
elif line.strip().startswith("#ifdef"):
definition = line.strip().split(" ")[1]
if definition not in self.definitions:
jump = True
self.lineno += 1
continue
elif line.strip().startswith("#ifndef"):
definition = line.strip().split(" ")[1]
if definition in self.definitions:
jump = True
self.lineno += 1
continue
elif line.strip().startswith("#else"):
jump = True
self.lineno += 1
continue
elif line.strip().startswith("#endif"):
self.lineno += 1
continue
newline = True
for char in line:
if not token:
token = char
# c style comment
elif (token + char).startswith("/*"):
if (token + char).endswith("*/"):
token = ""
else:
token += char
# c++ style comment
elif token.startswith("//"):
if newline:
token = char
else:
token += char
# identifier
elif token[0].isalpha():
if char.isalnum() or char == "_":
token += char
else:
tokens.append((self.filename, self.lineno, token))
token = char
# number
elif token[0].isdigit():
if char.upper() in "0123456789ABCDEFXUL.":
token += char
elif token.upper().endswith("E") and char in ["+", "-"]:
token += char
else:
tokens.append((self.filename, self.lineno, token))
token = char
# string literal
elif token.startswith('"'):
if char == '"' and previous_char != "\\":
token += char
tokens.append((self.filename, self.lineno, token))
token = ""
else:
# remove dummy space from the end of a line
if newline:
token = token[:-1]
previous_char = char
token += char
# character literal
elif token.startswith("'"):
if char == "'":
token += char
tokens.append((self.filename, self.lineno, token))
token = ""
else:
token += char
# operator
elif token in operators:
if token + char in operators:
token += char
else:
tokens.append((self.filename, self.lineno, token))
token = char
else:
token = char
newline = False
self.lineno += 1
self.tokens.extend(tokens)
def error(self, string):
"""
Generate an error message (including the filename and line number)
"""
raise C2CHIPError(string + "\n", self.filename, self.lineno)
def peek(self):
"""
Return the next token in the stream, but don't consume it.
"""
if self.tokens:
return self.tokens[0][2]
else:
return ""
def peek_next(self):
"""
Return the next next token in the stream, but don't consume it.
"""
if len(self.tokens) > 1:
return self.tokens[1][2]
else:
return ""
def get(self):
"""Return the next token in the stream, and consume it."""
if self.tokens:
self.lineno = self.tokens[0][1]
self.filename = self.tokens[0][0]
try:
filename, lineno, token = self.tokens.pop(0)
except IndexError:
self.error("Unexpected end of file")
return token
def end(self):
"""Return True if all the tokens have been consumed."""
return not self.tokens
def expect(self, expected):
"""Consume the next token in the stream,
generate an error if it is not as expected."""
try:
filename, lineno, actual = self.tokens.pop(0)
except IndexError:
self.error("Unexpected end of file")
if self.tokens:
self.lineno = self.tokens[0][1]
self.filename = self.tokens[0][0]
if actual == expected:
return
else:
self.error("Expected: %s, got: %s" % (expected, actual))
| mit | 3,926,535,980,926,046,000 | 31.340278 | 77 | 0.432467 | false |
OnroerendErfgoed/skosprovider_heritagedata | skosprovider_heritagedata/utils.py | 1 | 5488 | # -*- coding: utf-8 -*-
'''
Utility functions for :mod:`skosprovider_heritagedata`.
'''
import requests
from skosprovider.skos import (
Concept,
Label,
Note,
ConceptScheme)
from skosprovider.exceptions import ProviderUnavailableException
import logging
import sys
import requests
log = logging.getLogger(__name__)
PY3 = sys.version_info[0] == 3
if PY3: # pragma: no cover
binary_type = bytes
else: # pragma: no cover
binary_type = str
import rdflib
from rdflib.term import URIRef
from rdflib.namespace import RDF, SKOS, DC, DCTERMS, RDFS
PROV = rdflib.Namespace('http://www.w3.org/ns/prov#')
def conceptscheme_from_uri(conceptscheme_uri, **kwargs):
'''
Read a SKOS Conceptscheme from a :term:`URI`
:param string conceptscheme_uri: URI of the conceptscheme.
:rtype: skosprovider.skos.ConceptScheme
'''
s = kwargs.get('session', requests.Session())
graph = uri_to_graph('%s.rdf' % (conceptscheme_uri), session=s)
notes = []
labels = []
if graph is not False:
for s, p, o in graph.triples((URIRef(conceptscheme_uri), RDFS.label, None)):
label = Label(o.toPython(), "prefLabel", 'en')
labels.append(label)
for s, p, o in graph.triples((URIRef(conceptscheme_uri), DCTERMS.description, None)):
note = Note(o.toPython(), "scopeNote", 'en')
notes.append(note)
# get the conceptscheme
conceptscheme = ConceptScheme(
conceptscheme_uri,
labels=labels,
notes=notes
)
return conceptscheme
def things_from_graph(graph, concept_scheme):
'''
Read concepts and collections from a graph.
:param rdflib.Graph graph: Graph to read from.
:param skosprovider.skos.ConceptScheme concept_scheme: Conceptscheme the
concepts and collections belong to.
:rtype: :class:`list`
'''
clist = []
for sub, pred, obj in graph.triples((None, RDF.type, SKOS.Concept)):
uri = str(sub)
con = Concept(
id=_split_uri(uri, 1),
uri=uri,
concept_scheme = concept_scheme,
labels = _create_from_subject_typelist(graph, sub, Label.valid_types),
notes = _create_from_subject_typelist(graph, sub, Note.valid_types),
broader = _create_from_subject_predicate(graph, sub, SKOS.broader),
narrower = _create_from_subject_predicate(graph, sub, SKOS.narrower),
related = _create_from_subject_predicate(graph, sub, SKOS.related),
subordinate_arrays = []
)
clist.append(con)
# at this moment, Heritagedata does not support SKOS.Collection
# for sub, pred, obj in graph.triples((None, RDF.type, SKOS.Collection)):
# uri = str(sub)
# col = Collection(_split_uri(uri, 1), uri=uri)
# col.members = _create_from_subject_predicate(sub, SKOS.member)
# col.labels = _create_from_subject_typelist(sub, Label.valid_types)
# col.notes = _create_from_subject_typelist(sub, Note.valid_types)
# clist.append(col)
return clist
def _create_from_subject_typelist(graph, subject, typelist):
list = []
for p in typelist:
term = SKOS.term(p)
list.extend(_create_from_subject_predicate(graph, subject, term))
return list
def _create_from_subject_predicate(graph, subject, predicate):
list = []
for s, p, o in graph.triples((subject, predicate, None)):
type = predicate.split('#')[-1]
if Label.is_valid_type(type):
o = _create_label(o, type)
elif Note.is_valid_type(type):
o = _create_note(o, type)
else:
o = _split_uri(o, 1)
if o:
list.append(o)
return list
def _create_label(literal, type):
language = literal.language
if language is None:
return 'und' # return undefined code when no language
return Label(literal.toPython(), type, language)
def _create_note(literal, type):
if not Note.is_valid_type(type):
raise ValueError('Type of Note is not valid.')
return Note(text_(literal.value, encoding="utf-8"), type, _get_language_from_literal(literal))
def _get_language_from_literal(data):
if data.language is None:
return 'und' # return undefined code when no language
return text_(data.language, encoding="utf-8")
def _split_uri(uri, index):
return uri.strip('/').rsplit('/', 1)[index]
def uri_to_graph(uri, **kwargs):
'''
:param string uri: :term:`URI` where the RDF data can be found.
:rtype: rdflib.Graph
:raises skosprovider.exceptions.ProviderUnavailableException: if the
heritagedata.org services are down
'''
s = kwargs.get('session', requests.Session())
graph = rdflib.Graph()
try:
res = s.get(uri)
except requests.ConnectionError as e:
raise ProviderUnavailableException("URI not available: %s" % uri)
if res.status_code == 404:
return False
graph.parse(data=res.content)
#heritagedata.org returns a empy page/graph when a resource does not exists (statsu_code 200). For this reason we return False if the graph is empty
if len(graph) == 0:
return False
return graph
def text_(s, encoding='latin-1', errors='strict'):
""" If ``s`` is an instance of ``binary_type``, return
``s.decode(encoding, errors)``, otherwise return ``s``"""
if isinstance(s, binary_type):
return s.decode(encoding, errors)
return s
| mit | 6,342,921,371,170,018,000 | 30.54023 | 152 | 0.636662 | false |
mr-ping/WebTesting | main.py | 1 | 5059 | #!/usr/bin/python
import os
import sys
import argparse
from log import Log
from chat import Trend
from chat import plot_trend as pl
from settings import *
def parse_args():
"""
Parsing shell command arguments, and override appropriate params
from setting module
:return: None
"""
parser = argparse.ArgumentParser(version=VERSION)
parser.add_argument('-u', action='store', dest='url')
parser.add_argument('-f', action='store', dest='url_file')
parser.add_argument('-t', action='store', dest='target_log_file')
parser.add_argument('-l', action='store', dest='log_file')
parser.add_argument('-p', action='store_true', dest='plotting', default=True)
parser.add_argument('-m', action='store', dest='max_allowed_concurrent', type=int)
parser.add_argument('-b', action='store', dest='base_concurrent', type=int)
parser.add_argument('-s', action='store', dest='step_concurrent', type=int)
result = parser.parse_args()
if result.url:
global url
url = result.url
if result.url_file:
global url_file
url_file = result.url_file
if result.target_log_file:
global target_file
target_file = result.target_log_file
if result.log_file:
global log_file
log_file = result.log_file
if result.plotting:
global plotting
plotting = result.plotting
if result.max_allowed_concurrent:
global max_concurrent
max_concurrent = result.max_allowed_concurrent
if result.base_concurrent:
global base_concurrent
base_concurrent = result.base_concurrent
if result.step_concurrent:
global step_concurrent
step_concurrent = result.step_concurrent
def check_url_source():
"""
Check out Obtaining url from commend line or urls file.
:return: A flag that represent the source of urls. String'
"""
global plotting
if not url_file and not url:
plotting = False
sys.stderr.write('You should figure out the url source.')
elif url_file and url:
plotting = False
sys.stderr.write('Url source come from either url address or url file')
elif url_file:
exist = os.path.exists(url_file)
if exist:
return 'file'
else:
plotting = False
sys.stderr.write('No such urls file.')
elif url:
return 'address'
def test(base_concurrent):
"""
Main method to do the Testing.
Looping siege tool until some conditions satisfied,
and generate a new log file from siege log file.
:param base_concurrent: number concurrent
:return: None
"""
url_source = check_url_source()
while True:
for i in range(num_samples):
if url_source == 'address':
#os.system('siege -c {concurrent} -t {duration} -l {address}'\
os.system('siege -c {concurrent} -r {repeat} -l {address}'\
.format(address=url,
concurrent=base_concurrent,
#duration=duration))
repeat=repeat))
elif url_source == 'file':
#os.system('siege -c {concurrent} -t {duration} -f {url_file} -l'\
os.system('siege -c {concurrent} -r {repeat} -f {url_file} -l'\
.format(url_file=url_file,
concurrent=base_concurrent,
#duration=duration))
repeat=repeat))
last = Log.get_last_logs(log_file, siege_log_line_length, 1,\
base_concurrent)
Log.add_new_log(target_file, last)
base_concurrent += step_concurrent
log = Log(target_file)
if log.get_last_arrive_rate(num_samples) < (1-fails_allowed) \
or base_concurrent > max_concurrent:
break
def plot():
"""
Plotting chat using the data that analyzed from testing log.
:return: None
"""
log = Log(target_file)
trans_rate_dict = log.get_steps_trans_rate()
arrive_rate_dict = log.get_steps_arrive_rate()
resp_time_dict = log.get_steps_resp_time()
trans_trend = Trend('Transaction Rate',
'simulated users',
'trans rate (trans/sec)',
'g', 1, 'bar',
step_concurrent/2)
trans_trend.get_points(trans_rate_dict)
arrive_trend = Trend('Arrive Rate',
'simulated users',
'arrive rate',
'g', 2, 'line')
arrive_trend.get_points(arrive_rate_dict)
resp_trend = Trend('Resp Time',
'simulated users',
'time(sec)',
'r', 2, 'line')
resp_trend.get_points(resp_time_dict)
pl(trans_trend, resp_trend, arrive_trend)
if __name__ == '__main__':
parse_args()
test(base_concurrent)
if plotting:
plot()
| mit | 379,554,074,709,667,840 | 31.63871 | 86 | 0.567108 | false |
rickerc/neutron_audit | neutron/openstack/common/rpc/impl_kombu.py | 1 | 32063 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import itertools
import socket
import ssl
import sys
import time
import uuid
import eventlet
import greenlet
import kombu
import kombu.connection
import kombu.entity
import kombu.messaging
from oslo.config import cfg
from neutron.openstack.common import excutils
from neutron.openstack.common.gettextutils import _
from neutron.openstack.common import network_utils
from neutron.openstack.common.rpc import amqp as rpc_amqp
from neutron.openstack.common.rpc import common as rpc_common
kombu_opts = [
cfg.StrOpt('kombu_ssl_version',
default='',
help='SSL version to use (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_keyfile',
default='',
help='SSL key file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_certfile',
default='',
help='SSL cert file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_ca_certs',
default='',
help=('SSL certification authority file '
'(valid only if SSL enabled)')),
cfg.StrOpt('rabbit_host',
default='localhost',
help='The RabbitMQ broker address where a single node is used'),
cfg.IntOpt('rabbit_port',
default=5672,
help='The RabbitMQ broker port where a single node is used'),
cfg.ListOpt('rabbit_hosts',
default=['$rabbit_host:$rabbit_port'],
help='RabbitMQ HA cluster host:port pairs'),
cfg.BoolOpt('rabbit_use_ssl',
default=False,
help='connect over SSL for RabbitMQ'),
cfg.StrOpt('rabbit_userid',
default='guest',
help='the RabbitMQ userid'),
cfg.StrOpt('rabbit_password',
default='guest',
help='the RabbitMQ password',
secret=True),
cfg.StrOpt('rabbit_virtual_host',
default='/',
help='the RabbitMQ virtual host'),
cfg.IntOpt('rabbit_retry_interval',
default=1,
help='how frequently to retry connecting with RabbitMQ'),
cfg.IntOpt('rabbit_retry_backoff',
default=2,
help='how long to backoff for between retries when connecting '
'to RabbitMQ'),
cfg.IntOpt('rabbit_max_retries',
default=0,
help='maximum retries with trying to connect to RabbitMQ '
'(the default of 0 implies an infinite retry count)'),
cfg.BoolOpt('rabbit_ha_queues',
default=False,
help='use H/A queues in RabbitMQ (x-ha-policy: all).'
'You need to wipe RabbitMQ database when '
'changing this option.'),
]
cfg.CONF.register_opts(kombu_opts)
LOG = rpc_common.LOG
def _get_queue_arguments(conf):
"""Construct the arguments for declaring a queue.
If the rabbit_ha_queues option is set, we declare a mirrored queue
as described here:
http://www.rabbitmq.com/ha.html
Setting x-ha-policy to all means that the queue will be mirrored
to all nodes in the cluster.
"""
return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, channel, callback, tag, **kwargs):
"""Declare a queue on an amqp channel.
'channel' is the amqp channel to use
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
queue name, exchange name, and other kombu options are
passed in here as a dictionary.
"""
self.callback = callback
self.tag = str(tag)
self.kwargs = kwargs
self.queue = None
self.reconnect(channel)
def reconnect(self, channel):
"""Re-declare the queue after a rabbit reconnect."""
self.channel = channel
self.kwargs['channel'] = channel
self.queue = kombu.entity.Queue(**self.kwargs)
self.queue.declare()
def consume(self, *args, **kwargs):
"""Actually declare the consumer on the amqp channel. This will
start the flow of messages from the queue. Using the
Connection.iterconsume() iterator will process the messages,
calling the appropriate callback.
If a callback is specified in kwargs, use that. Otherwise,
use the callback passed during __init__()
If kwargs['nowait'] is True, then this call will block until
a message is read.
Messages will automatically be acked if the callback doesn't
raise an exception
"""
options = {'consumer_tag': self.tag}
options['nowait'] = kwargs.get('nowait', False)
callback = kwargs.get('callback', self.callback)
if not callback:
raise ValueError("No callback defined")
def _callback(raw_message):
message = self.channel.message_to_python(raw_message)
try:
msg = rpc_common.deserialize_msg(message.payload)
callback(msg)
except Exception:
LOG.exception(_("Failed to process message... skipping it."))
finally:
message.ack()
self.queue.consume(*args, callback=_callback, **options)
def cancel(self):
"""Cancel the consuming from the queue, if it has started."""
try:
self.queue.cancel(self.tag)
except KeyError as e:
# NOTE(comstud): Kludge to get around a amqplib bug
if str(e) != "u'%s'" % self.tag:
raise
self.queue = None
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'."""
def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
"""Init a 'direct' queue.
'channel' is the amqp channel to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=msg_id,
type='direct',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(DirectConsumer, self).__init__(channel,
callback,
tag,
name=msg_id,
exchange=exchange,
routing_key=msg_id,
**options)
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'."""
def __init__(self, conf, channel, topic, callback, tag, name=None,
exchange_name=None, **kwargs):
"""Init a 'topic' queue.
:param channel: the amqp channel to use
:param topic: the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param tag: a unique ID for the consumer on the channel
:param name: optional queue name, defaults to topic
:paramtype name: str
Other kombu options may be passed as keyword arguments
"""
# Default options
options = {'durable': conf.amqp_durable_queues,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
exchange = kombu.entity.Exchange(name=exchange_name,
type='topic',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(TopicConsumer, self).__init__(channel,
callback,
tag,
name=name or topic,
exchange=exchange,
routing_key=topic,
**options)
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'."""
def __init__(self, conf, channel, topic, callback, tag, **kwargs):
"""Init a 'fanout' queue.
'channel' is the amqp channel to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
unique = uuid.uuid4().hex
exchange_name = '%s_fanout' % topic
queue_name = '%s_fanout_%s' % (topic, unique)
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=exchange_name, type='fanout',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(FanoutConsumer, self).__init__(channel, callback, tag,
name=queue_name,
exchange=exchange,
routing_key=topic,
**options)
class Publisher(object):
"""Base Publisher class."""
def __init__(self, channel, exchange_name, routing_key, **kwargs):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.exchange_name = exchange_name
self.routing_key = routing_key
self.kwargs = kwargs
self.reconnect(channel)
def reconnect(self, channel):
"""Re-establish the Producer after a rabbit reconnection."""
self.exchange = kombu.entity.Exchange(name=self.exchange_name,
**self.kwargs)
self.producer = kombu.messaging.Producer(exchange=self.exchange,
channel=channel,
routing_key=self.routing_key)
def send(self, msg, timeout=None):
"""Send a message."""
if timeout:
#
# AMQP TTL is in milliseconds when set in the header.
#
self.producer.publish(msg, headers={'ttl': (timeout * 1000)})
else:
self.producer.publish(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'."""
def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(DirectPublisher, self).__init__(channel, msg_id, msg_id,
type='direct', **options)
class TopicPublisher(Publisher):
"""Publisher class for 'topic'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': conf.amqp_durable_queues,
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = rpc_amqp.get_control_exchange(conf)
super(TopicPublisher, self).__init__(channel,
exchange_name,
topic,
type='topic',
**options)
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic,
None, type='fanout', **options)
class NotifyPublisher(TopicPublisher):
"""Publisher class for 'notify'."""
def __init__(self, conf, channel, topic, **kwargs):
self.durable = kwargs.pop('durable', conf.amqp_durable_queues)
self.queue_arguments = _get_queue_arguments(conf)
super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs)
def reconnect(self, channel):
super(NotifyPublisher, self).reconnect(channel)
# NOTE(jerdfelt): Normally the consumer would create the queue, but
# we do this to ensure that messages don't get dropped if the
# consumer is started after we do
queue = kombu.entity.Queue(channel=channel,
exchange=self.exchange,
durable=self.durable,
name=self.routing_key,
routing_key=self.routing_key,
queue_arguments=self.queue_arguments)
queue.declare()
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
self.consumers = []
self.consumer_thread = None
self.proxy_callbacks = []
self.conf = conf
self.max_retries = self.conf.rabbit_max_retries
# Try forever?
if self.max_retries <= 0:
self.max_retries = None
self.interval_start = self.conf.rabbit_retry_interval
self.interval_stepping = self.conf.rabbit_retry_backoff
# max retry-interval = 30 seconds
self.interval_max = 30
self.memory_transport = False
if server_params is None:
server_params = {}
# Keys to translate from server_params to kombu params
server_params_to_kombu_params = {'username': 'userid'}
ssl_params = self._fetch_ssl_params()
params_list = []
for adr in self.conf.rabbit_hosts:
hostname, port = network_utils.parse_host_port(
adr, default_port=self.conf.rabbit_port)
params = {
'hostname': hostname,
'port': port,
'userid': self.conf.rabbit_userid,
'password': self.conf.rabbit_password,
'virtual_host': self.conf.rabbit_virtual_host,
}
for sp_key, value in server_params.iteritems():
p_key = server_params_to_kombu_params.get(sp_key, sp_key)
params[p_key] = value
if self.conf.fake_rabbit:
params['transport'] = 'memory'
if self.conf.rabbit_use_ssl:
params['ssl'] = ssl_params
params_list.append(params)
self.params_list = params_list
brokers_count = len(self.params_list)
self.next_broker_indices = itertools.cycle(range(brokers_count))
self.memory_transport = self.conf.fake_rabbit
self.connection = None
self.reconnect()
def _fetch_ssl_params(self):
"""Handles fetching what ssl params should be used for the connection
(if any).
"""
ssl_params = dict()
# http://docs.python.org/library/ssl.html - ssl.wrap_socket
if self.conf.kombu_ssl_version:
ssl_params['ssl_version'] = self.conf.kombu_ssl_version
if self.conf.kombu_ssl_keyfile:
ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile
if self.conf.kombu_ssl_certfile:
ssl_params['certfile'] = self.conf.kombu_ssl_certfile
if self.conf.kombu_ssl_ca_certs:
ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs
# We might want to allow variations in the
# future with this?
ssl_params['cert_reqs'] = ssl.CERT_REQUIRED
if not ssl_params:
# Just have the default behavior
return True
else:
# Return the extended behavior
return ssl_params
def _connect(self, params):
"""Connect to rabbit. Re-establish any queues that may have
been declared before if we are reconnecting. Exceptions should
be handled by the caller.
"""
if self.connection:
LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params)
try:
self.connection.release()
except self.connection_errors:
pass
# Setting this in case the next statement fails, though
# it shouldn't be doing any network operations, yet.
self.connection = None
self.connection = kombu.connection.BrokerConnection(**params)
self.connection_errors = self.connection.connection_errors
if self.memory_transport:
# Kludge to speed up tests.
self.connection.transport.polling_interval = 0.0
self.consumer_num = itertools.count(1)
self.connection.connect()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
for consumer in self.consumers:
consumer.reconnect(self.channel)
LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') %
params)
def reconnect(self):
"""Handles reconnecting and re-establishing queues.
Will retry up to self.max_retries number of times.
self.max_retries = 0 means to retry forever.
Sleep between tries, starting at self.interval_start
seconds, backing off self.interval_stepping number of seconds
each attempt.
"""
attempt = 0
while True:
params = self.params_list[next(self.next_broker_indices)]
attempt += 1
try:
self._connect(params)
return
except (IOError, self.connection_errors) as e:
pass
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
log_info = {}
log_info['err_str'] = str(e)
log_info['max_retries'] = self.max_retries
log_info.update(params)
if self.max_retries and attempt == self.max_retries:
LOG.error(_('Unable to connect to AMQP server on '
'%(hostname)s:%(port)d after %(max_retries)d '
'tries: %(err_str)s') % log_info)
# NOTE(comstud): Copied from original code. There's
# really no better recourse because if this was a queue we
# need to consume on, we have no way to consume anymore.
sys.exit(1)
if attempt == 1:
sleep_time = self.interval_start or 1
elif attempt > 1:
sleep_time += self.interval_stepping
if self.interval_max:
sleep_time = min(sleep_time, self.interval_max)
log_info['sleep_time'] = sleep_time
LOG.error(_('AMQP server on %(hostname)s:%(port)d is '
'unreachable: %(err_str)s. Trying again in '
'%(sleep_time)d seconds.') % log_info)
time.sleep(sleep_time)
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except (self.connection_errors, socket.timeout, IOError) as e:
if error_callback:
error_callback(e)
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
if error_callback:
error_callback(e)
self.reconnect()
def get_channel(self):
"""Convenience call for bin/clear_rabbit_queues."""
return self.channel
def close(self):
"""Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.connection.release()
self.connection = None
def reset(self):
"""Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.channel.close()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
self.consumers = []
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback,
self.consumer_num.next())
self.consumers.append(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers."""
info = {'do_consume': True}
def _error_callback(exc):
if isinstance(exc, socket.timeout):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
info['do_consume'] = True
def _consume():
if info['do_consume']:
queues_head = self.consumers[:-1]
queues_tail = self.consumers[-1]
for queue in queues_head:
queue.consume(nowait=True)
queues_tail.consume(nowait=False)
info['do_consume'] = False
return self.connection.drain_events(timeout=timeout)
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
"""Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
self.consumer_thread.wait()
except greenlet.GreenletExit:
pass
self.consumer_thread = None
def wait_on_proxy_callbacks(self):
"""Wait for all proxy callback threads to exit."""
for proxy_cb in self.proxy_callbacks:
proxy_cb.wait()
def publisher_send(self, cls, topic, msg, timeout=None, **kwargs):
"""Send to a publisher based on the publisher class."""
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs)
publisher.send(msg, timeout)
self.ensure(_error_callback, _publish)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message."""
self.publisher_send(TopicPublisher, topic, msg, timeout)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs)
def consume(self, limit=None):
"""Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
it.next()
except StopIteration:
return
def consume_in_thread(self):
"""Consumer from all queues/consumers in a greenthread."""
@excutils.forever_retry_uncaught_exceptions
def _consumer_thread():
try:
self.consume()
except greenlet.GreenletExit:
return
if self.consumer_thread is None:
self.consumer_thread = eventlet.spawn(_consumer_thread)
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
"""Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
if fanout:
self.declare_fanout_consumer(topic, proxy_cb)
else:
self.declare_topic_consumer(topic, proxy_cb)
def create_worker(self, topic, proxy, pool_name):
"""Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
self.declare_topic_consumer(topic, proxy_cb, pool_name)
def join_consumer_pool(self, callback, pool_name, topic,
exchange_name=None):
"""Register as a member of a group of consumers for a given topic from
the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
one is created.
"""
callback_wrapper = rpc_amqp.CallbackWrapper(
conf=self.conf,
callback=callback,
connection_pool=rpc_amqp.get_connection_pool(self.conf,
Connection),
)
self.proxy_callbacks.append(callback_wrapper)
self.declare_topic_consumer(
queue_name=pool_name,
topic=topic,
exchange_name=exchange_name,
callback=callback_wrapper,
)
def create_connection(conf, new=True):
"""Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
return rpc_amqp.multicall(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
return rpc_amqp.call(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def cast(conf, context, topic, msg):
"""Sends a message on a topic without waiting for a response."""
return rpc_amqp.cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast(conf, context, topic, msg):
"""Sends a message on a fanout exchange without waiting for a response."""
return rpc_amqp.fanout_cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a topic to a specific server."""
return rpc_amqp.cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a fanout exchange to a specific server."""
return rpc_amqp.fanout_cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def notify(conf, context, topic, msg, envelope):
"""Sends a notification event on a topic."""
return rpc_amqp.notify(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection),
envelope)
def cleanup():
return rpc_amqp.cleanup(Connection.pool)
| apache-2.0 | -1,269,175,068,643,578,600 | 37.124851 | 79 | 0.558993 | false |
raytung/Slice | account/hooks.py | 1 | 2621 | import hashlib
import random
from django.core.mail import send_mail
from django.template.loader import render_to_string
from account.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
class AccountDefaultHookSet(object):
def send_invitation_email(self, to, ctx):
subject = render_to_string("account/email/invite_user_subject.txt", ctx)
message = render_to_string("account/email/invite_user.txt", ctx)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to)
def send_confirmation_email(self, to, ctx):
subject = render_to_string("account/email/email_confirmation_subject.txt", ctx)
subject = "".join(subject.splitlines()) # remove superfluous line breaks
message = render_to_string("account/email/email_confirmation_message.txt", ctx)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to)
def send_password_change_email(self, to, ctx):
subject = render_to_string("account/email/password_change_subject.txt", ctx)
subject = "".join(subject.splitlines())
message = render_to_string("account/email/password_change.txt", ctx)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to)
def send_password_reset_email(self, to, ctx):
subject = render_to_string("account/email/password_reset_subject.txt", ctx)
subject = "".join(subject.splitlines())
message = render_to_string("account/email/password_reset.txt", ctx)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to)
def generate_random_token(self, extra=None, hash_func=hashlib.sha256):
if extra is None:
extra = []
bits = extra + [str(random.SystemRandom().getrandbits(512))]
return hash_func("".join(bits).encode("utf-8")).hexdigest()
def generate_signup_code_token(self, email=None):
return self.generate_random_token([email])
def generate_email_confirmation_token(self, email):
return self.generate_random_token([email])
def get_user_credentials(self, form, identifier_field):
try:
username = User.objects.get(email=form.cleaned_data[identifier_field])
username = username.username
except ObjectDoesNotExist:
username = form.cleaned_data[identifier_field]
return {
"username": username,
"password": form.cleaned_data["password"],
}
class HookProxy(object):
def __getattr__(self, attr):
return getattr(settings.ACCOUNT_HOOKSET, attr)
hookset = HookProxy()
| mit | -1,658,567,781,542,435,800 | 38.119403 | 87 | 0.679893 | false |
anselal/antminer-monitor | antminermonitor/blueprints/user/models.py | 1 | 1102 | from flask_login.mixins import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from sqlalchemy import Column, Integer, VARCHAR
from antminermonitor.database import Base
class User(UserMixin, Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
username = Column(VARCHAR(64), index=True, unique=True)
email = Column(VARCHAR(120), index=True, unique=True)
password_hash = Column(VARCHAR(128))
surname = Column(VARCHAR(100))
firstname = Column(VARCHAR(100))
active = Column(Integer, default=1)
@property
def serialize(self):
return {
'id': self.id,
'username': self.username,
'firstname': self.firstname,
'surname': self.surname,
'email': self.email
}
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
| gpl-3.0 | 8,158,068,241,093,129,000 | 30.485714 | 73 | 0.653358 | false |
wurstmineberg/alltheitems.wurstmineberg.de | alltheitems/cloud.py | 1 | 67724 | import alltheitems.__main__ as ati
import bottle
import collections
import contextlib
import datetime
import itertools
import json
import pathlib
import random
import re
import xml.sax.saxutils
import alltheitems.item
import alltheitems.util
import alltheitems.world
class FillLevel:
def __init__(self, stack_size, total_items, max_slots, *, is_smart_chest=True):
self.stack_size = stack_size
self.total_items = total_items
self.max_slots = max_slots
self.is_smart_chest = is_smart_chest
def __str__(self):
if self.total_items == 0:
return '{} is empty.'.format('SmartChest' if self.is_smart_chest else 'Chest')
elif self.total_items == self.max_items:
return '{} is full.'.format('SmartChest' if self.is_smart_chest else 'Chest')
else:
stacks, items = self.stacks
return '{} is filled {}% ({} {stack}{}{} out of {} {stack}s).'.format('SmartChest' if self.is_smart_chest else 'Chest', int(100 * self.fraction), stacks, '' if stacks == 1 else 's', ' and {} item{}'.format(items, '' if items == 1 else 's') if items > 0 else '', self.max_slots, stack='item' if self.stack_size == 1 else 'stack')
@property
def fraction(self):
return self.total_items / self.max_items
def is_empty(self):
return self.total_items == 0
def is_full(self):
return self.total_items == self.max_items
@property
def max_items(self):
return self.max_slots * self.stack_size
@property
def stacks(self):
return divmod(self.total_items, self.stack_size)
CONTAINERS = [ # layer coords of all counted container blocks in a SmartChest
(3, -7, 3),
(3, -7, 4),
(4, -7, 4),
(5, -7, 3),
(5, -7, 4),
(2, -6, 3),
(3, -6, 2),
(3, -6, 3),
(2, -5, 2),
(2, -5, 3),
(3, -5, 3),
(2, -4, 3),
(3, -4, 2),
(3, -4, 3),
(3, -3, 2),
(4, -3, 2),
(5, -3, 2),
(6, -3, 2),
(5, -2, 2),
(6, -2, 2),
(5, 0, 2),
(5, 0, 3)
]
STONE_VARIANTS = {
0: 'stone',
1: 'granite',
2: 'polished granite',
3: 'diorite',
4: 'polished diorite',
5: 'andesite',
6: 'polished andesite'
}
HOPPER_FACINGS = {
0: 'down',
1: 'up', #for droppers
2: 'north',
3: 'south',
4: 'west',
5: 'east'
}
TORCH_FACINGS = {
1: 'to its west',
2: 'to its east',
3: 'to its north',
4: 'to its south',
5: 'below'
}
HTML_COLORS = {
'cyan': '#0ff',
'cyan2': '#0ff',
'gray': '#777',
'red': '#f00',
'orange': '#f70',
'yellow': '#ff0',
'white': '#fff',
'white2': '#fff',
None: 'transparent'
}
def hopper_chain_connected(start_coords, end_coords, *, world=None, chunk_cache=None, block_at=None):
if world is None:
world = alltheitems.world.World()
if chunk_cache is None:
chunk_cache = {}
if block_at is None:
block_at=world.block_at
visited_coords = set()
x, y, z = start_coords
while (x, y, z) != end_coords:
if (x, y, z) in visited_coords:
return False, 'hopper chain points into itself at {} {} {}'.format(x, y, z)
visited_coords.add((x, y, z))
block = block_at(x, y, z, chunk_cache=chunk_cache)
if block['id'] != 'minecraft:hopper':
return False, 'block at {} {} {} is not a <a href="/block/minecraft/hopper">hopper</a>'.format(x, y, z, *end_coords)
if block['damage'] & 0x7 == 0:
y -= 1 # down
elif block['damage'] & 0x7 == 2:
z -= 1 # north
elif block['damage'] & 0x7 == 3:
z += 1 # south
elif block['damage'] & 0x7 == 4:
x -= 1 # west
elif block['damage'] & 0x7 == 5:
x += 1 # east
else:
raise ValueError('Unknown hopper facing {} at {}'.format(block['damage'] & 0x7, (x, y, z)))
return True, None
def smart_chest_schematic(document_root=ati.document_root):
layers = {}
with (document_root / 'static' / 'smartchest.txt').open() as smart_chest_layers:
current_y = None
current_layer = None
for line in smart_chest_layers:
if line == '\n':
continue
match = re.fullmatch('layer (-?[0-9]+)\n', line)
if match:
# new layer
if current_y is not None:
layers[current_y] = current_layer
current_y = int(match.group(1))
current_layer = []
else:
current_layer.append(line.rstrip('\r\n'))
if current_y is not None:
layers[current_y] = current_layer
return sorted(layers.items())
def chest_iter():
"""Returns an iterator yielding tuples (x, corridor, y, floor, z, chest)."""
with (ati.assets_root / 'json' / 'cloud.json').open() as cloud_json:
cloud_data = json.load(cloud_json)
for y, floor in enumerate(cloud_data):
for x, corridor in sorted(((int(x), corridor) for x, corridor in floor.items()), key=lambda tup: tup[0]):
for z, chest in enumerate(corridor):
yield x, corridor, y, floor, z, chest
def chest_coords(item, *, include_meta=False):
if not isinstance(item, alltheitems.item.Item):
item = alltheitems.item.Item(item)
for x, corridor, y, _, z, chest in chest_iter():
if item == chest:
if include_meta:
return (x, y, z), len(corridor), None if isinstance(chest, str) else chest.get('name'), None if isinstance(chest, str) else chest.get('sorter')
else:
return x, y, z
if include_meta:
return None, 0, None, None
def global_error_checks(*, chunk_cache=None, block_at=alltheitems.world.World().block_at):
cache_path = ati.cache_root / 'cloud-globals.json'
max_age = datetime.timedelta(hours=1, minutes=random.randrange(0, 60)) # use a random value between 1 and 2 hours for the cache expiration
if cache_path.exists() and datetime.datetime.utcfromtimestamp(cache_path.stat().st_mtime) > datetime.datetime.utcnow() - max_age:
# cached check results are recent enough
with cache_path.open() as cache_f:
cache = json.load(cache_f)
return cache
# cached check results are too old, recheck
if chunk_cache is None:
chunk_cache = {}
# error check: input hopper chain
start = 14, 61, 32 # the first hopper after the buffer elevator
end = -1, 25, 52 # the half of the uppermost overflow chest into which the hopper chain is pointing
is_connected, message = hopper_chain_connected(start, end, chunk_cache=chunk_cache, block_at=block_at)
if not is_connected:
return 'Input hopper chain at {} is not connected to the unsorted overflow at {}: {}.'.format(start, end, message)
if ati.cache_root.exists():
with cache_path.open('w') as cache_f:
json.dump(message, cache_f, sort_keys=True, indent=4)
def chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, pre_sorter, layer_coords, block_at, items_data, chunk_cache, document_root):
if stackable and has_sorter:
# error check: overflow exists
if not has_overflow:
if len(missing_overflow_hoppers) == 3:
return 'Missing overflow hoppers.'
elif len(missing_overflow_hoppers) > 1:
return 'Overflow hoppers at x={} do not exist.'.format(missing_overflow_hoppers)
elif len(missing_overflow_hoppers) == 1:
return 'Overflow hopper at x={} does not exist, is {}.'.format(next(iter(missing_overflow_hoppers)), block_at(next(iter(missing_overflow_hoppers)), base_y - 7, base_z - 1)['id'])
else:
return 'Missing overflow.'
# error check: pre-sorter for lower floors
if y > 4:
if pre_sorter is None:
return 'Preliminary sorter coordinate missing from cloud.json.'
pre_sorting_hopper = block_at(pre_sorter, 30, 52, chunk_cache=chunk_cache)
if pre_sorting_hopper['id'] != 'minecraft:hopper':
return 'Preliminary sorter is missing (should be at {} 30 52).'.format(pre_sorter)
if pre_sorting_hopper['damage'] != 3:
return 'Preliminary sorting hopper ({} 30 52) should be pointing south, but is facing {}.'.format(pre_sorter, HOPPER_FACINGS[pre_sorting_hopper['damage']])
empty_slots = set(range(5))
for slot in pre_sorting_hopper['tileEntity']['Items']:
empty_slots.remove(slot['Slot'])
if slot['Slot'] == 0:
if not item.matches_slot(slot):
return 'Preliminary sorting hopper is sorting the wrong item: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
else:
if not filler_item.matches_slot(slot):
return 'Preliminary sorting hopper has wrong filler item in slot {}: {} (should be {}).'.format(slot['Slot'], alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text(), filler_item.link_text())
if slot['Count'] > 1:
return 'Preliminary sorting hopper: too much {} in slot {}.'.format(filler_item.link_text(), slot['Slot'])
if len(empty_slots) > 0:
if len(empty_slots) == 5:
return 'Preliminary sorting hopper is empty.'
elif len(empty_slots) == 1:
return 'Slot {} of the preliminary sorting hopper is empty.'.format(next(iter(empty_slots)))
else:
return 'Some slots in the preliminary sorting hopper are empty: {}.'.format(alltheitems.util.join(empty_slots))
if has_sorter:
# error check: sorting hopper
if sorting_hopper['damage'] != 2:
return 'Sorting hopper ({} {} {}) should be pointing north, but is facing {}.'.format(base_x - 2 if z % 2 == 0 else base_x + 2, base_y - 3, base_z, HOPPER_FACINGS[sorting_hopper['damage']])
empty_slots = set(range(5))
for slot in sorting_hopper['tileEntity']['Items']:
empty_slots.remove(slot['Slot'])
if slot['Slot'] == 0 and stackable:
if not item.matches_slot(slot) and not filler_item.matches_slot(slot):
return 'Sorting hopper is sorting the wrong item: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
else:
if not filler_item.matches_slot(slot):
return 'Sorting hopper has wrong filler item in slot {}: {} (should be {}).'.format(slot['Slot'], alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text(), filler_item.link_text())
if slot['Count'] > 1:
return 'Sorting hopper: too much {} in slot {}.'.format(filler_item.link_text(), slot['Slot'])
if len(empty_slots) > 0:
if len(empty_slots) == 5:
return 'Sorting hopper is empty.'
elif len(empty_slots) == 1:
return 'Slot {} of the sorting hopper is empty.'.format(next(iter(empty_slots)))
else:
return 'Some slots in the sorting hopper are empty: {}.'.format(alltheitems.util.join(empty_slots))
if exists:
# error check: wrong items in access chest
for slot in itertools.chain(north_half['tileEntity']['Items'], south_half['tileEntity']['Items']):
if not item.matches_slot(slot):
return 'Access chest contains items of the wrong kind: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
# error check: wrong name on sign
sign = block_at(base_x - 1 if z % 2 == 0 else base_x + 1, base_y + 1, base_z + 1, chunk_cache=chunk_cache)
if sign['id'] != 'minecraft:wall_sign':
return 'Sign is missing.'
text = []
for line in range(1, 5):
line_text = json.loads(sign['tileEntity']['Text{}'.format(line)])['text'].translate(dict.fromkeys(range(0xf700, 0xf704), None))
if len(line_text) > 0:
text.append(line_text)
text = ' '.join(text)
if text != item_name.translate({0x2161: 'II'}):
return 'Sign has wrong text: should be {!r}, is {!r}.'.format(xml.sax.saxutils.escape(item_name), xml.sax.saxutils.escape(text))
if has_overflow:
# error check: overflow hopper chain
start = base_x + 5 if z % 2 == 0 else base_x - 5, base_y - 7, base_z - 1
end = -35, 6, 38 # position of the dropper leading into the Smelting Center's item elevator
is_connected, message = hopper_chain_connected(start, end, chunk_cache=chunk_cache, block_at=block_at)
if not is_connected:
return 'Overflow hopper chain at {} is not connected to the Smelting Center item elevator at {}: {}.'.format(start, end, message)
if exists and has_smart_chest:
# error check: all blocks
for layer_y, layer in smart_chest_schematic(document_root=document_root):
for layer_x, row in enumerate(layer):
for layer_z, block_symbol in enumerate(row):
# determine the coordinate of the current block
exact_x, exact_y, exact_z = layer_coords(layer_x, layer_y, layer_z)
# determine current block
block = block_at(exact_x, exact_y, exact_z, chunk_cache=chunk_cache)
# check against schematic
if block_symbol == ' ':
# air
if block['id'] != 'minecraft:air':
return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == '!':
# sign
if block['id'] != 'minecraft:wall_sign':
return 'Block at {} {} {} should be a sign, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != (4 if z % 2 == 0 else 5):
return 'Sign at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z)
elif block_symbol == '#':
# chest
if block['id'] != 'minecraft:chest':
return 'Block at {} {} {} should be a chest, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage chest at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == '<':
# hopper facing south
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 3: # south
return 'Hopper at {} {} {} should be pointing south, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
storage_hoppers = {
(5, -7, 4),
(6, -5, 4)
}
if (layer_x, layer_y, layer_z) in storage_hoppers:
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == '>':
# hopper facing north
if layer_y == -7 and layer_x == 0 and z < 8:
# the first few chests get ignored because their overflow points in the opposite direction
pass #TODO introduce special checks for them
else:
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 2: # north
return 'Hopper at {} {} {} should be pointing north, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
storage_hoppers = {
(3, -7, 3),
(3, -4, 2)
}
if (layer_x, layer_y, layer_z) in storage_hoppers:
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == '?':
# any block
pass
elif block_symbol == 'C':
# comparator
if block['id'] != 'minecraft:unpowered_comparator':
return 'Block at {} {} {} should be a comparator, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
known_facings = {
(5, -7, 2): 0x2, # south
(5, -5, 2): 0x2, # south
(7, -3, 4): 0x0, # north
(0, -1, 1): 0x0, # north
(1, -1, 2): 0x0, # north
(2, 0, 2): 0x1 if z % 2 == 0 else 0x3, # east / west
(2, 0, 3): 0x2, # south
(4, 0, 2): 0x1 if z % 2 == 0 else 0x3, # east / west
(4, 0, 3): 0x2 # south
}
facing = block['damage'] & 0x3
if (layer_x, layer_y, layer_z) in known_facings:
if known_facings[layer_x, layer_y, layer_z] != facing:
return 'Comparator at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z)
else:
return 'Direction check for comparator at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
known_modes = {
(5, -7, 2): False, # compare
(5, -5, 2): False, # compare
(7, -3, 4): False, # compare
(0, -1, 1): False, # compare
(1, -1, 2): True, # subtract
(2, 0, 2): True, # subtract
(2, 0, 3): False, # compare
(4, 0, 2): True, #subtract
(4, 0, 3): False # compare
}
mode = (block['damage'] & 0x4) == 0x4
if (layer_x, layer_y, layer_z) in known_modes:
if known_modes[layer_x, layer_y, layer_z] != mode:
return 'Comparator at {} {} {} is in {} mode, should be in {} mode.'.format(exact_x, exact_y, exact_z, 'subtraction' if mode else 'comparison', 'subtraction' if known_modes[layer_x, layer_y, layer_z] else 'comparison')
else:
return 'Mode check for comparator at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
elif block_symbol == 'D':
# dropper facing up
if block['id'] != 'minecraft:dropper':
return 'Block at {} {} {} should be a dropper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 1: # up
return 'Dropper at {} {} {} should be facing up, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Dropper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == 'F':
# furnace
if layer_y == -6 and layer_x == 0 and z < 2:
# the first few chests get ignored because their overflow points in the opposite direction
pass #TODO introduce special checks for them
elif layer_y == -1 and layer_x == 7 and layer_z == 1 and (z == corridor_length - 1 or z == corridor_length - 2 and z % 2 == 0):
# the floor ends with a quartz slab instead of a furnace here
if block['id'] != 'minecraft:stone_slab':
return 'Block at {} {} {} should be a quartz slab, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 0x7:
slab_variant = {
0: 'stone',
1: 'sandstone',
2: 'fake wood',
3: 'cobblestone',
4: 'brick',
5: 'stone brick',
6: 'Nether brick',
7: 'quartz'
}[block['damage'] & 0x7]
return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/7">quartz slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant)
if block['damage'] & 0x8 != 0x8:
return 'Quartz slab at {} {} {} should be a top slab, is a bottom slab.'.format(exact_x, exact_y, exact_z)
elif x == 0 and y == 6 and layer_y == -1 and layer_x == 7:
# the central corridor on the 6th floor uses stone bricks instead of furnaces for the floor
if block['id'] != 'minecraft:stonebrick':
return 'Block at {} {} {} should be stone bricks, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 0:
stonebrick_variant = {
0: 'regular',
1: 'mossy',
2: 'cracked',
3: 'chiseled'
}[block['damage']]
return 'Block at {} {} {} should be <a href="/block/minecraft/stonebrick/0">regular stone bricks</a>, is <a href="/block/minecraft/stonebrick/{}">{} stone bricks</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stonebrick_variant)
else:
if block['id'] != 'minecraft:furnace':
return 'Block at {} {} {} should be a furnace, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
known_signals = {
(0, -6, 4): 0,
(0, -6, 5): 0,
(0, -6, 6): 0,
(0, -6, 7): 0,
(0, -1, 0): 8,
(7, -1, 1): 0,
(7, -1, 2): 0,
(7, -1, 3): 0,
(7, -1, 4): 0,
(2, 0, 4): 1,
(4, 0, 4): 5
}
signal = alltheitems.item.comparator_signal(block, items_data=items_data)
if (layer_x, layer_y, layer_z) in known_signals:
if known_signals[layer_x, layer_y, layer_z] != signal:
return 'Furnace at {} {} {} has a fill level of {}, should be {}.'.format(exact_x, exact_y, exact_z, signal, known_signals[layer_x, layer_y, layer_z])
else:
return 'Fill level check for furnace at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
elif block_symbol == 'G':
# glowstone
if block['id'] != 'minecraft:glowstone':
return 'Block at {} {} {} should be glowstone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == 'H':
# hopper, any facing
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == 'N':
# overflow hopper chain pointing north
if y > 1 and (z == 0 or z == 1):
if block['id'] == 'minecraft:hopper':
if block['damage'] != 2: # north
return 'Overflow hopper at {} {} {} should be pointing north, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
elif block['id'] == 'minecraft:air':
pass # also allow air because some overflow hopper chains don't start on the first floor
else:
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
else:
if block['id'] != 'minecraft:air':
return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == 'P':
# upside-down oak stairs
if block['id'] != 'minecraft:oak_stairs':
return 'Block at {} {} {} should be oak stairs, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x3 != (0x1 if z % 2 == 0 else 0x0):
stairs_facings = {
0: 'west',
1: 'east',
2: 'south',
3: 'north'
}
return 'Stairs at {} {} {} should be facing {}, is {}.'.format(exact_x, exact_y, exact_z, stairs_facings[0x1 if z % 2 == 0 else 0x0], stairs_facings[block['damage'] & 0x3])
if block['damage'] & 0x4 != 0x4:
return 'Stairs at {} {} {} should be upside-down.'.format(exact_x, exact_y, exact_z)
elif block_symbol == 'Q':
# quartz top slab
if block['id'] != 'minecraft:stone_slab':
return 'Block at {} {} {} should be a quartz slab, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 0x7:
slab_variant = {
0: 'stone',
1: 'sandstone',
2: 'fake wood',
3: 'cobblestone',
4: 'brick',
5: 'stone brick',
6: 'Nether brick',
7: 'quartz'
}[block['damage'] & 0x7]
return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/7">quartz slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant)
if block['damage'] & 0x8 != 0x8:
return 'Quartz slab at {} {} {} should be a top slab, is a bottom slab.'.format(exact_x, exact_y, exact_z)
elif block_symbol == 'R':
# repeater
if block['id'] not in ('minecraft:unpowered_repeater', 'minecraft:powered_repeater'):
return 'Block at {} {} {} should be a repeater, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
known_facings = {
(1, -8, 2): 0x0, # north
(3, -8, 3): 0x3 if z % 2 == 0 else 0x1, # west / east
(6, -6, 2): 0x0, # north
(7, -5, 5): 0x2, # south
(3, -3, 1): 0x1 if z % 2 == 0 else 0x3 # east / west
}
facing = block['damage'] & 0x3
if (layer_x, layer_y, layer_z) in known_facings:
if known_facings[layer_x, layer_y, layer_z] != facing:
return 'Repeater at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z)
else:
return 'Direction check for repeater at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
known_delays = { # in game ticks
(1, -8, 2): 4,
(3, -8, 3): 2,
(6, -6, 2): 2,
(7, -5, 5): 2,
(3, -3, 1): 2
}
delay_ticks = 2 * (block['damage'] >> 2) + 2
if (layer_x, layer_y, layer_z) in known_delays:
if known_delays[layer_x, layer_y, layer_z] != delay_ticks:
return 'Repeater at {} {} {} has a delay of {} game tick{}, should be {}.'.format(exact_x, exact_y, exact_z, delay_ticks, '' if delay_ticks == 1 else 's', known_delays[layer_x, layer_y, layer_z])
else:
return 'Delay check for repeater at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
elif block_symbol == 'S':
# stone top slab
if block['id'] != 'minecraft:stone_slab':
return 'Block at {} {} {} should be a stone slab, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 0x0:
slab_variant = {
0: 'stone',
1: 'sandstone',
2: 'fake wood',
3: 'cobblestone',
4: 'brick',
5: 'stone brick',
6: 'Nether brick',
7: 'quartz'
}[block['damage'] & 0x7]
return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/0">stone slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant)
if block['damage'] & 0x8 != 0x8:
return 'Quartz slab at {} {} {} should be a top slab.'.format(exact_x, exact_y, exact_z)
elif block_symbol == 'T':
# redstone torch attached to the side of a block
if block['id'] not in ('minecraft:unlit_redstone_torch', 'minecraft:redstone_torch'):
return 'Block at {} {} {} should be a redstone torch, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
known_facings = {
(3, -8, 1): 1 if z % 2 == 0 else 2, # west / east
(2, -7, 1): 3, # north
(4, -6, 1): 2 if z % 2 == 0 else 1, # east / west
(4, -6, 2): 3, # north
(4, -5, 1): 1 if z % 2 == 0 else 2, # west / east
(4, -5, 3): 4, # south
(7, -5, 3): 3, # north
(1, -4, 2): 4, # south
(1, -3, 3): 3, # north
(1, -1, 4): 4, # south
(5, -1, 1): 2 if z % 2 == 0 else 1, # east / west
(3, 0, 3): 4 # south
}
if (layer_x, layer_y, layer_z) in known_facings:
if known_facings[layer_x, layer_y, layer_z] != block['damage']:
return 'Redstone torch at {} {} {} attached to the block {}, should be attached to the block {}.'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']], TORCH_FACINGS[known_facings[layer_x, layer_y, layer_z]])
else:
return 'Facing check for redstone torch at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z)
elif block_symbol == 'W':
# back wall
if z == corridor_length - 1 or z == corridor_length - 2 and z % 2 == 0:
if block['id'] != 'minecraft:stone':
return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 0:
stone_variant = {
0: 'stone',
1: 'granite',
2: 'polished granite',
3: 'diorite',
4: 'polished diorite',
5: 'andesite',
6: 'polished andesite'
}[block['damage']]
return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant)
elif block_symbol == 'X':
# overflow hopper chain pointing down
if layer_y < -7 and y < 6 and (z == 4 or z == 5) or layer_y > -7 and y > 1 and (z == 0 or z == 1):
if block['id'] == 'minecraft:hopper':
if block['damage'] != 0: # down
return 'Overflow hopper at {} {} {} should be pointing down, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
elif block['id'] == 'minecraft:air':
pass # also allow air because some overflow hopper chains don't start on the first floor
else:
return 'Block at {} {} {} should be air or a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
else:
if block['id'] != 'minecraft:air':
return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == '^':
# hopper facing outward
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != (5 if z % 2 == 0 else 4): # east / west
return 'Hopper at {} {} {} should be pointing {}, is {}.'.format(exact_x, exact_y, exact_z, 'east' if z % 2 == 0 else 'west', HOPPER_FACINGS[block['damage']])
storage_hoppers = {
(3, -5, 3),
(6, -5, 3),
(7, -4, 3),
(5, -3, 2),
(6, -3, 2)
}
if (layer_x, layer_y, layer_z) in storage_hoppers:
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == 'c':
# crafting table
if layer_y == -7 and (y == 6 or z < 4 or z < 6 and layer_z > 1):
if block['id'] != 'minecraft:stone':
return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 0:
stone_variant = STONE_VARIANTS[block['damage']]
return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant)
else:
if block['id'] != 'minecraft:crafting_table':
return 'Block at {} {} {} should be a crafting table, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == 'i':
# torch attached to the top of a block
if block['id'] != 'minecraft:torch':
return 'Block at {} {} {} should be a torch, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 5: # attached to the block below
return 'Torch at {} {} {} should be attached to the block below, is attached to the block {}'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']])
elif block_symbol == 'p':
# oak planks
if layer_y == -8 and (y == 6 or z < 4 or z < 6 and layer_z > 1):
if block['id'] != 'minecraft:stone':
return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 0:
stone_variant = STONE_VARIANTS[block['damage']]
return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant)
else:
if block['id'] != 'minecraft:planks':
return 'Block at {} {} {} should be oak planks, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
pass #TODO check material
elif block_symbol == 'r':
# redstone dust
if block['id'] != 'minecraft:redstone_wire':
return 'Block at {} {} {} should be redstone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
elif block_symbol == 's':
# stone
if block['id'] != 'minecraft:stone':
if exact_y < 5:
if block['id'] != 'minecraft:bedrock':
return 'Block at {} {} {} should be stone or bedrock, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
else:
return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 0:
stone_variant = STONE_VARIANTS[block['damage']]
return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant)
elif block_symbol == 't':
# redstone torch attached to the top of a block
if block['id'] not in ('minecraft:unlit_redstone_torch', 'minecraft:redstone_torch'):
return 'Block at {} {} {} should be a redstone torch, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] != 5: # attached to the block below
return 'Redstone torch at {} {} {} should be attached to the block below, is attached to the block {}'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']])
elif block_symbol == 'v':
# hopper facing inwards
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != (4 if z % 2 == 0 else 5): # west / east
return 'Hopper at {} {} {} should be pointing {}, is {}.'.format(exact_x, exact_y, exact_z, 'west' if z % 2 == 0 else 'east', HOPPER_FACINGS[block['damage']])
storage_hoppers = {
(3, -7, 4),
(4, -7, 4),
(2, -6, 3)
}
if (layer_x, layer_y, layer_z) in storage_hoppers:
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == 'x':
# hopper facing down
if block['id'] != 'minecraft:hopper':
return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
if block['damage'] & 0x7 != 0: # down
return 'Hopper at {} {} {} should be pointing down, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']])
storage_hoppers = {
(5, -1, 2)
}
if (layer_x, layer_y, layer_z) in storage_hoppers:
for slot in block['tileEntity']['Items']:
if not item.matches_slot(slot):
return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text())
elif block_symbol == '~':
# hopper chain
if block['id'] == 'minecraft:hopper':
pass #TODO check facing
pass #TODO check alignment
elif block['id'] == 'minecraft:air':
pass #TODO check alignment
else:
return 'Block at {} {} {} should be a hopper or air, is {}.'.format(exact_x, exact_y, exact_z, block['id'])
pass #TODO check hopper chain integrity
else:
return 'Not yet implemented: block at {} {} {} should be {}.'.format(exact_x, exact_y, exact_z, block_symbol)
# error check: items in storage chests but not in access chest
access_chest_fill_level = alltheitems.item.comparator_signal(north_half, south_half)
bottom_dropper_fill_level = alltheitems.item.comparator_signal(block_at(*layer_coords(5, -7, 3), chunk_cache=chunk_cache))
if access_chest_fill_level < 2 and bottom_dropper_fill_level > 2:
return 'Access chest is {}empty but there are items stuck in the storage dropper at {} {} {}.'.format('' if access_chest_fill_level == 0 else 'almost ', *layer_coords(5, -7, 3))
if durability and has_smart_chest:
# error check: damaged or enchanted tools in storage chests
storage_containers = set(CONTAINERS) - {(5, 0, 2), (5, 0, 3)}
for container in storage_containers:
for slot in block_at(*layer_coords(*container), chunk_cache=chunk_cache)['tileEntity']['Items']:
if slot.get('Damage', 0) > 0:
return 'Item in storage container at {} {} {} is damaged.'.format(*layer_coords(*container))
if len(slot.get('tag', {}).get('ench', [])) > 0:
return 'Item in storage container at {} {} {} is enchanted.'.format(*layer_coords(*container))
def chest_state(coords, item_stub, corridor_length, item_name=None, pre_sorter=None, *, items_data=None, block_at=alltheitems.world.World().block_at, document_root=ati.document_root, chunk_cache=None, cache=None, allow_cache=True):
if items_data is None:
with (ati.assets_root / 'json' / 'items.json').open() as items_file:
items_data = json.load(items_file)
if chunk_cache is None:
chunk_cache = {}
if isinstance(item_stub, str):
item_stub = {'id': item_stub}
item = alltheitems.item.Item(item_stub, items_data=items_data)
if item_name is None:
item_name = item.info()['name']
state = None, 'This SmartChest is in perfect state.', None
x, y, z = coords
# determine the base coordinate, i.e. the position of the north half of the access chest
if z % 2 == 0:
# left wall
base_x = 15 * x + 2
else:
# right wall
base_x = 15 * x - 3
base_y = 73 - 10 * y
base_z = 28 + 10 * y + 4 * (z // 2)
def layer_coords(layer_x, layer_y, layer_z):
if z % 2 == 0:
# left wall
exact_x = base_x + 5 - layer_x
else:
# right wall
exact_x = base_x - 5 + layer_x
exact_y = base_y + layer_y
exact_z = base_z + 3 - layer_z
return exact_x, exact_y, exact_z
# does the access chest exist?
exists = False
north_half = block_at(base_x, base_y, base_z, chunk_cache=chunk_cache)
south_half = block_at(base_x, base_y, base_z + 1, chunk_cache=chunk_cache)
if north_half['id'] != 'minecraft:chest' and south_half['id'] != 'minecraft:chest':
state = 'gray', 'Access chest does not exist.', None
elif north_half['id'] != 'minecraft:chest':
state = 'gray', 'North half of access chest does not exist.', None
elif south_half['id'] != 'minecraft:chest':
state = 'gray', 'South half of access chest does not exist.', None
else:
exists = True
# does it have a SmartChest?
has_smart_chest = False
missing_droppers = set()
for dropper_y in range(base_y - 7, base_y):
dropper = block_at(base_x, dropper_y, base_z, chunk_cache=chunk_cache)
if dropper['id'] != 'minecraft:dropper':
missing_droppers.add(dropper_y)
if len(missing_droppers) == 7:
if state[0] is None:
state = 'orange', 'SmartChest droppers do not exist.', None
elif len(missing_droppers) > 1:
if state[0] is None:
state = 'orange', 'SmartChest droppers at y={} do not exist.'.format(', y='.join(str(dropper) for dropper in missing_droppers)), None
elif len(missing_droppers) == 1:
if state[0] is None:
state = 'orange', 'SmartChest dropper at y={} does not exist, is {}.'.format(next(iter(missing_droppers)), block_at(base_x, dropper_y, base_z)['id']), None
else:
has_smart_chest = True
# is it stackable?
stackable = item.info().get('stackable', True)
if not stackable and state[0] is None:
state = 'cyan', "This SmartChest is in perfect state (but the item is not stackable, so it can't be sorted).", None
# does it have a durability bar?
durability = 'durability' in item.info()
# does it have a sorter?
has_sorter = False
if item == 'minecraft:crafting_table' or stackable and item.max_stack_size < 64:
filler_item = alltheitems.item.Item('minecraft:crafting_table', items_data=items_data)
else:
filler_item = alltheitems.item.Item('minecraft:ender_pearl', items_data=items_data)
sorting_hopper = block_at(base_x - 2 if z % 2 == 0 else base_x + 2, base_y - 3, base_z, chunk_cache=chunk_cache)
if sorting_hopper['id'] != 'minecraft:hopper':
if state[0] is None:
state = 'yellow', 'Sorting hopper does not exist, is {}.'.format(sorting_hopper['id']), None
else:
for slot in sorting_hopper['tileEntity']['Items']:
if slot['Slot'] == 0 and stackable and not item.matches_slot(slot) and filler_item.matches_slot(slot):
if state[0] is None or state[0] == 'cyan':
state = 'yellow', 'Sorting hopper is full of {}, but the sorted item is stackable, so the first slot should contain the item.'.format(filler_item.link_text()), None
break
else:
has_sorter = True
# does it have an overflow?
has_overflow = False
missing_overflow_hoppers = set()
for overflow_x in range(base_x + 3 if z % 2 == 0 else base_x - 3, base_x + 6 if z % 2 == 0 else base_x - 6, 1 if z % 2 == 0 else -1):
overflow_hopper = block_at(overflow_x, base_y - 7, base_z - 1, chunk_cache=chunk_cache)
if overflow_hopper['id'] != 'minecraft:hopper':
missing_overflow_hoppers.add(overflow_x)
if len(missing_overflow_hoppers) == 0:
has_overflow = True
# state determined, check for errors
if coords == (1, 1, 0): # Ender pearls
message = global_error_checks(chunk_cache=chunk_cache, block_at=block_at)
if message is not None:
return 'red', message, None
cache_path = ati.cache_root / 'cloud-chests.json'
if cache is None:
if cache_path.exists():
with cache_path.open() as cache_f:
cache = json.load(cache_f)
else:
cache = {}
max_age = datetime.timedelta(hours=1, minutes=random.randrange(0, 60)) # use a random value between 1 and 2 hours for the cache expiration
if allow_cache and str(y) in cache and str(x) in cache[str(y)] and str(z) in cache[str(y)][str(x)] and cache[str(y)][str(x)][str(z)]['errorMessage'] is None and datetime.datetime.strptime(cache[str(y)][str(x)][str(z)]['timestamp'], '%Y-%m-%d %H:%M:%S') > datetime.datetime.utcnow() - max_age:
message = cache[str(y)][str(x)][str(z)]['errorMessage']
pass # cached check results are recent enough
else:
# cached check results are too old, recheck
message = chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, pre_sorter, layer_coords, block_at, items_data, chunk_cache, document_root)
if ati.cache_root.exists():
if str(y) not in cache:
cache[str(y)] = {}
if str(x) not in cache[str(y)]:
cache[str(y)][str(x)] = {}
cache[str(y)][str(x)][str(z)] = {
'errorMessage': message,
'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
}
with cache_path.open('w') as cache_f:
json.dump(cache, cache_f, sort_keys=True, indent=4)
if message is not None:
return 'red', message, None
# no errors, determine fill level
if state[0] in (None, 'cyan', 'orange', 'yellow'):
try:
containers = CONTAINERS if state[0] in (None, 'cyan') else [ # layer coords of the access chest
(5, 0, 2),
(5, 0, 3)
]
total_items = sum(max(0, sum(slot['Count'] for slot in block_at(*layer_coords(*container), chunk_cache=chunk_cache)['tileEntity']['Items'] if slot.get('Damage', 0) == 0 or not durability) - (4 * item.max_stack_size if container == (5, -7, 3) else 0)) for container in containers) # Don't count the 4 stacks of items that are stuck in the bottom dropper. Don't count damaged tools.
max_slots = sum(alltheitems.item.NUM_SLOTS[block_at(*layer_coords(*container), chunk_cache=chunk_cache)['id']] for container in containers) - (0 if state[0] == 'orange' else 4)
return state[0], state[1], FillLevel(item.max_stack_size, total_items, max_slots, is_smart_chest=state[0] in (None, 'cyan'))
except:
# something went wrong determining fill level, re-check errors
message = chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, g, layer_coords, block_at, items_data, chunk_cache, document_root)
if ati.cache_root.exists():
if str(y) not in cache:
cache[str(y)] = {}
if str(x) not in cache[str(y)]:
cache[str(y)][str(x)] = {}
cache[str(y)][str(x)][str(z)] = {
'errorMessage': message,
'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
}
with cache_path.open('w') as cache_f:
json.dump(cache, cache_f, sort_keys=True, indent=4)
if message is None:
raise
else:
return 'red', message, None
return state
def cell_from_chest(coords, item_stub, corridor_length, item_name=None, pre_sorter=None, *, chunk_cache=None, items_data=None, colors_to_explain=None, cache=None, allow_cache=True):
color, state_message, fill_level = chest_state(coords, item_stub, corridor_length, item_name, pre_sorter, items_data=items_data, chunk_cache=chunk_cache, cache=cache, allow_cache=allow_cache)
if colors_to_explain is not None:
colors_to_explain.add(color)
if fill_level is None or fill_level.is_full():
return '<td style="background-color: {};">{}</td>'.format(HTML_COLORS[color], alltheitems.item.Item(item_stub, items_data=items_data).image())
else:
return '<td style="background-color: {};">{}<div class="durability"><div style="background-color: #f0f; width: {}px;"></div></div></td>'.format(HTML_COLORS[color], alltheitems.item.Item(item_stub, items_data=items_data).image(), 0 if fill_level.is_empty() else 2 + int(fill_level.fraction * 13) * 2)
def index(allow_cache=True):
yield ati.header(title='Cloud')
def body():
yield '<p>The <a href="//wiki.{host}/Cloud">Cloud</a> is the public item storage on <a href="//{host}/">Wurstmineberg</a>, consisting of 6 underground floors with <a href="//wiki.{host}/SmartChest">SmartChests</a> in them.</p>'.format(host=ati.host)
yield """<style type="text/css">
.item-table td {
box-sizing: content-box;
height: 32px;
width: 32px;
position: relative;
}
.item-table .left-sep {
border-left: 1px solid gray;
}
.durability {
z-index: 1;
}
</style>"""
chunk_cache = {}
with (ati.assets_root / 'json' / 'items.json').open() as items_file:
items_data = json.load(items_file)
cache_path = ati.cache_root / 'cloud-chests.json'
if cache_path.exists():
try:
with cache_path.open() as cache_f:
cache = json.load(cache_f)
except ValueError:
# cache JSON is corrupted, probably because of a full disk, try without cache
cache_path.unlink()
cache = None
else:
cache = None
colors_to_explain = set()
floors = {}
for x, corridor, y, floor, z, chest in chest_iter():
if y not in floors:
floors[y] = floor
for y, floor in sorted(floors.items(), key=lambda tup: tup[0]):
def cell(coords, item_stub, corridor):
if isinstance(item_stub, str):
item_stub = {'id': item_stub}
item_name = None
pre_sorter = None
else:
item_stub = item_stub.copy()
if 'name' in item_stub:
item_name = item_stub['name']
del item_stub['name']
else:
item_name = None
if 'sorter' in item_stub:
pre_sorter = item_stub['sorter']
del item_stub['sorter']
else:
pre_sorter = None
return cell_from_chest(coords, item_stub, len(corridor), item_name, pre_sorter, chunk_cache=chunk_cache, colors_to_explain=colors_to_explain, items_data=items_data, cache=cache, allow_cache=allow_cache)
yield bottle.template("""
%import itertools
<h2 id="floor{{y}}">{{y}}{{ordinal(y)}} floor (y={{73 - 10 * y}})</h2>
<table class="item-table" style="margin-left: auto; margin-right: auto;">
%for x in range(-3, 4):
%if x > -3:
<colgroup class="left-sep">
<col />
<col />
</colgroup>
%else:
<colgroup>
<col />
<col />
</colgroup>
%end
%end
<tbody>
%for z_left, z_right in zip(itertools.count(step=2), itertools.count(start=1, step=2)):
%found = False
<tr>
%for x in range(-3, 4):
%if str(x) not in floor:
<td></td>
<td></td>
%continue
%end
%corridor = floor[str(x)]
%if len(corridor) > z_right:
{{!cell((x, y, z_right), corridor[z_right], corridor)}}
%else:
<td></td>
%end
%if len(corridor) > z_left:
{{!cell((x, y, z_left), corridor[z_left], corridor)}}
%found = True
%else:
<td></td>
%end
%end
</tr>
%if not found:
%break
%end
%end
</tbody>
</table>
""", ordinal=alltheitems.util.ordinal, cell=cell, floor=floor, y=y)
color_explanations = collections.OrderedDict([
('red', '<p>A red background means that there is something wrong with the chest. See the item info page for details.</p>'),
('gray', "<p>A gray background means that the chest hasn't been built yet or is still located somewhere else.</p>"),
('orange', "<p>An orange background means that the chest doesn't have a SmartChest yet. It can only store 54 stacks.</p>"),
('yellow', "<p>A yellow background means that the chest doesn't have a sorter yet.</p>"),
('cyan', '<p>A cyan background means that the chest has no sorter because it stores an unstackable item. These items should not be automatically <a href="//wiki.wurstmineberg.de/Soup#Cloud">sent</a> to the Cloud.</p>'),
(None, '<p>A white background means that everything is okay: the chest has a SmartChest, a sorter, and overflow protection.</p>')
])
for chest_color in sorted(colors_to_explain, key=list(color_explanations.keys()).index):
if chest_color is not None or len(colors_to_explain) > 1:
yield color_explanations[chest_color]
yield from ati.html_exceptions(body())
yield ati.footer(linkify_headers=True)
def todo():
yield ati.header(title='Cloud by priority')
def body():
yield """<style type="text/css">
.todo-table td {
text-align: left;
vertical-align: middle !important;
}
.todo-table .coord {
width: 3em;
text-align: right;
}
.todo-table .item-image {
box-sizing: content-box;
width: 32px;
}
.todo-table .item-name {
width: 24em;
}
</style>"""
headers = collections.OrderedDict([
('red', 'Build errors'),
('gray', 'Missing chests'),
('orange', 'Missing SmartChests'),
('yellow', 'Missing sorters'),
('cyan', 'Empty SmartChests (unstackable)'),
('white', 'Empty SmartChests (stackable)'),
('cyan2', 'Missing items (unstackable)'),
('white2', 'Missing items (stackable)')
])
header_indexes = {color: i for i, color in enumerate(headers.keys())}
def priority(pair):
coords, state = pair
x, y, z = coords
color, _, fill_level, _ = state
return header_indexes[color], None if fill_level is None else fill_level.fraction * (-1 if color == 'orange' else 1), y * (-1 if color == 'orange' else 1), x if y % 2 == 0 else -x, z
chunk_cache = {}
with (ati.assets_root / 'json' / 'items.json').open() as items_file:
items_data = json.load(items_file)
cache_path = ati.cache_root / 'cloud-chests.json'
if cache_path.exists():
try:
with cache_path.open() as cache_f:
cache = json.load(cache_f)
except ValueError:
# cache JSON is corrupted, probably because of a full disk, try without cache
cache_path.unlink()
cache = None
else:
cache = None
states = {}
current_color = None
for x, corridor, y, _, z, item_stub in chest_iter():
if isinstance(item_stub, str):
item_stub = {'id': item_stub}
item_name = None
pre_sorter = None
else:
item_stub = item_stub.copy()
if 'name' in item_stub:
item_name = item_stub['name']
del item_stub['name']
else:
item_name = None
if 'sorter' in item_stub:
pre_sorter = item_stub['sorter']
del item_stub['sorter']
else:
pre_sorter = None
color, state_message, fill_level = chest_state((x, y, z), item_stub, len(corridor), item_name, pre_sorter, items_data=items_data, chunk_cache=chunk_cache, cache=cache)
if color is None:
color = 'white'
if color in ('cyan', 'white') and not fill_level.is_empty():
color += '2'
if fill_level is None or not fill_level.is_full() or color not in ('cyan', 'white', 'cyan2', 'white2'):
states[x, y, z] = color, state_message, fill_level, alltheitems.item.Item(item_stub, items_data=items_data)
for coords, state in sorted(states.items(), key=priority):
x, y, z = coords
color, state_message, fill_level, item = state
if color != current_color:
if current_color is not None:
yield '</tbody></table>'
yield bottle.template('<h2 id="{{color}}">{{header}}</h2>', color=color, header=headers[color])
yield '<table class="todo-table table table-responsive"><thead><tr><th class="coord">X</th><th class="coord">Y</th><th class="coord">Z</th><th class="item-image"> </th><th class="item-name">Item</th><th>{}</th></tr></thead><tbody>'.format('Fill Level' if color in ('cyan', 'white', 'cyan2', 'white2') else 'Info')
current_color = color
yield bottle.template("""
<tr>
<td class="coord">{{x}}</td>
<td class="coord">{{y}}</td>
<td class="coord">{{z}}</td>
<td class="item-image">{{!item.image()}}</td>
<td class="item-name">{{!item.link_text()}}</td>
<td style="background-color: {{color}}">{{!fill_level if color in ('#0ff', '#fff') else state_message}}</td>
</tr>
""", x=x, y=y, z=z, item=item, color=HTML_COLORS[color], fill_level=fill_level, state_message=state_message)
yield '</tbody></table>'
yield from ati.html_exceptions(body())
yield ati.footer(linkify_headers=True)
| mit | 6,210,669,962,743,115,000 | 59.09228 | 392 | 0.481912 | false |
amirgeva/coide | mainwindow.py | 1 | 54237 | from PyQt4 import QtCore
from PyQt4 import QtGui
import os
import re
import stat
import qutepart
from workspace import WorkSpace
import output
from consts import FileRole
from gdbwrapper import GDBWrapper
from watchestree import WatchesTree
from breakpoints import BreakpointsDB, BreakpointDialog
from properties import Properties
from functools import partial
from globals import is_src_ext
import utils
import genmake
import uis
import plugins
import dwarf
class MainWindow(QtGui.QMainWindow):
""" Main IDE Window
Contains the main code view, along with docking panes for: source files,
watches, call stack, and output
"""
LIBRARY_SCAN = "Scanning Libraries"
def __init__(self,rootDir,parent=None):
""" Initialize. rootDir indicates where data files are located """
super(MainWindow,self).__init__(parent)
s=QtCore.QSettings()
self.recent_ws=[d for d in s.value('recent_ws','').toString().split(':') if d]
self.symbolScan=s.value('symbol_scan',True).toBool()
self.setMinimumSize(QtCore.QSize(1024,768))
self.currentLine=0
self.currentFile=''
self.rootDir=rootDir
utils.setIconsDir(os.path.join(rootDir,"icons"))
self.debugger=None
self.breakpoints=BreakpointsDB()
self.findDetails=None
self.scm_mods=[]
self.setWindowIcon(utils.loadIcon('coide'))
self.setWindowTitle("Coide")
self.generateQueue=set()
self.editors={}
self.file_times={}
self.central=QtGui.QTabWidget()
self.setCentralWidget(self.central)
self.central.setTabsClosable(True)
self.central.tabCloseRequested.connect(self.closeTab)
self.central.currentChanged.connect(self.tabChanged)
self.tabOrder=[]
self.plugins=plugins.PluginsManager()
self.setupMenu()
self.setupContextMenuItems()
self.setupToolbar(rootDir)
self.showWorkspacePane()
self.showOutputPane()
self.showWatchesPane()
self.showLocalsPane()
self.showCallStackPane()
self.buildProcess=None
self.timerCall=None
self.config=s.value("config").toString()
if self.config=='':
self.config="Debug"
self.configCombo.setCurrentIndex(0 if self.config=='Debug' else 1)
self.workspaceTree.setConfig(self.config)
self.setAllFonts()
self.loadWindowSettings()
# Debugger timer that is supposed to periodically check
# if the program has stopped at a breakpoint
self.timer=QtCore.QTimer(self)
self.timer.timeout.connect(self.update)
self.runningWidget=None
self.asyncPollTimer=QtCore.QTimer(self)
self.asyncPollTimer.timeout.connect(self.pollAsync)
self.generateTimer=QtCore.QTimer()
self.generateTimer.timeout.connect(self.timer1000)
self.generateTimer.start(1000)
self.lowFreqTimer=QtCore.QTimer()
self.lowFreqTimer.timeout.connect(self.timer5000)
self.lowFreqTimer.start(5000)
#self.showStatus("Generating All Makefiles")
#self.timerCall=self.generateAllInThread
self.timerCall=None
self.paneWatches.hide()
self.paneLocals.hide()
self.paneStack.hide()
#self.sc=QtGui.QShortcut("Ctrl+F8",self)
#self.sc.activated.connect(self.prtsc)
def closeEvent(self, event):
""" Called before the application window closes
Informs sub-windows to prepare and saves window settings
to allow future sessions to look the same
"""
self.workspaceTree.onClose()
self.workspaceTree.saveTabs(self.central)
while self.central.count()>0:
if not self.closeFile():
event.ignore()
return
self.timer.stop()
self.generateTimer.stop()
if self.debugger:
self.debugger.closingApp()
settings = QtCore.QSettings()
settings.setValue("geometry", self.saveGeometry())
settings.setValue("windowState", self.saveState())
settings.sync()
self.removeTempScripts()
super(MainWindow,self).closeEvent(event)
def saveDebugWindowState(self):
"""
Save the state of the tool docks, like watches
and call stack
"""
settings = QtCore.QSettings()
settings.setValue("debugWindowState", self.saveState())
settings.sync()
def loadDebugWindowState(self):
"""
Restore previous debug windows layout
"""
settings = QtCore.QSettings()
self.restoreState(settings.value("debugWindowState").toByteArray())
def loadWindowSettings(self):
"""
Restore the window size settings from the previous session
"""
settings = QtCore.QSettings()
self.restoreGeometry(settings.value("geometry").toByteArray())
self.restoreState(settings.value("windowState").toByteArray())
self.loadTabs()
def loadTabs(self):
self.closeAllTabs()
ws=self.workspaceTree.settings()
opentabs=ws.value('opentabs','').toString()
opentabs=opentabs.split(',')
for path in opentabs:
self.openSourceFile(path)
curtab=ws.value('curtab','').toString()
if curtab:
self.setActiveSourceFile(curtab)
def setupMenu(self):
""" Creates the application main menu
The action handlers are also mapped from the toolbar icons
"""
bar=self.menuBar()
m=bar.addMenu('&File')
m.addAction(QtGui.QAction('&Initialize Workspace',self,triggered=self.initWorkspace))
m.addAction(QtGui.QAction('Open &Workspace',self,triggered=self.openWorkspace))
self.recents_menu=m.addMenu('&Recent Workspaces')
m.addAction(QtGui.QAction('&Save',self,shortcut='Ctrl+S',triggered=self.saveFile))
m.addAction(QtGui.QAction('Save &As',self,triggered=self.saveAsFile))
m.addAction(QtGui.QAction('&Close File',self,shortcut='Ctrl+F4',triggered=self.closeFile))
m.addAction(QtGui.QAction('E&xit',self,shortcut='Ctrl+Q',triggered=self.exitApp))
m=bar.addMenu('&Edit')
m.addAction(QtGui.QAction('&Copy',self,shortcut='Ctrl+C',triggered=self.onCopy))
m.addAction(QtGui.QAction('C&ut',self,shortcut='Ctrl+X',triggered=self.onCut))
m.addAction(QtGui.QAction('&Paste',self,shortcut='Ctrl+V',triggered=self.onPaste))
m.addSeparator()
m.addAction(QtGui.QAction('&Find/Replace',self,shortcut='Ctrl+F',triggered=self.onFindReplace))
m.addAction(QtGui.QAction('Find/Replace &Next',self,shortcut='F3',triggered=self.onFindNext))
m=bar.addMenu('&View')
panes=m.addMenu('Panes')
panes.addAction(QtGui.QAction('&Workspace',self,triggered=self.onViewPaneWorkspace))
panes.addAction(QtGui.QAction('&Output',self,triggered=self.onViewPaneOutput))
m.addAction(QtGui.QAction('&Next Tab',self,shortcut='Ctrl+F6',triggered=self.onViewNextTab))
m=bar.addMenu('&Build')
m.addAction(QtGui.QAction('&Build',self,shortcut='F7',triggered=self.build))
m.addAction(QtGui.QAction('&Clean',self,triggered=self.clean))
m.addAction(QtGui.QAction('&Rebuild',self,shortcut='Shift+F7',triggered=self.rebuild))
m.addAction(QtGui.QAction('&Settings',self,shortcut='Ctrl+F7',triggered=self.buildSettings))
m.addAction(QtGui.QAction('&Next Error',self,shortcut='F4',triggered=self.nextError))
m=bar.addMenu('&Debug')
m.addAction(QtGui.QAction('&Run',self,shortcut='Ctrl+F5',triggered=self.runProject))
m.addAction(QtGui.QAction('&Start/Continue Debugger',self,shortcut='F5',triggered=self.startDebug))
ma=m.addMenu('Actions')
ma.addAction(QtGui.QAction('&Step',self,shortcut='F11',triggered=self.actStep))
ma.addAction(QtGui.QAction('&Next',self,shortcut='F10',triggered=self.actNext))
ma.addAction(QtGui.QAction('Step &Out',self,shortcut='Shift+F11',triggered=self.actOut))
ma.addAction(QtGui.QAction('&Break',self,shortcut='Ctrl+C',triggered=self.actBreak))
ma.addAction(QtGui.QAction('Sto&p',self,shortcut='Shift+F5',triggered=self.actStop))
ma=m.addMenu('&Breakpoints')
ma.addAction(QtGui.QAction('&Clear',self,triggered=self.clearBreakpoints))
m=bar.addMenu('&Settings')
m.addAction(QtGui.QAction('&General',self,triggered=self.settingsGeneral))
m.addAction(QtGui.QAction('&Fonts',self,triggered=self.settingsFonts))
m.addAction(QtGui.QAction('&Editor',self,triggered=self.settingsEditor))
m.addAction(QtGui.QAction('&Templates',self,triggered=self.settingsTemplates))
m.addAction(QtGui.QAction('&Plugins',self,triggered=self.settingsPlugins))
m=bar.addMenu('&Tools')
pm=m.addMenu('&Plugins')
self.plugins.addToMenu(pm)
def onViewPaneWorkspace(self):
self.paneWorkspace.show()
def onViewPaneOutput(self):
self.paneOutput.show()
def onViewNextTab(self):
count=self.central.count()
if count>0:
if len(self.tabOrder)!=count:
self.tabOrder=range(0,self.central.count())
if self.central.currentIndex() == self.tabOrder[0]:
self.tabOrder=self.tabOrder[1:]+self.tabOrder[:1]
self.central.setCurrentIndex(self.tabOrder[0])
def setupContextMenuItems(self):
self.contextMenuItems={
'all':[
QtGui.QAction('Toggle Breakpoint',self,triggered=self.contextToggleBreakpoint)
],
'files':[
QtGui.QAction('Open Header',self,triggered=self.contextOpenHeader)
],
'breakpoints':[
QtGui.QAction('Edit Breakpoint',self,triggered=self.contextEditBreakpoint),
QtGui.QAction('Dis/Enable Breakpoint',self,triggered=self.contextAbleBreakpoint)
],
'symbols':[
QtGui.QAction('Goto Definition',self,triggered=self.contextGotoDefinition)
]
}
def insertContextMenuItems(self,editor,menu):
first=None
acts=menu.actions()
if len(acts)>0:
first=acts[0]
actions=list(self.contextMenuItems.get('all'))
path=editor.path
line=editor.contextMenuLine
word=editor.contextMenuWord
self.context=(path,line,word)
if len(word)>0:
actions.extend(self.contextMenuItems.get('symbols'))
if self.breakpoints.hasBreakpoint(path,line):
actions.extend(self.contextMenuItems.get('breakpoints'))
if self.workspaceTree.exists(editor.contextFilename):
actions.extend(self.contextMenuItems.get('files'))
menu.insertActions(first,actions)
menu.insertSeparator(first)
def contextGotoDefinition(self):
src=os.path.join(self.workspaceTree.root,'src')
intr=os.path.join(self.workspaceTree.root,'.intr')
srcpath=self.context[0]
objpath=''
if srcpath.startswith(src) and is_src_ext(srcpath):
rel=srcpath[len(src):]
rel=rel[1:-4]+'.o'
objpath=os.path.join(intr,rel)
(dir,name)=os.path.split(objpath)
objpath=os.path.join(dir,'Debug',name)
if srcpath.startswith(self.workspaceTree.root) and srcpath.endswith('.h'):
dir=self.workspaceTree.mainPath()
mkPath=os.path.join(dir,'Makefile')
objpath=utils.objForHeader(mkPath,srcpath)
if len(objpath)>0:
try:
s=dwarf.DwarfSymbols(objpath)
(path,line)=s.find(self.context[2])
if len(path)>0:
self.goToSource(path,line,1)
except IOError:
utils.message('Project must first be compiled in Debug')
def contextToggleBreakpoint(self):
e=self.central.currentWidget()
self.breakpoints.toggleBreakpoint(e)
e.update()
def contextEditBreakpoint(self):
e=self.central.currentWidget()
path=e.path
line=e.contextMenuLine
bp=self.breakpoints.getBreakpoint(path,line)
if bp:
d=BreakpointDialog()
d.condition.setText(bp.condition())
utils.setCheckbox(d.enabled,bp.isEnabled())
if d.exec_():
bp.setCondition(d.condition.text())
bp.able(utils.getCheckbox(d.enabled))
self.breakpoints.update()
e.update()
def contextAbleBreakpoint(self):
e=self.central.currentWidget()
path=e.path
line=e.contextMenuLine
bp=self.breakpoints.getBreakpoint(path,line)
if bp:
if bp.isEnabled():
bp.disable()
else:
bp.enable()
self.breakpoints.update()
e.update()
def contextOpenHeader(self):
e=self.central.currentWidget()
filename=self.workspaceTree.exists(e.contextFilename)
if filename:
self.workspaceTree.openFile(filename)
def markToggleBreakpoint(self,line):
e=self.central.currentWidget()
#path=e.path
self.breakpoints.toggleBreakpoint(e)
e.update()
def createPluginCuror(self):
from pcursor import PluginCursor
e=self.central.currentWidget()
if e:
return PluginCursor(e.textCursor())
return None
def setupToolbar(self,rootDir):
""" Creates the application main toolbar """
tb=self.addToolBar('Actions')
tb.setObjectName("Toolbar")
tb.addAction(utils.loadIcon('gear'),'Generate Makefiles').triggered.connect(self.generate)
self.configCombo=self.createConfigCombo(tb)
tb.addWidget(self.configCombo)
tb.addAction(utils.loadIcon('step.png'),'Step').triggered.connect(self.actStep)
tb.addAction(utils.loadIcon('next.png'),'Next').triggered.connect(self.actNext)
tb.addAction(utils.loadIcon('out.png'),'Out').triggered.connect(self.actOut)
tb.addAction(utils.loadIcon('cont.png'),'Continue').triggered.connect(self.actCont)
tb.addAction(utils.loadIcon('break.png'),'Break').triggered.connect(self.actBreak)
tb.addAction(utils.loadIcon('stop.png'),'Stop').triggered.connect(self.actStop)
self.createTemplatesCombo(tb)
tb.addWidget(self.tmplCombo)
def exitApp(self):
self.close()
def nextError(self):
e=self.outputEdit.getNextError()
if e:
self.showStatus(e[3])
self.goToSource(e[0],e[1],e[2],'#ff8080')
self.outputEdit.highlightLine(e[4])
def onCopy(self):
(e,p)=self.currentEditor()
if e:
e.copy()
def onCut(self):
(e,p)=self.currentEditor()
if e:
e.cut()
def onPaste(self):
(e,p)=self.currentEditor()
if e:
e.paste()
def onFindReplace(self):
(e,p)=self.currentEditor()
if e:
from finddlg import FindDialog
d=FindDialog(self)
c=e.textCursor()
if c.hasSelection:
d.setFindText(c.selectedText())
if d.exec_():
self.findDetails=d.details
self.onFindNext()
def onFindNext(self):
(e,p)=self.currentEditor()
if e and self.findDetails:
flags=QtGui.QTextDocument.FindFlags()
if not self.findDetails.get('find_case'):
flags = flags | QtGui.QTextDocument.FindCaseSensitively
if self.findDetails.get('find_words'):
flags = flags | QtGui.QTextDocument.FindWholeWords
if self.findDetails.get('find_back'):
flags = flags | QtGui.QTextDocument.FindBackward
text=self.findDetails.get('find_text')
replaceText=self.findDetails.get('find_replace_text')
replace=self.findDetails.get('find_replace')
all=self.findDetails.get('find_all')
if all and replace:
while e.find(text,flags):
e.textCursor().insertText(replaceText)
elif e.find(text,flags):
if replace:
e.textCursor().insertText(replaceText)
def settingsTemplates(self):
""" Show the code templates editing dialog """
from settings import TemplatesDialog
d=TemplatesDialog()
if d.exec_():
d.save()
self.updateTemplates()
def settingsPlugins(self):
""" Show the python plugins settings dialog """
from plugins import PluginsDialog
d=PluginsDialog()
if d.exec_():
d.save()
def settingsGeneral(self):
""" Show the general settings """
from settings import GeneralSettingsDialog
d=GeneralSettingsDialog()
if d.exec_():
d.save()
self.updateGeneralSettings()
def settingsEditor(self):
""" Show the editor settings """
from settings import EditorSettingsDialog
d=EditorSettingsDialog()
if d.exec_():
d.save()
self.updateEditorsSettings()
def settingsFonts(self):
""" Edit the font settings for the code window and various panes """
from settings import FontSettingsDialog
d=FontSettingsDialog()
if d.exec_():
self.setAllFonts()
def loadFont(self,name,target):
""" Load previously saved font settings """
settings=QtCore.QSettings()
if settings.contains(name):
fb=settings.value(name).toByteArray()
buf=QtCore.QBuffer(fb)
buf.open(QtCore.QIODevice.ReadOnly)
font=QtGui.QFont()
QtCore.QDataStream(fb) >> font
target.setFont(font)
else:
target.setFont(QtGui.QFont('Monospace',14))
def setAllFonts(self):
""" Apply fonts to the various sub-windows """
for e in self.editors:
self.loadFont('codefont',self.editors.get(e))
#self.loadFont('watchesfont',self.watchesTree)
#self.loadFont('watchesfont',self.stackList)
self.loadFont('watchesfont',self.outputEdit)
self.loadFont('sourcesfont',self.workspaceTree)
def updateGeneralSettings(self):
""" Apply general settings """
s=QtCore.QSettings()
sortFiles=s.value('sortFiles',True).toBool()
self.workspaceTree.setSorting(sortFiles)
def updateEditorsSettings(self):
""" Apply editor settings to all open tabs """
s=QtCore.QSettings()
indent=(s.value('indent',2).toInt())[0]
clang=s.value('clangCompletion',True).toBool()
for e in self.editors:
self.editors.get(e).indentWidth=indent
self.editors.get(e).clangCompletion=clang
def updateTemplates(self):
self.tmplCombo.clear()
self.tmplCombo.addItem("= Templates =")
d=QtCore.QSettings().value('tmplDir','').toString()
if d:
templates=os.listdir(d)
templates=[os.path.splitext(t)[0] for t in templates if t.endswith('.template')]
for t in templates:
self.tmplCombo.addItem(t)
def showStatus(self,status):
self.statusBar().showMessage(status)
def findUndefinedReferences(self,output):
"""
Search the linker output to find undefined reference
errors, and collect the missing symbol names
"""
undefined=set()
base='undefined reference to '
if output:
for line in output:
p=line.find(base)
if p>0:
name=line[(p+len(base)):]
if name.startswith('symbol '):
name=name[8:]
else:
name=name[1:]
p=name.find('(')
if p>0:
name=name[0:p]
else:
name=name[0:len(name)-1]
p=name.find('@')
if p>0:
name=name[0:p]
undefined.add(name)
return undefined
def toggleAdded(self,item):
if item.checkState():
self.added.add(item.text())
else:
self.added.remove(item.text())
def attemptUndefResolution(self,undefs):
if not self.symbolScan:
return
from system import getLibrarySymbols, getWorkspaceSymbols
suggested={}
syms=getLibrarySymbols()
wsSyms=getWorkspaceSymbols()
for sym in undefs:
words=sym.split(':')
words=[w for w in words if w]
words.append(sym)
for word in words:
if word in syms:
s=syms.get(word)
for l in s:
if not l in suggested:
suggested[l]=1
else:
n=suggested.get(l)+1
suggested[l]=n
if word in wsSyms:
s=wsSyms.get(word)
for l in s:
if not l in suggested:
suggested[l]=1
else:
n=suggested.get(l)+1
suggested[l]=n
self.added=set()
if len(suggested)>0:
d=uis.loadDialog('libsuggest')
model = QtGui.QStandardItemModel(d.libsList)
for s in suggested:
item=QtGui.QStandardItem(s)
item.setCheckable(True)
model.appendRow(item)
d.libsList.setModel(model)
model.itemChanged.connect(lambda item : self.toggleAdded(item))
if d.exec_():
self.workspaceTree.addLibrariesToProject(self.added)
def buildSettings(self,path=''):
from buildsettings import BuildSettingsDialog
if not path:
path=self.workspaceTree.mainPath()
if not path:
path=self.workspaceTree.root
d=BuildSettingsDialog(self,path)
d.exec_()
self.generateQueue.add(path)
def checkBuildOutput(self):
if self.buildProcess:
self.processBuildOutput(self.buildProcess.text)
self.buildProcess=None
def pollAsync(self):
rcs=utils.pollAsync()
if len(rcs)>0:
if rcs[0]==0:
utils.appendColorLine(self.outputEdit,"Success...",'#008020')
else:
utils.appendColorLine(self.outputEdit,"= Failed ({}) =".format(rcs[0]),'#ff0000')
self.checkBuildOutput()
self.asyncPollTimer.stop()
self.showStatus("Done")
def execute(self,path,cmd,*args):
if utils.pendingAsync():
self.showStatus('Busy')
return None
self.outputEdit.clearAll()
p=utils.execute(self.outputEdit,path,cmd,*args)
if not self.asyncPollTimer.isActive():
self.asyncPollTimer.start(10)
return p
def buildSpecific(self,path):
self.saveAll()
self.autoGenerate()
if len(path)>0:
self.showStatus("Building "+os.path.basename(path))
s=QtCore.QSettings()
if s.value('parallel_make',False).toBool():
self.buildProcess=self.execute(path,'/usr/bin/make','-j','3',self.config)
else:
self.buildProcess=self.execute(path,'/usr/bin/make',self.config)
def processBuildOutput(self,output):
undefs=self.findUndefinedReferences(output)
if len(undefs)>0:
self.attemptUndefResolution(undefs)
def build(self):
self.buildSpecific(self.workspaceTree.mainPath())
def cleanSpecific(self,path):
if len(path)>0:
self.execute(path,'/usr/bin/make','clean_{}'.format(self.config))
def clean(self):
self.cleanSpecific(self.workspaceTree.mainPath())
def rebuildSpecific(self,path):
if len(path)>0:
cfg=self.config
self.showStatus("Rebuilding "+os.path.basename(path))
self.buildProcess=self.execute(path,'/usr/bin/make','clean_'+cfg,cfg)
def rebuild(self):
self.rebuildSpecific(self.workspaceTree.mainPath())
def autoGenerateRun(self):
for path in self.generateQueue:
genmake.generateDirectory(self.workspaceTree.root,path)
self.generateQueue.clear()
self.showStatus('Ready')
def autoGenerate(self):
if len(self.generateQueue)>0:
self.showStatus('Generating Makefiles')
self.timerCall=self.autoGenerateRun
else:
if genmake.genThreadDone():
self.showStatus("Makefile Generate Done")
def waitForScanner(self):
if self.symbolScan:
import system
import time
while not system.isScannerDone():
time.sleep(1)
def timer1000(self):
e=self.central.currentWidget()
if e:
updates=self.breakpoints.updateLineNumbers(e.path)
for path in updates:
e=self.editors.get(path)
if e:
e.update()
if self.timerCall:
f=self.timerCall
self.timerCall=None
f()
self.autoGenerate()
#if self.statusBar().currentMessage() == MainWindow.LIBRARY_SCAN:
if self.symbolScan:
import system
if system.isScannerDone():
#if system.scanq and not system.scanq.empty():
if self.statusBar().currentMessage() == MainWindow.LIBRARY_SCAN:
self.showStatus('Ready')
system.getLibrarySymbols()
def timer5000(self):
import scm
res=scm.scan(self.workspaceTree.root)
if res:
new_scm_mods=[]
for (name,status) in res:
path=os.path.join(self.workspaceTree.root,name)
if path in self.workspaceTree.fileItems:
item=self.workspaceTree.fileItems.get(path)
if status=='Modified':
item.setForeground(0,QtGui.QBrush(QtGui.QColor(255,0,0)))
elif status=='Staged':
item.setForeground(0,QtGui.QBrush(QtGui.QColor(0,255,0)))
new_scm_mods.append(item)
for item in self.scm_mods:
if not item in new_scm_mods:
item.setForeground(0,QtGui.QBrush(QtGui.QColor(0,0,0)))
self.scm_mods=new_scm_mods
for path in self.editors:
last=self.file_times.get(path)
cur=os.path.getmtime(path)
if cur!=last:
self.file_times[path]=cur
res=QtGui.QMessageBox.question(self,'File changed','Reload {}'.format(path),QtGui.QMessageBox.Yes,QtGui.QMessageBox.No)
if res==QtGui.QMessageBox.Yes:
text=''.join(open(path,'r').readlines())
self.editors.get(path).text=text
def generateAllInThread(self):
genmake.generateTree(self.workspaceTree.root,False)
def generateAll(self):
genmake.generateTree(self.workspaceTree.root,True)
def generate(self):
mb=QtGui.QMessageBox()
mb.setText("Generate make files")
mb.setInformativeText("Overwrite all make files?")
mb.setStandardButtons(QtGui.QMessageBox.Yes|QtGui.QMessageBox.No)
mb.setDefaultButton(QtGui.QMessageBox.Yes)
rc=mb.exec_()
if rc==QtGui.QMessageBox.Yes:
self.generateAll()
utils.message("Done")
def createHelloWorldProject(self,dir):
try:
os.makedirs(dir)
except OSError:
pass
mainpath=os.path.join(dir,'main.cpp')
f=open(mainpath,"w")
f.write('#include <iostream>\n\n\nint main(int argc, char* argv[])\n')
f.write('{\n std::cout << "Hello World" << std::endl;\n return 0;\n}\n')
f.close()
self.workspaceTree.update()
genmake.generateDirectory(self.workspaceTree.root,dir)
self.workspaceTree.setMainPath(dir)
def initWorkspace(self):
d=QtGui.QFileDialog()
d.setFileMode(QtGui.QFileDialog.Directory)
d.setOption(QtGui.QFileDialog.ShowDirsOnly)
if d.exec_():
ws=(d.selectedFiles())[0]
os.makedirs(os.path.join(ws,'include'))
dir=os.path.join(ws,'src','hello')
self.workspaceTree.setWorkspacePath(ws)
self.createHelloWorldProject(dir)
self.workspaceTree.saveSettings()
self.generateAll()
def updateRecents(self):
ws=self.workspaceTree.root
if ws in self.recent_ws:
del self.recent_ws[self.recent_ws.index(ws)]
self.recent_ws.insert(0,ws)
while len(self.recent_ws)>4:
del self.recent_ws[-1]
s=QtCore.QSettings()
s.setValue('recent_ws',':'.join(self.recent_ws))
s.sync()
self.recents_menu.clear()
handlers=[partial(self.openRecent,w) for w in self.recent_ws]
for ws,h in zip(self.recent_ws,handlers):
self.recents_menu.addAction(QtGui.QAction(ws,self,triggered=h))
def openRecent(self,ws):
self.workspaceTree.saveTabs(self.central)
self.closeAllTabs()
self.workspaceTree.setWorkspacePath(ws)
#self.generateAll()
self.loadTabs()
self.waitForScanner()
import symbolscanner
symbolscanner.setWorkspacePath(ws)
self.updateRecents()
def openWorkspace(self):
d=QtGui.QFileDialog()
d.setFileMode(QtGui.QFileDialog.Directory)
d.setOption(QtGui.QFileDialog.ShowDirsOnly)
if d.exec_():
ws=(d.selectedFiles())[0]
self.openRecent(ws)
def saveTabFile(self,index):
n=self.central.tabBar().count()
if index>=0 and index<n:
path=self.central.tabToolTip(index)
editor=self.editors.get(path)
if editor:
doc=editor.document()
if doc.isModified():
f=open(path,'w')
if not f:
utils.errorMessage('Cannot write file: {}'.format(path))
return
f.write(doc.toPlainText())
f.close()
doc.setModified(False)
self.file_times[path]=os.path.getmtime(path)
#dir=os.path.dirname(path)
#self.generateQueue.add(dir)
if self.symbolScan:
from system import getLibrarySymbols
getLibrarySymbols()
from symbolscanner import rescanOnFileSave
rescanOnFileSave(path)
def saveFile(self):
n=self.central.tabBar().count()
if n>0:
self.saveTabFile(self.central.currentIndex())
def saveAll(self):
n=self.central.tabBar().count()
for i in xrange(0,n):
self.saveTabFile(i)
def saveAsFile(self):
pass
def closeAllTabs(self):
while self.central.count()>0:
if not self.closeTab(0):
return False
return True
def tabChanged(self,index):
for i in xrange(0,len(self.tabOrder)):
if self.tabOrder[i]==index:
self.tabOrder=self.tabOrder[i:]+self.tabOrder[:i]
break
def closeTab(self,index):
path=self.central.tabToolTip(index)
editor=self.editors.get(path)
if editor:
doc=editor.document()
if doc.isModified():
mb = QtGui.QMessageBox()
mb.setText("{} has been modified.".format(os.path.basename(path)))
mb.setInformativeText("Do you want to save your changes?")
mb.setStandardButtons(QtGui.QMessageBox.Save | QtGui.QMessageBox.Discard | QtGui.QMessageBox.Cancel)
mb.setDefaultButton(QtGui.QMessageBox.Save)
rc = mb.exec_()
if rc == QtGui.QMessageBox.Save:
f=open(path,'w')
if not f:
utils.errorMessage('Cannot write file: {}'.format(path))
return False
f.write(doc.toPlainText())
f.close()
elif rc == QtGui.QMessageBox.Cancel:
return False
del self.editors[path]
del self.file_times[path]
self.central.removeTab(index)
return True
def closeFile(self):
n=self.central.tabBar().count()
if n>0:
index=self.central.currentIndex()
return self.closeTab(index)
return False
def currentEditor(self):
if self.central.count()>0:
cur=self.central.currentIndex()
path=self.central.tabToolTip(cur)
if path in self.editors:
return (self.editors.get(path),path)
return (None,None)
def templateSelected(self,index):
(editor,path)=self.currentEditor()
if index>0 and editor:
template=self.tmplCombo.itemText(index)
d=QtCore.QSettings().value('tmplDir','').toString()
if d:
tpath=os.path.join(d,template+".template")
try:
f=open(tpath,'r')
code=f.read()
if code:
cursor=editor.textCursor()
props=Properties()
props.assign('PATH',path)
base=os.path.basename(path)
props.assign('FILENAME',base)
p=base.find('.')
if (p>0):
props.assign('FILEBASE',base[0:p])
props.assign('SELECTION',cursor.selectedText())
cursor.removeSelectedText()
import templates
text=templates.generateCode(code,props)
cursor.insertText(text)
except IOError:
utils.errorMessage("Cannot read file: {}".format(path))
self.tmplCombo.setCurrentIndex(0)
def showWorkspacePane(self):
""" Creates a docking pane that shows a list of source files """
self.paneWorkspace=QtGui.QDockWidget("Workspace",self)
self.paneWorkspace.setObjectName("Workspace")
self.paneWorkspace.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea|QtCore.Qt.RightDockWidgetArea)
self.workspaceTree=WorkSpace(self.paneWorkspace,self)
self.workspaceTree.depsChanged.connect(lambda path: self.generateQueue.add(path))
self.paneWorkspace.setWidget(self.workspaceTree)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea,self.paneWorkspace)
self.updateWorkspace()
self.workspaceTree.doubleClicked.connect(self.docDoubleClicked)
self.showStatus(MainWindow.LIBRARY_SCAN)
if self.symbolScan:
from system import startSymbolScan
startSymbolScan(self.workspaceTree.root)
else:
from system import disableSymbolScan
disableSymbolScan()
self.updateRecents()
def updateWorkspace(self):
self.workspaceTree.update()
def setActiveSourceFile(self,path):
if path in self.editors:
editor=self.editors.get(path)
n=self.central.tabBar().count()
for i in xrange(0,n):
if self.central.widget(i) == editor:
self.central.tabBar().setCurrentIndex(i)
return True
return False
def fixPath(self,path):
if path.startswith(self.rootDir):
path=os.path.relpath(path,self.rootDir)
return path
'''
Makes the path given the active source file in the editor.
If the file is already open, it is made active.
If not, it is opened and made active.
Function returns true if the file is found and opened
'''
def openSourceFile(self,path):
path=self.fixPath(path)
if self.setActiveSourceFile(path):
return True
else:
try:
f=open(path,"r")
if not f:
return False
lines=f.readlines()
if lines:
firstLine=lines[0]
s=QtCore.QSettings()
editor=qutepart.Qutepart()
editor.setPath(path)
editor.detectSyntax(sourceFilePath=path, firstLine=firstLine)
editor.lineLengthEdge = 1024
editor.drawIncorrectIndentation = True
editor.drawAnyWhitespace = False
editor.indentUseTabs = False
editor.indentWidth = (s.value('indent',2).toInt())[0]
editor.text="".join(lines)
editor.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap)
editor.setWorkspace(self.workspaceTree)
editor.setMainWindow(self)
index=self.central.addTab(editor,os.path.basename(path))
self.central.setTabToolTip(index,path)
self.editors[path]=editor
self.file_times[path]=os.path.getmtime(path)
self.loadFont('codefont',editor)
self.central.tabBar().setCurrentIndex(index)
bps=self.breakpoints.pathBreakpoints(path)
editor.bpMarks=bps
editor._markArea.blockDoubleClicked.connect(self.markToggleBreakpoint)
return True
except IOError:
return False
return False
def docDoubleClicked(self,index):
item=self.workspaceTree.currentItem()
path=item.data(0,FileRole).toString()
if len(path)>0:
self.openSourceFile(path)
if path in self.editors:
self.editors.get(path).setFocus(QtCore.Qt.MouseFocusReason)
def goToSource(self,path,row,col,color=''):
"""
Given a file path, and a position within, open a tab
or switch to an already open tab, and scroll to that
position. Usually useful to find references or
compiler error positions
"""
path=self.fixPath(path)
if self.openSourceFile(path):
editor=self.editors.get(path)
if editor:
self.setActiveSourceFile(path)
c=editor.textCursor()
c.movePosition(QtGui.QTextCursor.Start)
c.movePosition(QtGui.QTextCursor.Down,n=row-1)
c.movePosition(QtGui.QTextCursor.Right,n=col-1)
editor.setTextCursor(c)
editor.ensureCursorVisible()
if len(color)>0:
editor.colorLine(row,color)
def showCallStackPane(self):
self.paneStack=QtGui.QDockWidget("Call Stack",self)
self.paneStack.setObjectName("CallStack")
self.paneStack.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea)
self.stackList=QtGui.QListWidget(self.paneStack)
self.paneStack.setWidget(self.stackList)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneStack)
self.loadFont('watchesfont',self.stackList)
self.stackList.itemDoubleClicked.connect(self.stackItemDoubleClicked)
def showLocalsPane(self):
self.paneLocals=QtGui.QDockWidget("Locals",self)
self.paneLocals.setObjectName("Locals")
self.paneLocals.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea)
self.localsTree=WatchesTree(self.paneLocals)
self.localsTree.setColumnCount(2)
self.localsTree.setHeaderLabels(['Name','Value'])
self.paneLocals.setWidget(self.localsTree)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneLocals)
self.loadFont('watchesfont',self.watchesTree)
def showWatchesPane(self):
self.paneWatches=QtGui.QDockWidget("Watches",self)
self.paneWatches.setObjectName("Watches")
self.paneWatches.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea)
self.watchesTree=WatchesTree(self.paneWatches)
self.watchesTree.setColumnCount(2)
self.watchesTree.setHeaderLabels(['Name','Value'])
self.paneWatches.setWidget(self.watchesTree)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneWatches)
self.loadFont('watchesfont',self.watchesTree)
self.watchesTree.addTopLevelItem(QtGui.QTreeWidgetItem(['* Double-Click for new watch']))
self.watchesTree.resizeColumnToContents(0)
self.watchesTree.itemDoubleClicked.connect(lambda item,column : self.watchDoubleClicked(item,column))
def showOutputPane(self):
self.paneOutput=QtGui.QDockWidget("Output",self)
self.paneOutput.setObjectName("Output")
self.paneOutput.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea)
self.outputEdit=output.OutputWidget(self.paneOutput,self)
self.outputEdit.setReadOnly(True)
self.paneOutput.setWidget(self.outputEdit)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneOutput)
def stackItemDoubleClicked(self,item):
pat='at (.+):(\d+)'
m=re.search(pat,item.text())
if m:
g=m.groups()
path=g[0]
line=int(g[1])
self.goToSource(path,line,1)
else:
row=self.stackList.row(item)
if row<(self.stackList.count()-1):
self.stackItemDoubleClicked(self.stackList.item(row+1))
def watchDoubleClicked(self,item,column):
""" Edits existing watches, or adds a new watch """
changed=False
index=self.watchesTree.indexOfTopLevelItem(item)
if item.text(column)=='* Double-Click for new watch':
res=QtGui.QInputDialog.getText(self,'New Watch','Expression')
expr=res[0]
if len(expr)>0 and res[1]:
self.watchesTree.insertTopLevelItem(index,QtGui.QTreeWidgetItem([expr]))
changed=True
self.updateWatches()
else:
watch=item.text(0)
res=QtGui.QInputDialog.getText(self,"Edit Watch",'Expression',text=watch)
watch=res[0]
if res[1]:
changed=True
if len(watch)>0:
item.setText(0,watch)
self.updateWatches()
else:
self.watchesTree.takeTopLevelItem(index)
if changed:
self.saveWatches()
def createConfigCombo(self,parent):
configCombo=QtGui.QComboBox(parent)
configCombo.addItem("Debug")
configCombo.addItem("Release")
configCombo.currentIndexChanged.connect(self.configChanged)
return configCombo
def createTemplatesCombo(self,parent):
self.tmplCombo=QtGui.QComboBox(parent)
self.tmplCombo.currentIndexChanged.connect(self.templateSelected)
self.updateTemplates()
def configChanged(self,index):
configs=['Debug','Release']
self.config=configs[index]
s=QtCore.QSettings()
s.setValue("config",self.config)
s.sync()
self.workspaceTree.setConfig(self.config)
def addOutputText(self,added):
""" Append the new text captured
Text is appended to the end of existing text and the widget
is scrolled to show the end
"""
text=self.outputEdit.toPlainText()
self.outputEdit.setPlainText(text+added)
c=self.outputEdit.textCursor()
c.movePosition(QtGui.QTextCursor.End)
self.outputEdit.setTextCursor(c)
self.outputEdit.ensureCursorVisible()
def tempScriptPath(self):
"""
Generate a temporary script name. Used for running programs
with an additional wait for key at the end.
"""
from time import time
t=int(time()*10)
return '/tmp/coide_{}.sh'.format(t)
def removeTempScripts(self):
"""
Remove all temporary script files. Called before program
exit
"""
files=os.listdir('/tmp')
files=[f for f in files if f.startswith('coide_')]
for f in files:
os.remove('/tmp/{}'.format(f))
def runProject(self):
if not utils.checkFor('xterm'):
utils.message("xterm not installed")
return
path=self.tempScriptPath()
f=open(path,'w')
dir=self.workspaceTree.getDebugDirectory()
cmd=self.workspaceTree.getExecutablePath()
params=self.workspaceTree.getDebugParams()
if len(params)>0:
cmd=cmd+" "+params
f.write('#!/bin/sh\ncd {}\n{}\nread -r -p "Press any key..." key\n'.format(dir,cmd))
f.close()
os.chmod(path,stat.S_IRUSR|stat.S_IWUSR|stat.S_IXUSR)
utils.run('/tmp','xterm','-fn','10x20','-e',path)
def getCurrentFile(self):
if self.central.count()==0:
return ''
return self.central.tabToolTip(self.central.currentIndex())
def getCurrentEditor(self):
path=self.getCurrentFile()
if len(path)>0:
return self.editors.get(path)
def updatePosition(self):
""" Query current position and update the code view """
changed=False
poslist=self.debugger.getCurrentPos()
if poslist and len(poslist)>0:
for (path,line) in poslist:
if self.getCurrentFile()==path:
if self.currentLine!=line:
changed=True
break
if self.openSourceFile(path):
changed=True
break
e=self.editors.get(path)
if changed and e:
e.colorLine(line,'#0080ff')
e.cursorPosition=(line-1,1)
self.currentLine=line
e.ensureCursorVisible()
def saveWatches(self):
""" Save all watches to settings, for future sessions """
res=[]
n=self.watchesTree.topLevelItemCount()-1
for i in xrange(0,n):
item=self.watchesTree.topLevelItem(i)
if len(res)>0:
res.append(';')
res.append(item.text(0))
settings=QtCore.QSettings()
key='watches:{}'.format(self.debugger.debugged)
settings.setValue(key,''.join(res))
def loadWatches(self):
""" Load all previous session watches from settings """
while self.watchesTree.topLevelItemCount()>1:
self.watchesTree.takeTopLevelItem(0)
settings=QtCore.QSettings()
key='watches:{}'.format(self.debugger.debugged)
val=settings.value(key,'').toString()
if len(val)>0:
arr=val.split(';')
if len(arr)>0:
res=[]
for watch in arr:
res.append(QtGui.QTreeWidgetItem([watch]))
self.watchesTree.insertTopLevelItems(0,res)
def updateLocals(self):
locals=self.debugger.getLocals()
self.localsTree.clear()
for var in locals.keys():
item=QtGui.QTreeWidgetItem([var])
self.localsTree.addTopLevelItem(item)
res=locals.get(var)
if res:
self.updateWatchItem(item,res)
def updateWatches(self):
""" Re-evaluate the value of each watch and update view """
n=self.watchesTree.topLevelItemCount()-1
for i in xrange(0,n):
item=self.watchesTree.topLevelItem(i)
item.takeChildren()
expr=item.text(0)
res=self.debugger.evaluate(expr)
if res:
self.updateWatchItem(item,res)
def updateWatchItem(self,item,root):
item.setText(1,root.value)
def addChildren(item,node):
for c in node.children:
subitem=QtGui.QTreeWidgetItem([c.name])
subitem.setText(1,c.value)
item.addChild(subitem)
addChildren(subitem,c)
addChildren(item,root)
def updateCallstack(self):
bt=self.debugger.getBackTrace()
self.stackList.clear()
for line in bt:
self.stackList.addItem(line)
def startDebug(self):
if self.debugger:
self.actCont()
return
self.outputEdit.setPlainText('')
cmd=[self.workspaceTree.getExecutablePath()]
args=self.workspaceTree.getDebugParams().split()
cwd=self.workspaceTree.getDebugDirectory()
if len(cwd)<1:
cwd=self.workspaceTree.mainPath()
for a in args:
cmd.append(a)
self.debugger=GDBWrapper(self.breakpoints,cmd,cwd)
#self.showWatchesPane()
#self.showCallStackPane()
#self.loadDebugWindowState()
self.showDebugPanes()
self.loadWatches()
self.timer.start(50)
qutepart.evaluator=self.debugger.evaluateAsText
def stopDebugger(self):
if self.debugger:
qutepart.evaluator=None
for path in self.editors:
e=self.editors.get(path)
e.colorLine(0,'')
self.saveDebugWindowState()
self.debugger.quitDebugger()
self.debugger=None
#self.paneWatches.close()
#self.paneWatches=None
#self.paneStack.close()
#self.paneStack=None
self.hideDebugPanes()
self.timer.stop()
def hideDebugPanes(self):
self.paneWatches.hide()
self.paneLocals.hide()
self.paneStack.hide()
def showDebugPanes(self):
self.paneWatches.show()
self.paneLocals.show()
self.paneStack.show()
def clearBreakpoints(self):
self.breakpoints.clear()
n=self.central.count()
for i in xrange(0,n):
self.central.widget(i).bpMarks={}
if self.debugger:
self.debugger.clearBreakpoints()
def actStep(self):
if self.debugger:
self.debugger.actStep()
if not self.debugger.running:
self.stopDebugger()
def actNext(self):
if self.debugger:
self.debugger.actNext()
if not self.debugger.running:
self.stopDebugger()
def actOut(self):
if self.debugger:
self.debugger.actOut()
if not self.debugger.running:
self.stopDebugger()
def actCont(self):
if self.debugger:
e=self.getCurrentEditor()
if e:
e.colorLine(0,'')
self.currentLine=-1
self.debugger.actCont()
def actBreak(self):
if self.debugger:
self.debugger.actBreak()
def actStop(self):
if self.debugger:
self.debugger.actStop()
def update(self):
""" Called every 50ms to check if a change in debugger state occurred
Basically this is waiting for a change of state, indicated by:
* self.debugger.changed
If a change is detected, everything is re-evaluated and drawn
"""
if self.debugger:
self.debugger.update()
#if len(text)>0:
# self.addOutputText(text)
if self.debugger.hasOutput():
self.addOutputText(self.debugger.getOutput())
if self.debugger.changed:
self.updatePosition()
self.updateWatches()
self.updateLocals()
self.updateCallstack()
self.debugger.changed=False
if not self.debugger.running:
self.stopDebugger()
# If the debugger is active running the program,
# create an indication using an animation in the top left
# corner of the application window
if self.debugger and self.debugger.active:
if self.runningWidget is None:
from running import RunningWidget
self.runningWidget=RunningWidget(self)
self.runningWidget.show()
self.outputEdit.setBlinkingCursor(True)
s=self.outputEdit.getInput()
if len(s)>0:
text=''.join(s)
self.debugger.sendInput(text)
self.addOutputText(text)
else:
self.outputEdit.clearInput()
self.outputEdit.setBlinkingCursor(False)
if not self.runningWidget is None:
self.runningWidget.close()
self.runningWidget=None
| gpl-2.0 | -3,514,037,278,721,606,700 | 36.664583 | 135 | 0.576286 | false |
boutiques/schema | tools/python/boutiques/tests/test_logger.py | 1 | 2801 | #!/usr/bin/env python
import os
import pytest
import json
import boutiques as bosh
import boutiques.creator as bc
from boutiques import __file__ as bfile
from boutiques.localExec import ExecutorError
from argparse import ArgumentParser
from unittest import TestCase
import mock
from boutiques_mocks import mock_zenodo_search, MockZenodoRecord
def mock_get(*args, **kwargs):
query = args[0].split("=")[1]
query = query[:query.find("&")]
query = query.replace("*", '')
mock_records = []
# Return an arbitrary list of results
for i in range(0, 10):
mock_records.append(MockZenodoRecord(i, "Example Tool %s" % i))
return mock_zenodo_search(mock_records)
class TestLogger(TestCase):
def get_examples_dir(self):
return os.path.join(os.path.dirname(bfile),
"schema", "examples")
def test_raise_error(self):
example1_dir = os.path.join(self.get_examples_dir(), "example1")
invocationStr = open(os.path.join(example1_dir,
"invocation_invalid.json")).read()
with pytest.raises(ExecutorError) as e:
bosh.execute("launch",
os.path.join(example1_dir,
"example1_docker.json"),
invocationStr)
assert("[ ERROR ]" in str(e))
@mock.patch('requests.get', side_effect=mock_get)
def test_print_info(self, mock_get):
bosh.search("-v")
out, err = self.capfd.readouterr()
assert("[ INFO ]" in out)
assert("[ INFO (200) " in out)
def test_print_warning(self):
parser = ArgumentParser(description="my tool description")
parser.add_argument("--myarg", "-m", action="store",
help="my help", dest="==SUPPRESS==")
creatorObj = bc.CreateDescriptor(parser,
execname='/path/to/myscript.py',
verbose=True,
tags={"purpose": "testing-creator",
"foo": "bar"})
out, err = self.capfd.readouterr()
assert("[ WARNING ]" in out)
def test_evaloutput(self):
example1_dir = os.path.join(self.get_examples_dir(), "example1")
desc = os.path.join(example1_dir, "example1_docker.json")
invo = os.path.join(example1_dir, "invocation.json")
query = bosh.evaluate(desc, invo, "invalid-query")
out, err = self.capfd.readouterr()
assert("[ ERROR ]" in out)
# Captures the stdout and stderr during test execution
# and returns them as a tuple in readouterr()
@pytest.fixture(autouse=True)
def capfd(self, capfd):
self.capfd = capfd
| gpl-2.0 | 309,840,088,008,387,140 | 35.855263 | 76 | 0.571225 | false |
pradyunsg/dotfiles | lib/checker.py | 1 | 5820 | import os
import sys
import shutil
import platform
from .logging import Logger, log
from .utils import run_output
import click
import yaml
class SystemChecker(object):
"""A super-fancy helper for checking the system configuration
"""
def __init__(self, verbose):
super().__init__()
self._logger = Logger()
self.verbose = verbose
def _log_happy(self, msg):
self._logger.spaced_status("pass", msg, fit_width=4)
def _log_angry(self, msg, is_warning):
if is_warning:
self._logger.spaced_status("warn", msg, fit_width=4)
else:
self._logger.spaced_status("fail", msg, fit_width=4)
def platform(self):
return platform.system()
def equal(self, expected, *, should_warn=False, **kwargs):
"""Check if a given value for something is equal to the expected value.
checker.equal(value, name=from_system)
"""
assert len(kwargs) == 1, "expected 1 keyword argument"
name, value = next(iter(kwargs.items()))
if value == expected:
self._log_happy(name + " is correct")
else:
self._log_angry(
f"{name} is not {expected!r}, it is {value!r}",
is_warning=should_warn,
)
# The actual logic is below
def run(self, fname):
data = self._load_yaml(fname)
self._check_username(data["identity"]["username"])
self._check_ssh(data["identity"]["ssh-key"])
self._check_gpg(data["identity"]["gpg-key"])
for category, contents in data["things"].items():
self._check_category(category, contents, data)
def _load_yaml(self, fname):
with open(fname) as f:
try:
return yaml.safe_load(f)
except Exception as e:
click.secho("ERROR: Could not parse file.", fg="red")
click.secho(str(e), fg="red")
sys.exit(1)
def _check_username(self, expected):
self.equal(expected, Username=os.environ["USER"])
def _check_ssh(self, expected):
# FIXME: Is this fragile?
output = run_output("ssh-keygen -E md5 -lf {}".format(
os.path.expanduser("~/.ssh/id_rsa.pub")
))
if output is None:
ssh_key = "not found"
else:
ssh_key = output.split()[1]
if ssh_key.startswith("MD5:"):
ssh_key = ssh_key[4:]
self.equal(expected, **{"SSH key": ssh_key})
def _check_gpg(self, expected):
# This checks that the GPG key exists in the dB
output = run_output("gpg --list-keys {}".format(expected))
if output is not None:
self.equal(expected, **{"GPG key": expected})
else:
self.equal(expected, **{"GPG key": "not found"})
def _check_category(self, category, contents, data):
if "if" in contents:
if list(contents["if"]) != ["platform"]:
raise ValueError(
"Needed condition of category {} to be 'platform'"
.format(category)
)
if contents["if"]["platform"] != self.platform():
log.spaced_status("skip", category)
return
log.spaced_status("topic", category, fit_width=5)
with log:
self._check_executables(
category, contents.get("executables", None)
)
self._check_run_items(
category, contents.get("run_check", None), data
)
def _check_executables(self, category, executables):
if not executables:
return
# Convert the string to a list.
executables = list(map(lambda x: x.strip(), executables.split(",")))
missing = set()
for fname in executables:
if shutil.which(fname) is None:
missing.add(fname)
verb = lambda x: "executable" if len(x) == 1 else "executables"
if missing:
desc = "missing {}: {}".format(
verb(missing), ", ".join(map(repr, missing))
)
log.spaced_status("fail", desc, fit_width=4)
else:
log.spaced_status(
"pass",
"{} {} available".format(len(executables), verb(executables)),
fit_width=4,
)
def _check_run_items(self, category, run_items, data):
if not run_items:
return
for name, cmd_dict in run_items.items():
if not isinstance(cmd_dict, dict) or "cmd" not in cmd_dict:
log.spaced_status(
"warn", f"!!! invalid !!! {category} {name}",
fit_width=4
)
continue
got = run_output(cmd_dict["cmd"])
if got is None:
# Did not exit cleanly
ok = False
reason = "command did not succeed"
elif "equal" in cmd_dict:
# Match the output against an expected value...
expected = cmd_dict["equal"]
# Perform substitution (from values earlier in the dict)
if expected.startswith("$"):
expected = _dotted_access(data, expected[1:])
ok = expected == got.rstrip()
reason = f"{expected!r} != {got!r}"
if ok:
log.spaced_status("pass", name, fit_width=4)
else:
log.spaced_status("fail", name, fit_width=4)
if self.verbose:
with log:
log.info(reason)
def _dotted_access(data, spec):
item = data
for part in spec.split("."):
item = item[part]
return item
| mit | -4,368,949,340,365,042,700 | 30.978022 | 79 | 0.518041 | false |
peterrenshaw/socsim | setup.py | 1 | 1400 | #!/usr/bin/env python
# ~*~ encoding: utf-8 ~*~
"""
This file is part of SOCSIM.
SOCSIM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SOCSIM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SOCSIM. If not, see <http://www.gnu.org/licenses/>.
"""
import os
from setuptools import setup
from setuptools import find_packages
from socsim import __version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name = "socsim",
version = __version__,
description = 'social media simulation tools',
long_description=read('README'),
license = 'GNU GPL 3.0',
author = "Peter Renshaw",
author_email = "[email protected]",
url = 'https://github.com/peterrenshaw/socsim',
packages = find_packages(),
keywords = ['message','testing','human','response'],
zip_safe = True)
# vim: ff=unix:ts=4:sw=4:tw=78:noai:expandtab
| gpl-3.0 | -4,294,559,742,352,133,000 | 30.111111 | 72 | 0.682143 | false |
judaba13/GenrePredictor | hdf5_utils.py | 1 | 28730 | """
Thierry Bertin-Mahieux (2010) Columbia University
[email protected]
This code contains a set of routines to create HDF5 files containing
features and metadata of a song.
This is part of the Million Song Dataset project from
LabROSA (Columbia University) and The Echo Nest.
Copyright 2010, Thierry Bertin-Mahieux
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import numpy as np
# code relies on pytables, see http://www.pytables.org
import tables
import hdf5_descriptors as DESC
from hdf5_getters import *
# musicbrainz related stuff
try:
from MBrainzDB import query as QUERYMB
except ImportError:
print 'need pg module and MBrainzDB folder of Python source code if you'
print 'want to use musicbrainz related functions, e.g. fill_hdf5_from_musicbrainz'
# description of the different arrays in the song file
ARRAY_DESC_SIMILAR_ARTISTS = 'array of similar artists Echo Nest id'
ARRAY_DESC_ARTIST_TERMS = 'array of terms (Echo Nest tags) for an artist'
ARRAY_DESC_ARTIST_TERMS_FREQ = 'array of term (Echo Nest tags) frequencies for an artist'
ARRAY_DESC_ARTIST_TERMS_WEIGHT = 'array of term (Echo Nest tags) weights for an artist'
ARRAY_DESC_SEGMENTS_START = 'array of start times of segments'
ARRAY_DESC_SEGMENTS_CONFIDENCE = 'array of confidence of segments'
ARRAY_DESC_SEGMENTS_PITCHES = 'array of pitches of segments (chromas)'
ARRAY_DESC_SEGMENTS_TIMBRE = 'array of timbre of segments (MFCC-like)'
ARRAY_DESC_SEGMENTS_LOUDNESS_MAX = 'array of max loudness of segments'
ARRAY_DESC_SEGMENTS_LOUDNESS_MAX_TIME = 'array of max loudness time of segments'
ARRAY_DESC_SEGMENTS_LOUDNESS_START = 'array of loudness of segments at start time'
ARRAY_DESC_SECTIONS_START = 'array of start times of sections'
ARRAY_DESC_SECTIONS_CONFIDENCE = 'array of confidence of sections'
ARRAY_DESC_BEATS_START = 'array of start times of beats'
ARRAY_DESC_BEATS_CONFIDENCE = 'array of confidence of sections'
ARRAY_DESC_BARS_START = 'array of start times of bars'
ARRAY_DESC_BARS_CONFIDENCE = 'array of confidence of bars'
ARRAY_DESC_TATUMS_START = 'array of start times of tatums'
ARRAY_DESC_TATUMS_CONFIDENCE = 'array of confidence of tatums'
ARRAY_DESC_ARTIST_MBTAGS = 'array of tags from MusicBrainz for an artist'
ARRAY_DESC_ARTIST_MBTAGS_COUNT = 'array of tag counts from MusicBrainz for an artist'
def fill_hdf5_from_artist(h5,artist):
"""
Fill an open hdf5 using all content in a artist object
from the Echo Nest python API
There could be overlap with fill_from_song and fill_from_track,
we assume the data is consistent!
"""
# get the metadata table, fill it
metadata = h5.root.metadata.songs
metadata.cols.artist_id[0] = artist.id
idsplitter = lambda x,y: x.split(':')[2] if x else y
metadata.cols.artist_mbid[0] = idsplitter(artist.get_foreign_id(idspace='musicbrainz'),'')
metadata.cols.artist_playmeid[0] = int(idsplitter(artist.get_foreign_id(idspace='playme'),-1))
metadata.cols.artist_7digitalid[0] = int(idsplitter(artist.get_foreign_id(idspace='7digital'),-1))
# fill the metadata arrays
group = h5.root.metadata
metadata.cols.idx_similar_artists[0] = 0
group.similar_artists.append( np.array(map(lambda x : x.id,artist.get_similar(results=100)),dtype='string') )
metadata.cols.idx_artist_terms[0] = 0
group.artist_terms.append( np.array(map(lambda x : x.name,artist.get_terms()),dtype='string') )
group.artist_terms_freq.append( np.array(map(lambda x : x.frequency,artist.get_terms()),dtype='float64') )
group.artist_terms_weight.append( np.array(map(lambda x : x.weight,artist.get_terms()),dtype='float64') )
# done, flush
metadata.flush()
def fill_hdf5_from_song(h5,song):
"""
Fill an open hdf5 using all the content in a song object
from the Echo Nest python API.
Usually, fill_hdf5_from_track() will have been called first.
"""
# get the metadata table, fill it
metadata = h5.root.metadata.songs
metadata.cols.artist_familiarity[0] = song.get_artist_familiarity()
metadata.cols.artist_hotttnesss[0] = song.get_artist_hotttnesss()
metadata.cols.artist_id[0] = song.artist_id
metadata.cols.artist_latitude[0] = song.get_artist_location().latitude
metadata.cols.artist_location[0] = song.get_artist_location().location.encode('utf-8') if song.get_artist_location().location else ''
metadata.cols.artist_longitude[0] = song.get_artist_location().longitude
metadata.cols.artist_name[0] = song.artist_name.encode('utf-8') if song.artist_name else ''
metadata.cols.song_id[0] = song.id
metadata.cols.song_hotttnesss[0] = song.get_song_hotttnesss()
metadata.cols.title[0] = song.title.encode('utf-8') if song.title else ''
metadata.flush()
# get the analysis table
analysis = h5.root.analysis.songs
analysis.danceability = song.get_audio_summary().danceability
analysis.energy = song.get_audio_summary().energy
analysis.flush()
def fill_hdf5_from_track(h5,track):
"""
Fill an open hdf5 using all the content in a track object
from the Echo Nest python API
"""
# get the metadata table, fill it
metadata = h5.root.metadata.songs
#metadata.cols.analyzer_version[0] = track.analyzer_version
metadata.cols.artist_name[0] = getattr(track, 'artist', u'').encode('utf-8')
metadata.cols.release[0] = getattr(track, 'release', u'').encode('utf-8')
metadata.cols.title[0] = getattr(track, 'title', u'').encode('utf-8')
idsplitter_7digital = lambda x: int(x.split(':')[2]) if x and x.split(':')[0]=='7digital' else -1
metadata.cols.release_7digitalid[0] = idsplitter_7digital(track.foreign_release_id)
metadata.cols.track_7digitalid[0] = idsplitter_7digital(track.foreign_id)
metadata.flush()
# get the analysis table, fill it
analysis = h5.root.analysis.songs
analysis.cols.analysis_sample_rate[0] = track.analysis_sample_rate
analysis.cols.audio_md5[0] = track.audio_md5
analysis.cols.duration[0] = track.duration
analysis.cols.end_of_fade_in[0] = track.end_of_fade_in
analysis.cols.key[0] = track.key
analysis.cols.key_confidence[0] = track.key_confidence
analysis.cols.loudness[0] = track.loudness
analysis.cols.mode[0] = track.mode
analysis.cols.mode_confidence[0] = track.mode_confidence
analysis.cols.start_of_fade_out[0] = track.start_of_fade_out
analysis.cols.tempo[0] = track.tempo
analysis.cols.time_signature[0] = track.time_signature
analysis.cols.time_signature_confidence[0] = track.time_signature_confidence
analysis.cols.track_id[0] = track.id
analysis.flush()
group = h5.root.analysis
# analysis arrays (segments)
analysis.cols.idx_segments_start[0] = 0
group.segments_start.append( np.array(map(lambda x : x['start'],track.segments),dtype='float64') )
analysis.cols.idx_segments_confidence[0] = 0
group.segments_confidence.append( np.array(map(lambda x : x['confidence'],track.segments),dtype='float64') )
analysis.cols.idx_segments_pitches[0] = 0
group.segments_pitches.append( np.array(map(lambda x : x['pitches'],track.segments),dtype='float64') )
analysis.cols.idx_segments_timbre[0] = 0
group.segments_timbre.append( np.array(map(lambda x : x['timbre'],track.segments),dtype='float64') )
analysis.cols.idx_segments_loudness_max[0] = 0
group.segments_loudness_max.append( np.array(map(lambda x : x['loudness_max'],track.segments),dtype='float64') )
analysis.cols.idx_segments_loudness_max_time[0] = 0
group.segments_loudness_max_time.append( np.array(map(lambda x : x['loudness_max_time'],track.segments),dtype='float64') )
analysis.cols.idx_segments_loudness_start[0] = 0
group.segments_loudness_start.append( np.array(map(lambda x : x['loudness_start'],track.segments),dtype='float64') )
# analysis arrays (sections)
analysis.cols.idx_sections_start[0] = 0
group.sections_start.append( np.array(map(lambda x : x['start'],track.sections),dtype='float64') )
analysis.cols.idx_sections_confidence[0] = 0
group.sections_confidence.append( np.array(map(lambda x : x['confidence'],track.sections),dtype='float64') )
# analysis arrays (beats
analysis.cols.idx_beats_start[0] = 0
group.beats_start.append( np.array(map(lambda x : x['start'],track.beats),dtype='float64') )
analysis.cols.idx_beats_confidence[0] = 0
group.beats_confidence.append( np.array(map(lambda x : x['confidence'],track.beats),dtype='float64') )
# analysis arrays (bars)
analysis.cols.idx_bars_start[0] = 0
group.bars_start.append( np.array(map(lambda x : x['start'],track.bars),dtype='float64') )
analysis.cols.idx_bars_confidence[0] = 0
group.bars_confidence.append( np.array(map(lambda x : x['confidence'],track.bars),dtype='float64') )
# analysis arrays (tatums)
analysis.cols.idx_tatums_start[0] = 0
group.tatums_start.append( np.array(map(lambda x : x['start'],track.tatums),dtype='float64') )
analysis.cols.idx_tatums_confidence[0] = 0
group.tatums_confidence.append( np.array(map(lambda x : x['confidence'],track.tatums),dtype='float64') )
analysis.flush()
# DONE
def fill_hdf5_from_musicbrainz(h5,connect):
"""
Fill an open hdf5 using the musicbrainz server and data.
We assume this code is run after fill_hdf5_from_artist/song
because we need artist_mbid, artist_name, release and title
INPUT
h5 - open song file (append mode)
connect - open pg connection to musicbrainz_db
"""
# get info from h5 song file
ambid = h5.root.metadata.songs.cols.artist_mbid[0]
artist_name = h5.root.metadata.songs.cols.artist_name[0]
release = h5.root.metadata.songs.cols.release[0]
title = h5.root.metadata.songs.cols.title[0]
# get the musicbrainz table, fill it
musicbrainz = h5.root.musicbrainz.songs
musicbrainz.cols.year[0] = QUERYMB.find_year_safemode(connect,ambid,title,release,artist_name)
# fill the musicbrainz arrays
group = h5.root.musicbrainz
musicbrainz.cols.idx_artist_mbtags[0] = 0
tags,tagcount = QUERYMB.get_artist_tags(connect, ambid, maxtags=20)
group.artist_mbtags.append( np.array(tags,dtype='string') )
group.artist_mbtags_count.append( np.array(tagcount,dtype='float64') )
# done, flush
musicbrainz.flush()
def fill_hdf5_aggregate_file(h5,h5_filenames,summaryfile=False):
"""
Fill an open hdf5 aggregate file using all the content from all the HDF5 files
listed as filenames. These HDF5 files are supposed to be filled already.
Usefull to create one big HDF5 file from many, thus improving IO speed.
For most of the info, we simply use one row per song.
For the arrays (e.g. segment_start) we need the indecies (e.g. idx_segment_start)
to know which part of the array belongs to one particular song.
If summaryfile=True, we skip arrays (indices all 0)
"""
# counter
counter = 0
# iterate over filenames
for h5idx,h5filename in enumerate(h5_filenames):
# open h5 file
h5tocopy = open_h5_file_read(h5filename)
# get number of songs in new file
nSongs = get_num_songs(h5tocopy)
# iterate over songs in one HDF5 (1 if regular file, more if aggregate file)
for songidx in xrange(nSongs):
# METADATA
row = h5.root.metadata.songs.row
row["artist_familiarity"] = get_artist_familiarity(h5tocopy,songidx)
row["artist_hotttnesss"] = get_artist_hotttnesss(h5tocopy,songidx)
row["artist_id"] = get_artist_id(h5tocopy,songidx)
row["artist_mbid"] = get_artist_mbid(h5tocopy,songidx)
row["artist_playmeid"] = get_artist_playmeid(h5tocopy,songidx)
row["artist_7digitalid"] = get_artist_7digitalid(h5tocopy,songidx)
row["artist_latitude"] = get_artist_latitude(h5tocopy,songidx)
row["artist_location"] = get_artist_location(h5tocopy,songidx)
row["artist_longitude"] = get_artist_longitude(h5tocopy,songidx)
row["artist_name"] = get_artist_name(h5tocopy,songidx)
row["release"] = get_release(h5tocopy,songidx)
row["release_7digitalid"] = get_release_7digitalid(h5tocopy,songidx)
row["song_id"] = get_song_id(h5tocopy,songidx)
row["song_hotttnesss"] = get_song_hotttnesss(h5tocopy,songidx)
row["title"] = get_title(h5tocopy,songidx)
row["track_7digitalid"] = get_track_7digitalid(h5tocopy,songidx)
# INDICES
if not summaryfile:
if counter == 0 : # we're first row
row["idx_similar_artists"] = 0
row["idx_artist_terms"] = 0
else:
row["idx_similar_artists"] = h5.root.metadata.similar_artists.shape[0]
row["idx_artist_terms"] = h5.root.metadata.artist_terms.shape[0]
row.append()
h5.root.metadata.songs.flush()
# ARRAYS
if not summaryfile:
h5.root.metadata.similar_artists.append( get_similar_artists(h5tocopy,songidx) )
h5.root.metadata.artist_terms.append( get_artist_terms(h5tocopy,songidx) )
h5.root.metadata.artist_terms_freq.append( get_artist_terms_freq(h5tocopy,songidx) )
h5.root.metadata.artist_terms_weight.append( get_artist_terms_weight(h5tocopy,songidx) )
# ANALYSIS
row = h5.root.analysis.songs.row
row["analysis_sample_rate"] = get_analysis_sample_rate(h5tocopy,songidx)
row["audio_md5"] = get_audio_md5(h5tocopy,songidx)
row["danceability"] = get_danceability(h5tocopy,songidx)
row["duration"] = get_duration(h5tocopy,songidx)
row["end_of_fade_in"] = get_end_of_fade_in(h5tocopy,songidx)
row["energy"] = get_energy(h5tocopy,songidx)
row["key"] = get_key(h5tocopy,songidx)
row["key_confidence"] = get_key_confidence(h5tocopy,songidx)
row["loudness"] = get_loudness(h5tocopy,songidx)
row["mode"] = get_mode(h5tocopy,songidx)
row["mode_confidence"] = get_mode_confidence(h5tocopy,songidx)
row["start_of_fade_out"] = get_start_of_fade_out(h5tocopy,songidx)
row["tempo"] = get_tempo(h5tocopy,songidx)
row["time_signature"] = get_time_signature(h5tocopy,songidx)
row["time_signature_confidence"] = get_time_signature_confidence(h5tocopy,songidx)
row["track_id"] = get_track_id(h5tocopy,songidx)
# INDICES
if not summaryfile:
if counter == 0 : # we're first row
row["idx_segments_start"] = 0
row["idx_segments_confidence"] = 0
row["idx_segments_pitches"] = 0
row["idx_segments_timbre"] = 0
row["idx_segments_loudness_max"] = 0
row["idx_segments_loudness_max_time"] = 0
row["idx_segments_loudness_start"] = 0
row["idx_sections_start"] = 0
row["idx_sections_confidence"] = 0
row["idx_beats_start"] = 0
row["idx_beats_confidence"] = 0
row["idx_bars_start"] = 0
row["idx_bars_confidence"] = 0
row["idx_tatums_start"] = 0
row["idx_tatums_confidence"] = 0
else : # check the current shape of the arrays
row["idx_segments_start"] = h5.root.analysis.segments_start.shape[0]
row["idx_segments_confidence"] = h5.root.analysis.segments_confidence.shape[0]
row["idx_segments_pitches"] = h5.root.analysis.segments_pitches.shape[0]
row["idx_segments_timbre"] = h5.root.analysis.segments_timbre.shape[0]
row["idx_segments_loudness_max"] = h5.root.analysis.segments_loudness_max.shape[0]
row["idx_segments_loudness_max_time"] = h5.root.analysis.segments_loudness_max_time.shape[0]
row["idx_segments_loudness_start"] = h5.root.analysis.segments_loudness_start.shape[0]
row["idx_sections_start"] = h5.root.analysis.sections_start.shape[0]
row["idx_sections_confidence"] = h5.root.analysis.sections_confidence.shape[0]
row["idx_beats_start"] = h5.root.analysis.beats_start.shape[0]
row["idx_beats_confidence"] = h5.root.analysis.beats_confidence.shape[0]
row["idx_bars_start"] = h5.root.analysis.bars_start.shape[0]
row["idx_bars_confidence"] = h5.root.analysis.bars_confidence.shape[0]
row["idx_tatums_start"] = h5.root.analysis.tatums_start.shape[0]
row["idx_tatums_confidence"] = h5.root.analysis.tatums_confidence.shape[0]
row.append()
h5.root.analysis.songs.flush()
# ARRAYS
if not summaryfile:
h5.root.analysis.segments_start.append( get_segments_start(h5tocopy,songidx) )
h5.root.analysis.segments_confidence.append( get_segments_confidence(h5tocopy,songidx) )
h5.root.analysis.segments_pitches.append( get_segments_pitches(h5tocopy,songidx) )
h5.root.analysis.segments_timbre.append( get_segments_timbre(h5tocopy,songidx) )
h5.root.analysis.segments_loudness_max.append( get_segments_loudness_max(h5tocopy,songidx) )
h5.root.analysis.segments_loudness_max_time.append( get_segments_loudness_max_time(h5tocopy,songidx) )
h5.root.analysis.segments_loudness_start.append( get_segments_loudness_start(h5tocopy,songidx) )
h5.root.analysis.sections_start.append( get_sections_start(h5tocopy,songidx) )
h5.root.analysis.sections_confidence.append( get_sections_confidence(h5tocopy,songidx) )
h5.root.analysis.beats_start.append( get_beats_start(h5tocopy,songidx) )
h5.root.analysis.beats_confidence.append( get_beats_confidence(h5tocopy,songidx) )
h5.root.analysis.bars_start.append( get_bars_start(h5tocopy,songidx) )
h5.root.analysis.bars_confidence.append( get_bars_confidence(h5tocopy,songidx) )
h5.root.analysis.tatums_start.append( get_tatums_start(h5tocopy,songidx) )
h5.root.analysis.tatums_confidence.append( get_tatums_confidence(h5tocopy,songidx) )
# MUSICBRAINZ
row = h5.root.musicbrainz.songs.row
row["year"] = get_year(h5tocopy,songidx)
# INDICES
if not summaryfile:
if counter == 0 : # we're first row
row["idx_artist_mbtags"] = 0
else:
row["idx_artist_mbtags"] = h5.root.musicbrainz.artist_mbtags.shape[0]
row.append()
h5.root.musicbrainz.songs.flush()
# ARRAYS
if not summaryfile:
h5.root.musicbrainz.artist_mbtags.append( get_artist_mbtags(h5tocopy,songidx) )
h5.root.musicbrainz.artist_mbtags_count.append( get_artist_mbtags_count(h5tocopy,songidx) )
# counter
counter += 1
# close h5 file
h5tocopy.close()
def create_song_file(h5filename,title='H5 Song File',force=False,complevel=1):
"""
Create a new HDF5 file for a new song.
If force=False, refuse to overwrite an existing file
Raise a ValueError if it's the case.
Other optional param is the H5 file.
Setups the groups, each containing a table 'songs' with one row:
- metadata
- analysis
DETAIL
- we set the compression level to 1 by default, it uses the ZLIB library
to disable compression, set it to 0
"""
# check if file exists
if not force:
if os.path.exists(h5filename):
raise ValueError('file exists, can not create HDF5 song file')
# create the H5 file
h5 = tables.openFile(h5filename, mode='w', title='H5 Song File')
# set filter level
h5.filters = tables.Filters(complevel=complevel,complib='zlib')
# setup the groups and tables
# group metadata
group = h5.createGroup("/",'metadata','metadata about the song')
table = h5.createTable(group,'songs',DESC.SongMetaData,'table of metadata for one song')
r = table.row
r.append() # filled with default values 0 or '' (depending on type)
table.flush()
# group analysis
group = h5.createGroup("/",'analysis','Echo Nest analysis of the song')
table = h5.createTable(group,'songs',DESC.SongAnalysis,'table of Echo Nest analysis for one song')
r = table.row
r.append() # filled with default values 0 or '' (depending on type)
table.flush()
# group musicbrainz
group = h5.createGroup("/",'musicbrainz','data about the song coming from MusicBrainz')
table = h5.createTable(group,'songs',DESC.SongMusicBrainz,'table of data coming from MusicBrainz')
r = table.row
r.append() # filled with default values 0 or '' (depending on type)
table.flush()
# create arrays
create_all_arrays(h5,expectedrows=3)
# close it, done
h5.close()
def create_aggregate_file(h5filename,title='H5 Aggregate File',force=False,expectedrows=1000,complevel=1,
summaryfile=False):
"""
Create a new HDF5 file for all songs.
It will contains everything that are in regular song files.
Tables created empty.
If force=False, refuse to overwrite an existing file
Raise a ValueError if it's the case.
If summaryfile=True, creates a sumary file, i.e. no arrays
Other optional param is the H5 file.
DETAILS
- if you create a very large file, try to approximate correctly
the number of data points (songs), it speeds things up with arrays (by
setting the chunking correctly).
- we set the compression level to 1 by default, it uses the ZLIB library
to disable compression, set it to 0
Setups the groups, each containing a table 'songs' with one row:
- metadata
- analysis
"""
# check if file exists
if not force:
if os.path.exists(h5filename):
raise ValueError('file exists, can not create HDF5 song file')
# summary file? change title
if summaryfile:
title = 'H5 Summary File'
# create the H5 file
h5 = tables.openFile(h5filename, mode='w', title='H5 Song File')
# set filter level
h5.filters = tables.Filters(complevel=complevel,complib='zlib')
# setup the groups and tables
# group metadata
group = h5.createGroup("/",'metadata','metadata about the song')
table = h5.createTable(group,'songs',DESC.SongMetaData,'table of metadata for one song',
expectedrows=expectedrows)
# group analysis
group = h5.createGroup("/",'analysis','Echo Nest analysis of the song')
table = h5.createTable(group,'songs',DESC.SongAnalysis,'table of Echo Nest analysis for one song',
expectedrows=expectedrows)
# group musicbrainz
group = h5.createGroup("/",'musicbrainz','data about the song coming from MusicBrainz')
table = h5.createTable(group,'songs',DESC.SongMusicBrainz,'table of data coming from MusicBrainz',
expectedrows=expectedrows)
# create arrays
if not summaryfile:
create_all_arrays(h5,expectedrows=expectedrows)
# close it, done
h5.close()
def create_all_arrays(h5,expectedrows=1000):
"""
Utility functions used by both create_song_file and create_aggregate_files,
creates all the EArrays (empty).
INPUT
h5 - hdf5 file, open with write or append permissions
metadata and analysis groups already exist!
"""
# group metadata arrays
group = h5.root.metadata
h5.createEArray(where=group,name='similar_artists',atom=tables.StringAtom(20,shape=()),shape=(0,),title=ARRAY_DESC_SIMILAR_ARTISTS)
h5.createEArray(group,'artist_terms',tables.StringAtom(256,shape=()),(0,),ARRAY_DESC_ARTIST_TERMS,
expectedrows=expectedrows*40)
h5.createEArray(group,'artist_terms_freq',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_ARTIST_TERMS_FREQ,
expectedrows=expectedrows*40)
h5.createEArray(group,'artist_terms_weight',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_ARTIST_TERMS_WEIGHT,
expectedrows=expectedrows*40)
# group analysis arrays
group = h5.root.analysis
h5.createEArray(where=group,name='segments_start',atom=tables.Float64Atom(shape=()),shape=(0,),title=ARRAY_DESC_SEGMENTS_START)
h5.createEArray(group,'segments_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_CONFIDENCE,
expectedrows=expectedrows*300)
h5.createEArray(group,'segments_pitches',tables.Float64Atom(shape=()),(0,12),ARRAY_DESC_SEGMENTS_PITCHES,
expectedrows=expectedrows*300)
h5.createEArray(group,'segments_timbre',tables.Float64Atom(shape=()),(0,12),ARRAY_DESC_SEGMENTS_TIMBRE,
expectedrows=expectedrows*300)
h5.createEArray(group,'segments_loudness_max',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_MAX,
expectedrows=expectedrows*300)
h5.createEArray(group,'segments_loudness_max_time',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_MAX_TIME,
expectedrows=expectedrows*300)
h5.createEArray(group,'segments_loudness_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_START,
expectedrows=expectedrows*300)
h5.createEArray(group,'sections_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SECTIONS_START,
expectedrows=expectedrows*300)
h5.createEArray(group,'sections_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SECTIONS_CONFIDENCE,
expectedrows=expectedrows*300)
h5.createEArray(group,'beats_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BEATS_START,
expectedrows=expectedrows*300)
h5.createEArray(group,'beats_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BEATS_CONFIDENCE,
expectedrows=expectedrows*300)
h5.createEArray(group,'bars_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BARS_START,
expectedrows=expectedrows*300)
h5.createEArray(group,'bars_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BARS_CONFIDENCE,
expectedrows=expectedrows*300)
h5.createEArray(group,'tatums_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_TATUMS_START,
expectedrows=expectedrows*300)
h5.createEArray(group,'tatums_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_TATUMS_CONFIDENCE,
expectedrows=expectedrows*300)
# group musicbrainz arrays
group = h5.root.musicbrainz
h5.createEArray(where=group,name='artist_mbtags',atom=tables.StringAtom(256,shape=()),shape=(0,),title=ARRAY_DESC_ARTIST_MBTAGS,
expectedrows=expectedrows*5)
h5.createEArray(group,'artist_mbtags_count',tables.IntAtom(shape=()),(0,),ARRAY_DESC_ARTIST_MBTAGS_COUNT,
expectedrows=expectedrows*5)
def open_h5_file_read(h5filename):
"""
Open an existing H5 in read mode.
"""
return tables.openFile(h5filename, mode='r')
def open_h5_file_append(h5filename):
"""
Open an existing H5 in append mode.
"""
return tables.openFile(h5filename, mode='a')
################################################ MAIN #####################################
def die_with_usage():
""" HELP MENU """
print 'hdf5_utils.py'
print 'by T. Bertin-Mahieux (2010) Columbia University'
print ''
print 'should be used as a library, contains functions to create'
print 'HDF5 files for the Million Song Dataset project'
sys.exit(0)
if __name__ == '__main__':
# help menu
die_with_usage() | apache-2.0 | 7,055,382,129,776,609,000 | 52.304267 | 137 | 0.664636 | false |
openstack/python-magnumclient | magnumclient/tests/osc/unit/v1/test_clusters.py | 1 | 17895 | # Copyright 2016 Easystack. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import os
import sys
import tempfile
from unittest import mock
from contextlib import contextmanager
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from unittest.mock import call
from magnumclient import exceptions
from magnumclient.osc.v1 import clusters as osc_clusters
from magnumclient.tests.osc.unit.v1 import fakes as magnum_fakes
class TestCluster(magnum_fakes.TestMagnumClientOSCV1):
def setUp(self):
super(TestCluster, self).setUp()
self.clusters_mock = self.app.client_manager.container_infra.clusters
self.certificates_mock = \
self.app.client_manager.container_infra.certificates
class TestClusterCreate(TestCluster):
def setUp(self):
super(TestClusterCreate, self).setUp()
attr = dict()
attr['name'] = 'fake-cluster-1'
self._cluster = magnum_fakes.FakeCluster.create_one_cluster(attr)
self._default_args = {
'cluster_template_id': 'fake-ct',
'create_timeout': 60,
'discovery_url': None,
'keypair': None,
'master_count': 1,
'name': 'fake-cluster-1',
'node_count': 1,
}
self.clusters_mock.create = mock.Mock()
self.clusters_mock.create.return_value = self._cluster
self.clusters_mock.get = mock.Mock()
self.clusters_mock.get.return_value = copy.deepcopy(self._cluster)
self.clusters_mock.update = mock.Mock()
self.clusters_mock.update.return_value = self._cluster
# Get the command object to test
self.cmd = osc_clusters.CreateCluster(self.app, None)
self.data = tuple(map(lambda x: getattr(self._cluster, x),
osc_clusters.CLUSTER_ATTRIBUTES))
def test_cluster_create_required_args_pass(self):
"""Verifies required arguments."""
arglist = [
'--cluster-template', self._cluster.cluster_template_id,
self._cluster.name
]
verifylist = [
('cluster_template', self._cluster.cluster_template_id),
('name', self._cluster.name)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.create.assert_called_with(**self._default_args)
def test_cluster_create_missing_required_arg(self):
"""Verifies missing required arguments."""
arglist = [
self._cluster.name
]
verifylist = [
('name', self._cluster.name)
]
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
def test_cluster_create_with_labels(self):
"""Verifies labels are properly parsed when given as argument."""
expected_args = self._default_args
expected_args['labels'] = {
'arg1': 'value1', 'arg2': 'value2'
}
arglist = [
'--cluster-template', self._cluster.cluster_template_id,
'--labels', 'arg1=value1',
'--labels', 'arg2=value2',
self._cluster.name
]
verifylist = [
('cluster_template', self._cluster.cluster_template_id),
('labels', ['arg1=value1', 'arg2=value2']),
('name', self._cluster.name)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.create.assert_called_with(**expected_args)
def test_cluster_create_with_lb_disabled(self):
"""Verifies master lb disabled properly parsed."""
expected_args = self._default_args
expected_args['master_lb_enabled'] = False
arglist = [
'--cluster-template', self._cluster.cluster_template_id,
'--master-lb-disabled',
self._cluster.name
]
verifylist = [
('cluster_template', self._cluster.cluster_template_id),
('master_lb_enabled', [False]),
('name', self._cluster.name)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.create.assert_called_with(**expected_args)
class TestClusterDelete(TestCluster):
def setUp(self):
super(TestClusterDelete, self).setUp()
self.clusters_mock.delete = mock.Mock()
self.clusters_mock.delete.return_value = None
# Get the command object to test
self.cmd = osc_clusters.DeleteCluster(self.app, None)
def test_cluster_delete_one(self):
arglist = ['foo']
verifylist = [('cluster', ['foo'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.delete.assert_called_with('foo')
def test_cluster_delete_multiple(self):
arglist = ['foo', 'bar']
verifylist = [('cluster', ['foo', 'bar'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.delete.assert_has_calls([call('foo'), call('bar')])
def test_cluster_delete_bad_uuid(self):
arglist = ['foo']
verifylist = [('cluster', ['foo'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
returns = self.cmd.take_action(parsed_args)
self.assertEqual(returns, None)
def test_cluster_delete_no_uuid(self):
arglist = []
verifylist = [('cluster', [])]
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
class TestClusterList(TestCluster):
attr = dict()
attr['name'] = 'fake-cluster-1'
_cluster = magnum_fakes.FakeCluster.create_one_cluster(attr)
columns = [
'uuid',
'name',
'keypair',
'node_count',
'master_count',
'status',
'health_status'
]
datalist = (
(
_cluster.uuid,
_cluster.name,
_cluster.keypair,
_cluster.node_count,
_cluster.master_count,
_cluster.status,
_cluster.health_status,
),
)
def setUp(self):
super(TestClusterList, self).setUp()
self.clusters_mock.list = mock.Mock()
self.clusters_mock.list.return_value = [self._cluster]
# Get the command object to test
self.cmd = osc_clusters.ListCluster(self.app, None)
def test_cluster_list_no_options(self):
arglist = []
verifylist = [
('limit', None),
('sort_key', None),
('sort_dir', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.clusters_mock.list.assert_called_with(
limit=None,
sort_dir=None,
sort_key=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, tuple(data))
def test_cluster_list_options(self):
arglist = [
'--limit', '1',
'--sort-key', 'key',
'--sort-dir', 'asc'
]
verifylist = [
('limit', 1),
('sort_key', 'key'),
('sort_dir', 'asc')
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.list.assert_called_with(
limit=1,
sort_dir='asc',
sort_key='key',
)
def test_cluster_list_bad_sort_dir_fail(self):
arglist = [
'--sort-dir', 'foo'
]
verifylist = [
('limit', None),
('sort_key', None),
('sort_dir', 'foo'),
('fields', None),
]
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
class TestClusterUpdate(TestCluster):
def setUp(self):
super(TestClusterUpdate, self).setUp()
self.clusters_mock.update = mock.Mock()
self.clusters_mock.update.return_value = None
# Get the command object to test
self.cmd = osc_clusters.UpdateCluster(self.app, None)
def test_cluster_update_pass(self):
arglist = ['foo', 'remove', 'bar']
verifylist = [
('cluster', 'foo'),
('op', 'remove'),
('attributes', [['bar']]),
('rollback', False)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.update.assert_called_with(
'foo',
[{'op': 'remove', 'path': '/bar'}]
)
def test_cluster_update_bad_op(self):
arglist = ['foo', 'bar', 'snafu']
verifylist = [
('cluster', 'foo'),
('op', 'bar'),
('attributes', ['snafu']),
('rollback', False)
]
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
class TestClusterShow(TestCluster):
def setUp(self):
super(TestClusterShow, self).setUp()
attr = dict()
attr['name'] = 'fake-cluster-1'
self._cluster = magnum_fakes.FakeCluster.create_one_cluster(attr)
self.clusters_mock.get = mock.Mock()
self.clusters_mock.get.return_value = self._cluster
# Get the command object to test
self.cmd = osc_clusters.ShowCluster(self.app, None)
self.data = tuple(map(lambda x: getattr(self._cluster, x),
osc_clusters.CLUSTER_ATTRIBUTES))
def test_cluster_show_pass(self):
arglist = ['fake-cluster']
verifylist = [
('cluster', 'fake-cluster')
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.clusters_mock.get.assert_called_with('fake-cluster')
self.assertEqual(osc_clusters.CLUSTER_ATTRIBUTES, columns)
self.assertEqual(self.data, data)
def test_cluster_show_no_cluster_fail(self):
arglist = []
verifylist = []
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
@contextmanager
def capture(command, *args, **kwargs):
out, sys.stdout = sys.stdout, StringIO()
try:
command(*args, **kwargs)
sys.stdout.seek(0)
yield sys.stdout.read()
finally:
sys.stdout = out
class TestClusterConfig(TestCluster):
def setUp(self):
super(TestClusterConfig, self).setUp()
attr = dict()
attr['name'] = 'fake-cluster-1'
attr['status'] = 'CREATE_COMPLETE'
self._cluster = magnum_fakes.FakeCluster.create_one_cluster(attr)
self.clusters_mock.get = mock.Mock()
self.clusters_mock.get.return_value = self._cluster
cert = magnum_fakes.FakeCert(pem='foo bar')
self.certificates_mock.create = mock.Mock()
self.certificates_mock.create.return_value = cert
self.certificates_mock.get = mock.Mock()
self.certificates_mock.get.return_value = cert
# Fake the cluster_template
attr = dict()
attr['name'] = 'fake-ct'
self._cluster_template = \
magnum_fakes.FakeClusterTemplate.create_one_cluster_template(attr)
self.cluster_templates_mock = \
self.app.client_manager.container_infra.cluster_templates
self.cluster_templates_mock.get = mock.Mock()
self.cluster_templates_mock.get.return_value = self._cluster_template
# Get the command object to test
self.cmd = osc_clusters.ConfigCluster(self.app, None)
def test_cluster_config_no_cluster_fail(self):
arglist = []
verifylist = []
self.assertRaises(magnum_fakes.MagnumParseException,
self.check_parser, self.cmd, arglist, verifylist)
@mock.patch.dict(os.environ, {'SHELL': '/bin/bash'})
def test_cluster_config_custom_dir_with_config_only_works_if_force(self):
tmp_dir = tempfile.mkdtemp()
open(os.path.join(tmp_dir, 'config'), 'a').close() # touch config
arglist = ['fake-cluster', '--dir', tmp_dir]
verifylist = [
('cluster', 'fake-cluster'),
('force', False),
('dir', tmp_dir),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(exceptions.CommandError,
self.cmd.take_action, parsed_args)
self.clusters_mock.get.assert_called_with('fake-cluster')
arglist = ['fake-cluster', '--force', '--dir', tmp_dir]
verifylist = [
('cluster', 'fake-cluster'),
('force', True),
('dir', tmp_dir),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
expected_value = '''\
export KUBECONFIG={}/config
'''.format(tmp_dir)
with capture(self.cmd.take_action, parsed_args) as output:
self.assertEqual(expected_value, output)
self.clusters_mock.get.assert_called_with('fake-cluster')
@mock.patch.dict(os.environ, {'SHELL': '/bin/bash'})
def test_cluster_config_with_custom_dir(self):
tmp_dir = tempfile.mkdtemp()
arglist = ['fake-cluster', '--dir', tmp_dir]
verifylist = [
('cluster', 'fake-cluster'),
('dir', tmp_dir),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
expected_value = '''\
export KUBECONFIG={}/config
'''.format(tmp_dir)
with capture(self.cmd.take_action, parsed_args) as output:
self.assertEqual(expected_value, output)
self.clusters_mock.get.assert_called_with('fake-cluster')
@mock.patch.dict(os.environ, {'SHELL': '/bin/bash'})
def test_cluster_config_creates_config_in_cwd_if_not_dir_specified(self):
tmp_dir = tempfile.mkdtemp()
os.chdir(tmp_dir)
arglist = ['fake-cluster']
verifylist = [
('cluster', 'fake-cluster'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
expected_value = '''\
export KUBECONFIG={}/config
'''.format(os.getcwd())
with capture(self.cmd.take_action, parsed_args) as output:
self.assertEqual(expected_value, output)
self.clusters_mock.get.assert_called_with('fake-cluster')
class TestClusterResize(TestCluster):
def setUp(self):
super(TestClusterResize, self).setUp()
self.cluster = mock.Mock()
self.cluster.uuid = "UUID1"
self.clusters_mock.resize = mock.Mock()
self.clusters_mock.resize.return_value = None
self.clusters_mock.get = mock.Mock()
self.clusters_mock.get.return_value = self.cluster
# Get the command object to test
self.cmd = osc_clusters.ResizeCluster(self.app, None)
def test_cluster_resize_pass(self):
arglist = ['foo', '2']
verifylist = [
('cluster', 'foo'),
('node_count', 2),
('nodes_to_remove', None),
('nodegroup', None)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.resize.assert_called_with(
"UUID1", 2, None, None
)
def test_cluster_resize_to_zero_pass(self):
arglist = ['foo', '0']
verifylist = [
('cluster', 'foo'),
('node_count', 0),
('nodes_to_remove', None),
('nodegroup', None)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.resize.assert_called_with(
"UUID1", 0, None, None
)
class TestClusterUpgrade(TestCluster):
def setUp(self):
super(TestClusterUpgrade, self).setUp()
self.cluster = mock.Mock()
self.cluster.uuid = "UUID1"
self.clusters_mock.upgrade = mock.Mock()
self.clusters_mock.upgrade.return_value = None
self.clusters_mock.get = mock.Mock()
self.clusters_mock.get.return_value = self.cluster
# Get the command object to test
self.cmd = osc_clusters.UpgradeCluster(self.app, None)
def test_cluster_upgrade_pass(self):
cluster_template_id = 'TEMPLATE_ID'
arglist = ['foo', cluster_template_id]
verifylist = [
('cluster', 'foo'),
('cluster_template', cluster_template_id),
('max_batch_size', 1),
('nodegroup', None)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.clusters_mock.upgrade.assert_called_with(
"UUID1", cluster_template_id, 1, None
)
| apache-2.0 | -923,871,317,477,197,800 | 30.616608 | 78 | 0.585359 | false |
blab/antibody-response-pulse | bcell-array/code/Virus_Bcell_IgM_IgG_Infection_OAS_new.py | 1 | 13195 |
# coding: utf-8
# # Antibody Response Pulse
# https://github.com/blab/antibody-response-pulse
#
# ### B-cells evolution --- cross-reactive antibody response after influenza virus infection or vaccination
# ### Adaptive immune response for repeated infection
# In[3]:
'''
author: Alvason Zhenhua Li
date: 04/09/2015
'''
get_ipython().magic(u'matplotlib inline')
import numpy as np
import matplotlib.pyplot as plt
import os
from matplotlib.ticker import FuncFormatter
import alva_machinery_event_OAS_new as alva
AlvaFontSize = 23
AlvaFigSize = (15, 5)
numberingFig = 0
# plotting
dir_path = '/Users/al/Desktop/GitHub/antibody-response-pulse/bcell-array/figure'
file_name = 'Virus-Bcell-IgM-IgG'
figure_name = '-equation'
file_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize=(12, 5))
plt.axis('off')
plt.title(r'$ Virus-Bcell-IgM-IgG \ equations \ (antibody-response \ for \ repeated-infection) $'
, fontsize = AlvaFontSize)
plt.text(0, 7.0/9, r'$ \frac{\partial V_n(t)}{\partial t} = +\mu_{v}V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) - \phi_{m} M_{n}(t) V_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 5.0/9, r'$ \frac{\partial B_n(t)}{\partial t} = +\mu_{b}V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) + (\beta_{m} + \beta_{g}) V_{n}(t) B_{n}(t) - \mu_{b} B_{n}(t) + m_b V_{n}(t)\frac{B_{i-1}(t) - 2B_i(t) + B_{i+1}(t)}{(\Delta i)^2} $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 3.0/9,r'$ \frac{\partial M_n(t)}{\partial t} = +\xi_{m} B_{n}(t) - \phi_{m} M_{n}(t) V_{n}(t) - \mu_{m} M_{n}(t) $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 1.0/9,r'$ \frac{\partial G_n(t)}{\partial t} = +\xi_{g} B_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) - \mu_{g} G_{n}(t) + m_a V_{n}(t)\frac{G_{i-1}(t) - 2G_i(t) + G_{i+1}(t)}{(\Delta i)^2} $'
, fontsize = 1.2*AlvaFontSize)
plt.savefig(save_figure, dpi = 100)
plt.show()
# define the V-M-G partial differential equations
def dVdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dV_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
dV_dt_array[:] = +inRateV*V[:]*(1 - V[:]/maxV) - killRateVm*M[:]*V[:] - killRateVg*G[:]*V[:]
return(dV_dt_array)
def dBdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dB_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
Bcopy = np.copy(B)
centerX = Bcopy[:]
leftX = np.roll(Bcopy[:], 1)
rightX = np.roll(Bcopy[:], -1)
leftX[0] = centerX[0]
rightX[-1] = centerX[-1]
dB_dt_array[:] = +inRateB*V[:]*(1 - V[:]/maxV) + (actRateBm + alva.event_active + alva.event_OAS_B)*V[:]*B[:] - outRateB*B[:] + mutatRateB*V[:]*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2)
return(dB_dt_array)
def dMdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dM_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
dM_dt_array[:] = +inRateM*B[:] - consumeRateM*M[:]*V[:] - outRateM*M[:]
return(dM_dt_array)
def dGdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dG_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
Gcopy = np.copy(G)
centerX = Gcopy[:]
leftX = np.roll(Gcopy[:], 1)
rightX = np.roll(Gcopy[:], -1)
leftX[0] = centerX[0]
rightX[-1] = centerX[-1]
dG_dt_array[:] = +(inRateG + alva.event_OAS)*B[:] - consumeRateG*G[:]*V[:] - outRateG*G[:] + mutatRateA*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2)
return(dG_dt_array)
# In[7]:
# setting parameter
timeUnit = 'day'
if timeUnit == 'hour':
hour = float(1)
day = float(24)
elif timeUnit == 'day':
day = float(1)
hour = float(1)/24
elif timeUnit == 'year':
year = float(1)
day = float(1)/365
hour = float(1)/24/365
maxV = float(50) # max virus/micro-liter
inRateV = 0.2/hour # in-rate of virus
killRateVm = 0.0003/hour # kill-rate of virus by antibody-IgM
killRateVg = killRateVm # kill-rate of virus by antibody-IgG
inRateB = 0.06/hour # in-rate of B-cell
outRateB = inRateB/8 # out-rate of B-cell
actRateBm = killRateVm # activation rate of naive B-cell
inRateM = 0.16/hour # in-rate of antibody-IgM from naive B-cell
outRateM = inRateM/1 # out-rate of antibody-IgM from naive B-cell
consumeRateM = killRateVm # consume-rate of antibody-IgM by cleaning virus
inRateG = inRateM/10 # in-rate of antibody-IgG from memory B-cell
outRateG = outRateM/250 # out-rate of antibody-IgG from memory B-cell
consumeRateG = killRateVg # consume-rate of antibody-IgG by cleaning virus
mutatRateB = 0.00003/hour # B-cell mutation rate
mutatRateA = 0.0001/hour # antibody mutation rate
mutatRateB = 0.0000/hour # B-cell mutation rate
mutatRateA = 0.000/hour # antibody mutation rate
# time boundary and griding condition
minT = float(0)
maxT = float(6*28*day)
totalPoint_T = int(1*10**3 + 1)
gT = np.linspace(minT, maxT, totalPoint_T)
spacingT = np.linspace(minT, maxT, num = totalPoint_T, retstep = True)
gT = spacingT[0]
dt = spacingT[1]
# space boundary and griding condition
minX = float(0)
maxX = float(3)
totalPoint_X = int(maxX - minX + 1)
gX = np.linspace(minX, maxX, totalPoint_X)
gridingX = np.linspace(minX, maxX, num = totalPoint_X, retstep = True)
gX = gridingX[0]
dx = gridingX[1]
gV_array = np.zeros([totalPoint_X, totalPoint_T])
gB_array = np.zeros([totalPoint_X, totalPoint_T])
gM_array = np.zeros([totalPoint_X, totalPoint_T])
gG_array = np.zeros([totalPoint_X, totalPoint_T])
# initial output condition
#gV_array[1, 0] = float(2)
#[pre-parameter, post-parameter, recovered-day, OAS+, OSA-]
actRateBg_1st = 0.0002/hour # activation rate of memory B-cell at 1st time (pre-)
actRateBg_2nd = actRateBg_1st*10 # activation rate of memory B-cell at 2nd time (post-)
origin_virus = int(1)
current_virus = int(2)
event_parameter = np.array([[actRateBg_1st,
actRateBg_2nd,
14*day,
+5/hour,
-actRateBm - actRateBg_1st + (actRateBm + actRateBg_1st)/3,
origin_virus,
current_virus]])
# [viral population, starting time, first]
# [viral population, starting time] ---first
infection_period = 1*28*day
viral_population = np.zeros(int(maxX + 1))
viral_population[origin_virus:current_virus + 1] = 3
infection_starting_time = np.arange(int(maxX + 1))*infection_period
event_1st = np.zeros([int(maxX + 1), 2])
event_1st[:, 0] = viral_population
event_1st[:, 1] = infection_starting_time
print ('event_1st = {:}'.format(event_1st))
# [viral population, starting time] ---2nd]
viral_population = np.zeros(int(maxX + 1))
viral_population[origin_virus:current_virus + 1] = 0
infection_starting_time = np.arange(int(maxX + 1))*0
event_2nd = np.zeros([int(maxX + 1), 2])
event_2nd[:, 0] = viral_population
event_2nd[:, 1] = infection_starting_time
print ('event_2nd = {:}'.format(event_2nd))
event_table = np.array([event_parameter, event_1st, event_2nd])
# Runge Kutta numerical solution
pde_array = np.array([dVdt_array, dBdt_array, dMdt_array, dGdt_array])
initial_Out = np.array([gV_array, gB_array, gM_array, gG_array])
gOut_array = alva.AlvaRungeKutta4XT(pde_array, initial_Out, minX, maxX, totalPoint_X, minT, maxT, totalPoint_T, event_table)
# plotting
gV = gOut_array[0]
gB = gOut_array[1]
gM = gOut_array[2]
gG = gOut_array[3]
numberingFig = numberingFig + 1
for i in range(totalPoint_X):
figure_name = '-response-%i'%(i)
figure_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
plt.figure(numberingFig, figsize = AlvaFigSize)
plt.plot(gT, gV[i], color = 'red', label = r'$ V_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gM[i], color = 'blue', label = r'$ IgM_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gG[i], color = 'green', label = r'$ IgG_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gM[i] + gG[i], color = 'gray', linewidth = 5.0, alpha = 0.5, linestyle = 'dashed'
, label = r'$ IgM_{%i}(t) + IgG_{%i}(t) $'%(i, i))
plt.grid(True, which = 'both')
plt.title(r'$ Antibody \ from \ Virus-{%i} $'%(i), fontsize = AlvaFontSize)
plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xlim([minT, maxT])
plt.xticks(fontsize = AlvaFontSize*0.6)
plt.yticks(fontsize = AlvaFontSize*0.6)
plt.ylim([2**0, 2**14])
plt.yscale('log', basey = 2)
plt.legend(loc = (1,0), fontsize = AlvaFontSize)
plt.savefig(save_figure, dpi = 100)
plt.show()
# In[5]:
# Experimental lab data from OAS paper
gT_lab = np.array([28, 28 + 7, 28 + 14, 28 + 28]) + 28
gPR8_lab = np.array([2**(9 + 1.0/10), 2**(13 - 1.0/5), 2**(13 + 1.0/3), 2**(13 - 1.0/4)])
standard_PR8 = gPR8_lab**(3.0/4)
gFM1_lab = np.array([0, 2**(6 - 1.0/5), 2**(7 - 1.0/4), 2**(8 + 1.0/4)])
standard_FM1 = gFM1_lab**(3.0/4)
bar_width = 2.0
# Sequential immunization graph
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize = (12, 6))
plt.subplot(111)
plt.plot(gT, (gM[origin_virus] + gG[origin_virus]), linewidth = 5.0, alpha = 0.5, color = 'gray'
, label = r'$ Origin-virus $')
plt.plot(gT, (gM[origin_virus + 1] + gG[origin_virus + 1]), linewidth = 5.0, alpha = 0.5, color = 'red'
, label = r'$ Subsequence-virus $')
plt.bar(gT_lab - bar_width/2, gPR8_lab, bar_width, alpha = 0.6, color = 'gray', yerr = standard_PR8
, error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ PR8-virus $')
plt.bar(gT_lab + bar_width/2, gFM1_lab, bar_width, alpha = 0.6, color = 'red', yerr = standard_FM1
, error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ FM1-virus $')
plt.grid(True, which = 'both')
plt.title(r'$ Original \ Antigenic \ Sin \ (sequential-infection)$', fontsize = AlvaFontSize)
plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xticks(fontsize = AlvaFontSize*0.6)
plt.yticks(fontsize = AlvaFontSize*0.6)
plt.xlim([minT, 6*30*day])
plt.ylim([2**5, 2**14])
plt.yscale('log', basey = 2)
# gca()---GetCurrentAxis and Format the ticklabel to be 2**x
plt.gca().yaxis.set_major_formatter(FuncFormatter(lambda x, pos: int(2**(np.log(x)/np.log(2)))))
#plt.gca().xaxis.set_major_locator(plt.MultipleLocator(7))
plt.legend(loc = (1, 0), fontsize = AlvaFontSize)
plt.show()
# In[6]:
# Experimental lab data from OAS paper
gT_lab = np.array([28, 28 + 7, 28 + 14, 28 + 28]) + 28
gPR8_lab = np.array([2**(9 + 1.0/10), 2**(13 - 1.0/5), 2**(13 + 1.0/3), 2**(13 - 1.0/4)])
standard_PR8 = gPR8_lab**(3.0/4)
gFM1_lab = np.array([0, 2**(6 - 1.0/5), 2**(7 - 1.0/4), 2**(8 + 1.0/4)])
standard_FM1 = gFM1_lab**(3.0/4)
bar_width = 1.0
# Sequential immunization graph
figure_name = '-Original-Antigenic-Sin-infection'
figure_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize = (12, 6))
plt.subplot(111)
plt.plot(gT, (gM[origin_virus] + gG[origin_virus]), linewidth = 5.0, alpha = 0.5, color = 'gray'
, label = r'$ Origin-virus $')
plt.plot(gT, (gM[origin_virus + 1] + gG[origin_virus + 1]), linewidth = 5.0, alpha = 0.5, color = 'red'
, label = r'$ Subsequence-virus $')
plt.bar(gT_lab - bar_width/2, gPR8_lab, bar_width, alpha = 0.6, color = 'gray', yerr = standard_PR8
, error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ PR8-virus $')
plt.bar(gT_lab + bar_width/2, gFM1_lab, bar_width, alpha = 0.6, color = 'red', yerr = standard_FM1
, error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ FM1-virus $')
plt.grid(True, which = 'both')
plt.title(r'$ Original \ Antigenic \ Sin \ (sequential-infection)$', fontsize = AlvaFontSize)
plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xticks(fontsize = AlvaFontSize*0.6)
plt.yticks(fontsize = AlvaFontSize*0.6)
plt.xlim([minT, 3*30*day])
plt.ylim([2**5, 2**14])
plt.yscale('log', basey = 2)
# gca()---GetCurrentAxis and Format the ticklabel to be 2**x
plt.gca().yaxis.set_major_formatter(FuncFormatter(lambda x, pos: int(2**(np.log(x)/np.log(2)))))
plt.gca().xaxis.set_major_locator(plt.MultipleLocator(7))
plt.legend(loc = (1, 0), fontsize = AlvaFontSize)
plt.savefig(save_figure, dpi = 100, bbox_inches='tight')
plt.show()
# In[ ]:
| gpl-2.0 | 6,642,361,420,170,856,000 | 38.154303 | 257 | 0.619856 | false |
lucifurtun/myquotes | apps/search/bible/management/commands/zefania_xml_generator.py | 1 | 2231 | import json
from itertools import groupby
import xmltodict
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Generates zefania xml from different formats'
def handle(self, *args, **options):
with open('NTR.json') as f:
data = json.load(f)
current_b_number = 0
for item in data:
if item['chapter'] == 1 and item['verse'] == 1:
current_b_number += 1
item['b_number'] = current_b_number
grouped_books = groupby(data, lambda item: item['b_number'])
books_list = []
for book_grouper, chapters in grouped_books:
chapters_list = []
grouped_chapters = groupby(chapters, lambda item: item['chapter'])
for chapter_grouper, verses in grouped_chapters:
chapters_list.append({
'number': chapter_grouper,
'items': list(verses)
})
books_list.append({
'title': chapters_list[0]['items'][0]['long_name'],
'number': int(book_grouper),
'items': chapters_list
})
with open('NTR.xml', 'w+') as f:
d = {
'XMLBIBLE': {
'BIBLEBOOK': [
{
'@bnumber': book['number'],
'@bname': book['title'],
'CHAPTER': [
{
'@cnumber': chapter['number'],
'VERS': [
{
'@vnumber': verse['verse'],
'#text': verse['text']
} for verse in chapter['items']
]
} for chapter in book['items']
]
} for book in books_list
]
}
}
f.write(xmltodict.unparse(d, pretty=True))
self.stdout.write(self.style.SUCCESS('Imported!'))
| bsd-3-clause | -6,679,120,634,669,536,000 | 32.298507 | 78 | 0.404303 | false |
amchoukir/ycmd | build.py | 1 | 6445 | #!/usr/bin/env python
import os
import os.path as p
import sys
major, minor = sys.version_info[ 0 : 2 ]
if major != 2 or minor < 6:
sys.exit( 'The build script requires Python version >= 2.6 and < 3.0; '
'your version of Python is ' + sys.version )
DIR_OF_THIS_SCRIPT = p.dirname( p.abspath( __file__ ) )
DIR_OF_THIRD_PARTY = p.join( DIR_OF_THIS_SCRIPT, 'third_party' )
for folder in os.listdir( DIR_OF_THIRD_PARTY ):
abs_folder_path = p.join( DIR_OF_THIRD_PARTY, folder )
if p.isdir( abs_folder_path ) and not os.listdir( abs_folder_path ):
sys.exit( 'Some folders in ' + DIR_OF_THIRD_PARTY + ' are empty; '
'you probably forgot to run:'
'\n\tgit submodule update --init --recursive\n\n' )
sys.path.insert( 0, p.abspath( p.join( DIR_OF_THIRD_PARTY, 'sh' ) ) )
sys.path.insert( 0, p.abspath( p.join( DIR_OF_THIRD_PARTY, 'argparse' ) ) )
import sh
import platform
import argparse
import multiprocessing
from distutils.spawn import find_executable
def OnMac():
return platform.system() == 'Darwin'
def PathToFirstExistingExecutable( executable_name_list ):
for executable_name in executable_name_list:
path = find_executable( executable_name )
if path:
return path
return None
def NumCores():
ycm_cores = os.environ.get( 'YCM_CORES' )
if ycm_cores:
return int( ycm_cores )
try:
return multiprocessing.cpu_count()
except NotImplementedError:
return 1
def CheckDeps():
if not PathToFirstExistingExecutable( [ 'cmake' ] ):
sys.exit( 'Please install CMake and retry.')
def CustomPythonCmakeArgs():
# The CMake 'FindPythonLibs' Module does not work properly.
# So we are forced to do its job for it.
python_prefix = sh.python_config( '--prefix' ).strip()
if p.isfile( p.join( python_prefix, '/Python' ) ):
python_library = p.join( python_prefix, '/Python' )
python_include = p.join( python_prefix, '/Headers' )
else:
which_python = sh.python(
'-c',
'import sys;i=sys.version_info;print "python%d.%d" % (i[0], i[1])'
).strip()
lib_python = '{0}/lib/lib{1}'.format( python_prefix, which_python ).strip()
if p.isfile( '{0}.a'.format( lib_python ) ):
python_library = '{0}.a'.format( lib_python )
# This check is for CYGWIN
elif p.isfile( '{0}.dll.a'.format( lib_python ) ):
python_library = '{0}.dll.a'.format( lib_python )
else:
python_library = '{0}.dylib'.format( lib_python )
python_include = '{0}/include/{1}'.format( python_prefix, which_python )
python_executable = '{0}/bin/python'.format( python_prefix )
return [
'-DPYTHON_LIBRARY={0}'.format( python_library ),
'-DPYTHON_INCLUDE_DIR={0}'.format( python_include ),
'-DPYTHON_EXECUTABLE={0}'.format( python_executable )
]
def ParseArguments():
parser = argparse.ArgumentParser()
parser.add_argument( '--clang-completer', action = 'store_true',
help = 'Build C-family semantic completion engine.')
parser.add_argument( '--system-libclang', action = 'store_true',
help = 'Use system libclang instead of downloading one '
'from llvm.org. NOT RECOMMENDED OR SUPPORTED!' )
parser.add_argument( '--omnisharp-completer', action = 'store_true',
help = 'Build C# semantic completion engine.' )
parser.add_argument( '--gocode-completer', action = 'store_true',
help = 'Build Go semantic completion engine.' )
parser.add_argument( '--system-boost', action = 'store_true',
help = 'Use the system boost instead of bundled one. '
'NOT RECOMMENDED OR SUPPORTED!')
args = parser.parse_args()
if args.system_libclang and not args.clang_completer:
sys.exit( "You can't pass --system-libclang without also passing "
"--clang-completer as well." )
return args
def GetCmakeArgs( parsed_args ):
cmake_args = []
if parsed_args.clang_completer:
cmake_args.append( '-DUSE_CLANG_COMPLETER=ON' )
if parsed_args.system_libclang:
cmake_args.append( '-DUSE_SYSTEM_LIBCLANG=ON' )
if parsed_args.system_boost:
cmake_args.append( '-DUSE_SYSTEM_BOOST=ON' )
extra_cmake_args = os.environ.get( 'EXTRA_CMAKE_ARGS', '' )
cmake_args.extend( extra_cmake_args.split() )
return cmake_args
def RunYcmdTests( build_dir ):
tests_dir = p.join( build_dir, 'ycm/tests' )
sh.cd( tests_dir )
new_env = os.environ.copy()
new_env[ 'LD_LIBRARY_PATH' ] = DIR_OF_THIS_SCRIPT
sh.Command( p.join( tests_dir, 'ycm_core_tests' ) )(
_env = new_env, _out = sys.stdout )
def BuildYcmdLibs( cmake_args ):
build_dir = unicode( sh.mktemp( '-d', '-t', 'ycm_build.XXXXXX' ) ).strip()
try:
full_cmake_args = [ '-G', 'Unix Makefiles' ]
if OnMac():
full_cmake_args.extend( CustomPythonCmakeArgs() )
full_cmake_args.extend( cmake_args )
full_cmake_args.append( p.join( DIR_OF_THIS_SCRIPT, 'cpp' ) )
sh.cd( build_dir )
sh.cmake( *full_cmake_args, _out = sys.stdout )
build_target = ( 'ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else
'ycm_core_tests' )
sh.make( '-j', NumCores(), build_target, _out = sys.stdout,
_err = sys.stderr )
if 'YCM_TESTRUN' in os.environ:
RunYcmdTests( build_dir )
finally:
sh.cd( DIR_OF_THIS_SCRIPT )
sh.rm( '-rf', build_dir )
def BuildOmniSharp():
build_command = PathToFirstExistingExecutable(
[ 'msbuild', 'msbuild.exe', 'xbuild' ] )
if not build_command:
sys.exit( 'msbuild or xbuild is required to build Omnisharp' )
sh.cd( p.join( DIR_OF_THIS_SCRIPT, 'third_party/OmniSharpServer' ) )
sh.Command( build_command )( "/property:Configuration=Release", _out = sys.stdout )
def BuildGoCode():
if not find_executable( 'go' ):
sys.exit( 'go is required to build gocode' )
sh.cd( p.join( DIR_OF_THIS_SCRIPT, 'third_party/gocode' ) )
sh.Command( 'go' )( 'build', _out = sys.stdout )
def ApplyWorkarounds():
# Some OSs define a 'make' ENV VAR and this confuses sh when we try to do
# sh.make. See https://github.com/Valloric/YouCompleteMe/issues/1401
os.environ.pop('make', None)
def Main():
ApplyWorkarounds()
CheckDeps()
args = ParseArguments()
BuildYcmdLibs( GetCmakeArgs( args ) )
if args.omnisharp_completer:
BuildOmniSharp()
if args.gocode_completer:
BuildGoCode()
if __name__ == "__main__":
Main()
| gpl-3.0 | 5,089,517,288,401,382,000 | 31.550505 | 85 | 0.638169 | false |
snehasi/servo | components/style/properties/data.py | 1 | 11812 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
PHYSICAL_SIDES = ["top", "left", "bottom", "right"]
LOGICAL_SIDES = ["block-start", "block-end", "inline-start", "inline-end"]
PHYSICAL_SIZES = ["width", "height"]
LOGICAL_SIZES = ["block-size", "inline-size"]
# bool is True when logical
ALL_SIDES = [(side, False) for side in PHYSICAL_SIDES] + [(side, True) for side in LOGICAL_SIDES]
ALL_SIZES = [(size, False) for size in PHYSICAL_SIZES] + [(size, True) for size in LOGICAL_SIZES]
def maybe_moz_logical_alias(product, side, prop):
if product == "gecko" and side[1]:
axis, dir = side[0].split("-")
if axis == "inline":
return prop % dir
return None
def to_rust_ident(name):
name = name.replace("-", "_")
if name in ["static", "super", "box", "move"]: # Rust keywords
name += "_"
return name
def to_camel_case(ident):
return re.sub("(^|_|-)([a-z])", lambda m: m.group(2).upper(), ident.strip("_").strip("-"))
class Keyword(object):
def __init__(self, name, values, gecko_constant_prefix=None,
gecko_enum_prefix=None, custom_consts=None,
extra_gecko_values=None, extra_servo_values=None,
gecko_strip_moz_prefix=True,
gecko_inexhaustive=None):
self.name = name
self.values = values.split()
if gecko_constant_prefix and gecko_enum_prefix:
raise TypeError("Only one of gecko_constant_prefix and gecko_enum_prefix can be specified")
self.gecko_constant_prefix = gecko_constant_prefix or \
"NS_STYLE_" + self.name.upper().replace("-", "_")
self.gecko_enum_prefix = gecko_enum_prefix
self.extra_gecko_values = (extra_gecko_values or "").split()
self.extra_servo_values = (extra_servo_values or "").split()
self.consts_map = {} if custom_consts is None else custom_consts
self.gecko_strip_moz_prefix = gecko_strip_moz_prefix
self.gecko_inexhaustive = gecko_inexhaustive or (gecko_enum_prefix is None)
def gecko_values(self):
return self.values + self.extra_gecko_values
def servo_values(self):
return self.values + self.extra_servo_values
def values_for(self, product):
if product == "gecko":
return self.gecko_values()
elif product == "servo":
return self.servo_values()
else:
raise Exception("Bad product: " + product)
def gecko_constant(self, value):
moz_stripped = value.replace("-moz-", '') if self.gecko_strip_moz_prefix else value.replace("-moz-", 'moz-')
mapped = self.consts_map.get(value)
if self.gecko_enum_prefix:
parts = moz_stripped.replace('-', '_').split('_')
parts = mapped if mapped else [p.title() for p in parts]
return self.gecko_enum_prefix + "::" + "".join(parts)
else:
suffix = mapped if mapped else moz_stripped.replace("-", "_")
return self.gecko_constant_prefix + "_" + suffix.upper()
def needs_cast(self):
return self.gecko_enum_prefix is None
def maybe_cast(self, type_str):
return "as " + type_str if self.needs_cast() else ""
def arg_to_bool(arg):
if isinstance(arg, bool):
return arg
assert arg in ["True", "False"]
return arg == "True"
class Longhand(object):
def __init__(self, style_struct, name, spec=None, animation_type=None, derived_from=None, keyword=None,
predefined_type=None, custom_cascade=False, experimental=False, internal=False,
need_clone=False, need_index=False, gecko_ffi_name=None, depend_on_viewport_size=False,
allowed_in_keyframe_block=True, complex_color=False, cast_type='u8',
has_uncacheable_values=False, logical=False, alias=None, extra_prefixes=None, boxed=False,
creates_stacking_context=False, fixpos_cb=False, abspos_cb=False):
self.name = name
if not spec:
raise TypeError("Spec should be specified for %s" % name)
self.spec = spec
self.keyword = keyword
self.predefined_type = predefined_type
self.ident = to_rust_ident(name)
self.camel_case = to_camel_case(self.ident)
self.style_struct = style_struct
self.experimental = ("layout.%s.enabled" % name) if experimental else None
self.custom_cascade = custom_cascade
self.internal = internal
self.need_index = need_index
self.has_uncacheable_values = has_uncacheable_values
self.gecko_ffi_name = gecko_ffi_name or "m" + self.camel_case
self.depend_on_viewport_size = depend_on_viewport_size
self.derived_from = (derived_from or "").split()
self.complex_color = complex_color
self.cast_type = cast_type
self.logical = arg_to_bool(logical)
self.alias = alias.split() if alias else []
self.extra_prefixes = extra_prefixes.split() if extra_prefixes else []
self.boxed = arg_to_bool(boxed)
self.creates_stacking_context = arg_to_bool(creates_stacking_context)
self.fixpos_cb = arg_to_bool(fixpos_cb)
self.abspos_cb = arg_to_bool(abspos_cb)
# https://drafts.csswg.org/css-animations/#keyframes
# > The <declaration-list> inside of <keyframe-block> accepts any CSS property
# > except those defined in this specification,
# > but does accept the `animation-play-state` property and interprets it specially.
self.allowed_in_keyframe_block = allowed_in_keyframe_block \
and allowed_in_keyframe_block != "False"
# This is done like this since just a plain bool argument seemed like
# really random.
if animation_type is None:
raise TypeError("animation_type should be specified for (" + name + ")")
animation_types = ["none", "normal", "discrete"]
if animation_type not in animation_types:
raise TypeError("animation_type should be one of (" + str(animation_types) + ")")
self.animation_type = animation_type
self.animatable = animation_type != "none"
if self.logical:
# Logical properties don't animate separately
self.animatable = False
# NB: Animatable implies clone because a property animation requires a
# copy of the computed value.
#
# See components/style/helpers/animated_properties.mako.rs.
self.need_clone = need_clone or self.animatable
class Shorthand(object):
def __init__(self, name, sub_properties, spec=None, experimental=False, internal=False,
allowed_in_keyframe_block=True, alias=None, extra_prefixes=None):
self.name = name
if not spec:
raise TypeError("Spec should be specified for %s" % name)
self.spec = spec
self.ident = to_rust_ident(name)
self.camel_case = to_camel_case(self.ident)
self.derived_from = None
self.experimental = ("layout.%s.enabled" % name) if experimental else None
self.sub_properties = sub_properties
self.internal = internal
self.alias = alias.split() if alias else []
self.extra_prefixes = extra_prefixes.split() if extra_prefixes else []
# https://drafts.csswg.org/css-animations/#keyframes
# > The <declaration-list> inside of <keyframe-block> accepts any CSS property
# > except those defined in this specification,
# > but does accept the `animation-play-state` property and interprets it specially.
self.allowed_in_keyframe_block = allowed_in_keyframe_block \
and allowed_in_keyframe_block != "False"
class Method(object):
def __init__(self, name, return_type=None, arg_types=None, is_mut=False):
self.name = name
self.return_type = return_type
self.arg_types = arg_types or []
self.is_mut = is_mut
def arg_list(self):
args = ["_: " + x for x in self.arg_types]
args = ["&mut self" if self.is_mut else "&self"] + args
return ", ".join(args)
def signature(self):
sig = "fn %s(%s)" % (self.name, self.arg_list())
if self.return_type:
sig = sig + " -> " + self.return_type
return sig
def declare(self):
return self.signature() + ";"
def stub(self):
return self.signature() + "{ unimplemented!() }"
class StyleStruct(object):
def __init__(self, name, inherited, gecko_name=None, additional_methods=None):
self.gecko_struct_name = "Gecko" + name
self.name = name
self.name_lower = name.lower()
self.ident = to_rust_ident(self.name_lower)
self.longhands = []
self.inherited = inherited
self.gecko_name = gecko_name or name
self.gecko_ffi_name = "nsStyle" + self.gecko_name
self.additional_methods = additional_methods or []
class PropertiesData(object):
"""
The `testing` parameter means that we're running tests.
In this situation, the `product` value is ignored while choosing
which shorthands and longhands to generate; and instead all properties for
which code exists for either servo or stylo are generated. Note that we skip
this behavior when the style crate is being built in gecko mode, because we
need manual glue for such properties and we don't have it.
"""
def __init__(self, product, testing):
self.product = product
self.testing = testing and product != "gecko"
self.style_structs = []
self.current_style_struct = None
self.longhands = []
self.longhands_by_name = {}
self.derived_longhands = {}
self.shorthands = []
def new_style_struct(self, *args, **kwargs):
style_struct = StyleStruct(*args, **kwargs)
self.style_structs.append(style_struct)
self.current_style_struct = style_struct
def active_style_structs(self):
return [s for s in self.style_structs if s.additional_methods or s.longhands]
def add_prefixed_aliases(self, property):
# FIXME Servo's DOM architecture doesn't support vendor-prefixed properties.
# See servo/servo#14941.
if self.product == "gecko":
for prefix in property.extra_prefixes:
property.alias.append('-%s-%s' % (prefix, property.name))
def declare_longhand(self, name, products="gecko servo", disable_when_testing=False, **kwargs):
products = products.split()
if self.product not in products and not (self.testing and not disable_when_testing):
return
longhand = Longhand(self.current_style_struct, name, **kwargs)
self.add_prefixed_aliases(longhand)
self.current_style_struct.longhands.append(longhand)
self.longhands.append(longhand)
self.longhands_by_name[name] = longhand
for name in longhand.derived_from:
self.derived_longhands.setdefault(name, []).append(longhand)
return longhand
def declare_shorthand(self, name, sub_properties, products="gecko servo",
disable_when_testing=False, *args, **kwargs):
products = products.split()
if self.product not in products and not (self.testing and not disable_when_testing):
return
sub_properties = [self.longhands_by_name[s] for s in sub_properties]
shorthand = Shorthand(name, sub_properties, *args, **kwargs)
self.add_prefixed_aliases(shorthand)
self.shorthands.append(shorthand)
return shorthand
| mpl-2.0 | 6,119,045,123,851,958,000 | 41.489209 | 116 | 0.626143 | false |
vcatechnology/cmake-boilerplate | cmake/pygh/__init__.py | 1 | 25114 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import os
import sys
import json
import errno
import platform
import fileinput
import subprocess
from datetime import datetime, timezone
try:
import requests
except ImportError:
raise ImportError(
'Failed to import \'requests\', run \'pip install requests\'')
try:
import pystache
except ImportError:
raise ImportError(
'Failed to import \'pystache\', run \'pip install pystache\'')
class ReleaseError(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
class ExecuteCommandError(Exception):
def __init__(self, message, cmd, code, out, err):
self.message = message
self.cmd = cmd
self.code = code
self.out = out
self.err = err
def __str__(self):
return self.message
class EmptyLogger(object):
'''Provides an implementation of an empty logging function'''
def debug(self, *k, **kw):
pass
def info(self, *k, **kw):
pass
def warn(self, *k, **kw):
pass
def error(self, *k, **kw):
pass
def critical(self, *k, **kw):
pass
def setLevel(self, *k, **kw):
pass
class Version(object):
'''Represents a version number'''
def __init__(self, *k, **kw):
'''
A version number can be instantiate with:
- a dot-separated string
- Version('1.2.3')
- an iterable
- Version([1, 2, 3])
- seperate arguments
- `Version(1, 2, 3)`
- another version class
- `Version(Version(1, 2, 3))`
- a dictionary
- `Version({'minor':2,'major':1,'patch':3})`
- keywords
- `Version(minor = 2,major = 1, patch = 3)`
'''
try:
version = (k[0].major, k[0].minor, k[0].patch)
except (AttributeError, TypeError):
try:
version = (kw['major'], kw['minor'], kw['patch'])
except (KeyError, TypeError):
try:
version = (k[0]['major'], k[0]['minor'], k[0]['patch'])
except (KeyError, TypeError):
if isinstance(k[0], str):
version = k[0].split('.')
else:
try:
version = (k[0][0], k[0][1], k[0][2])
except (IndexError, TypeError):
version = k
self.major = int(version[0])
self.minor = int(version[1])
self.patch = int(version[2])
def bump(self, category):
'''
Bumps the version number depending on the category
'''
setattr(self, category, getattr(self, category) + 1)
if category == 'major':
self.minor = 0
self.patch = 0
elif category == 'minor':
self.patch = 0
def __gt__(self, other):
return tuple(self) > tuple(other)
def __ge__(self, other):
return tuple(self) >= tuple(other)
def __lt__(self, other):
return tuple(self) < tuple(other)
def __le__(self, other):
return tuple(self) <= tuple(other)
def __eq__(self, other):
return tuple(self) == tuple(other)
def __ne__(self, other):
return tuple(self) != tuple(other)
def __getitem__(self, index):
'''
Allows iteration of the version number
'''
if index == 0:
return self.major
elif index == 1:
return self.minor
elif index == 2:
return self.patch
else:
raise IndexError('version index out of range')
def __repr__(self):
'''
Provides a dot-separated string representation of the version number
'''
return '%i.%i.%i' % (self.major, self.minor, self.patch)
class GitVersion(Version):
'''A git repository version number'''
def __init__(self, *k, **kw):
'''
A git version number can be instantiate with:
- a dot-separated string
- Version('1.2.3.ef3aa43d-dirty')
- an iterable
- Version([1, 2, 3, 'ef3aa43d', True])
- seperate arguments
- `Version(1, 2, 3, 'ef3aa43d', True)`
- another version class
- `Version(Version(1, 2, 3, 'ef3aa43d', True))`
- a dictionary
- `Version({'minor':2,'major':1,'patch':3, 'commit': 'ef3aa43d', 'dirty', True})`
- keywords
- `Version(minor = 2,major = 1, patch = 3, commit ='ef3aa43d', dirty =True)`
'''
super(GitVersion, self).__init__(*k, **kw)
try:
version = (k[0].commit, k[0].dirty)
except (AttributeError, TypeError):
try:
version = (kw['commit'], kw['dirty'])
except (KeyError, TypeError):
try:
version = (k[0]['commit'], k[0]['dirty'])
except (KeyError, TypeError):
if isinstance(k[0], str):
version = k[0].split('.')[3]
else:
try:
version = (k[0][3], k[0][4])
except (IndexError, TypeError):
version = k[3:]
self.commit = str(version[0])
try:
self.dirty = bool(version[1])
except:
try:
split = self.commit.split('-')
self.dirty = (split[1] == 'dirty')
self.commit = split[0]
except:
self.dirty = False
try:
int(self.commit, 16)
except ValueError:
raise ValueError('The git commit string is not hexidecimal: %s'
% self.commit)
def __repr__(self):
'''
Provides a dot-separated string representation of the version number
'''
string = '%s.%s' % (super(GitVersion, self).__repr__(),
self.commit[:8])
if self.dirty:
string += '-dirty'
return string
def find_exe_in_path(filename, path=None):
'''
Finds an executable in the PATH environment variable
'''
if platform.system() == 'Windows':
filename += '.exe'
if path is None:
path = os.environ.get('PATH', '')
if type(path) is type(''):
pathlist = path.split(os.pathsep)
return list(filter(os.path.exists, map(lambda dir, filename=filename: os.path.join(dir, filename), pathlist)))
def execute_command(cmd,
error_message='Failed to run external program',
expected=0,
cwd=os.getcwd()):
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
cwd=cwd)
(out, err) = p.communicate()
if expected != None and p.returncode != expected:
raise ExecuteCommandError(error_message, cmd, p.returncode, out, err)
return (p.returncode, out, err)
def close_milestone(number, repo, token, logger=EmptyLogger()):
logger.debug('Closing milestone #%d for %s' % (number, repo))
number = int(number)
r = requests.patch('https://api.github.com/repos/%s/milestones/%d' %
(repo, number),
params={
'access_token': token,
},
json={
'state': 'closed',
})
if r.status_code != 200:
json = r.json()
message = json['message']
errors = json.get('errors', [])
for e in errors:
message += '\n - %s: %s: %s' % (e.get('resource', 'unknown'),
e.get('field', 'unknown'),
e.get('code', 'unknown'))
raise ReleaseError('Failed to close github milestone #%d: %s' %
(number, message))
logger.info('Closed milestone #%d' % number)
return r.json()
def get_milestones(repo, token, logger=EmptyLogger()):
logger.debug('Retrieving milestones for %s' % repo)
r = requests.get('https://api.github.com/repos/%s/milestones' % repo,
params={
'access_token': token,
})
if r.status_code != 200:
raise ReleaseError('Failed to retrieve github milestones from %s: %s' %
(repo, r.json()['message']))
return r.json()
def get_git_tag_version(path,
git_executable=find_exe_in_path('git'),
logger=EmptyLogger()):
if isinstance(git_executable, list):
git_executable = git_executable[0]
logger.debug('Getting latest git tag version')
# Get the head commit
cmd = [git_executable, 'rev-parse', 'HEAD']
_, out, _ = execute_command(cmd,
'Failed to get HEAD revision of repository',
cwd=path)
commit = out.split('\n')[0].strip()
if commit == 'HEAD' or not commit:
commit = '0000000000000000000000000000000000000000'
# Check if dirty
dirty = False
cmd = [git_executable, 'diff-index', '--name-only', 'HEAD']
if execute_command(
cmd,
'Failed to check if the project had local modifications',
cwd=path)[1]:
dirty = True
cmd = [git_executable, 'status', '--porcelain']
if '?? ' in execute_command(
cmd,
'Failed to check if the project had local modifications',
cwd=path)[1]:
dirty = True
# Find the latest tag
cmd = [git_executable, 'describe', '--match=v[0-9]*', 'HEAD']
code, out, _ = execute_command(cmd, expected=None, cwd=path)
if code:
return GitVersion(0, 0, 0, commit, dirty)
# Parse the tag
re_tag = re.compile('^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[0-9]+-g[a-f0-9]+)?')
matches = re_tag.match(out)
major = int(matches.group(1))
minor = int(matches.group(2))
revision = int(matches.group(3))
version = GitVersion(major, minor, revision, commit, dirty)
logger.info('Latest git tag version %s' % version)
return version
re_remote_fetch_url = re.compile(
r'Fetch URL: (?:(?:(git)(?:@))|(?:(https)(?:://)))([^:/]+)[:/]([^/]+/[^.]+)(?:\.git)?')
def get_repo(path=os.getcwd(), git_executable=find_exe_in_path('git')):
if isinstance(git_executable, list):
git_executable = git_executable[0]
cmd = [git_executable, 'remote', 'show', '-n', 'origin']
code, out, err = execute_command(
cmd,
'Failed to get repository remote information',
cwd=path)
match = re_remote_fetch_url.search(out)
if not match:
raise ExecuteCommandError('Failed to match fetch url', cmd, code, out,
err)
protocol = match.group(1) or match.group(2)
server = match.group(3)
if server != 'github.com':
raise ExecuteCommandError('Repository is not from github', cmd, code,
out, err)
repo = match.group(4)
return repo
def get_git_version(git_executable=find_exe_in_path('git'),
logger=EmptyLogger()):
if isinstance(git_executable, list):
git_executable = git_executable[0]
logger.debug('Getting git version')
_, out, _ = execute_command([git_executable, '--version'])
git_version = Version(out.replace('git version ', ''))
logger.debug('Using git %s' % git_version)
return git_version
changelog_template = \
'## [v{{version.to}}](https://github.com/{{repo}}/tree/v{{version.to}}) ({{date}})\n' \
'{{#version.from}}' \
'[Full Changelog](https://github.com/{{repo}}/compare/v{{version.from}}...v{{version.to}})' \
'{{/version.from}}' \
'{{#milestone}}' \
'{{#version.from}} {{/version.from}}' \
'[Milestone]({{html_url}})' \
'{{/milestone}}\n' \
'\n' \
'{{description}}\n' \
'\n' \
'**Closed issues:**\n' \
'{{#issues}}\n' \
'\n' \
' - {{title}} [\#{{number}}]({{html_url}})\n' \
'{{/issues}}\n' \
'{{^issues}}\n' \
'\n' \
'_None_\n' \
'{{/issues}}\n' \
'\n' \
'**Merged pull requests:**\n' \
'{{#pullrequests}}\n' \
'\n' \
' - {{title}} [\#{{number}}]({{pull_request.html_url}})\n' \
'{{/pullrequests}}\n' \
'{{^pullrequests}}\n' \
'\n' \
'_None_\n' \
'{{/pullrequests}}\n'
def get_closed_issues(repo,
token=os.environ.get('GITHUB_TOKEN', None),
since=None,
logger=EmptyLogger()):
logger.debug('Getting issues for %s' % (repo))
if not token:
raise ReleaseError('Must provide a valid GitHub API token')
issues = []
params = {'state': 'closed', 'sort': 'asc', 'access_token': token, }
if since:
since = since.astimezone(timezone.utc)
params['since'] = since.isoformat()[:19] + 'Z'
r = requests.get('https://api.github.com/repos/%s/issues' % repo,
params=params)
if r.status_code != 200:
raise ReleaseError('Failed to retrieve github issues from %s: %s' %
(repo, r.json()['message']))
issues = r.json()
logger.debug('Retrieved %i closed issues for %s' % (len(issues), repo))
return issues
def create_changelog(current_version,
previous_version,
repo,
milestone=None,
token=os.environ.get('GITHUB_TOKEN', None),
description=None,
since=None,
date=datetime.utcnow(),
template=changelog_template,
logger=EmptyLogger()):
logger.debug('Creating changelog for %s from %s' % (current_version, repo))
description = description or 'The v%s release of %s' % (current_version,
repo.split('/')[1])
issues = get_closed_issues(repo=repo,
token=token,
since=since,
logger=logger)
if milestone:
milestone[
'html_url'] = 'https://github.com/%s/issues?q=milestone%%3Av%s+is%%3Aall' % (
repo, current_version)
data = {
'version': {
'from': str(previous_version)
if previous_version > (0, 0, 0) else None,
'to': str(current_version),
},
'milestone': milestone,
'date': date.isoformat()[:10],
'repo': repo,
'description': description,
'issues': [i for i in issues if not i.get('pull_request', None)],
'pullrequests': [i for i in issues if i.get('pull_request', None)],
}
renderer = pystache.Renderer()
parsed = pystache.parse(template)
changelog = renderer.render(parsed, data)
logger.info('Rendered changelog')
return changelog
def write_version(path, version, logger=EmptyLogger()):
if not isinstance(version, Version):
raise ValueError('must provide a version class')
version = Version(version)
with open(path, 'w') as f:
f.write('%s' % version)
logger.info('Wrote %s' % os.path.basename(path))
def write_changelog(path, changelog, logger=EmptyLogger()):
try:
for line in fileinput.input(path, inplace=True):
sys.stdout.write(line)
if line.startswith('# Changelog'):
print()
sys.stdout.write(changelog)
logger.info('Updated %s' % os.path.basename(path))
except EnvironmentError as e:
if e.errno == errno.ENOENT:
with open(path, 'w') as f:
f.write('# Changelog\n\n')
f.write(changelog)
logger.info('Created %s' % os.path.basename(path))
else:
raise
def get_git_root(path, git_executable=find_exe_in_path('git')):
abspath = os.path.abspath(path)
if os.path.isfile(abspath):
abspath = os.path.dirname(abspath)
cmd = [git_executable, 'rev-parse', '--show-toplevel']
_, out, _ = execute_command(cmd,
'Failed to find root of repository',
cwd=abspath)
return out.strip()
def commit_file(path,
message,
git_executable=find_exe_in_path('git'),
logger=EmptyLogger()):
if isinstance(git_executable, list):
git_executable = git_executable[0]
logger.debug('Commiting %s' % path)
cwd = get_git_root(path, git_executable=git_executable)
path = os.path.relpath(path, cwd)
cmd = [git_executable, 'add', path]
execute_command(cmd, 'Failed to add file %s' % path, cwd=cwd)
cmd = [git_executable, 'commit', '-m', message]
execute_command(cmd, 'Failed to commit file %s' % path, cwd=cwd)
logger.info('Committed %s' % path)
def get_tag_date(tag,
path=os.getcwd(),
git_executable=find_exe_in_path('git')):
if isinstance(git_executable, list):
git_executable = git_executable[0]
cwd = get_git_root(path, git_executable=git_executable)
cmd = [git_executable, 'log', '-1', '--format=%ai', tag]
_, out, _ = execute_command(cmd,
'Failed to get tag date: %s' % tag,
cwd=cwd)
out = out.strip()
return datetime.strptime(out, '%Y-%m-%d %H:%M:%S %z')
def create_git_version_tag(version,
message=None,
path=os.getcwd(),
git_executable=find_exe_in_path('git'),
logger=EmptyLogger()):
if isinstance(git_executable, list):
git_executable = git_executable[0]
if not isinstance(version, Version):
raise ValueError('must provide a version class')
version = Version(version)
logger.debug('Tagging %s' % version)
message = message or 'The v%s release of the project' % version
cwd = get_git_root(path, git_executable=git_executable)
cmd = [git_executable, 'tag', '-a', 'v%s' % version, '-m', message]
execute_command(cmd, 'Failed to create version tag %s' % version, cwd=cwd)
logger.info('Tagged %s' % version)
def create_release(repo,
version,
description,
token=os.environ.get('GITHUB_TOKEN', None),
files=[],
path=os.getcwd(),
git_executable=find_exe_in_path('git'),
logger=EmptyLogger()):
if isinstance(git_executable, list):
git_executable = git_executable[0]
if not isinstance(version, Version):
raise ValueError('must provide a version class')
logger.debug('Creating github release %s' % version)
r = requests.post('https://api.github.com/repos/%s/releases' % repo,
params={
'access_token': token,
},
json={
'tag_name': 'v%s' % version,
'name': str(version),
'body': description,
})
if r.status_code != 201:
json = r.json()
message = json['message']
errors = json.get('errors', [])
for e in errors:
message += '\n - %s: %s: %s' % (e.get('resource', 'unknown'),
e.get('field', 'unknown'),
e.get('code', 'unknown'))
raise ReleaseError('Failed to create github release %s: %s' %
(repo, message))
logger.info('Created GitHub release')
def release(category='patch',
path=os.getcwd(),
git_executable=find_exe_in_path('git'),
token=os.environ.get('GITHUB_TOKEN', None),
repo=None,
date=datetime.utcnow(),
description=None,
changelog='CHANGELOG.md',
version='VERSION',
template=changelog_template,
logger=EmptyLogger(),
hooks={}):
'''
Performs the release of a repository on GitHub.
'''
if isinstance(git_executable, list):
git_executable = git_executable[0]
logger.debug('Starting %r release' % category)
git_version = get_git_version(git_executable=git_executable, logger=logger)
if git_version < (1, 0, 0):
raise ReleaseError('The version of git is too old %s' % git_version)
previous_version = get_git_tag_version(path=path,
git_executable=git_executable,
logger=logger)
if previous_version.dirty:
raise ReleaseError(
'Cannot release a dirty repository. Make sure all files are committed')
current_version = Version(previous_version)
previous_version = Version(current_version)
current_version.bump(category)
logger.debug('Previous version %r' % previous_version)
logger.debug('Bumped version %r' % current_version)
repo = repo or get_repo(path=path, git_executable=git_executable)
description = description or 'The v%s release of %s' % (current_version,
repo.split('/')[1])
milestones = get_milestones(repo=repo, token=token, logger=logger)
try:
milestone = [
m
for m in milestones
if m['title'] == ('v%s' % current_version) and m['state'] == 'open'
][0]
open_issues = milestone['open_issues']
if open_issues:
raise ReleaseError('The v%s milestone has %d open issues' %
(current_version, open_issues))
except IndexError:
milestone = None
try:
previous_date = get_tag_date('v%s' % previous_version,
path=path,
git_executable=git_executable)
except ExecuteCommandError:
previous_date = None
changelog_data = create_changelog(description=description,
repo=repo,
date=date,
token=token,
current_version=current_version,
previous_version=previous_version,
template=template,
since=previous_date,
logger=logger,
milestone=milestone)
changelog_data = hooks.get('changelog', lambda d: d)(changelog_data)
write_changelog(path=os.path.join(path, changelog),
changelog=changelog_data,
logger=logger)
commit_file(changelog,
'Updated changelog for v%s' % current_version,
git_executable=git_executable,
logger=logger)
write_version(path=os.path.join(path, version),
version=current_version,
logger=logger)
commit_file(version,
'Updated version to v%s' % current_version,
git_executable=git_executable,
logger=logger)
create_git_version_tag(current_version,
message=description,
path=path,
git_executable=git_executable,
logger=logger)
logger.debug('Pushing branch to remote')
cwd = get_git_root(path, git_executable=git_executable)
cmd = [git_executable, 'push']
execute_command(cmd, 'Failed to push to remote', cwd=cwd)
logger.info('Pushed branch to remote')
logger.debug('Pushing tags to remote')
cwd = get_git_root(path, git_executable=git_executable)
cmd = [git_executable, 'push', '--tags']
execute_command(cmd, 'Failed to push tags to remote', cwd=cwd)
logger.info('Pushed tags to remote')
files = []
create_release(path=path,
version=current_version,
description=changelog_data,
git_executable=git_executable,
repo=repo,
logger=logger,
files=files,
token=token)
if milestone:
close_milestone(number=milestone['number'],
repo=repo,
token=token,
logger=logger)
logger.info('Released %s' % current_version)
| bsd-3-clause | -6,535,361,961,749,887,000 | 34.026499 | 114 | 0.510472 | false |
Nexenta/cinder | cinder/tests/unit/backup/drivers/test_backup_ceph.py | 1 | 50666 | # Copyright 2013 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Tests for Ceph backup service."""
import hashlib
import os
import tempfile
import uuid
import mock
from oslo_concurrency import processutils
from oslo_serialization import jsonutils
import six
from six.moves import range
from cinder.backup import driver
from cinder.backup.drivers import ceph
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder import test
from cinder.volume.drivers import rbd as rbddriver
# This is used to collect raised exceptions so that tests may check what was
# raised.
# NOTE: this must be initialised in test setUp().
RAISED_EXCEPTIONS = []
class MockException(Exception):
def __init__(self, *args, **kwargs):
RAISED_EXCEPTIONS.append(self.__class__)
class MockImageNotFoundException(MockException):
"""Used as mock for rbd.ImageNotFound."""
class MockImageBusyException(MockException):
"""Used as mock for rbd.ImageBusy."""
class MockObjectNotFoundException(MockException):
"""Used as mock for rados.MockObjectNotFoundException."""
def common_mocks(f):
"""Decorator to set mocks common to all tests.
The point of doing these mocks here is so that we don't accidentally set
mocks that can't/don't get unset.
"""
def _common_inner_inner1(inst, *args, **kwargs):
# NOTE(dosaboy): mock Popen to, by default, raise Exception in order to
# ensure that any test ending up in a subprocess fails
# if not properly mocked.
@mock.patch('subprocess.Popen', spec=True)
# NOTE(dosaboy): mock out eventlet.sleep() so that it does nothing.
@mock.patch('eventlet.sleep', spec=True)
@mock.patch('time.time', spec=True)
# NOTE(dosaboy): set spec to empty object so that hasattr calls return
# False by default.
@mock.patch('cinder.backup.drivers.ceph.rbd')
@mock.patch('cinder.backup.drivers.ceph.rados')
def _common_inner_inner2(mock_rados, mock_rbd, mock_time, mock_sleep,
mock_popen):
mock_time.side_effect = inst.time_inc
mock_popen.side_effect = Exception
inst.mock_rados = mock_rados
inst.mock_rbd = mock_rbd
inst.mock_rbd.ImageBusy = MockImageBusyException
inst.mock_rbd.ImageNotFound = MockImageNotFoundException
inst.service.rbd = inst.mock_rbd
inst.service.rados = inst.mock_rados
return f(inst, *args, **kwargs)
return _common_inner_inner2()
return _common_inner_inner1
class BackupCephTestCase(test.TestCase):
"""Test case for ceph backup driver."""
def _create_volume_db_entry(self, id, size):
vol = {'id': id, 'size': size, 'status': 'available'}
return db.volume_create(self.ctxt, vol)['id']
def _create_backup_db_entry(self, backupid, volid, size,
userid=str(uuid.uuid4()),
projectid=str(uuid.uuid4())):
backup = {'id': backupid, 'size': size, 'volume_id': volid,
'user_id': userid, 'project_id': projectid}
return db.backup_create(self.ctxt, backup)['id']
def time_inc(self):
self.counter += 1
return self.counter
def _get_wrapped_rbd_io(self, rbd_image):
rbd_meta = rbddriver.RBDImageMetadata(rbd_image, 'pool_foo',
'user_foo', 'conf_foo')
return rbddriver.RBDImageIOWrapper(rbd_meta)
def _setup_mock_popen(self, mock_popen, retval=None, p1hook=None,
p2hook=None):
class MockPopen(object):
hooks = [p2hook, p1hook]
def __init__(mock_inst, cmd, *args, **kwargs):
self.callstack.append('popen_init')
mock_inst.stdout = mock.Mock()
mock_inst.stdout.close = mock.Mock()
mock_inst.stdout.close.side_effect = \
lambda *args: self.callstack.append('stdout_close')
mock_inst.returncode = 0
hook = mock_inst.__class__.hooks.pop()
if hook is not None:
hook()
def communicate(mock_inst):
self.callstack.append('communicate')
return retval
mock_popen.side_effect = MockPopen
def setUp(self):
global RAISED_EXCEPTIONS
RAISED_EXCEPTIONS = []
super(BackupCephTestCase, self).setUp()
self.ctxt = context.get_admin_context()
# Create volume.
self.volume_size = 1
self.volume_id = str(uuid.uuid4())
self._create_volume_db_entry(self.volume_id, self.volume_size)
self.volume = db.volume_get(self.ctxt, self.volume_id)
# Create backup of volume.
self.backup_id = str(uuid.uuid4())
self._create_backup_db_entry(self.backup_id, self.volume_id,
self.volume_size)
self.backup = objects.Backup.get_by_id(self.ctxt, self.backup_id)
# Create alternate volume.
self.alt_volume_id = str(uuid.uuid4())
self._create_volume_db_entry(self.alt_volume_id, self.volume_size)
self.alt_volume = db.volume_get(self.ctxt, self.alt_volume_id)
self.chunk_size = 1024
self.num_chunks = 128
self.data_length = self.num_chunks * self.chunk_size
self.checksum = hashlib.sha256()
# Create a file with some data in it.
self.volume_file = tempfile.NamedTemporaryFile()
self.addCleanup(self.volume_file.close)
for _i in range(0, self.num_chunks):
data = os.urandom(self.chunk_size)
self.checksum.update(data)
self.volume_file.write(data)
self.volume_file.seek(0)
# Always trigger an exception if a command is executed since it should
# always be dealt with gracefully. At time of writing on rbd
# export/import-diff is executed and if they fail we expect to find
# alternative means of backing up.
mock_exec = mock.Mock()
mock_exec.side_effect = processutils.ProcessExecutionError
self.service = ceph.CephBackupDriver(self.ctxt, execute=mock_exec)
# Ensure that time.time() always returns more than the last time it was
# called to avoid div by zero errors.
self.counter = float(0)
self.callstack = []
@common_mocks
def test_get_rbd_support(self):
del self.service.rbd.RBD_FEATURE_LAYERING
del self.service.rbd.RBD_FEATURE_STRIPINGV2
self.assertFalse(hasattr(self.service.rbd, 'RBD_FEATURE_LAYERING'))
self.assertFalse(hasattr(self.service.rbd, 'RBD_FEATURE_STRIPINGV2'))
oldformat, features = self.service._get_rbd_support()
self.assertTrue(oldformat)
self.assertEqual(0, features)
self.service.rbd.RBD_FEATURE_LAYERING = 1
oldformat, features = self.service._get_rbd_support()
self.assertFalse(oldformat)
self.assertEqual(1, features)
self.service.rbd.RBD_FEATURE_STRIPINGV2 = 2
oldformat, features = self.service._get_rbd_support()
self.assertFalse(oldformat)
self.assertEqual(1 | 2, features)
@common_mocks
def test_get_most_recent_snap(self):
last = 'backup.%s.snap.9824923.1212' % (uuid.uuid4())
image = self.mock_rbd.Image.return_value
image.list_snaps.return_value = \
[{'name': 'backup.%s.snap.6423868.2342' % (uuid.uuid4())},
{'name': 'backup.%s.snap.1321319.3235' % (uuid.uuid4())},
{'name': last},
{'name': 'backup.%s.snap.3824923.1412' % (uuid.uuid4())}]
snap = self.service._get_most_recent_snap(image)
self.assertEqual(last, snap)
@common_mocks
def test_get_backup_snap_name(self):
snap_name = 'backup.%s.snap.3824923.1412' % (uuid.uuid4())
def get_backup_snaps(inst, *args):
return [{'name': 'backup.%s.snap.6423868.2342' % (uuid.uuid4()),
'backup_id': str(uuid.uuid4())},
{'name': snap_name,
'backup_id': self.backup_id}]
with mock.patch.object(self.service, 'get_backup_snaps'):
name = self.service._get_backup_snap_name(self.service.rbd.Image(),
'base_foo',
self.backup_id)
self.assertIsNone(name)
with mock.patch.object(self.service, 'get_backup_snaps') as \
mock_get_backup_snaps:
mock_get_backup_snaps.side_effect = get_backup_snaps
name = self.service._get_backup_snap_name(self.service.rbd.Image(),
'base_foo',
self.backup_id)
self.assertEqual(snap_name, name)
self.assertTrue(mock_get_backup_snaps.called)
@common_mocks
def test_get_backup_snaps(self):
image = self.mock_rbd.Image.return_value
image.list_snaps.return_value = [
{'name': 'backup.%s.snap.6423868.2342' % (uuid.uuid4())},
{'name': 'backup.%s.wambam.6423868.2342' % (uuid.uuid4())},
{'name': 'backup.%s.snap.1321319.3235' % (uuid.uuid4())},
{'name': 'bbbackup.%s.snap.1321319.3235' % (uuid.uuid4())},
{'name': 'backup.%s.snap.3824923.1412' % (uuid.uuid4())}]
snaps = self.service.get_backup_snaps(image)
self.assertEqual(3, len(snaps))
@common_mocks
def test_transfer_data_from_rbd_to_file(self):
def fake_read(offset, length):
self.volume_file.seek(offset)
return self.volume_file.read(length)
self.mock_rbd.Image.return_value.read.side_effect = fake_read
self.mock_rbd.Image.return_value.size.return_value = self.data_length
with tempfile.NamedTemporaryFile() as test_file:
self.volume_file.seek(0)
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
self.service._transfer_data(rbd_io, 'src_foo', test_file,
'dest_foo', self.data_length)
checksum = hashlib.sha256()
test_file.seek(0)
for _c in range(0, self.num_chunks):
checksum.update(test_file.read(self.chunk_size))
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
@common_mocks
def test_transfer_data_from_rbd_to_rbd(self):
def fake_read(offset, length):
self.volume_file.seek(offset)
return self.volume_file.read(length)
def mock_write_data(data, offset):
checksum.update(data)
test_file.write(data)
rbd1 = mock.Mock()
rbd1.read.side_effect = fake_read
rbd1.size.return_value = os.fstat(self.volume_file.fileno()).st_size
rbd2 = mock.Mock()
rbd2.write.side_effect = mock_write_data
with tempfile.NamedTemporaryFile() as test_file:
self.volume_file.seek(0)
checksum = hashlib.sha256()
src_rbd_io = self._get_wrapped_rbd_io(rbd1)
dest_rbd_io = self._get_wrapped_rbd_io(rbd2)
self.service._transfer_data(src_rbd_io, 'src_foo', dest_rbd_io,
'dest_foo', self.data_length)
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
@common_mocks
def test_transfer_data_from_file_to_rbd(self):
def mock_write_data(data, offset):
checksum.update(data)
test_file.write(data)
self.mock_rbd.Image.return_value.write.side_effect = mock_write_data
with tempfile.NamedTemporaryFile() as test_file:
self.volume_file.seek(0)
checksum = hashlib.sha256()
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
self.service._transfer_data(self.volume_file, 'src_foo',
rbd_io, 'dest_foo', self.data_length)
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
@common_mocks
def test_transfer_data_from_file_to_file(self):
with tempfile.NamedTemporaryFile() as test_file:
self.volume_file.seek(0)
checksum = hashlib.sha256()
self.service._transfer_data(self.volume_file, 'src_foo', test_file,
'dest_foo', self.data_length)
checksum = hashlib.sha256()
test_file.seek(0)
for _c in range(0, self.num_chunks):
checksum.update(test_file.read(self.chunk_size))
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
@common_mocks
def test_backup_volume_from_file(self):
checksum = hashlib.sha256()
def mock_write_data(data, offset):
checksum.update(data)
test_file.write(data)
self.service.rbd.Image.return_value.write.side_effect = mock_write_data
with mock.patch.object(self.service, '_backup_metadata'):
with mock.patch.object(self.service, '_discard_bytes'):
with tempfile.NamedTemporaryFile() as test_file:
self.service.backup(self.backup, self.volume_file)
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
self.assertTrue(self.service.rbd.Image.return_value.write.called)
@common_mocks
def test_get_backup_base_name(self):
name = self.service._get_backup_base_name(self.volume_id,
diff_format=True)
self.assertEqual("volume-%s.backup.base" % (self.volume_id), name)
self.assertRaises(exception.InvalidParameterValue,
self.service._get_backup_base_name,
self.volume_id)
name = self.service._get_backup_base_name(self.volume_id, '1234')
self.assertEqual("volume-%s.backup.%s" % (self.volume_id, '1234'),
name)
@common_mocks
@mock.patch('fcntl.fcntl', spec=True)
@mock.patch('subprocess.Popen', spec=True)
def test_backup_volume_from_rbd(self, mock_popen, mock_fnctl):
backup_name = self.service._get_backup_base_name(self.backup_id,
diff_format=True)
def mock_write_data():
self.volume_file.seek(0)
data = self.volume_file.read(self.data_length)
self.callstack.append('write')
checksum.update(data)
test_file.write(data)
def mock_read_data():
self.callstack.append('read')
return self.volume_file.read(self.data_length)
self._setup_mock_popen(mock_popen,
['out', 'err'],
p1hook=mock_read_data,
p2hook=mock_write_data)
self.mock_rbd.RBD.list = mock.Mock()
self.mock_rbd.RBD.list.return_value = [backup_name]
with mock.patch.object(self.service, '_backup_metadata'):
with mock.patch.object(self.service, 'get_backup_snaps') as \
mock_get_backup_snaps:
with mock.patch.object(self.service, '_full_backup') as \
mock_full_backup:
with mock.patch.object(self.service,
'_try_delete_base_image'):
with tempfile.NamedTemporaryFile() as test_file:
checksum = hashlib.sha256()
image = self.service.rbd.Image()
meta = rbddriver.RBDImageMetadata(image,
'pool_foo',
'user_foo',
'conf_foo')
rbdio = rbddriver.RBDImageIOWrapper(meta)
self.service.backup(self.backup, rbdio)
self.assertEqual(['popen_init',
'read',
'popen_init',
'write',
'stdout_close',
'communicate'], self.callstack)
self.assertFalse(mock_full_backup.called)
self.assertTrue(mock_get_backup_snaps.called)
# Ensure the files are equal
self.assertEqual(checksum.digest(),
self.checksum.digest())
@common_mocks
@mock.patch('fcntl.fcntl', spec=True)
@mock.patch('subprocess.Popen', spec=True)
def test_backup_volume_from_rbd_fail(self, mock_popen, mock_fnctl):
"""Test of when an exception occurs in an exception handler.
In _backup_rbd(), after an exception.BackupRBDOperationFailed
occurs in self._rbd_diff_transfer(), we want to check the
process when the second exception occurs in
self._try_delete_base_image().
"""
backup_name = self.service._get_backup_base_name(self.backup_id,
diff_format=True)
def mock_write_data():
self.volume_file.seek(0)
data = self.volume_file.read(self.data_length)
self.callstack.append('write')
checksum.update(data)
test_file.write(data)
def mock_read_data():
self.callstack.append('read')
return self.volume_file.read(self.data_length)
self._setup_mock_popen(mock_popen,
['out', 'err'],
p1hook=mock_read_data,
p2hook=mock_write_data)
self.mock_rbd.RBD.list = mock.Mock()
self.mock_rbd.RBD.list.return_value = [backup_name]
with mock.patch.object(self.service, 'get_backup_snaps'), \
mock.patch.object(self.service, '_rbd_diff_transfer') as \
mock_rbd_diff_transfer:
def mock_rbd_diff_transfer_side_effect(src_name, src_pool,
dest_name, dest_pool,
src_user, src_conf,
dest_user, dest_conf,
src_snap, from_snap):
raise exception.BackupRBDOperationFailed(_('mock'))
# Raise a pseudo exception.BackupRBDOperationFailed.
mock_rbd_diff_transfer.side_effect \
= mock_rbd_diff_transfer_side_effect
with mock.patch.object(self.service, '_full_backup'), \
mock.patch.object(self.service,
'_try_delete_base_image') as \
mock_try_delete_base_image:
def mock_try_delete_base_image_side_effect(backup_id,
volume_id,
base_name):
raise self.service.rbd.ImageNotFound(_('mock'))
# Raise a pesudo exception rbd.ImageNotFound.
mock_try_delete_base_image.side_effect \
= mock_try_delete_base_image_side_effect
with mock.patch.object(self.service, '_backup_metadata'):
with tempfile.NamedTemporaryFile() as test_file:
checksum = hashlib.sha256()
image = self.service.rbd.Image()
meta = rbddriver.RBDImageMetadata(image,
'pool_foo',
'user_foo',
'conf_foo')
rbdio = rbddriver.RBDImageIOWrapper(meta)
# We expect that the second exception is
# notified.
self.assertRaises(
self.service.rbd.ImageNotFound,
self.service.backup,
self.backup, rbdio)
@common_mocks
@mock.patch('fcntl.fcntl', spec=True)
@mock.patch('subprocess.Popen', spec=True)
def test_backup_volume_from_rbd_fail2(self, mock_popen, mock_fnctl):
"""Test of when an exception occurs in an exception handler.
In backup(), after an exception.BackupOperationError occurs in
self._backup_metadata(), we want to check the process when the
second exception occurs in self.delete().
"""
backup_name = self.service._get_backup_base_name(self.backup_id,
diff_format=True)
def mock_write_data():
self.volume_file.seek(0)
data = self.volume_file.read(self.data_length)
self.callstack.append('write')
checksum.update(data)
test_file.write(data)
def mock_read_data():
self.callstack.append('read')
return self.volume_file.read(self.data_length)
self._setup_mock_popen(mock_popen,
['out', 'err'],
p1hook=mock_read_data,
p2hook=mock_write_data)
self.mock_rbd.RBD.list = mock.Mock()
self.mock_rbd.RBD.list.return_value = [backup_name]
with mock.patch.object(self.service, 'get_backup_snaps'), \
mock.patch.object(self.service, '_rbd_diff_transfer'), \
mock.patch.object(self.service, '_full_backup'), \
mock.patch.object(self.service, '_backup_metadata') as \
mock_backup_metadata:
def mock_backup_metadata_side_effect(backup):
raise exception.BackupOperationError(_('mock'))
# Raise a pseudo exception.BackupOperationError.
mock_backup_metadata.side_effect = mock_backup_metadata_side_effect
with mock.patch.object(self.service, 'delete') as mock_delete:
def mock_delete_side_effect(backup):
raise self.service.rbd.ImageBusy()
# Raise a pseudo exception rbd.ImageBusy.
mock_delete.side_effect = mock_delete_side_effect
with tempfile.NamedTemporaryFile() as test_file:
checksum = hashlib.sha256()
image = self.service.rbd.Image()
meta = rbddriver.RBDImageMetadata(image,
'pool_foo',
'user_foo',
'conf_foo')
rbdio = rbddriver.RBDImageIOWrapper(meta)
# We expect that the second exception is
# notified.
self.assertRaises(
self.service.rbd.ImageBusy,
self.service.backup,
self.backup, rbdio)
@common_mocks
def test_backup_vol_length_0(self):
volume_id = str(uuid.uuid4())
self._create_volume_db_entry(volume_id, 0)
backup_id = str(uuid.uuid4())
self._create_backup_db_entry(backup_id, volume_id, 1)
backup = objects.Backup.get_by_id(self.ctxt, backup_id)
self.assertRaises(exception.InvalidParameterValue, self.service.backup,
backup, self.volume_file)
@common_mocks
def test_restore(self):
backup_name = self.service._get_backup_base_name(self.backup_id,
diff_format=True)
self.mock_rbd.RBD.return_value.list.return_value = [backup_name]
def mock_read_data(offset, length):
return self.volume_file.read(self.data_length)
self.mock_rbd.Image.return_value.read.side_effect = mock_read_data
self.mock_rbd.Image.return_value.size.return_value = \
self.chunk_size * self.num_chunks
with mock.patch.object(self.service, '_restore_metadata') as \
mock_restore_metadata:
with mock.patch.object(self.service, '_discard_bytes') as \
mock_discard_bytes:
with tempfile.NamedTemporaryFile() as test_file:
self.volume_file.seek(0)
self.service.restore(self.backup, self.volume_id,
test_file)
checksum = hashlib.sha256()
test_file.seek(0)
for _c in range(0, self.num_chunks):
checksum.update(test_file.read(self.chunk_size))
# Ensure the files are equal
self.assertEqual(checksum.digest(), self.checksum.digest())
self.assertTrue(mock_restore_metadata.called)
self.assertTrue(mock_discard_bytes.called)
self.assertTrue(mock_discard_bytes.called)
self.assertTrue(self.service.rbd.Image.return_value.read.called)
@common_mocks
def test_discard_bytes(self):
# Lower the chunksize to a memory manageable number
self.service.chunk_size = 1024
image = self.mock_rbd.Image.return_value
wrapped_rbd = self._get_wrapped_rbd_io(image)
self.service._discard_bytes(wrapped_rbd, 0, 0)
self.assertEqual(0, image.discard.call_count)
self.service._discard_bytes(wrapped_rbd, 0, 1234)
self.assertEqual(1, image.discard.call_count)
image.reset_mock()
# Test discard with no remainder
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = False
self.service._discard_bytes(wrapped_rbd, 0,
self.service.chunk_size * 2)
self.assertEqual(2, image.write.call_count)
self.assertEqual(2, image.flush.call_count)
self.assertFalse(image.discard.called)
zeroes = '\0' * self.service.chunk_size
image.write.assert_has_calls([mock.call(zeroes, 0),
mock.call(zeroes, self.chunk_size)])
image.reset_mock()
image.write.reset_mock()
# Now test with a remainder.
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = False
self.service._discard_bytes(wrapped_rbd, 0,
(self.service.chunk_size * 2) + 1)
self.assertEqual(3, image.write.call_count)
self.assertEqual(3, image.flush.call_count)
self.assertFalse(image.discard.called)
image.write.assert_has_calls([mock.call(zeroes,
self.chunk_size * 2),
mock.call(zeroes,
self.chunk_size * 3),
mock.call('\0',
self.chunk_size * 4)])
@common_mocks
def test_delete_backup_snapshot(self):
snap_name = 'backup.%s.snap.3824923.1412' % (uuid.uuid4())
base_name = self.service._get_backup_base_name(self.volume_id,
diff_format=True)
self.mock_rbd.RBD.remove_snap = mock.Mock()
with mock.patch.object(self.service, '_get_backup_snap_name') as \
mock_get_backup_snap_name:
mock_get_backup_snap_name.return_value = snap_name
with mock.patch.object(self.service, 'get_backup_snaps') as \
mock_get_backup_snaps:
mock_get_backup_snaps.return_value = None
rem = self.service._delete_backup_snapshot(self.mock_rados,
base_name,
self.backup_id)
self.assertTrue(mock_get_backup_snap_name.called)
self.assertTrue(mock_get_backup_snaps.called)
self.assertEqual((snap_name, 0), rem)
@common_mocks
@mock.patch('cinder.backup.drivers.ceph.VolumeMetadataBackup', spec=True)
def test_try_delete_base_image_diff_format(self, mock_meta_backup):
backup_name = self.service._get_backup_base_name(self.volume_id,
diff_format=True)
self.mock_rbd.RBD.return_value.list.return_value = [backup_name]
with mock.patch.object(self.service, '_delete_backup_snapshot') as \
mock_del_backup_snap:
snap_name = self.service._get_new_snap_name(self.backup_id)
mock_del_backup_snap.return_value = (snap_name, 0)
self.service.delete(self.backup)
self.assertTrue(mock_del_backup_snap.called)
self.assertTrue(self.mock_rbd.RBD.return_value.list.called)
self.assertTrue(self.mock_rbd.RBD.return_value.remove.called)
@common_mocks
@mock.patch('cinder.backup.drivers.ceph.VolumeMetadataBackup', spec=True)
def test_try_delete_base_image(self, mock_meta_backup):
backup_name = self.service._get_backup_base_name(self.volume_id,
self.backup_id)
self.mock_rbd.RBD.return_value.list.return_value = [backup_name]
with mock.patch.object(self.service, 'get_backup_snaps'):
self.service.delete(self.backup)
self.assertTrue(self.mock_rbd.RBD.return_value.remove.called)
@common_mocks
def test_try_delete_base_image_busy(self):
"""This should induce retries then raise rbd.ImageBusy."""
backup_name = self.service._get_backup_base_name(self.volume_id,
self.backup_id)
rbd = self.mock_rbd.RBD.return_value
rbd.list.return_value = [backup_name]
rbd.remove.side_effect = self.mock_rbd.ImageBusy
with mock.patch.object(self.service, 'get_backup_snaps') as \
mock_get_backup_snaps:
self.assertRaises(self.mock_rbd.ImageBusy,
self.service._try_delete_base_image,
self.backup['id'], self.backup['volume_id'])
self.assertTrue(mock_get_backup_snaps.called)
self.assertTrue(rbd.list.called)
self.assertTrue(rbd.remove.called)
self.assertIn(MockImageBusyException, RAISED_EXCEPTIONS)
@common_mocks
@mock.patch('cinder.backup.drivers.ceph.VolumeMetadataBackup', spec=True)
def test_delete(self, mock_meta_backup):
with mock.patch.object(self.service, '_try_delete_base_image'):
self.service.delete(self.backup)
self.assertEqual([], RAISED_EXCEPTIONS)
@common_mocks
@mock.patch('cinder.backup.drivers.ceph.VolumeMetadataBackup', spec=True)
def test_delete_image_not_found(self, mock_meta_backup):
with mock.patch.object(self.service, '_try_delete_base_image') as \
mock_del_base:
mock_del_base.side_effect = self.mock_rbd.ImageNotFound
# ImageNotFound exception is caught so that db entry can be cleared
self.service.delete(self.backup)
self.assertEqual([MockImageNotFoundException], RAISED_EXCEPTIONS)
@common_mocks
def test_diff_restore_allowed_with_image_not_exists(self):
"""Test diff restore not allowed when backup not diff-format."""
not_allowed = (False, None)
backup_base = 'backup.base'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_different = [backup_base, self.backup, self.alt_volume,
rbd_io, self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (False, backup_base)
resp = self.service._diff_restore_allowed(*args_vols_different)
self.assertEqual(not_allowed, resp)
mock_rbd_image_exists.assert_called_once_with(
backup_base,
self.backup['volume_id'],
self.mock_rados)
@common_mocks
def test_diff_restore_allowed_with_no_restore_point(self):
"""Test diff restore not allowed when no restore point found.
Detail conditions:
1. backup base is diff-format
2. restore point does not exist
"""
not_allowed = (False, None)
backup_base = 'backup.base'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_different = [backup_base, self.backup, self.alt_volume,
rbd_io, self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (True, backup_base)
with mock.patch.object(self.service, '_get_restore_point') as \
mock_get_restore_point:
mock_get_restore_point.return_value = None
args = args_vols_different
resp = self.service._diff_restore_allowed(*args)
self.assertEqual(not_allowed, resp)
self.assertTrue(mock_rbd_image_exists.called)
mock_get_restore_point.assert_called_once_with(
backup_base,
self.backup['id'])
@common_mocks
def test_diff_restore_allowed_with_not_rbd(self):
"""Test diff restore not allowed when destination volume is not rbd.
Detail conditions:
1. backup base is diff-format
2. restore point exists
3. destination volume is not an rbd.
"""
backup_base = 'backup.base'
restore_point = 'backup.snap.1'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_different = [backup_base, self.backup, self.alt_volume,
rbd_io, self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (True, backup_base)
with mock.patch.object(self.service, '_get_restore_point') as \
mock_get_restore_point:
mock_get_restore_point.return_value = restore_point
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = False
args = args_vols_different
resp = self.service._diff_restore_allowed(*args)
self.assertEqual((False, restore_point), resp)
self.assertTrue(mock_rbd_image_exists.called)
self.assertTrue(mock_get_restore_point.called)
mock_file_is_rbd.assert_called_once_with(
rbd_io)
@common_mocks
def test_diff_restore_allowed_with_same_volume(self):
"""Test diff restore not allowed when volumes are same.
Detail conditions:
1. backup base is diff-format
2. restore point exists
3. destination volume is an rbd
4. source and destination volumes are the same
"""
backup_base = 'backup.base'
restore_point = 'backup.snap.1'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_same = [backup_base, self.backup, self.volume, rbd_io,
self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (True, backup_base)
with mock.patch.object(self.service, '_get_restore_point') as \
mock_get_restore_point:
mock_get_restore_point.return_value = restore_point
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = True
resp = self.service._diff_restore_allowed(*args_vols_same)
self.assertEqual((False, restore_point), resp)
self.assertTrue(mock_rbd_image_exists.called)
self.assertTrue(mock_get_restore_point.called)
self.assertTrue(mock_file_is_rbd.called)
@common_mocks
def test_diff_restore_allowed_with_has_extents(self):
"""Test diff restore not allowed when destination volume has data.
Detail conditions:
1. backup base is diff-format
2. restore point exists
3. destination volume is an rbd
4. source and destination volumes are different
5. destination volume has data on it - full copy is mandated
"""
backup_base = 'backup.base'
restore_point = 'backup.snap.1'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_different = [backup_base, self.backup, self.alt_volume,
rbd_io, self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (True, backup_base)
with mock.patch.object(self.service, '_get_restore_point') as \
mock_get_restore_point:
mock_get_restore_point.return_value = restore_point
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = True
with mock.patch.object(self.service, '_rbd_has_extents') \
as mock_rbd_has_extents:
mock_rbd_has_extents.return_value = True
args = args_vols_different
resp = self.service._diff_restore_allowed(*args)
self.assertEqual((False, restore_point), resp)
self.assertTrue(mock_rbd_image_exists.called)
self.assertTrue(mock_get_restore_point.called)
self.assertTrue(mock_file_is_rbd.called)
mock_rbd_has_extents.assert_called_once_with(
rbd_io.rbd_image)
@common_mocks
def test_diff_restore_allowed_with_no_extents(self):
"""Test diff restore allowed when no data in destination volume.
Detail conditions:
1. backup base is diff-format
2. restore point exists
3. destination volume is an rbd
4. source and destination volumes are different
5. destination volume no data on it
"""
backup_base = 'backup.base'
restore_point = 'backup.snap.1'
rbd_io = self._get_wrapped_rbd_io(self.service.rbd.Image())
args_vols_different = [backup_base, self.backup, self.alt_volume,
rbd_io, self.mock_rados]
with mock.patch.object(self.service, '_rbd_image_exists') as \
mock_rbd_image_exists:
mock_rbd_image_exists.return_value = (True, backup_base)
with mock.patch.object(self.service, '_get_restore_point') as \
mock_get_restore_point:
mock_get_restore_point.return_value = restore_point
with mock.patch.object(self.service, '_file_is_rbd') as \
mock_file_is_rbd:
mock_file_is_rbd.return_value = True
with mock.patch.object(self.service, '_rbd_has_extents') \
as mock_rbd_has_extents:
mock_rbd_has_extents.return_value = False
args = args_vols_different
resp = self.service._diff_restore_allowed(*args)
self.assertEqual((True, restore_point), resp)
self.assertTrue(mock_rbd_image_exists.called)
self.assertTrue(mock_get_restore_point.called)
self.assertTrue(mock_file_is_rbd.called)
self.assertTrue(mock_rbd_has_extents.called)
@common_mocks
@mock.patch('fcntl.fcntl', spec=True)
@mock.patch('subprocess.Popen', spec=True)
def test_piped_execute(self, mock_popen, mock_fcntl):
mock_fcntl.return_value = 0
self._setup_mock_popen(mock_popen, ['out', 'err'])
self.service._piped_execute(['foo'], ['bar'])
self.assertEqual(['popen_init', 'popen_init',
'stdout_close', 'communicate'], self.callstack)
@common_mocks
def test_restore_metdata(self):
version = 2
def mock_read(*args):
base_tag = driver.BackupMetadataAPI.TYPE_TAG_VOL_BASE_META
glance_tag = driver.BackupMetadataAPI.TYPE_TAG_VOL_GLANCE_META
return jsonutils.dumps({base_tag: {'image_name': 'image.base'},
glance_tag: {'image_name': 'image.glance'},
'version': version})
self.mock_rados.Object.return_value.read.side_effect = mock_read
self.service._restore_metadata(self.backup, self.volume_id)
self.assertTrue(self.mock_rados.Object.return_value.stat.called)
self.assertTrue(self.mock_rados.Object.return_value.read.called)
version = 3
try:
self.service._restore_metadata(self.backup, self.volume_id)
except exception.BackupOperationError as exc:
msg = _("Metadata restore failed due to incompatible version")
self.assertEqual(msg, six.text_type(exc))
else:
# Force a test failure
self.assertFalse(True)
@common_mocks
@mock.patch('cinder.backup.drivers.ceph.VolumeMetadataBackup', spec=True)
def test_backup_metadata_already_exists(self, mock_meta_backup):
def mock_set(json_meta):
msg = (_("Metadata backup object '%s' already exists") %
("backup.%s.meta" % (self.backup_id)))
raise exception.VolumeMetadataBackupExists(msg)
mock_meta_backup.return_value.set = mock.Mock()
mock_meta_backup.return_value.set.side_effect = mock_set
with mock.patch.object(self.service, 'get_metadata') as \
mock_get_metadata:
mock_get_metadata.return_value = "some.json.metadata"
try:
self.service._backup_metadata(self.backup)
except exception.BackupOperationError as e:
msg = (_("Failed to backup volume metadata - Metadata backup "
"object 'backup.%s.meta' already exists") %
(self.backup_id))
self.assertEqual(msg, six.text_type(e))
else:
# Make the test fail
self.assertFalse(True)
self.assertFalse(mock_meta_backup.set.called)
@common_mocks
def test_backup_metata_error(self):
"""Ensure that delete() is called if the metadata backup fails.
Also ensure that the exception is propagated to the caller.
"""
with mock.patch.object(self.service, '_backup_metadata') as \
mock_backup_metadata:
mock_backup_metadata.side_effect = exception.BackupOperationError
with mock.patch.object(self.service, '_get_volume_size_gb'):
with mock.patch.object(self.service, '_file_is_rbd',
return_value=False):
with mock.patch.object(self.service, '_full_backup'):
with mock.patch.object(self.service, 'delete') as \
mock_delete:
self.assertRaises(exception.BackupOperationError,
self.service.backup, self.backup,
mock.Mock(),
backup_metadata=True)
self.assertTrue(mock_delete.called)
@common_mocks
def test_restore_invalid_metadata_version(self):
def mock_read(*args):
base_tag = driver.BackupMetadataAPI.TYPE_TAG_VOL_BASE_META
glance_tag = driver.BackupMetadataAPI.TYPE_TAG_VOL_GLANCE_META
return jsonutils.dumps({base_tag: {'image_name': 'image.base'},
glance_tag: {'image_name': 'image.glance'},
'version': 3})
self.mock_rados.Object.return_value.read.side_effect = mock_read
with mock.patch.object(ceph.VolumeMetadataBackup, '_exists') as \
mock_exists:
mock_exists.return_value = True
self.assertRaises(exception.BackupOperationError,
self.service._restore_metadata,
self.backup, self.volume_id)
self.assertTrue(mock_exists.called)
self.assertTrue(self.mock_rados.Object.return_value.read.called)
def common_meta_backup_mocks(f):
"""Decorator to set mocks common to all metadata backup tests.
The point of doing these mocks here is so that we don't accidentally set
mocks that can't/don't get unset.
"""
def _common_inner_inner1(inst, *args, **kwargs):
@mock.patch('cinder.backup.drivers.ceph.rbd')
@mock.patch('cinder.backup.drivers.ceph.rados')
def _common_inner_inner2(mock_rados, mock_rbd):
inst.mock_rados = mock_rados
inst.mock_rbd = mock_rbd
inst.mock_rados.ObjectNotFound = MockObjectNotFoundException
return f(inst, *args, **kwargs)
return _common_inner_inner2()
return _common_inner_inner1
class VolumeMetadataBackupTestCase(test.TestCase):
def setUp(self):
global RAISED_EXCEPTIONS
RAISED_EXCEPTIONS = []
super(VolumeMetadataBackupTestCase, self).setUp()
self.backup_id = str(uuid.uuid4())
self.mb = ceph.VolumeMetadataBackup(mock.Mock(), self.backup_id)
@common_meta_backup_mocks
def test_name(self):
self.assertEqual('backup.%s.meta' % (self.backup_id), self.mb.name)
@common_meta_backup_mocks
def test_exists(self):
# True
self.assertTrue(self.mb.exists)
self.assertTrue(self.mock_rados.Object.return_value.stat.called)
self.mock_rados.Object.return_value.reset_mock()
# False
self.mock_rados.Object.return_value.stat.side_effect = (
self.mock_rados.ObjectNotFound)
self.assertFalse(self.mb.exists)
self.assertTrue(self.mock_rados.Object.return_value.stat.called)
self.assertEqual([MockObjectNotFoundException], RAISED_EXCEPTIONS)
@common_meta_backup_mocks
def test_set(self):
obj_data = []
called = []
def mock_read(*args):
called.append('read')
self.assertEqual(1, len(obj_data))
return obj_data[0]
def _mock_write(data):
obj_data.append(data)
called.append('write')
self.mb.get = mock.Mock()
self.mb.get.side_effect = mock_read
with mock.patch.object(ceph.VolumeMetadataBackup, 'set') as mock_write:
mock_write.side_effect = _mock_write
self.mb.set({'foo': 'bar'})
self.assertEqual({'foo': 'bar'}, self.mb.get())
self.assertTrue(self.mb.get.called)
self.mb._exists = mock.Mock()
self.mb._exists.return_value = True
# use the unmocked set() method.
self.assertRaises(exception.VolumeMetadataBackupExists, self.mb.set,
{'doo': 'dah'})
# check the meta obj state has not changed.
self.assertEqual({'foo': 'bar'}, self.mb.get())
self.assertEqual(['write', 'read', 'read'], called)
@common_meta_backup_mocks
def test_get(self):
self.mock_rados.Object.return_value.stat.side_effect = (
self.mock_rados.ObjectNotFound)
self.mock_rados.Object.return_value.read.return_value = 'meta'
self.assertIsNone(self.mb.get())
self.mock_rados.Object.return_value.stat.side_effect = None
self.assertEqual('meta', self.mb.get())
@common_meta_backup_mocks
def remove_if_exists(self):
with mock.patch.object(self.mock_rados.Object, 'remove') as \
mock_remove:
mock_remove.side_effect = self.mock_rados.ObjectNotFound
self.mb.remove_if_exists()
self.assertEqual([MockObjectNotFoundException], RAISED_EXCEPTIONS)
self.mock_rados.Object.remove.side_effect = None
self.mb.remove_if_exists()
self.assertEqual([], RAISED_EXCEPTIONS)
| apache-2.0 | -5,105,239,275,316,770,000 | 41.433836 | 79 | 0.559192 | false |
jeroenh/OpenNSA | test/test_topology.py | 1 | 1847 | import StringIO
from twisted.trial import unittest
from opennsa import nsa
from opennsa.topology import gole
from . import topology as testtopology
TEST_PATH_1 = {
'source_stp' : nsa.STP('Aruba', 'A2'),
'dest_stp' : nsa.STP('Curacao', 'C3'),
'paths' : [ [ nsa.Link('Aruba', 'A2', 'A4'), nsa.Link('Bonaire', 'B1', 'B4'), nsa.Link('Curacao', 'C1', 'C3') ],
[ nsa.Link('Aruba', 'A2', 'A1'), nsa.Link('Dominica', 'D4', 'D1'), nsa.Link('Curacao', 'C4', 'C3') ]
]
}
TEST_PATH_2 = {
'source_stp' : nsa.STP('Aruba', 'A2'),
'dest_stp' : nsa.STP('Bonaire', 'B2'),
'paths' : [ [ nsa.Link('Aruba', 'A2', 'A4'), nsa.Link('Bonaire', 'B1', 'B2') ],
[ nsa.Link('Aruba', 'A2', 'A1'), nsa.Link('Dominica', 'D4', 'D1'), nsa.Link('Curacao', 'C4', 'C1'), nsa.Link('Bonaire', 'B4', 'B2') ] ]
}
# Currently we do not have bandwidth, so this us unused
TEST_PATH_3 = {
'source_stp': nsa.STP('Aruba', 'A2'),
'dest_stp' : nsa.STP('Bonaire', 'B3'),
'paths' : [ [ nsa.Link('Aruba', 'A2', 'A1'), nsa.Link('Dominica', 'D4', 'D1'), nsa.Link('Curacao', 'C4', 'C1'), nsa.Link('Bonaire', 'B4', 'B3') ] ],
'bandwidth' : nsa.BandwidthParameters(1000, 1000, 1000)
}
TEST_PATHS = [ TEST_PATH_1, TEST_PATH_2 ]
class GenericTopologyTest:
def testParseAndFindPath(self):
for tp in TEST_PATHS:
paths = self.topo.findPaths(tp['source_stp'], tp['dest_stp'], tp.get('bandwidth'))
for path in paths:
self.assertIn(path.network_links, tp['paths'])
self.assertEquals(len(paths), len(tp['paths']))
class GOLETopologyTest(GenericTopologyTest, unittest.TestCase):
def setUp(self):
f = StringIO.StringIO(testtopology.TEST_TOPOLOGY)
self.topo, _ = gole.parseTopology( [f] )
| bsd-3-clause | 2,879,352,091,860,986,000 | 31.982143 | 157 | 0.559827 | false |
rven/odoo | addons/pad/models/pad.py | 1 | 5592 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import random
import re
import string
import requests
from odoo import api, models, _
from odoo.exceptions import UserError
from ..py_etherpad import EtherpadLiteClient
_logger = logging.getLogger(__name__)
class PadCommon(models.AbstractModel):
_name = 'pad.common'
_description = 'Pad Common'
def _valid_field_parameter(self, field, name):
return name == 'pad_content_field' or super()._valid_field_parameter(field, name)
@api.model
def pad_is_configured(self):
return bool(self.env.company.pad_server)
@api.model
def pad_generate_url(self):
company = self.env.company.sudo()
pad = {
"server": company.pad_server,
"key": company.pad_key,
}
# make sure pad server in the form of http://hostname
if not pad["server"]:
return pad
if not pad["server"].startswith('http'):
pad["server"] = 'http://' + pad["server"]
pad["server"] = pad["server"].rstrip('/')
# generate a salt
s = string.ascii_uppercase + string.digits
salt = ''.join([s[random.SystemRandom().randint(0, len(s) - 1)] for i in range(10)])
# path
# etherpad hardcodes pad id length limit to 50
path = '-%s-%s' % (self._name, salt)
path = '%s%s' % (self.env.cr.dbname.replace('_', '-')[0:50 - len(path)], path)
# contruct the url
url = '%s/p/%s' % (pad["server"], path)
# if create with content
if self.env.context.get('field_name') and self.env.context.get('model'):
myPad = EtherpadLiteClient(pad["key"], pad["server"] + '/api')
try:
myPad.createPad(path)
except IOError:
raise UserError(_("Pad creation failed, either there is a problem with your pad server URL or with your connection."))
# get attr on the field model
model = self.env[self.env.context["model"]]
field = model._fields[self.env.context['field_name']]
real_field = field.pad_content_field
res_id = self.env.context.get("object_id")
record = model.browse(res_id)
# get content of the real field
real_field_value = record[real_field] or self.env.context.get('record', {}).get(real_field, '')
if real_field_value:
myPad.setHtmlFallbackText(path, real_field_value)
return {
"server": pad["server"],
"path": path,
"url": url,
}
@api.model
def pad_get_content(self, url):
company = self.env.company.sudo()
myPad = EtherpadLiteClient(company.pad_key, (company.pad_server or '') + '/api')
content = ''
if url:
split_url = url.split('/p/')
path = len(split_url) == 2 and split_url[1]
try:
content = myPad.getHtml(path).get('html', '')
except IOError:
_logger.warning('Http Error: the credentials might be absent for url: "%s". Falling back.' % url)
try:
r = requests.get('%s/export/html' % url)
r.raise_for_status()
except Exception:
_logger.warning("No pad found with url '%s'.", url)
else:
mo = re.search('<body>(.*)</body>', r.content.decode(), re.DOTALL)
if mo:
content = mo.group(1)
return content
# TODO
# reverse engineer protocol to be setHtml without using the api key
def write(self, vals):
self._set_field_to_pad(vals)
self._set_pad_to_field(vals)
return super(PadCommon, self).write(vals)
@api.model
def create(self, vals):
# Case of a regular creation: we receive the pad url, so we need to update the
# corresponding field
self._set_pad_to_field(vals)
pad = super(PadCommon, self).create(vals)
# Case of a programmatical creation (e.g. copy): we receive the field content, so we need
# to create the corresponding pad
if self.env.context.get('pad_no_create', False):
return pad
for k, field in self._fields.items():
if hasattr(field, 'pad_content_field') and k not in vals:
ctx = {
'model': self._name,
'field_name': k,
'object_id': pad.id,
}
pad_info = self.with_context(**ctx).pad_generate_url()
pad[k] = pad_info.get('url')
return pad
def _set_field_to_pad(self, vals):
# Update the pad if the `pad_content_field` is modified
for k, field in self._fields.items():
if hasattr(field, 'pad_content_field') and vals.get(field.pad_content_field) and self[k]:
company = self.env.user.sudo().company_id
myPad = EtherpadLiteClient(company.pad_key, (company.pad_server or '') + '/api')
path = self[k].split('/p/')[1]
myPad.setHtmlFallbackText(path, vals[field.pad_content_field])
def _set_pad_to_field(self, vals):
# Update the `pad_content_field` if the pad is modified
for k, v in list(vals.items()):
field = self._fields.get(k)
if hasattr(field, 'pad_content_field'):
vals[field.pad_content_field] = self.pad_get_content(v)
| agpl-3.0 | -3,819,001,917,685,144,000 | 36.530201 | 134 | 0.552933 | false |
Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/v2016_10_01/aio/operations/_key_vault_client_operations.py | 1 | 230715 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KeyVaultClientOperationsMixin:
async def create_key(
self,
vault_base_url: str,
key_name: str,
parameters: "_models.KeyCreateParameters",
**kwargs: Any
) -> "_models.KeyBundle":
"""Creates a new key, stores it, then returns key parameters and attributes to the client.
The create key operation can be used to create any key type in Azure Key Vault. If the named
key already exists, Azure Key Vault creates a new version of the key. It requires the
keys/create permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name for the new key. The system will generate the version name for the
new key.
:type key_name: str
:param parameters: The parameters to create a key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_key.metadata = {'url': '/keys/{key-name}/create'} # type: ignore
async def import_key(
self,
vault_base_url: str,
key_name: str,
parameters: "_models.KeyImportParameters",
**kwargs: Any
) -> "_models.KeyBundle":
"""Imports an externally created key, stores it, and returns key parameters and attributes to the client.
The import key operation may be used to import any key type into an Azure Key Vault. If the
named key already exists, Azure Key Vault creates a new version of the key. This operation
requires the keys/import permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: Name for the imported key.
:type key_name: str
:param parameters: The parameters to import a key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyImportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.import_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyImportParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_key.metadata = {'url': '/keys/{key-name}'} # type: ignore
async def delete_key(
self,
vault_base_url: str,
key_name: str,
**kwargs: Any
) -> "_models.DeletedKeyBundle":
"""Deletes a key of any type from storage in Azure Key Vault.
The delete key operation cannot be used to remove individual versions of a key. This operation
removes the cryptographic material associated with the key, which means the key is not usable
for Sign/Verify, Wrap/Unwrap or Encrypt/Decrypt operations. This operation requires the
keys/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to delete.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedKeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedKeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_key.metadata = {'url': '/keys/{key-name}'} # type: ignore
async def update_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyUpdateParameters",
**kwargs: Any
) -> "_models.KeyBundle":
"""The update key operation changes specified attributes of a stored key and can be applied to any key type and key version stored in Azure Key Vault.
In order to perform this operation, the key must already exist in the Key Vault. Note: The
cryptographic material of a key itself cannot be changed. This operation requires the
keys/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of key to update.
:type key_name: str
:param key_version: The version of the key to update.
:type key_version: str
:param parameters: The parameters of the key to update.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore
async def get_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
**kwargs: Any
) -> "_models.KeyBundle":
"""Gets the public part of a stored key.
The get key operation is applicable to all key types. If the requested key is symmetric, then
no key material is released in the response. This operation requires the keys/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to get.
:type key_name: str
:param key_version: Adding the version parameter retrieves a specific version of a key.
:type key_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore
def get_key_versions(
self,
vault_base_url: str,
key_name: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.KeyListResult"]:
"""Retrieves a list of individual key versions with the same key name.
The full key identifier, attributes, and tags are provided in the response. This operation
requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_key_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_key_versions.metadata = {'url': '/keys/{key-name}/versions'} # type: ignore
def get_keys(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.KeyListResult"]:
"""List keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the
public part of a stored key. The LIST operation is applicable to all key types, however only
the base key identifier, attributes, and tags are provided in the response. Individual versions
of a key are not listed in the response. This operation requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_keys.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_keys.metadata = {'url': '/keys'} # type: ignore
async def backup_key(
self,
vault_base_url: str,
key_name: str,
**kwargs: Any
) -> "_models.BackupKeyResult":
"""Requests that a backup of the specified key be downloaded to the client.
The Key Backup operation exports a key from Azure Key Vault in a protected form. Note that this
operation does NOT return key material in a form that can be used outside the Azure Key Vault
system, the returned key material is either protected to a Azure Key Vault HSM or to Azure Key
Vault itself. The intent of this operation is to allow a client to GENERATE a key in one Azure
Key Vault instance, BACKUP the key, and then RESTORE it into another Azure Key Vault instance.
The BACKUP operation may be used to export, in protected form, any key type from Azure Key
Vault. Individual versions of a key cannot be backed up. BACKUP / RESTORE can be performed
within geographical boundaries only; meaning that a BACKUP from one geographical area cannot be
restored to another geographical area. For example, a backup from the US geographical area
cannot be restored in an EU geographical area. This operation requires the key/backup
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BackupKeyResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.BackupKeyResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupKeyResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.backup_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('BackupKeyResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
backup_key.metadata = {'url': '/keys/{key-name}/backup'} # type: ignore
async def restore_key(
self,
vault_base_url: str,
parameters: "_models.KeyRestoreParameters",
**kwargs: Any
) -> "_models.KeyBundle":
"""Restores a backed up key to a vault.
Imports a previously backed up key into Azure Key Vault, restoring the key, its key identifier,
attributes and access control policies. The RESTORE operation may be used to import a
previously backed up key. Individual versions of a key cannot be restored. The key is restored
in its entirety with the same key name as it had when it was backed up. If the key name is not
available in the target Key Vault, the RESTORE operation will be rejected. While the key name
is retained during restore, the final key identifier will change if the key is restored to a
different vault. Restore will restore all versions and preserve version identifiers. The
RESTORE operation is subject to security constraints: The target Key Vault must be owned by the
same Microsoft Azure Subscription as the source Key Vault The user must have RESTORE permission
in the target Key Vault. This operation requires the keys/restore permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param parameters: The parameters to restore the key.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyRestoreParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.restore_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyRestoreParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore_key.metadata = {'url': '/keys/restore'} # type: ignore
async def encrypt(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs: Any
) -> "_models.KeyOperationResult":
"""Encrypts an arbitrary sequence of bytes using an encryption key that is stored in a key vault.
The ENCRYPT operation encrypts an arbitrary sequence of bytes using an encryption key that is
stored in Azure Key Vault. Note that the ENCRYPT operation only supports a single block of
data, the size of which is dependent on the target key and the encryption algorithm to be used.
The ENCRYPT operation is only strictly necessary for symmetric keys stored in Azure Key Vault
since protection with an asymmetric key can be performed using public portion of the key. This
operation is supported for asymmetric keys as a convenience for callers that have a
key-reference but do not have access to the public key material. This operation requires the
keys/encrypt permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the encryption operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.encrypt.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
encrypt.metadata = {'url': '/keys/{key-name}/{key-version}/encrypt'} # type: ignore
async def decrypt(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs: Any
) -> "_models.KeyOperationResult":
"""Decrypts a single block of encrypted data.
The DECRYPT operation decrypts a well-formed block of ciphertext using the target encryption
key and specified algorithm. This operation is the reverse of the ENCRYPT operation; only a
single block of data may be decrypted, the size of this block is dependent on the target key
and the algorithm to be used. The DECRYPT operation applies to asymmetric and symmetric keys
stored in Azure Key Vault since it uses the private portion of the key. This operation requires
the keys/decrypt permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the decryption operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.decrypt.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
decrypt.metadata = {'url': '/keys/{key-name}/{key-version}/decrypt'} # type: ignore
async def sign(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeySignParameters",
**kwargs: Any
) -> "_models.KeyOperationResult":
"""Creates a signature from a digest using the specified key.
The SIGN operation is applicable to asymmetric and symmetric keys stored in Azure Key Vault
since this operation uses the private portion of the key. This operation requires the keys/sign
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the signing operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeySignParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.sign.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeySignParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
sign.metadata = {'url': '/keys/{key-name}/{key-version}/sign'} # type: ignore
async def verify(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyVerifyParameters",
**kwargs: Any
) -> "_models.KeyVerifyResult":
"""Verifies a signature using a specified key.
The VERIFY operation is applicable to symmetric keys stored in Azure Key Vault. VERIFY is not
strictly necessary for asymmetric keys stored in Azure Key Vault since signature verification
can be performed using the public portion of the key but this operation is supported as a
convenience for callers that only have a key-reference and not the public portion of the key.
This operation requires the keys/verify permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for verify operations.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyVerifyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyVerifyResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyVerifyResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyVerifyResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.verify.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyVerifyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyVerifyResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
verify.metadata = {'url': '/keys/{key-name}/{key-version}/verify'} # type: ignore
async def wrap_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs: Any
) -> "_models.KeyOperationResult":
"""Wraps a symmetric key using a specified key.
The WRAP operation supports encryption of a symmetric key using a key encryption key that has
previously been stored in an Azure Key Vault. The WRAP operation is only strictly necessary for
symmetric keys stored in Azure Key Vault since protection with an asymmetric key can be
performed using the public portion of the key. This operation is supported for asymmetric keys
as a convenience for callers that have a key-reference but do not have access to the public key
material. This operation requires the keys/wrapKey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for wrap operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.wrap_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
wrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/wrapkey'} # type: ignore
async def unwrap_key(
self,
vault_base_url: str,
key_name: str,
key_version: str,
parameters: "_models.KeyOperationsParameters",
**kwargs: Any
) -> "_models.KeyOperationResult":
"""Unwraps a symmetric key using the specified key that was initially used for wrapping that key.
The UNWRAP operation supports decryption of a symmetric key using the target key encryption
key. This operation is the reverse of the WRAP operation. The UNWRAP operation applies to
asymmetric and symmetric keys stored in Azure Key Vault since it uses the private portion of
the key. This operation requires the keys/unwrapKey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param parameters: The parameters for the key operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyOperationResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.unwrap_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unwrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/unwrapkey'} # type: ignore
def get_deleted_keys(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.DeletedKeyListResult"]:
"""Lists the deleted keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the
public part of a deleted key. This operation includes deletion-specific information. The Get
Deleted Keys operation is applicable for vaults enabled for soft-delete. While the operation
can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled
vault. This operation requires the keys/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedKeyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedKeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_keys.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedKeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_keys.metadata = {'url': '/deletedkeys'} # type: ignore
async def get_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs: Any
) -> "_models.DeletedKeyBundle":
"""Gets the public part of a deleted key.
The Get Deleted Key operation is applicable for soft-delete enabled vaults. While the operation
can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled
vault. This operation requires the keys/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedKeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedKeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore
async def purge_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs: Any
) -> None:
"""Permanently deletes the specified key.
The Purge Deleted Key operation is applicable for soft-delete enabled vaults. While the
operation can be invoked on any vault, it will return an error if invoked on a non soft-delete
enabled vault. This operation requires the keys/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore
async def recover_deleted_key(
self,
vault_base_url: str,
key_name: str,
**kwargs: Any
) -> "_models.KeyBundle":
"""Recovers the deleted key to its latest version.
The Recover Deleted Key operation is applicable for deleted keys in soft-delete enabled vaults.
It recovers the deleted key back to its latest version under /keys. An attempt to recover an
non-deleted key will return an error. Consider this the inverse of the delete operation on
soft-delete enabled vaults. This operation requires the keys/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the deleted key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KeyBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_key.metadata = {'url': '/deletedkeys/{key-name}/recover'} # type: ignore
async def set_secret(
self,
vault_base_url: str,
secret_name: str,
parameters: "_models.SecretSetParameters",
**kwargs: Any
) -> "_models.SecretBundle":
"""Sets a secret in a specified key vault.
The SET operation adds a secret to the Azure Key Vault. If the named secret already exists,
Azure Key Vault creates a new version of that secret. This operation requires the secrets/set
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param parameters: The parameters for setting the secret.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretSetParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretSetParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore
async def delete_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs: Any
) -> "_models.DeletedSecretBundle":
"""Deletes a secret from a specified key vault.
The DELETE operation applies to any secret stored in Azure Key Vault. DELETE cannot be applied
to an individual version of a secret. This operation requires the secrets/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedSecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore
async def update_secret(
self,
vault_base_url: str,
secret_name: str,
secret_version: str,
parameters: "_models.SecretUpdateParameters",
**kwargs: Any
) -> "_models.SecretBundle":
"""Updates the attributes associated with a specified secret in a given key vault.
The UPDATE operation changes specified attributes of an existing stored secret. Attributes that
are not specified in the request are left unchanged. The value of a secret itself cannot be
changed. This operation requires the secrets/set permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:param parameters: The parameters for update secret operation.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore
async def get_secret(
self,
vault_base_url: str,
secret_name: str,
secret_version: str,
**kwargs: Any
) -> "_models.SecretBundle":
"""Get a specified secret from a given key vault.
The GET operation is applicable to any secret stored in Azure Key Vault. This operation
requires the secrets/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore
def get_secrets(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.SecretListResult"]:
"""List secrets in a specified key vault.
The Get Secrets operation is applicable to the entire vault. However, only the base secret
identifier and its attributes are provided in the response. Individual secret versions are not
listed in the response. This operation requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified, the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_secrets.metadata = {'url': '/secrets'} # type: ignore
def get_secret_versions(
self,
vault_base_url: str,
secret_name: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.SecretListResult"]:
"""List all versions of the specified secret.
The full secret identifier and attributes are provided in the response. No values are returned
for the secrets. This operations requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param maxresults: Maximum number of results to return in a page. If not specified, the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_secret_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_secret_versions.metadata = {'url': '/secrets/{secret-name}/versions'} # type: ignore
def get_deleted_secrets(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.DeletedSecretListResult"]:
"""Lists deleted secrets for the specified vault.
The Get Deleted Secrets operation returns the secrets that have been deleted for a vault
enabled for soft-delete. This operation requires the secrets/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedSecretListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedSecretListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedSecretListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_secrets.metadata = {'url': '/deletedsecrets'} # type: ignore
async def get_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs: Any
) -> "_models.DeletedSecretBundle":
"""Gets the specified deleted secret.
The Get Deleted Secret operation returns the specified deleted secret along with its
attributes. This operation requires the secrets/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedSecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore
async def purge_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs: Any
) -> None:
"""Permanently deletes the specified secret.
The purge deleted secret operation removes the secret permanently, without the possibility of
recovery. This operation can only be enabled on a soft-delete enabled vault. This operation
requires the secrets/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore
async def recover_deleted_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs: Any
) -> "_models.SecretBundle":
"""Recovers the deleted secret to the latest version.
Recovers the deleted secret in the specified vault. This operation can only be performed on a
soft-delete enabled vault. This operation requires the secrets/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the deleted secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}/recover'} # type: ignore
async def backup_secret(
self,
vault_base_url: str,
secret_name: str,
**kwargs: Any
) -> "_models.BackupSecretResult":
"""Backs up the specified secret.
Requests that a backup of the specified secret be downloaded to the client. All versions of the
secret will be downloaded. This operation requires the secrets/backup permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BackupSecretResult, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.BackupSecretResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupSecretResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.backup_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('BackupSecretResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
backup_secret.metadata = {'url': '/secrets/{secret-name}/backup'} # type: ignore
async def restore_secret(
self,
vault_base_url: str,
parameters: "_models.SecretRestoreParameters",
**kwargs: Any
) -> "_models.SecretBundle":
"""Restores a backed up secret to a vault.
Restores a backed up secret, and all its versions, to a vault. This operation requires the
secrets/restore permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param parameters: The parameters to restore the secret.
:type parameters: ~azure.keyvault.v2016_10_01.models.SecretRestoreParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecretBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.restore_secret.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecretRestoreParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SecretBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore_secret.metadata = {'url': '/secrets/restore'} # type: ignore
def get_certificates(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.CertificateListResult"]:
"""List certificates in a specified key vault.
The GetCertificates operation returns the set of certificates resources in the specified key
vault. This operation requires the certificates/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificates.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificates.metadata = {'url': '/certificates'} # type: ignore
async def delete_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.DeletedCertificateBundle":
"""Deletes a certificate from a specified key vault.
Deletes all versions of a certificate object along with its associated policy. Delete
certificate cannot be used to remove individual versions of a certificate object. This
operation requires the certificates/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedCertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate.metadata = {'url': '/certificates/{certificate-name}'} # type: ignore
async def set_certificate_contacts(
self,
vault_base_url: str,
contacts: "_models.Contacts",
**kwargs: Any
) -> "_models.Contacts":
"""Sets the certificate contacts for the specified key vault.
Sets the certificate contacts for the specified key vault. This operation requires the
certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param contacts: The contacts for the key vault certificate.
:type contacts: ~azure.keyvault.v2016_10_01.models.Contacts
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(contacts, 'Contacts')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
async def get_certificate_contacts(
self,
vault_base_url: str,
**kwargs: Any
) -> "_models.Contacts":
"""Lists the certificate contacts for a specified key vault.
The GetCertificateContacts operation returns the set of certificate contact resources in the
specified key vault. This operation requires the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
async def delete_certificate_contacts(
self,
vault_base_url: str,
**kwargs: Any
) -> "_models.Contacts":
"""Deletes the certificate contacts for a specified key vault.
Deletes the certificate contacts for a specified key vault certificate. This operation requires
the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Contacts, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.Contacts
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_contacts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('Contacts', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore
def get_certificate_issuers(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.CertificateIssuerListResult"]:
"""List certificate issuers for a specified key vault.
The GetCertificateIssuers operation returns the set of certificate issuer resources in the
specified key vault. This operation requires the certificates/manageissuers/getissuers
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateIssuerListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateIssuerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateIssuerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificate_issuers.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateIssuerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificate_issuers.metadata = {'url': '/certificates/issuers'} # type: ignore
async def set_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
parameter: "_models.CertificateIssuerSetParameters",
**kwargs: Any
) -> "_models.IssuerBundle":
"""Sets the specified certificate issuer.
The SetCertificateIssuer operation adds or updates the specified certificate issuer. This
operation requires the certificates/setissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param parameter: Certificate issuer set parameter.
:type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerSetParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameter, 'CertificateIssuerSetParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def update_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
parameter: "_models.CertificateIssuerUpdateParameters",
**kwargs: Any
) -> "_models.IssuerBundle":
"""Updates the specified certificate issuer.
The UpdateCertificateIssuer operation performs an update on the specified certificate issuer
entity. This operation requires the certificates/setissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param parameter: Certificate issuer update parameter.
:type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameter, 'CertificateIssuerUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def get_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
**kwargs: Any
) -> "_models.IssuerBundle":
"""Lists the specified certificate issuer.
The GetCertificateIssuer operation returns the specified certificate issuer resources in the
specified key vault. This operation requires the certificates/manageissuers/getissuers
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def delete_certificate_issuer(
self,
vault_base_url: str,
issuer_name: str,
**kwargs: Any
) -> "_models.IssuerBundle":
"""Deletes the specified certificate issuer.
The DeleteCertificateIssuer operation permanently removes the specified certificate issuer from
the vault. This operation requires the certificates/manageissuers/deleteissuers permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IssuerBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_issuer.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IssuerBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore
async def create_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateCreateParameters",
**kwargs: Any
) -> "_models.CertificateOperation":
"""Creates a new certificate.
If this is the first version, the certificate resource is created. This operation requires the
certificates/create permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to create a certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_certificate.metadata = {'url': '/certificates/{certificate-name}/create'} # type: ignore
async def import_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateImportParameters",
**kwargs: Any
) -> "_models.CertificateBundle":
"""Imports a certificate into a specified key vault.
Imports an existing valid certificate, containing a private key, into Azure Key Vault. The
certificate to be imported can be in either PFX or PEM format. If the certificate is in PEM
format the PEM file must contain the key as well as x509 certificates. This operation requires
the certificates/import permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to import the certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateImportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.import_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateImportParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_certificate.metadata = {'url': '/certificates/{certificate-name}/import'} # type: ignore
def get_certificate_versions(
self,
vault_base_url: str,
certificate_name: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.CertificateListResult"]:
"""List the versions of a certificate.
The GetCertificateVersions operation returns the versions of a certificate in the specified key
vault. This operation requires the certificates/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_certificate_versions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_certificate_versions.metadata = {'url': '/certificates/{certificate-name}/versions'} # type: ignore
async def get_certificate_policy(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.CertificatePolicy":
"""Lists the policy for a certificate.
The GetCertificatePolicy operation returns the specified certificate policy resources in the
specified key vault. This operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in a given key vault.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificatePolicy, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_policy.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificatePolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore
async def update_certificate_policy(
self,
vault_base_url: str,
certificate_name: str,
certificate_policy: "_models.CertificatePolicy",
**kwargs: Any
) -> "_models.CertificatePolicy":
"""Updates the policy for a certificate.
Set specified members in the certificate policy. Leave others as null. This operation requires
the certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given vault.
:type certificate_name: str
:param certificate_policy: The policy for the certificate.
:type certificate_policy: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificatePolicy, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_policy.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_policy, 'CertificatePolicy')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificatePolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore
async def update_certificate(
self,
vault_base_url: str,
certificate_name: str,
certificate_version: str,
parameters: "_models.CertificateUpdateParameters",
**kwargs: Any
) -> "_models.CertificateBundle":
"""Updates the specified attributes associated with the given certificate.
The UpdateCertificate operation applies the specified update on the given certificate; the only
elements updated are the certificate's attributes. This operation requires the
certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given key vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:param parameters: The parameters for certificate update.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore
async def get_certificate(
self,
vault_base_url: str,
certificate_name: str,
certificate_version: str,
**kwargs: Any
) -> "_models.CertificateBundle":
"""Gets information about a certificate.
Gets information about a specific certificate. This operation requires the certificates/get
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore
async def update_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
certificate_operation: "_models.CertificateOperationUpdateParameter",
**kwargs: Any
) -> "_models.CertificateOperation":
"""Updates a certificate operation.
Updates a certificate creation operation that is already in progress. This operation requires
the certificates/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param certificate_operation: The certificate operation response.
:type certificate_operation: ~azure.keyvault.v2016_10_01.models.CertificateOperationUpdateParameter
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_operation, 'CertificateOperationUpdateParameter')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def get_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.CertificateOperation":
"""Gets the creation operation of a certificate.
Gets the creation operation associated with a specified certificate. This operation requires
the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def delete_certificate_operation(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.CertificateOperation":
"""Deletes the creation operation for a specific certificate.
Deletes the creation operation for a specified certificate that is in the process of being
created. The certificate is no longer created. This operation requires the certificates/update
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateOperation, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_certificate_operation.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore
async def merge_certificate(
self,
vault_base_url: str,
certificate_name: str,
parameters: "_models.CertificateMergeParameters",
**kwargs: Any
) -> "_models.CertificateBundle":
"""Merges a certificate or a certificate chain with a key pair existing on the server.
The MergeCertificate operation performs the merging of a certificate or certificate chain with
a key pair currently available in the service. This operation requires the certificates/create
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param parameters: The parameters to merge certificate.
:type parameters: ~azure.keyvault.v2016_10_01.models.CertificateMergeParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.merge_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CertificateMergeParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
merge_certificate.metadata = {'url': '/certificates/{certificate-name}/pending/merge'} # type: ignore
def get_deleted_certificates(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.DeletedCertificateListResult"]:
"""Lists the deleted certificates in the specified vault currently available for recovery.
The GetDeletedCertificates operation retrieves the certificates in the current vault which are
in a deleted state and ready for recovery or purging. This operation includes deletion-specific
information. This operation requires the certificates/get/list permission. This operation can
only be enabled on soft-delete enabled vaults.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedCertificateListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedCertificateListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_deleted_certificates.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedCertificateListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_deleted_certificates.metadata = {'url': '/deletedcertificates'} # type: ignore
async def get_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.DeletedCertificateBundle":
"""Retrieves information about the specified deleted certificate.
The GetDeletedCertificate operation retrieves the deleted certificate information plus its
attributes, such as retention interval, scheduled permanent deletion and the current deletion
recovery level. This operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedCertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore
async def purge_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> None:
"""Permanently deletes the specified deleted certificate.
The PurgeDeletedCertificate operation performs an irreversible deletion of the specified
certificate, without possibility for recovery. The operation is not available if the recovery
level does not specify 'Purgeable'. This operation requires the certificate/purge permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.purge_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
purge_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore
async def recover_deleted_certificate(
self,
vault_base_url: str,
certificate_name: str,
**kwargs: Any
) -> "_models.CertificateBundle":
"""Recovers the deleted certificate back to its current version under /certificates.
The RecoverDeletedCertificate operation performs the reversal of the Delete operation. The
operation is applicable in vaults enabled for soft-delete, and must be issued during the
retention interval (available in the deleted certificate's attributes). This operation requires
the certificates/recover permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the deleted certificate.
:type certificate_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CertificateBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.recover_deleted_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CertificateBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
recover_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}/recover'} # type: ignore
def get_storage_accounts(
self,
vault_base_url: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.StorageListResult"]:
"""List storage accounts managed by the specified key vault. This operation requires the
storage/list permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.StorageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_storage_accounts.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StorageListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_storage_accounts.metadata = {'url': '/storage'} # type: ignore
async def delete_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
**kwargs: Any
) -> "_models.StorageBundle":
"""Deletes a storage account. This operation requires the storage/delete permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def get_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
**kwargs: Any
) -> "_models.StorageBundle":
"""Gets information about a specified storage account. This operation requires the storage/get
permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def set_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs: Any
) -> "_models.StorageBundle":
"""Creates or updates a new storage account. This operation requires the storage/set permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to create a storage account.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def update_storage_account(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountUpdateParameters",
**kwargs: Any
) -> "_models.StorageBundle":
"""Updates the specified attributes associated with the given storage account. This operation
requires the storage/set/update permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to update a storage account.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_storage_account.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore
async def regenerate_storage_account_key(
self,
vault_base_url: str,
storage_account_name: str,
parameters: "_models.StorageAccountRegenerteKeyParameters",
**kwargs: Any
) -> "_models.StorageBundle":
"""Regenerates the specified key value for the given storage account. This operation requires the
storage/regeneratekey permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param parameters: The parameters to regenerate storage account key.
:type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountRegenerteKeyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.regenerate_storage_account_key.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountRegenerteKeyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('StorageBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_storage_account_key.metadata = {'url': '/storage/{storage-account-name}/regeneratekey'} # type: ignore
def get_sas_definitions(
self,
vault_base_url: str,
storage_account_name: str,
maxresults: Optional[int] = None,
**kwargs: Any
) -> AsyncIterable["_models.SasDefinitionListResult"]:
"""List storage SAS definitions for the given storage account. This operation requires the
storage/listsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param maxresults: Maximum number of results to return in a page. If not specified the service
will return up to 25 results.
:type maxresults: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SasDefinitionListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SasDefinitionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_sas_definitions.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SasDefinitionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_sas_definitions.metadata = {'url': '/storage/{storage-account-name}/sas'} # type: ignore
async def delete_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
**kwargs: Any
) -> "_models.SasDefinitionBundle":
"""Deletes a SAS definition from a specified storage account. This operation requires the
storage/deletesas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.delete_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def get_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
**kwargs: Any
) -> "_models.SasDefinitionBundle":
"""Gets information about a SAS definition for the specified storage account. This operation
requires the storage/getsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
accept = "application/json"
# Construct URL
url = self.get_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def set_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
parameters: "_models.SasDefinitionCreateParameters",
**kwargs: Any
) -> "_models.SasDefinitionBundle":
"""Creates or updates a new SAS definition for the specified storage account. This operation
requires the storage/setsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param parameters: The parameters to create a SAS definition.
:type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SasDefinitionCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
async def update_sas_definition(
self,
vault_base_url: str,
storage_account_name: str,
sas_definition_name: str,
parameters: "_models.SasDefinitionUpdateParameters",
**kwargs: Any
) -> "_models.SasDefinitionBundle":
"""Updates the specified attributes associated with the given SAS definition. This operation
requires the storage/setsas permission.
:param vault_base_url: The vault name, for example https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param parameters: The parameters to update a SAS definition.
:type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SasDefinitionBundle, or the result of cls(response)
:rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_sas_definition.metadata['url'] # type: ignore
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SasDefinitionUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SasDefinitionBundle', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
| mit | 5,830,821,531,992,470,000 | 48.203455 | 158 | 0.642438 | false |
platformio/platformio-core | platformio/debug/helpers.py | 1 | 6497 | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import util
from platformio.commands import PlatformioCLI
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.run.command import print_processing_header
from platformio.commands.test.helpers import get_test_names
from platformio.commands.test.processor import TestProcessorBase
from platformio.compat import IS_WINDOWS, is_bytes
from platformio.debug.exception import DebugInvalidOptionsError
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(
ctx, project_dir, project_config, env_name, preload, verbose
): # pylint: disable=too-many-arguments
debug_testname = project_config.get("env:" + env_name, "debug_test")
if debug_testname:
test_names = get_test_names(project_config)
if debug_testname not in test_names:
raise DebugInvalidOptionsError(
"Unknown test name `%s`. Valid names are `%s`"
% (debug_testname, ", ".join(test_names))
)
print_processing_header(env_name, project_config, verbose)
tp = TestProcessorBase(
ctx,
debug_testname,
env_name,
dict(
project_config=project_config,
project_dir=project_dir,
without_building=False,
without_uploading=True,
without_testing=True,
verbose=False,
),
)
tp.build_or_upload(["__debug", "__test"] + (["upload"] if preload else []))
else:
ctx.invoke(
cmd_run,
project_dir=project_dir,
project_conf=project_config.path,
environment=[env_name],
target=["__debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
# b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, "w") as fp:
fp.write(new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port
| apache-2.0 | 5,665,862,348,788,826,000 | 30.848039 | 85 | 0.587348 | false |
jmchilton/galaxy-central | galaxy/tools/parameters.py | 1 | 19896 | """
Classes encapsulating tool parameters
"""
import logging, string, sys
from galaxy import config, datatypes, util, form_builder
import validation
from elementtree.ElementTree import XML, Element
log = logging.getLogger(__name__)
class ToolParameter( object ):
"""
Describes a parameter accepted by a tool. This is just a simple stub at the
moment but in the future should encapsulate more complex parameters (lists
of valid choices, validation logic, ...)
"""
def __init__( self, tool, param ):
self.tool = tool
self.name = param.get("name")
self.label = util.xml_text(param, "label")
self.help = util.xml_text(param, "help")
self.html = "no html set"
self.validators = []
for elem in param.findall("validator"):
self.validators.append( validation.Validator.from_element( elem ) )
def get_label( self ):
"""Return user friendly name for the parameter"""
if self.label: return self.label
else: return self.name
def get_html( self, trans=None, value=None, other_values={} ):
"""
Returns the html widget corresponding to the paramter.
Optionally attempt to retain the current value specific by 'value'
"""
return self.html
def get_required_enctype( self ):
"""
If this parameter needs the form to have a specific encoding
return it, otherwise return None (indicating compatibility with
any encoding)
"""
return None
def filter_value( self, value, trans=None, other_values={} ):
"""
Parse the value returned by the view into a form usable by the tool OR
raise a ValueError.
"""
return value
def to_string( self, value, app ):
"""Convert a value to a string representation suitable for persisting"""
return str( value )
def to_python( self, value, app ):
"""Convert a value created with to_string back to an object representation"""
return value
def validate( self, value, history=None ):
for validator in self.validators:
validator.validate( value, history )
@classmethod
def build( cls, tool, param ):
"""Factory method to create parameter of correct type"""
param_type = param.get("type")
if not param_type or param_type not in parameter_types:
raise ValueError( "Unknown tool parameter type '%s'" % param_type )
else:
return parameter_types[param_type]( tool, param )
class TextToolParameter( ToolParameter ):
"""
Parameter that can take on any text value.
>>> p = TextToolParameter( None, XML( '<param name="blah" type="text" size="4" value="default" />' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="text" name="blah" size="4" value="default">
>>> print p.get_html( value="meh" )
<input type="text" name="blah" size="4" value="meh">
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
self.name = elem.get( 'name' )
self.size = elem.get( 'size' )
self.value = elem.get( 'value' )
self.area = str_bool( elem.get( 'area', False ) )
def get_html( self, trans=None, value=None, other_values={} ):
if self.area:
return form_builder.TextArea( self.name, self.size, value or self.value ).get_html()
return form_builder.TextField( self.name, self.size, value or self.value ).get_html()
class IntegerToolParameter( TextToolParameter ):
"""
Parameter that takes an integer value.
>>> p = IntegerToolParameter( None, XML( '<param name="blah" type="integer" size="4" value="10" />' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="text" name="blah" size="4" value="10">
>>> type( p.filter_value( "10" ) )
<type 'int'>
>>> type( p.filter_value( "bleh" ) )
Traceback (most recent call last):
...
ValueError: An integer is required
"""
def filter_value( self, value, trans=None, other_values={} ):
try: return int( value )
except: raise ValueError( "An integer is required" )
def to_python( self, value, app ):
return int( value )
class FloatToolParameter( TextToolParameter ):
"""
Parameter that takes a real number value.
>>> p = FloatToolParameter( None, XML( '<param name="blah" type="integer" size="4" value="3.141592" />' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="text" name="blah" size="4" value="3.141592">
>>> type( p.filter_value( "36.1" ) )
<type 'float'>
>>> type( p.filter_value( "bleh" ) )
Traceback (most recent call last):
...
ValueError: A real number is required
"""
def filter_value( self, value, trans=None, other_values={} ):
try: return float( value )
except: raise ValueError( "A real number is required")
def to_python( self, value, app ):
return float( value )
class BooleanToolParameter( ToolParameter ):
"""
Parameter that takes one of two values.
>>> p = BooleanToolParameter( None, XML( '<param name="blah" type="boolean" checked="yes" truevalue="bulletproof vests" falsevalue="cellophane chests" />' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="checkbox" name="blah" value="true" checked><input type="hidden" name="blah" value="true">
>>> print p.filter_value( ["true","true"] )
bulletproof vests
>>> print p.filter_value( ["true"] )
cellophane chests
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
self.truevalue = elem.get( 'truevalue', 'true' )
self.falsevalue = elem.get( 'falsevalue', 'false' )
self.name = elem.get( 'name' )
self.checked = elem.get( 'checked' )
def get_html( self, trans=None, value=None, other_values={} ):
checked = self.checked
if value: checked = form_builder.CheckboxField.is_checked( value )
return form_builder.CheckboxField( self.name, checked ).get_html()
def filter_value( self, value, trans=None, other_values={} ):
if form_builder.CheckboxField.is_checked( value ):
return self.truevalue
else:
return self.falsevalue
def to_python( self, value, app ):
return ( value == 'True' )
class FileToolParameter( ToolParameter ):
"""
Parameter that takes an uploaded file as a value.
>>> p = FileToolParameter( None, XML( '<param name="blah" type="file"/>' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="file" name="blah">
"""
def __init__( self, tool, elem ):
"""
Example: C{<param name="bins" type="file" />}
"""
ToolParameter.__init__( self, tool, elem )
self.html = form_builder.FileField( elem.get( 'name') ).get_html()
def get_required_enctype( self ):
"""
File upload elements require the multipart/form-data encoding
"""
return "multipart/form-data"
def to_string( self, value, app ):
raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
raise Exception( "FileToolParameter cannot be persisted" )
class HiddenToolParameter( ToolParameter ):
"""
Parameter that takes one of two values.
FIXME: This seems hacky, parameters should only describe things the user
might change. It is used for 'initializing' the UCSC proxy tool
>>> p = HiddenToolParameter( None, XML( '<param name="blah" type="hidden" value="wax so rockin"/>' ) )
>>> print p.name
blah
>>> print p.get_html()
<input type="hidden" name="blah" value="wax so rockin">
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
self.name = elem.get( 'name' )
self.value = elem.get( 'value' )
self.html = form_builder.HiddenField( self.name, self.value ).get_html()
## This is clearly a HACK, parameters should only be used for things the user
## can change, there needs to be a different way to specify this. I'm leaving
## it for now to avoid breaking any tools.
class BaseURLToolParameter( ToolParameter ):
"""
Returns a parameter the contains its value prepended by the
current server base url. Used in all redirects.
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
self.name = elem.get( 'name' )
self.value = elem.get( 'value', '' )
def get_html( self, trans=None, value=None, other_values={} ):
return form_builder.HiddenField( self.name, trans.request.base + self.value ).get_html()
class SelectToolParameter( ToolParameter ):
"""
Parameter that takes on one (or many) or a specific set of values.
TODO: There should be an alternate display that allows single selects to be
displayed as radio buttons and multiple selects as a set of checkboxes
>>> p = SelectToolParameter( None, XML(
... '''
... <param name="blah" type="select">
... <option value="x">I am X</option>
... <option value="y" selected="true">I am Y</option>
... <option value="z">I am Z</option>
... </param>
... ''' ) )
>>> print p.name
blah
>>> print p.get_html()
<select name="blah">
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
</select>
>>> print p.get_html( value="z" )
<select name="blah">
<option value="x">I am X</option>
<option value="y">I am Y</option>
<option value="z" selected>I am Z</option>
</select>
>>> print p.filter_value( "y" )
y
>>> p = SelectToolParameter( None, XML(
... '''
... <param name="blah" type="select" multiple="true">
... <option value="x">I am X</option>
... <option value="y" selected="true">I am Y</option>
... <option value="z" selected="true">I am Z</option>
... </param>
... ''' ) )
>>> print p.name
blah
>>> print p.get_html()
<select name="blah" multiple>
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z" selected>I am Z</option>
</select>
>>> print p.get_html( value=["x","y"])
<select name="blah" multiple>
<option value="x" selected>I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
</select>
>>> print p.filter_value( ["y", "z"] )
y,z
>>> p = SelectToolParameter( None, XML(
... '''
... <param name="blah" type="select" multiple="true" display="checkboxes">
... <option value="x">I am X</option>
... <option value="y" selected="true">I am Y</option>
... <option value="z" selected="true">I am Z</option>
... </param>
... ''' ) )
>>> print p.name
blah
>>> print p.get_html()
<div><input type="checkbox" name="blah" value="x">I am X</div>
<div><input type="checkbox" name="blah" value="y" checked>I am Y</div>
<div><input type="checkbox" name="blah" value="z" checked>I am Z</div>
>>> print p.get_html( value=["x","y"])
<div><input type="checkbox" name="blah" value="x" checked>I am X</div>
<div><input type="checkbox" name="blah" value="y" checked>I am Y</div>
<div><input type="checkbox" name="blah" value="z">I am Z</div>
>>> print p.filter_value( ["y", "z"] )
y,z
"""
def __init__( self, tool, elem):
ToolParameter.__init__( self, tool, elem )
self.multiple = str_bool( elem.get( 'multiple', False ) )
self.display = elem.get( 'display', None )
self.separator = elem.get( 'separator', ',' )
self.legal_values = set()
self.dynamic_options = elem.get( "dynamic_options", None )
if self.dynamic_options is None:
self.options = list()
for index, option in enumerate( elem.findall("option") ):
value = option.get( "value" )
self.legal_values.add( value )
selected = ( option.get( "selected", None ) == "true" )
self.options.append( ( option.text, value, selected ) )
def get_html( self, trans=None, value=None, other_values={} ):
if value is not None:
if not isinstance( value, list ): value = [ value ]
field = form_builder.SelectField( self.name, self.multiple, self.display )
if self.dynamic_options:
options = eval( self.dynamic_options, self.tool.code_namespace, other_values )
else:
options = self.options
for text, optval, selected in options:
if value: selected = ( optval in value )
field.add_option( text, optval, selected )
return field.get_html()
def filter_value( self, value, trans=None, other_values={} ):
if self.dynamic_options:
legal_values = set( v for _, v, _ in eval( self.dynamic_options, self.tool.code_namespace, other_values ) )
else:
legal_values = self.legal_values
if isinstance( value, list ):
assert self.multiple, "Multiple values provided but parameter is not expecting multiple values"
rval = []
for v in value:
v = util.restore_text( v )
assert v in legal_values
rval.append( v )
return self.separator.join( rval )
else:
value = util.restore_text( value )
assert value in legal_values
return value
class DataToolParameter( ToolParameter ):
"""
Parameter that takes on one (or many) or a specific set of values.
TODO: There should be an alternate display that allows single selects to be
displayed as radio buttons and multiple selects as a set of checkboxes
>>> # Mock up a history (not connected to database)
>>> from galaxy.model import History, Dataset
>>> from cookbook.patterns import Bunch
>>> hist = History()
>>> hist.add_dataset( Dataset( id=1, extension='text' ) )
>>> hist.add_dataset( Dataset( id=2, extension='bed' ) )
>>> hist.add_dataset( Dataset( id=3, extension='fasta' ) )
>>> hist.add_dataset( Dataset( id=4, extension='png' ) )
>>> hist.add_dataset( Dataset( id=5, extension='interval' ) )
>>> p = DataToolParameter( None, XML( '<param name="blah" type="data" format="interval"/>' ) )
>>> print p.name
blah
>>> print p.get_html( trans=Bunch( history=hist ) )
<select name="blah">
<option value="2">2: Unnamed dataset</option>
<option value="5" selected>5: Unnamed dataset</option>
</select>
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
self.format = datatypes.get_datatype_by_extension( elem.get( 'format', 'data' ).lower() )
self.multiple = str_bool( elem.get( 'multiple', False ) )
self.optional = str_bool( elem.get( 'optional', False ) )
def get_html( self, trans=None, value=None, other_values={} ):
assert trans is not None, "DataToolParameter requires a trans"
history = trans.history
assert history is not None, "DataToolParameter requires a history"
if value is not None:
if type( value ) != list: value = [ value ]
field = form_builder.SelectField( self.name, self.multiple )
some_data = False
for data in history.datasets:
if isinstance( data.datatype, self.format.__class__ ) and not data.parent_id:
some_data = True
selected = ( value and ( data in value ) )
field.add_option( "%d: %s" % ( data.hid, data.name[:30] ), data.id, selected )
if some_data and value is None:
# Ensure that the last item is always selected
a, b, c = field.options[-1]; field.options[-1] = a, b, True
else:
# HACK: we should just disable the form or something
field.add_option( "no data has the proper type", '' )
if self.optional == True:
field.add_option( "Selection is Optional", 'None', True )
return field.get_html()
def filter_value( self, value, trans, other_values={} ):
if not value:
raise ValueError( "A data of the appropriate type is required" )
if value in [None, "None"]:
temp_data = trans.app.model.Dataset()
temp_data.state = temp_data.states.FAKE
return temp_data
if isinstance( value, list ):
return [ trans.app.model.Dataset.get( v ) for v in value ]
else:
return trans.app.model.Dataset.get( value )
def to_string( self, value, app ):
return value.id
def to_python( self, value, app ):
return app.model.Dataset.get( int( value ) )
class RawToolParameter( ToolParameter ):
"""
Completely nondescript parameter, HTML representation is provided as text
contents.
>>> p = RawToolParameter( None, XML(
... '''
... <param name="blah" type="raw">
... <![CDATA[<span id="$name">Some random stuff</span>]]>
... </param>
... ''' ) )
>>> print p.name
blah
>>> print p.get_html().strip()
<span id="blah">Some random stuff</span>
"""
def __init__( self, tool, elem ):
ToolParameter.__init__( self, tool, elem )
template = string.Template( elem.text )
self.html = template.substitute( self.__dict__ )
# class HistoryIDParameter( ToolParameter ):
# """
# Parameter that takes a name value, makes history.id available.
#
# FIXME: This is a hack (esp. if hidden params are a hack) but in order to
# have the history accessable at the job level, it is necessary
# I also probably wrote this docstring test thing wrong.
#
# >>> from galaxy.model import History, Dataset
# >>> from cookbook.patterns import Bunch
# >>> hist = History( id=1 )
# >>> p = HistoryIDParameter( None, XML( '<param name="blah" type="history"/>' ) )
# >>> print p.name
# blah
# >>> html_string = '<input type="hidden" name="blah" value="%d">' % hist.id
# >>> assert p.get_html( trans=Bunch( history=hist ) ) == html_string
# """
# def __init__( self, tool, elem ):
# ToolParameter.__init__( self, tool, elem )
# self.name = elem.get('name')
# def get_html( self, trans, value=None, other_values={} ):
# assert trans.history is not None, "HistoryIDParameter requires a history"
# self.html = form_builder.HiddenField( self.name, trans.history.id ).get_html()
# return self.html
parameter_types = dict( text = TextToolParameter,
integer = IntegerToolParameter,
float = FloatToolParameter,
boolean = BooleanToolParameter,
select = SelectToolParameter,
hidden = HiddenToolParameter,
baseurl = BaseURLToolParameter,
file = FileToolParameter,
data = DataToolParameter,
raw = RawToolParameter )
def get_suite():
"""Get unittest suite for this module"""
import doctest, sys
return doctest.DocTestSuite( sys.modules[__name__] )
def str_bool(in_str):
"""
returns true/false of a string, since bool(str), always returns true if string is not empty
default action is to return false
"""
if str(in_str).lower() == 'true':
return True
return False | mit | -6,214,418,039,163,646,000 | 39.19596 | 162 | 0.587304 | false |
puttarajubr/commcare-hq | corehq/ex-submodules/couchforms/tests/test_dbaccessors.py | 1 | 2772 | import datetime
from django.test import TestCase
from couchforms.dbaccessors import get_forms_by_type, clear_forms_in_domain, \
get_number_of_forms_by_type, get_number_of_forms_of_all_types, \
get_form_ids_by_type, get_number_of_forms_all_domains_in_couch
from couchforms.models import XFormInstance, XFormError
class TestDBAccessors(TestCase):
@classmethod
def setUpClass(cls):
from casexml.apps.case.tests import delete_all_xforms
delete_all_xforms()
cls.domain = 'evelyn'
cls.now = datetime.datetime.utcnow()
cls.xforms = [
XFormInstance(_id='xform_1',
received_on=cls.now - datetime.timedelta(days=10)),
XFormInstance(_id='xform_2', received_on=cls.now)
]
cls.xform_errors = [XFormError(_id='xform_error_1')]
for form in cls.xforms + cls.xform_errors:
form.domain = cls.domain
form.save()
@classmethod
def tearDownClass(cls):
clear_forms_in_domain(cls.domain)
def test_get_forms_by_type_xforminstance(self):
forms = get_forms_by_type(self.domain, 'XFormInstance', limit=10)
self.assertEqual(len(forms), len(self.xforms))
self.assertEqual({form._id for form in forms},
{form._id for form in self.xforms})
for form in forms:
self.assertIsInstance(form, XFormInstance)
def test_get_forms_by_type_xformerror(self):
forms = get_forms_by_type(self.domain, 'XFormError', limit=10)
self.assertEqual(len(forms), len(self.xform_errors))
self.assertEqual({form._id for form in forms},
{form._id for form in self.xform_errors})
for form in forms:
self.assertIsInstance(form, XFormError)
def test_get_number_of_forms_by_type_xforminstance(self):
self.assertEqual(
get_number_of_forms_by_type(self.domain, 'XFormInstance'),
len(self.xforms)
)
def test_get_number_of_forms_by_type_xformerror(self):
self.assertEqual(
get_number_of_forms_by_type(self.domain, 'XFormError'),
len(self.xform_errors)
)
def test_get_number_of_forms_of_all_types(self):
self.assertEqual(
get_number_of_forms_of_all_types(self.domain),
len(self.xforms) + len(self.xform_errors)
)
def test_get_form_ids_by_type(self):
form_ids = get_form_ids_by_type(self.domain, 'XFormError')
self.assertEqual(form_ids, [form._id for form in self.xform_errors])
def test_get_number_of_forms_all_domains_in_couch(self):
self.assertEqual(
get_number_of_forms_all_domains_in_couch(),
len(self.xforms)
)
| bsd-3-clause | -2,153,954,313,831,242,500 | 36.459459 | 78 | 0.620491 | false |
ashmastaflash/don-bot | app/donlib/halo.py | 1 | 11070 | import cloudpassage
import os
import requests
from formatter import Formatter
from urlparse import urljoin
from utility import Utility as util
from halocelery.apputils import Utility as hc_util
class Halo(object):
"""This contains all Halo interaction logic
Attrubites:
session (cloudpassage.HaloSession): Halo session object
"""
def __init__(self, config, health_string, tasks_obj):
"""Initialization only instantiates the session object."""
self.session = cloudpassage.HaloSession(config.halo_api_key,
config.halo_api_secret_key,
api_host=config.halo_api_host,
api_port=config.halo_api_port,
integration_string=config.ua)
self.product_version = config.product_version
self.monitor_events = config.monitor_events
self.slack_channel = config.slack_channel
self.health_string = health_string
self.tasks = tasks_obj
self.flower_host = config.flower_host
self.config = config
return
def credentials_work(self):
"""Attempts to authenticate against Halo API"""
good = True
try:
self.session.authenticate_client()
except cloudpassage.CloudPassageAuthentication:
good = False
return good
@classmethod
def list_tasks_formatted(cls, flower_host):
"""Gets a formatted list of tasks from Flower"""
report = "Cortex Tasks:\n"
celery_url = urljoin(flower_host, "api/tasks")
try:
response = requests.get(celery_url)
result = response.json()
except (ValueError, requests.exceptions.ConnectionError) as e:
report += "Error: Unable to retrieve task list at this time."
# We print the output so that it will be retained in the
# container logs.
hc_util.log_stderr(e)
return report
try:
for task in result.items():
prefmt = {"id": task[0], "name": task[1]["name"],
"args": str(task[1]["args"]),
"kwargs": str(task[1]["kwargs"]),
"started": util.u_to_8601(task[1]["started"]),
"tstamp": util.u_to_8601(task[1]["timestamp"]),
"state": task[1]["state"],
"exception": str(task[1]["exception"])}
report += Formatter.format_item(prefmt, "task")
except AttributeError as e: # Empty set will throw AttributeError
hc_util.log_stderr("Halo.list_tasks_formatted(): AttributeError! %s" % e) # NOQA
pass
return report
def interrogate(self, query_type, target):
"""Entrypoint for report generation
This method is where you start for generating reports. When you add
a new report this is the second place you configure it, right after
you set it up in Lexicals.get_message_type().
Returns a finished report, as a string.
"""
report = "I didn't understand your request. Try asking for help!\n"
if query_type == "server_report":
report = self.tasks.report_server_formatted.delay(target)
elif query_type == "group_report":
report = self.tasks.report_group_formatted.delay(target)
elif query_type == "ip_report":
report = self.get_ip_report(target)
elif query_type == "all_servers":
report = self.tasks.list_all_servers_formatted.delay()
elif query_type == "all_groups":
report = self.tasks.list_all_groups_formatted.delay()
elif query_type == "group_firewall_report":
img_tag = os.getenv('FIREWALL_GRAPH_VERSION', 'v0.2')
image = "docker.io/halotools/firewall-graph:%s" % img_tag
env_literal = {"TARGET": target}
env_expand = {"HALO_API_KEY": "HALO_API_KEY",
"HALO_API_SECRET_KEY": "HALO_API_SECRET_KEY",
"HALO_API_HOSTNAME": "HALO_API_HOSTNAME",
"HTTPS_PROXY": "HTTPS_PROXY"}
report = self.tasks.generic_containerized_task.delay(image,
env_literal,
env_expand,
False)
elif query_type == "servers_in_group":
report = self.tasks.servers_in_group_formatted.delay(target)
elif query_type == "servers_by_cve":
report = self.tasks.search_server_by_cve(target)
elif query_type == "ec2_halo_footprint_csv":
img_tag = os.getenv('EC2_HALO_DELTA_VERSION', 'v0.2')
image = "docker.io/halotools/ec2-halo-delta:%s" % img_tag
env_literal = {"OUTPUT_FORMAT": "csv"}
# Set optional args
optional_fields = ["AWS_ROLE_NAME", "AWS_ACCOUNT_NUMBERS"]
for field in optional_fields:
if os.getenv(field, "") != "":
env_literal[field] = os.getenv(field)
env_expand = {"HALO_API_KEY": "HALO_API_KEY",
"HALO_API_SECRET_KEY": "HALO_API_SECRET_KEY",
"HALO_API_HOSTNAME": "HALO_API_HOSTNAME",
"AWS_ACCESS_KEY_ID": "AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY": "AWS_SECRET_ACCESS_KEY",
"HTTPS_PROXY": "HTTPS_PROXY"}
report = self.tasks.generic_containerized_task.delay(image,
env_literal,
env_expand,
False)
elif query_type == "tasks":
report = self.list_tasks_formatted(self.flower_host)
elif query_type == "selfie":
report = Halo.take_selfie()
elif query_type == "help":
report = Halo.help_text()
elif query_type == "version":
report = Halo.version_info(self.product_version) + "\n"
elif query_type == "config":
report = self.running_config()
elif query_type == "health":
report = self.health_string
return(report)
@classmethod
def help_text(cls):
"""This is the help output"""
ret = ("I currently answer these burning questions, " +
"but only when you address me by name:\n" +
"\"tell me about server `(server_id|server_name)`\"\n" +
"\"tell me about ip `ip_address`\"\n" +
"\"tell me about group `(group_id|group_name)`\"\n" +
"\"list all servers\"\n" +
"\"list server groups\"\n" +
"\"servers with CVE `cve_id`\"\n" +
"\"servers in group `(group_id|group_name)`\"\n" +
"\"group firewall `(group_id|group_name)`\"\n" +
"\"ec2 halo footprint csv\"\n" +
"\"version\"\n" +
"\"tasks\"\n" +
"\"config\"\n")
return ret
@classmethod
def version_info(cls, product_version):
return "v%s" % product_version
def running_config(self):
if os.getenv("NOSLACK"):
return "Slack integration is disabled. CLI access only."
if self.monitor_events == 'yes':
events = "Monitoring Halo events"
conf = ("IP-Blocker Configuration\n" +
"------------------------\n" +
"IPBLOCKER_ENABLED=%s\n" % (self.config.ipblocker_enable) +
"IPBLOCKER_IP_ZONE_NAME=%s\n" % (self.config.ip_zone_name) + # NOQA
"IPBLOCKER_TRIGGER_EVENTS=%s\n" % (self.config.ipblocker_trigger_events) + # NOQA
"IPBLOCKER_TRIGGER_ONLY_ON_CRITICAL=%s\n\n" % (self.config.ipblocker_trigger_only_on_critical) + # NOQA
"Quarantine Configuration\n" +
"------------------------\n" +
"QUARANTINE_ENABLED=%s\n" % (self.config.quarantine_enable) + # NOQA
"QUARANTINE_TRIGGER_GROUP_NAMES=%s\n" % (self.config.quarantine_trigger_group_names) + # NOQA
"QUARANTINE_TRIGGER_EVENTS=%s\n" % (self.config.quarantine_trigger_events) + # NOQA
"QUARANTINE_TRIGGER_ONLY_ON_CRITICAL=%s\n" % (self.config.quarantine_trigger_only_on_critical) + # NOQA
"QUARANTINE_GROUP_NAME=%s\n\n" % (self.config.quarantine_group_name) + # NOQA
"Event Suppression Configuration\n" +
"-------------------------------\n" +
"SUPPRESS_EVENTS_IN_CHANNEL=%s\n" % (self.config.suppress_events)) # NOQA
else:
events = "NOT monitoring Halo events"
retval = "%s\nHalo channel: #%s\n%s\n" % (events,
self.slack_channel,
conf)
return retval
def get_ip_report(self, target):
"""This wraps the report_server_by_id by accepting IP as target"""
servers = cloudpassage.Server(self.session)
report = "Unknown IP: \n" + target
try:
s_id = servers.list_all(connecting_ip_address=target)[0]["id"]
report = self.tasks.report_server_formatted(s_id)
except:
pass
return report
def quarantine_server(self, event):
server_id = event["server_id"]
quarantine_group_name = event["quarantine_group"]
hc_util.log_stdout("Quarantine %s to group %s" % (server_id,
quarantine_group_name)) # NOQA
return self.tasks.quarantine_server.delay(server_id,
quarantine_group_name)
def add_ip_to_blocklist(self, ip_address, block_list_name):
# We trigger a removal job for one hour out.
hc_util.log_stdout("Add IP %s to blocklist %s" % (ip_address,
block_list_name))
self.tasks.remove_ip_from_list.apply_async(args=[ip_address,
block_list_name],
countdown=3600)
return self.tasks.add_ip_to_list.delay(ip_address, block_list_name)
@classmethod
def take_selfie(cls):
selfie_file_name = "selfie.txt"
heredir = os.path.abspath(os.path.dirname(__file__))
selfie_full_path = os.path.join(heredir, selfie_file_name)
with open(selfie_full_path, 'r') as s_file:
selfie = "```" + s_file.read() + "```"
return selfie
| bsd-3-clause | 3,402,476,530,501,787,600 | 47.76652 | 124 | 0.512285 | false |
toobaz/pandas | ci/print_skipped.py | 1 | 1409 | #!/usr/bin/env python
import os
import sys
import math
import xml.etree.ElementTree as et
def parse_results(filename):
tree = et.parse(filename)
root = tree.getroot()
skipped = []
current_class = ""
i = 1
assert i - 1 == len(skipped)
for el in root.findall("testcase"):
cn = el.attrib["classname"]
for sk in el.findall("skipped"):
old_class = current_class
current_class = cn
name = "{classname}.{name}".format(
classname=current_class, name=el.attrib["name"]
)
msg = sk.attrib["message"]
out = ""
if old_class != current_class:
ndigits = int(math.log(i, 10) + 1)
# 4 for : + space + # + space
out += "-" * (len(name + msg) + 4 + ndigits) + "\n"
out += "#{i} {name}: {msg}".format(i=i, name=name, msg=msg)
skipped.append(out)
i += 1
assert i - 1 == len(skipped)
assert i - 1 == len(skipped)
# assert len(skipped) == int(root.attrib['skip'])
return "\n".join(skipped)
def main():
test_files = ["test-data-single.xml", "test-data-multiple.xml", "test-data.xml"]
print("SKIPPED TESTS:")
for fn in test_files:
if os.path.isfile(fn):
print(parse_results(fn))
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause | -8,257,308,143,200,926,000 | 26.096154 | 84 | 0.515259 | false |
openstack-infra/shade | shade/tests/unit/test_floating_ip_neutron.py | 1 | 41101 | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_floating_ip_neutron
----------------------------------
Tests Floating IP resource methods for Neutron
"""
import copy
import datetime
import munch
from shade import exc
from shade.tests import fakes
from shade.tests.unit import base
class TestFloatingIP(base.RequestsMockTestCase):
mock_floating_ip_list_rep = {
'floatingips': [
{
'router_id': 'd23abc8d-2991-4a55-ba98-2aaea84cc72f',
'tenant_id': '4969c491a3c74ee4af974e6d800c62de',
'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57',
'fixed_ip_address': '10.0.0.4',
'floating_ip_address': '172.24.4.229',
'port_id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac',
'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda7',
'status': 'ACTIVE'
},
{
'router_id': None,
'tenant_id': '4969c491a3c74ee4af974e6d800c62de',
'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57',
'fixed_ip_address': None,
'floating_ip_address': '203.0.113.30',
'port_id': None,
'id': '61cea855-49cb-4846-997d-801b70c71bdd',
'status': 'DOWN'
}
]
}
mock_floating_ip_new_rep = {
'floatingip': {
'fixed_ip_address': '10.0.0.4',
'floating_ip_address': '172.24.4.229',
'floating_network_id': 'my-network-id',
'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8',
'port_id': None,
'router_id': None,
'status': 'ACTIVE',
'tenant_id': '4969c491a3c74ee4af974e6d800c62df'
}
}
mock_floating_ip_port_rep = {
'floatingip': {
'fixed_ip_address': '10.0.0.4',
'floating_ip_address': '172.24.4.229',
'floating_network_id': 'my-network-id',
'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8',
'port_id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac',
'router_id': None,
'status': 'ACTIVE',
'tenant_id': '4969c491a3c74ee4af974e6d800c62df'
}
}
mock_get_network_rep = {
'status': 'ACTIVE',
'subnets': [
'54d6f61d-db07-451c-9ab3-b9609b6b6f0b'
],
'name': 'my-network',
'provider:physical_network': None,
'admin_state_up': True,
'tenant_id': '4fd44f30292945e481c7b8a0c8908869',
'provider:network_type': 'local',
'router:external': True,
'shared': True,
'id': 'my-network-id',
'provider:segmentation_id': None
}
mock_search_ports_rep = [
{
'status': 'ACTIVE',
'binding:host_id': 'devstack',
'name': 'first-port',
'created_at': datetime.datetime.now().isoformat(),
'allowed_address_pairs': [],
'admin_state_up': True,
'network_id': '70c1db1f-b701-45bd-96e0-a313ee3430b3',
'tenant_id': '',
'extra_dhcp_opts': [],
'binding:vif_details': {
'port_filter': True,
'ovs_hybrid_plug': True
},
'binding:vif_type': 'ovs',
'device_owner': 'compute:None',
'mac_address': 'fa:16:3e:58:42:ed',
'binding:profile': {},
'binding:vnic_type': 'normal',
'fixed_ips': [
{
'subnet_id': '008ba151-0b8c-4a67-98b5-0d2b87666062',
'ip_address': u'172.24.4.2'
}
],
'id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac',
'security_groups': [],
'device_id': 'server-id'
}
]
def assertAreInstances(self, elements, elem_type):
for e in elements:
self.assertIsInstance(e, elem_type)
def setUp(self):
super(TestFloatingIP, self).setUp()
self.fake_server = fakes.make_fake_server(
'server-id', '', 'ACTIVE',
addresses={u'test_pnztt_net': [{
u'OS-EXT-IPS:type': u'fixed',
u'addr': '192.0.2.129',
u'version': 4,
u'OS-EXT-IPS-MAC:mac_addr':
u'fa:16:3e:ae:7d:42'}]})
self.floating_ip = self.cloud._normalize_floating_ips(
self.mock_floating_ip_list_rep['floatingips'])[0]
def test_float_no_status(self):
floating_ips = [
{
'fixed_ip_address': '10.0.0.4',
'floating_ip_address': '172.24.4.229',
'floating_network_id': 'my-network-id',
'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8',
'port_id': None,
'router_id': None,
'tenant_id': '4969c491a3c74ee4af974e6d800c62df'
}
]
normalized = self.cloud._normalize_floating_ips(floating_ips)
self.assertEqual('UNKNOWN', normalized[0]['status'])
def test_list_floating_ips(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_list_rep)])
floating_ips = self.cloud.list_floating_ips()
self.assertIsInstance(floating_ips, list)
self.assertAreInstances(floating_ips, dict)
self.assertEqual(2, len(floating_ips))
self.assert_calls()
def test_list_floating_ips_with_filters(self):
self.register_uris([
dict(method='GET',
uri=('https://network.example.com/v2.0/floatingips.json?'
'Foo=42'),
json={'floatingips': []})])
self.cloud.list_floating_ips(filters={'Foo': 42})
self.assert_calls()
def test_search_floating_ips(self):
self.register_uris([
dict(method='GET',
uri=('https://network.example.com/v2.0/floatingips.json'),
json=self.mock_floating_ip_list_rep)])
floating_ips = self.cloud.search_floating_ips(
filters={'attached': False})
self.assertIsInstance(floating_ips, list)
self.assertAreInstances(floating_ips, dict)
self.assertEqual(1, len(floating_ips))
self.assert_calls()
def test_get_floating_ip(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_list_rep)])
floating_ip = self.cloud.get_floating_ip(
id='2f245a7b-796b-4f26-9cf9-9e82d248fda7')
self.assertIsInstance(floating_ip, dict)
self.assertEqual('172.24.4.229', floating_ip['floating_ip_address'])
self.assertEqual(
self.mock_floating_ip_list_rep['floatingips'][0]['tenant_id'],
floating_ip['project_id']
)
self.assertEqual(
self.mock_floating_ip_list_rep['floatingips'][0]['tenant_id'],
floating_ip['tenant_id']
)
self.assertIn('location', floating_ip)
self.assert_calls()
def test_get_floating_ip_not_found(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_list_rep)])
floating_ip = self.cloud.get_floating_ip(id='non-existent')
self.assertIsNone(floating_ip)
self.assert_calls()
def test_get_floating_ip_by_id(self):
fid = self.mock_floating_ip_new_rep['floatingip']['id']
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/floatingips/'
'{id}'.format(id=fid),
json=self.mock_floating_ip_new_rep)])
floating_ip = self.cloud.get_floating_ip_by_id(id=fid)
self.assertIsInstance(floating_ip, dict)
self.assertEqual('172.24.4.229', floating_ip['floating_ip_address'])
self.assertEqual(
self.mock_floating_ip_new_rep['floatingip']['tenant_id'],
floating_ip['project_id']
)
self.assertEqual(
self.mock_floating_ip_new_rep['floatingip']['tenant_id'],
floating_ip['tenant_id']
)
self.assertIn('location', floating_ip)
self.assert_calls()
def test_create_floating_ip(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='POST',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_new_rep,
validate=dict(
json={'floatingip': {
'floating_network_id': 'my-network-id'}}))
])
ip = self.cloud.create_floating_ip(network='my-network')
self.assertEqual(
self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'],
ip['floating_ip_address'])
self.assert_calls()
def test_create_floating_ip_port_bad_response(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='POST',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_new_rep,
validate=dict(
json={'floatingip': {
'floating_network_id': 'my-network-id',
'port_id': u'ce705c24-c1ef-408a-bda3-7bbd946164ab'}}))
])
# Fails because we requested a port and the returned FIP has no port
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.create_floating_ip,
network='my-network', port='ce705c24-c1ef-408a-bda3-7bbd946164ab')
self.assert_calls()
def test_create_floating_ip_port(self):
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='POST',
uri='https://network.example.com/v2.0/floatingips.json',
json=self.mock_floating_ip_port_rep,
validate=dict(
json={'floatingip': {
'floating_network_id': 'my-network-id',
'port_id': u'ce705c24-c1ef-408a-bda3-7bbd946164ac'}}))
])
ip = self.cloud.create_floating_ip(
network='my-network', port='ce705c24-c1ef-408a-bda3-7bbd946164ac')
self.assertEqual(
self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'],
ip['floating_ip_address'])
self.assert_calls()
def test_neutron_available_floating_ips(self):
"""
Test without specifying a network name.
"""
fips_mock_uri = 'https://network.example.com/v2.0/floatingips.json'
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri='https://network.example.com/v2.0/subnets.json',
json={'subnets': []}),
dict(method='GET', uri=fips_mock_uri, json={'floatingips': []}),
dict(method='POST', uri=fips_mock_uri,
json=self.mock_floating_ip_new_rep,
validate=dict(json={
'floatingip': {
'floating_network_id': self.mock_get_network_rep['id']
}}))
])
# Test if first network is selected if no network is given
self.cloud._neutron_available_floating_ips()
self.assert_calls()
def test_neutron_available_floating_ips_network(self):
"""
Test with specifying a network name.
"""
fips_mock_uri = 'https://network.example.com/v2.0/floatingips.json'
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri='https://network.example.com/v2.0/subnets.json',
json={'subnets': []}),
dict(method='GET', uri=fips_mock_uri, json={'floatingips': []}),
dict(method='POST', uri=fips_mock_uri,
json=self.mock_floating_ip_new_rep,
validate=dict(json={
'floatingip': {
'floating_network_id': self.mock_get_network_rep['id']
}}))
])
# Test if first network is selected if no network is given
self.cloud._neutron_available_floating_ips(
network=self.mock_get_network_rep['name']
)
self.assert_calls()
def test_neutron_available_floating_ips_invalid_network(self):
"""
Test with an invalid network name.
"""
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri='https://network.example.com/v2.0/subnets.json',
json={'subnets': []})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud._neutron_available_floating_ips,
network='INVALID')
self.assert_calls()
def test_auto_ip_pool_no_reuse(self):
# payloads taken from citycloud
self.register_uris([
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={"networks": [{
"status": "ACTIVE",
"subnets": [
"df3e17fa-a4b2-47ae-9015-bc93eb076ba2",
"6b0c3dc9-b0b8-4d87-976a-7f2ebf13e7ec",
"fc541f48-fc7f-48c0-a063-18de6ee7bdd7"],
"availability_zone_hints": [],
"availability_zones": ["nova"],
"name": "ext-net",
"admin_state_up": True,
"tenant_id": "a564613210ee43708b8a7fc6274ebd63",
"tags": [],
"ipv6_address_scope": "9f03124f-89af-483a-b6fd-10f08079db4d", # noqa
"mtu": 0,
"is_default": False,
"router:external": True,
"ipv4_address_scope": None,
"shared": False,
"id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf",
"description": None
}, {
"status": "ACTIVE",
"subnets": ["f0ad1df5-53ee-473f-b86b-3604ea5591e9"],
"availability_zone_hints": [],
"availability_zones": ["nova"],
"name": "private",
"admin_state_up": True,
"tenant_id": "65222a4d09ea4c68934fa1028c77f394",
"created_at": "2016-10-22T13:46:26",
"tags": [],
"updated_at": "2016-10-22T13:46:26",
"ipv6_address_scope": None,
"router:external": False,
"ipv4_address_scope": None,
"shared": False,
"mtu": 1450,
"id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f",
"description": ""
}]}),
dict(method='GET',
uri='https://network.example.com/v2.0/ports.json'
'?device_id=f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7',
json={"ports": [{
"status": "ACTIVE",
"created_at": "2017-02-06T20:59:45",
"description": "",
"allowed_address_pairs": [],
"admin_state_up": True,
"network_id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f",
"dns_name": None,
"extra_dhcp_opts": [],
"mac_address": "fa:16:3e:e8:7f:03",
"updated_at": "2017-02-06T20:59:49",
"name": "",
"device_owner": "compute:None",
"tenant_id": "65222a4d09ea4c68934fa1028c77f394",
"binding:vnic_type": "normal",
"fixed_ips": [{
"subnet_id": "f0ad1df5-53ee-473f-b86b-3604ea5591e9",
"ip_address": "10.4.0.16"}],
"id": "a767944e-057a-47d1-a669-824a21b8fb7b",
"security_groups": [
"9fb5ba44-5c46-4357-8e60-8b55526cab54"],
"device_id": "f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7",
}]}),
dict(method='POST',
uri='https://network.example.com/v2.0/floatingips.json',
json={"floatingip": {
"router_id": "9de9c787-8f89-4a53-8468-a5533d6d7fd1",
"status": "DOWN",
"description": "",
"dns_domain": "",
"floating_network_id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", # noqa
"fixed_ip_address": "10.4.0.16",
"floating_ip_address": "89.40.216.153",
"port_id": "a767944e-057a-47d1-a669-824a21b8fb7b",
"id": "e69179dc-a904-4c9a-a4c9-891e2ecb984c",
"dns_name": "",
"tenant_id": "65222a4d09ea4c68934fa1028c77f394"
}},
validate=dict(json={"floatingip": {
"floating_network_id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", # noqa
"fixed_ip_address": "10.4.0.16",
"port_id": "a767944e-057a-47d1-a669-824a21b8fb7b",
}})),
dict(method='GET',
uri='{endpoint}/servers/detail'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={"servers": [{
"status": "ACTIVE",
"updated": "2017-02-06T20:59:49Z",
"addresses": {
"private": [{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03",
"version": 4,
"addr": "10.4.0.16",
"OS-EXT-IPS:type": "fixed"
}, {
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03",
"version": 4,
"addr": "89.40.216.153",
"OS-EXT-IPS:type": "floating"
}]},
"key_name": None,
"image": {"id": "95e4c449-8abf-486e-97d9-dc3f82417d2d"},
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": "active",
"OS-SRV-USG:launched_at": "2017-02-06T20:59:48.000000",
"flavor": {"id": "2186bd79-a05e-4953-9dde-ddefb63c88d4"},
"id": "f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7",
"security_groups": [{"name": "default"}],
"OS-SRV-USG:terminated_at": None,
"OS-EXT-AZ:availability_zone": "nova",
"user_id": "c17534835f8f42bf98fc367e0bf35e09",
"name": "testmt",
"created": "2017-02-06T20:59:44Z",
"tenant_id": "65222a4d09ea4c68934fa1028c77f394",
"OS-DCF:diskConfig": "MANUAL",
"os-extended-volumes:volumes_attached": [],
"accessIPv4": "",
"accessIPv6": "",
"progress": 0,
"OS-EXT-STS:power_state": 1,
"config_drive": "",
"metadata": {}
}]}),
dict(method='GET',
uri='https://network.example.com/v2.0/networks.json',
json={"networks": [{
"status": "ACTIVE",
"subnets": [
"df3e17fa-a4b2-47ae-9015-bc93eb076ba2",
"6b0c3dc9-b0b8-4d87-976a-7f2ebf13e7ec",
"fc541f48-fc7f-48c0-a063-18de6ee7bdd7"],
"availability_zone_hints": [],
"availability_zones": ["nova"],
"name": "ext-net",
"admin_state_up": True,
"tenant_id": "a564613210ee43708b8a7fc6274ebd63",
"tags": [],
"ipv6_address_scope": "9f03124f-89af-483a-b6fd-10f08079db4d", # noqa
"mtu": 0,
"is_default": False,
"router:external": True,
"ipv4_address_scope": None,
"shared": False,
"id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf",
"description": None
}, {
"status": "ACTIVE",
"subnets": ["f0ad1df5-53ee-473f-b86b-3604ea5591e9"],
"availability_zone_hints": [],
"availability_zones": ["nova"],
"name": "private",
"admin_state_up": True,
"tenant_id": "65222a4d09ea4c68934fa1028c77f394",
"created_at": "2016-10-22T13:46:26",
"tags": [],
"updated_at": "2016-10-22T13:46:26",
"ipv6_address_scope": None,
"router:external": False,
"ipv4_address_scope": None,
"shared": False,
"mtu": 1450,
"id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f",
"description": ""
}]}),
dict(method='GET',
uri='https://network.example.com/v2.0/subnets.json',
json={"subnets": [{
"description": "",
"enable_dhcp": True,
"network_id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f",
"tenant_id": "65222a4d09ea4c68934fa1028c77f394",
"created_at": "2016-10-22T13:46:26",
"dns_nameservers": [
"89.36.90.101",
"89.36.90.102"],
"updated_at": "2016-10-22T13:46:26",
"gateway_ip": "10.4.0.1",
"ipv6_ra_mode": None,
"allocation_pools": [{
"start": "10.4.0.2",
"end": "10.4.0.200"}],
"host_routes": [],
"ip_version": 4,
"ipv6_address_mode": None,
"cidr": "10.4.0.0/24",
"id": "f0ad1df5-53ee-473f-b86b-3604ea5591e9",
"subnetpool_id": None,
"name": "private-subnet-ipv4",
}]})])
self.cloud.add_ips_to_server(
munch.Munch(
id='f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7',
addresses={
"private": [{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03",
"version": 4,
"addr": "10.4.0.16",
"OS-EXT-IPS:type": "fixed"
}]}),
ip_pool='ext-net', reuse=False)
self.assert_calls()
def test_available_floating_ip_new(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'networks.json']),
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'subnets.json']),
json={'subnets': []}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': []}),
dict(method='POST',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
validate=dict(
json={'floatingip': {
'floating_network_id': 'my-network-id'}}),
json=self.mock_floating_ip_new_rep)
])
ip = self.cloud.available_floating_ip(network='my-network')
self.assertEqual(
self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'],
ip['floating_ip_address'])
self.assert_calls()
def test_delete_floating_ip_existing(self):
fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7'
fake_fip = {
'id': fip_id,
'floating_ip_address': '172.99.106.167',
'status': 'ACTIVE',
}
self.register_uris([
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': []}),
])
self.assertTrue(
self.cloud.delete_floating_ip(floating_ip_id=fip_id, retry=2))
self.assert_calls()
def test_delete_floating_ip_existing_down(self):
fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7'
fake_fip = {
'id': fip_id,
'floating_ip_address': '172.99.106.167',
'status': 'ACTIVE',
}
down_fip = {
'id': fip_id,
'floating_ip_address': '172.99.106.167',
'status': 'DOWN',
}
self.register_uris([
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [down_fip]}),
])
self.assertTrue(
self.cloud.delete_floating_ip(floating_ip_id=fip_id, retry=2))
self.assert_calls()
def test_delete_floating_ip_existing_no_delete(self):
fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7'
fake_fip = {
'id': fip_id,
'floating_ip_address': '172.99.106.167',
'status': 'ACTIVE',
}
self.register_uris([
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fake_fip]}),
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.delete_floating_ip,
floating_ip_id=fip_id, retry=2)
self.assert_calls()
def test_delete_floating_ip_not_found(self):
self.register_uris([
dict(method='DELETE',
uri=('https://network.example.com/v2.0/floatingips/'
'a-wild-id-appears.json'),
status_code=404)])
ret = self.cloud.delete_floating_ip(
floating_ip_id='a-wild-id-appears')
self.assertFalse(ret)
self.assert_calls()
def test_attach_ip_to_server(self):
fip = self.mock_floating_ip_list_rep['floatingips'][0]
device_id = self.fake_server['id']
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json'],
qs_elements=["device_id={0}".format(device_id)]),
json={'ports': self.mock_search_ports_rep}),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(
fip['id'])]),
json={'floatingip': fip},
validate=dict(
json={'floatingip': {
'port_id': self.mock_search_ports_rep[0]['id'],
'fixed_ip_address': self.mock_search_ports_rep[0][
'fixed_ips'][0]['ip_address']}})),
])
self.cloud._attach_ip_to_server(
server=self.fake_server,
floating_ip=self.floating_ip)
self.assert_calls()
def test_add_ip_refresh_timeout(self):
device_id = self.fake_server['id']
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'networks.json']),
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri='https://network.example.com/v2.0/subnets.json',
json={'subnets': []}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json'],
qs_elements=["device_id={0}".format(device_id)]),
json={'ports': self.mock_search_ports_rep}),
dict(method='POST',
uri='https://network.example.com/v2.0/floatingips.json',
json={'floatingip': self.floating_ip},
validate=dict(
json={'floatingip': {
'floating_network_id': 'my-network-id',
'fixed_ip_address': self.mock_search_ports_rep[0][
'fixed_ips'][0]['ip_address'],
'port_id': self.mock_search_ports_rep[0]['id']}})),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [self.floating_ip]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(
self.floating_ip['id'])]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': []}),
])
self.assertRaises(
exc.OpenStackCloudTimeout,
self.cloud._add_auto_ip,
server=self.fake_server,
wait=True, timeout=0.01,
reuse=False)
self.assert_calls()
def test_detach_ip_from_server(self):
fip = self.mock_floating_ip_new_rep['floatingip']
attached_fip = copy.copy(fip)
attached_fip['port_id'] = 'server-port-id'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [attached_fip]}),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(
fip['id'])]),
json={'floatingip': fip},
validate=dict(
json={'floatingip': {'port_id': None}}))
])
self.cloud.detach_ip_from_server(
server_id='server-id',
floating_ip_id=fip['id'])
self.assert_calls()
def test_add_ip_from_pool(self):
network = self.mock_get_network_rep
fip = self.mock_floating_ip_new_rep['floatingip']
fixed_ip = self.mock_search_ports_rep[0]['fixed_ips'][0]['ip_address']
port_id = self.mock_search_ports_rep[0]['id']
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'networks.json']),
json={'networks': [network]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'subnets.json']),
json={'subnets': []}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [fip]}),
dict(method='POST',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingip': fip},
validate=dict(
json={'floatingip': {
'floating_network_id': network['id']}})),
dict(method="GET",
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json'],
qs_elements=[
"device_id={0}".format(self.fake_server['id'])]),
json={'ports': self.mock_search_ports_rep}),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(
fip['id'])]),
json={'floatingip': fip},
validate=dict(
json={'floatingip': {
'fixed_ip_address': fixed_ip,
'port_id': port_id}})),
])
server = self.cloud._add_ip_from_pool(
server=self.fake_server,
network=network['id'],
fixed_address=fixed_ip)
self.assertEqual(server, self.fake_server)
self.assert_calls()
def test_cleanup_floating_ips(self):
floating_ips = [{
"id": "this-is-a-floating-ip-id",
"fixed_ip_address": None,
"internal_network": None,
"floating_ip_address": "203.0.113.29",
"network": "this-is-a-net-or-pool-id",
"port_id": None,
"status": "ACTIVE"
}, {
"id": "this-is-an-attached-floating-ip-id",
"fixed_ip_address": None,
"internal_network": None,
"floating_ip_address": "203.0.113.29",
"network": "this-is-a-net-or-pool-id",
"attached": True,
"port_id": "this-is-id-of-port-with-fip",
"status": "ACTIVE"
}]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': floating_ips}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'floatingips/{0}.json'.format(
floating_ips[0]['id'])]),
json={}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingips': [floating_ips[1]]}),
])
self.cloud.delete_unattached_floating_ips()
self.assert_calls()
def test_create_floating_ip_no_port(self):
server_port = {
"id": "port-id",
"device_id": "some-server",
'created_at': datetime.datetime.now().isoformat(),
'fixed_ips': [
{
'subnet_id': 'subnet-id',
'ip_address': '172.24.4.2'
}
],
}
floating_ip = {
"id": "floating-ip-id",
"port_id": None
}
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'networks.json']),
json={'networks': [self.mock_get_network_rep]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'subnets.json']),
json={'subnets': []}),
dict(method="GET",
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json'],
qs_elements=['device_id=some-server']),
json={'ports': [server_port]}),
dict(method='POST',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'floatingips.json']),
json={'floatingip': floating_ip})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud._neutron_create_floating_ip,
server=dict(id='some-server'))
self.assert_calls()
| apache-2.0 | -3,525,956,898,842,083,300 | 40.101 | 91 | 0.467069 | false |
bblais/plasticity | setup.py | 1 | 2798 | # this is from https://github.com/cython/cython/wiki/PackageHierarchy
import sys, os, stat, subprocess
from distutils.core import setup
from Cython.Distutils import build_ext
from distutils.extension import Extension
# we'd better have Cython installed, or it's a no-go
try:
from Cython.Distutils import build_ext
except:
print("You don't seem to have Cython installed. Please get a")
print("copy from www.cython.org and install it")
sys.exit(1)
import numpy
def get_version(package):
d={}
version_line=''
with open('%s/version.py' % package) as fid:
for line in fid:
if line.startswith('version='):
version_line=line
print(version_line)
exec(version_line,d)
return d['version']
# scan the directory for extension files, converting
# them to extension names in dotted notation
def scandir(dir, files=[]):
for file in os.listdir(dir):
path = os.path.join(dir, file)
if os.path.isfile(path) and path.endswith(".pyx"):
files.append(path.replace(os.path.sep, ".")[:-4])
elif os.path.isdir(path):
scandir(path, files)
return files
def cleanc(dir):
for file in os.listdir(dir):
path = os.path.join(dir, file)
if os.path.isfile(path) and path.endswith(".pyx"):
base,ext=os.path.splitext(path)
cpath=base+'.c'
if os.path.isfile(cpath):
os.remove(cpath)
print("~~",cpath)
elif os.path.isdir(path):
cleanc(path)
# generate an Extension object from its dotted name
def makeExtension(extName):
extPath = extName.replace(".", os.path.sep)+".pyx"
folder=extName.split(".")[0]
return Extension(
extName,
[extPath,'plasticity/randomkit.c'],
include_dirs = [numpy.get_include(), ".", "%s/" % folder], # adding the '.' to include_dirs is CRUCIAL!!
extra_compile_args = ["-O3", "-Wall"],
extra_link_args = ['-g'],
)
# get the list of extensions
extNames = scandir("plasticity")
print(extNames)
cleanc("plasticity")
# and build up the set of Extension objects
print(extNames)
extensions = [makeExtension(name) for name in extNames]
# finally, we can pass all this to distutils
setup(
name="plasticity",
version=get_version('plasticity'),
description="Synaptic Plasticity in Rate-Based Neurons",
author="Brian Blais",
packages=['plasticity',
'plasticity.dialogs',
'plasticity.dialogs.waxy'],
scripts=['plasticity/Plasticity.pyw'],
package_data={'plasticity': ['images/*.*','dialogs/images/*.*',
'dialogs/images/learning_rules/*.*','hdf5/*.*']},
ext_modules=extensions,
cmdclass = {'build_ext': build_ext},
)
| mit | -212,955,220,237,154,800 | 29.086022 | 114 | 0.62366 | false |
dslackw/sbo-templates | sbo_templates/__metadata__.py | 1 | 1203 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# __metadata__.py file is part of sbo-templates.
# Copyright 2015-2021 Dimitris Zlatanidis <[email protected]>
# All rights reserved.
# SBo tool for managing templates.
# https://gitlab.com/dslackw/sbo-templates
# sbo-templates is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__prog__ = "sbo-templates"
__author__ = "dslackw"
__copyright__ = 2015-2021
__version_info__ = (1, 3, 2)
__version__ = "{0}.{1}.{2}".format(*__version_info__)
__license__ = "GNU General Public License v3 (GPLv3)"
__email__ = "[email protected]"
__website__ = "https://gitlab.com/dslackw/sbo-templates"
| gpl-3.0 | -942,446,830,145,835,600 | 36.59375 | 71 | 0.717373 | false |
mozilla/pto | pto/apps/autocomplete/views.py | 1 | 1912 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
from django import http
from pto.apps.dates.decorators import json_view
from pto.apps.users.models import UserProfile, User
from pto.apps.users.utils import ldap_lookup
@json_view
def cities(request):
if not request.user.is_authenticated():
return http.HttpResponseForbidden('Must be logged in')
data = []
term = request.GET.get('term')
qs = UserProfile.objects.exclude(city='')
if term:
qs = qs.filter(city__istartswith=term)
for each in (qs
.values('city')
.distinct()
.order_by('city')):
city = each['city']
data.append(city)
return data
@json_view
def users(request, known_only=False):
if not request.user.is_authenticated():
return http.HttpResponseForbidden('Must be logged in')
query = request.GET.get('term').strip()
if len(query) < 2:
return []
results = []
# I chose a limit of 30 because there are about 20+ 'peter'
# something in mozilla
for each in ldap_lookup.search_users(query, 30, autocomplete=True):
if not each.get('givenName'):
logging.warn("Skipping LDAP entry %s" % each)
continue
if known_only:
if not User.objects.filter(email__iexact=each['mail']).exists():
continue
full_name_and_email = '%s %s <%s>' % (each['givenName'],
each['sn'],
each['mail'])
result = {'id': each['uid'],
'label': full_name_and_email,
'value': full_name_and_email}
results.append(result)
return results
| mpl-2.0 | 9,029,721,242,809,781,000 | 34.407407 | 76 | 0.578452 | false |
ajylee/gpaw-rtxs | gpaw/test/diamond_gllb.py | 1 | 2143 | from ase.structure import bulk
from sys import argv
from ase.dft.kpoints import ibz_points, get_bandpath
from gpaw import *
from ase import *
from gpaw.test import gen
from gpaw import setup_paths
import os
"""This calculation has the following structure.
1) Calculate the ground state of Diamond.
2) Calculate the band structure of diamond in order to obtain accurate KS band gap for Diamond.
3) Calculate ground state again, and calculate the potential discontinuity using accurate band gap.
4) Calculate band structure again, and apply the discontinuity to CBM.
Compare to reference.
"""
xc = 'GLLBSC'
gen('C',xcname=xc)
setup_paths.insert(0, '.')
# Calculate ground state
atoms = bulk('C', 'diamond', a=3.567)
calc = GPAW(h=0.15, kpts=(4,4,4), xc=xc, nbands = 6, eigensolver='cg')
atoms.set_calculator(calc)
atoms.get_potential_energy()
calc.write('Cgs.gpw')
# Calculate accurate KS-band gap from band structure
points = ibz_points['fcc']
# CMB is in G-X
G = points['Gamma']
X = points['X']
#W = points['W']
#K = points['K']
#L = points['L']
#[W, L, G, X, W, K]
kpts, x, X = get_bandpath([G, X], atoms.cell, npoints=12)
calc = GPAW('Cgs.gpw', kpts=kpts, fixdensity=True, usesymm=None, convergence=dict(bands=6))
calc.get_atoms().get_potential_energy()
# Get the accurate KS-band gap
homolumo = calc.occupations.get_homo_lumo(calc.wfs)
homo, lumo = homolumo
print "band gap ",(lumo-homo)*27.2
# Redo the ground state calculation
calc = GPAW(h=0.15, kpts=(4,4,4), xc=xc, nbands = 6, eigensolver='cg')
atoms.set_calculator(calc)
atoms.get_potential_energy()
# And calculate the discontinuity potential with accurate band gap
response = calc.hamiltonian.xc.xcs['RESPONSE']
response.calculate_delta_xc(homolumo=homolumo)
calc.write('CGLLBSC.gpw')
# Redo the band structure calculation
atoms, calc = restart('CGLLBSC.gpw', kpts=kpts, fixdensity=True, usesymm=None, convergence=dict(bands=6))
atoms.get_potential_energy()
response = calc.hamiltonian.xc.xcs['RESPONSE']
KS, dxc = response.calculate_delta_xc_perturbation()
assert abs(KS+dxc-5.41)<0.10
#M. Kuisma et. al, Phys. Rev. B 82, 115106, QP gap for C, 5.41eV, expt. 5.48eV
| gpl-3.0 | 7,119,025,400,674,546,000 | 30.985075 | 105 | 0.728885 | false |
discoapi/discotech | discotech/discoAPI/keywordManager.py | 1 | 3203 | __package__ = 'discotech.discoAPI'
from discotech import discotechError
class KeywordManager(object):
"""
Simple object to store and queue keyword to search in social media providers
"""
def __init__(self,keywords = [],convertToSearchPhrases = False):
"""
@type keywords: list
@param keywords: the keyword you want search for
@type convertToSearchPhrases: bool
@param convertToSearchPhrases: whether keyword should be conveted to matching search phrases for example 'spider man' => ['spider','man','spiderman','spider_man']
"""
if keywords:
self.keywords = self._keyworsToSearchPhrases(keywords) if convertToSearchPhrases else list(keywords)
self._keywordCount = len(self.keywords)
self._headLocation = 0
else:
self.keywords = keywords
def dequque(self):
"""
dequque a keyword from the queue, the keyword is then moved to the end of the queue
@return: the next keyword in queue
"""
if not self.keywords:
raise discotechError("you don't any keywords")
retValue = self.keywords[self._headLocation]
# move head next
self._headLocation = (self._headLocation + 1) % self._keywordCount
return retValue
def _updateFromList(self,keywords):
self.keywords = list(keywords)
self._keywordCount = len(self.keywords)
self._headLocation = 0
def _updateFromDict(self,config):
if 'keywords' in config:
convertToSearchPhrases = False
if 'search_phrase' in config and config['search_phrase'] is True:
convertToSearchPhrases = True
self.keywords = self._keyworsToSearchPhrases(config['keywords']) if convertToSearchPhrases else list(config['keywords'])
self._keywordCount = len(self.keywords)
self._headLocation = 0
else:
raise discotechError("no keywords were given")
def _keyworToSearchPhrases(self,keyword):
words = keyword.split(' ')
#edge case
if len(words) == 1:
return words
cleanWords = []
#cleanup stage
for word in words:
word = word.strip()
if word != '':
cleanWords.append(word)
#combinator stage
combinators = ['','_']
combinedWords = []
for combinator in combinators:
combinedWords.append(combinator.join(cleanWords))
return cleanWords + combinedWords
def _keyworsToSearchPhrases(self,keywords):
retList = []
for keyword in keywords:
retList += self._keyworToSearchPhrases(keyword)
return retList
def loadConfig(self,config):
"""
load keywords from a configuation
@type config: list | str
@param config: a list of keywords or a path or address of JSON configuration file
"""
#if it's list
if type(config) is list:
self._updateFromList(config)
#if it's a dict
if type(config) is dict:
self._updateFromDict(config)
#if it's string
if type(config) is str:
#could be an address
if config.startswith('http://') or config.startswith('https://'):
configFile = getUrlContents(config)
confList = json.loads(configFile['response_text'])
#recursivly call yourself
return self.loadConfig(confList)
#could be file name
confFile = open(config,'r')
confLisr = json.loads(confFile.read())
#recursivly call yourself
return self.loadConfig(confList)
| gpl-2.0 | -1,382,077,788,170,863,000 | 27.345133 | 163 | 0.70153 | false |
webrecorder/warcio | test/test_capture_http_proxy.py | 1 | 7458 | from warcio.capture_http import capture_http
import threading
from wsgiref.simple_server import make_server, WSGIServer
import time
import requests
from warcio.archiveiterator import ArchiveIterator
from pytest import raises
# ==================================================================
class TestCaptureHttpProxy():
def setup(cls):
def app(env, start_response):
result = ('Proxied: ' + env['PATH_INFO']).encode('utf-8')
headers = [('Content-Length', str(len(result)))]
start_response('200 OK', headers=headers)
return iter([result])
from wsgiprox.wsgiprox import WSGIProxMiddleware
wsgiprox = WSGIProxMiddleware(app, '/')
class NoLogServer(WSGIServer):
def handle_error(self, request, client_address):
pass
server = make_server('localhost', 0, wsgiprox, server_class=NoLogServer)
addr, cls.port = server.socket.getsockname()
cls.proxies = {'https': 'localhost:' + str(cls.port),
'http': 'localhost:' + str(cls.port)
}
def run():
try:
server.serve_forever()
except Exception as e:
print(e)
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
time.sleep(0.1)
def test_capture_http_proxy(self):
with capture_http() as warc_writer:
res = requests.get("http://example.com/test", proxies=self.proxies, verify=False)
ai = ArchiveIterator(warc_writer.get_stream())
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "http://example.com/test"
assert response.content_stream().read().decode('utf-8') == 'Proxied: /http://example.com/test'
assert response.rec_headers['WARC-Proxy-Host'] == 'http://localhost:{0}'.format(self.port)
request = next(ai)
assert request.rec_type == 'request'
assert request.rec_headers['WARC-Target-URI'] == "http://example.com/test"
assert request.rec_headers['WARC-Proxy-Host'] == 'http://localhost:{0}'.format(self.port)
with raises(StopIteration):
assert next(ai)
def test_capture_https_proxy(self):
with capture_http() as warc_writer:
res = requests.get("https://example.com/test", proxies=self.proxies, verify=False)
res = requests.get("https://example.com/foo", proxies=self.proxies, verify=False)
# not recording this request
res = requests.get("https://example.com/skip", proxies=self.proxies, verify=False)
with capture_http(warc_writer):
res = requests.get("https://example.com/bar", proxies=self.proxies, verify=False)
ai = ArchiveIterator(warc_writer.get_stream())
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/test"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/test'
request = next(ai)
assert request.rec_type == 'request'
assert request.rec_headers['WARC-Target-URI'] == "https://example.com/test"
assert request.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/foo"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/foo'
request = next(ai)
assert request.rec_type == 'request'
assert request.rec_headers['WARC-Target-URI'] == "https://example.com/foo"
assert request.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/bar"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/bar'
request = next(ai)
assert request.rec_type == 'request'
with raises(StopIteration):
assert next(ai)
def test_capture_https_proxy_same_session(self):
sesh = requests.session()
with capture_http() as warc_writer:
res = sesh.get("https://example.com/test", proxies=self.proxies, verify=False)
res = sesh.get("https://example.com/foo", proxies=self.proxies, verify=False)
# *will* be captured, as part of same session... (fix this?)
res = sesh.get("https://example.com/skip", proxies=self.proxies, verify=False)
with capture_http(warc_writer):
res = sesh.get("https://example.com/bar", proxies=self.proxies, verify=False)
ai = ArchiveIterator(warc_writer.get_stream())
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/test"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/test'
request = next(ai)
assert request.rec_type == 'request'
assert request.rec_headers['WARC-Target-URI'] == "https://example.com/test"
assert request.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/foo"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/foo'
request = next(ai)
assert request.rec_type == 'request'
assert request.rec_headers['WARC-Target-URI'] == "https://example.com/foo"
assert request.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/skip"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/skip'
request = next(ai)
assert request.rec_type == 'request'
response = next(ai)
assert response.rec_type == 'response'
assert response.rec_headers['WARC-Target-URI'] == "https://example.com/bar"
assert response.rec_headers['WARC-Proxy-Host'] == 'https://localhost:{0}'.format(self.port)
assert response.content_stream().read().decode('utf-8') == 'Proxied: /https://example.com/bar'
request = next(ai)
assert request.rec_type == 'request'
with raises(StopIteration):
assert next(ai)
| apache-2.0 | -5,916,640,460,981,349,000 | 43.658683 | 103 | 0.615581 | false |
fedora-infra/anitya | anitya/tests/lib/backends/test_freshmeat.py | 1 | 4187 | # -*- coding: utf-8 -*-
#
# Copyright © 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
"""
anitya tests for the custom backend.
"""
import unittest
import anitya.lib.backends.freshmeat as backend
from anitya.db import models
from anitya.lib.exceptions import AnityaPluginException
from anitya.tests.base import DatabaseTestCase, create_distro
BACKEND = "Freshmeat"
class FreshmeatBackendtests(DatabaseTestCase):
"""Drupal backend tests."""
def setUp(self):
"""Set up the environnment, ran before every tests."""
super(FreshmeatBackendtests, self).setUp()
create_distro(self.session)
self.create_project()
def create_project(self):
"""Create some basic projects to work with."""
project = models.Project(
name="atmail",
homepage="http://freecode.com/projects/atmail",
backend=BACKEND,
)
self.session.add(project)
self.session.commit()
project = models.Project(
name="foo", homepage="http://freecode.com/projects/foo", backend=BACKEND
)
self.session.add(project)
self.session.commit()
project = models.Project(
name="awstats",
homepage="http://freecode.com/projects/awstats",
backend=BACKEND,
)
self.session.add(project)
self.session.commit()
def test_get_version(self):
"""Test the get_version function of the freshmeat backend."""
pid = 1
project = models.Project.get(self.session, pid)
exp = "7"
obs = backend.FreshmeatBackend.get_version(project)
self.assertEqual(obs, exp)
pid = 2
project = models.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException, backend.FreshmeatBackend.get_version, project
)
pid = 3
project = models.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException, backend.FreshmeatBackend.get_version, project
)
def test_get_version_url(self):
"""Assert that correct url is returned."""
project = models.Project(
name="test", homepage="http://example.org", backend=BACKEND
)
exp = "http://freshmeat.net/projects/test"
obs = backend.FreshmeatBackend.get_version_url(project)
self.assertEqual(obs, exp)
def test_get_versions(self):
"""Test the get_versions function of the debian backend."""
pid = 1
project = models.Project.get(self.session, pid)
exp = ["6.3.5", "6.5.0", "6.6.0", "6.30.3", "7"]
obs = backend.FreshmeatBackend.get_ordered_versions(project)
self.assertEqual(obs, exp)
pid = 2
project = models.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException, backend.FreshmeatBackend.get_versions, project
)
pid = 3
project = models.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException, backend.FreshmeatBackend.get_versions, project
)
if __name__ == "__main__":
SUITE = unittest.TestLoader().loadTestsFromTestCase(FreshmeatBackendtests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| gpl-2.0 | -850,745,508,754,240,800 | 32.488 | 84 | 0.65528 | false |
ioggstream/python-course | ansible-101/notebooks/exercise-05/inventory-docker-solution.py | 1 | 1376 | #!/usr/bin/env python
# List our containers. Note: this only works with docker-compose containers.
from __future__ import print_function
from collections import defaultdict
import json
#
# Manage different docker libraries
#
try:
from docker import Client
except ImportError:
from docker import APIClient as Client
import logging
log = logging.getLogger()
logging.basicConfig(level=logging.DEBUG)
def print_hosts():
c=Client(base_url="http://172.17.0.1:2375")
container_fmt = lambda x: (
x['Names'][0][1:],
x['NetworkSettings']['Networks']['bridge']['IPAddress'],
)
inventory = dict()
for x in c.containers():
log.debug("Processing entry %r", '\t\t'.join(container_fmt(x)))
try:
group_name = x['Labels']['com.docker.compose.service']
ip_address = x['NetworkSettings']['Networks']['bridge']['IPAddress']
if group_name not in inventory:
inventory[group_name] = defaultdict(list)
inventory[group_name]['hosts'].append(ip_address)
except KeyError:
log.warning("Host not run via docker-compose: skipping")
inventory['web']['host_vars'] = {'ansible_ssh_common_args': ' -o StrictHostKeyChecking=no '}
ret = json.dumps(inventory, indent=True)
return ret
if __name__ == '__main__':
print(print_hosts())
| agpl-3.0 | -8,088,468,184,198,138,000 | 28.913043 | 96 | 0.634448 | false |
googleapis/googleapis-gen | google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/common/types/ad_type_infos.py | 1 | 46175 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v8.common.types import ad_asset
from google.ads.googleads.v8.enums.types import call_conversion_reporting_state
from google.ads.googleads.v8.enums.types import display_ad_format_setting
from google.ads.googleads.v8.enums.types import display_upload_product_type as gage_display_upload_product_type
from google.ads.googleads.v8.enums.types import legacy_app_install_ad_app_store
from google.ads.googleads.v8.enums.types import mime_type as gage_mime_type
__protobuf__ = proto.module(
package='google.ads.googleads.v8.common',
marshal='google.ads.googleads.v8',
manifest={
'TextAdInfo',
'ExpandedTextAdInfo',
'ExpandedDynamicSearchAdInfo',
'HotelAdInfo',
'ShoppingSmartAdInfo',
'ShoppingProductAdInfo',
'ShoppingComparisonListingAdInfo',
'GmailAdInfo',
'GmailTeaser',
'DisplayCallToAction',
'ProductImage',
'ProductVideo',
'ImageAdInfo',
'VideoBumperInStreamAdInfo',
'VideoNonSkippableInStreamAdInfo',
'VideoTrueViewInStreamAdInfo',
'VideoOutstreamAdInfo',
'VideoTrueViewDiscoveryAdInfo',
'VideoAdInfo',
'VideoResponsiveAdInfo',
'ResponsiveSearchAdInfo',
'LegacyResponsiveDisplayAdInfo',
'AppAdInfo',
'AppEngagementAdInfo',
'LegacyAppInstallAdInfo',
'ResponsiveDisplayAdInfo',
'LocalAdInfo',
'DisplayUploadAdInfo',
'ResponsiveDisplayAdControlSpec',
'SmartCampaignAdInfo',
'CallAdInfo',
},
)
class TextAdInfo(proto.Message):
r"""A text ad.
Attributes:
headline (str):
The headline of the ad.
description1 (str):
The first line of the ad's description.
description2 (str):
The second line of the ad's description.
"""
headline = proto.Field(
proto.STRING,
number=4,
optional=True,
)
description1 = proto.Field(
proto.STRING,
number=5,
optional=True,
)
description2 = proto.Field(
proto.STRING,
number=6,
optional=True,
)
class ExpandedTextAdInfo(proto.Message):
r"""An expanded text ad.
Attributes:
headline_part1 (str):
The first part of the ad's headline.
headline_part2 (str):
The second part of the ad's headline.
headline_part3 (str):
The third part of the ad's headline.
description (str):
The description of the ad.
description2 (str):
The second description of the ad.
path1 (str):
The text that can appear alongside the ad's
displayed URL.
path2 (str):
Additional text that can appear alongside the
ad's displayed URL.
"""
headline_part1 = proto.Field(
proto.STRING,
number=8,
optional=True,
)
headline_part2 = proto.Field(
proto.STRING,
number=9,
optional=True,
)
headline_part3 = proto.Field(
proto.STRING,
number=10,
optional=True,
)
description = proto.Field(
proto.STRING,
number=11,
optional=True,
)
description2 = proto.Field(
proto.STRING,
number=12,
optional=True,
)
path1 = proto.Field(
proto.STRING,
number=13,
optional=True,
)
path2 = proto.Field(
proto.STRING,
number=14,
optional=True,
)
class ExpandedDynamicSearchAdInfo(proto.Message):
r"""An expanded dynamic search ad.
Attributes:
description (str):
The description of the ad.
description2 (str):
The second description of the ad.
"""
description = proto.Field(
proto.STRING,
number=3,
optional=True,
)
description2 = proto.Field(
proto.STRING,
number=4,
optional=True,
)
class HotelAdInfo(proto.Message):
r"""A hotel ad. """
class ShoppingSmartAdInfo(proto.Message):
r"""A Smart Shopping ad. """
class ShoppingProductAdInfo(proto.Message):
r"""A standard Shopping ad. """
class ShoppingComparisonListingAdInfo(proto.Message):
r"""A Shopping Comparison Listing ad.
Attributes:
headline (str):
Headline of the ad. This field is required.
Allowed length is between 25 and 45 characters.
"""
headline = proto.Field(
proto.STRING,
number=2,
optional=True,
)
class GmailAdInfo(proto.Message):
r"""A Gmail ad.
Attributes:
teaser (google.ads.googleads.v8.common.types.GmailTeaser):
The Gmail teaser.
header_image (str):
The MediaFile resource name of the header
image. Valid image types are GIF, JPEG and PNG.
The minimum size is 300x100 pixels and the
aspect ratio must be between 3:1 and 5:1 (+-1%).
marketing_image (str):
The MediaFile resource name of the marketing
image. Valid image types are GIF, JPEG and PNG.
The image must either be landscape with a
minimum size of 600x314 pixels and aspect ratio
of 600:314 (+-1%) or square with a minimum size
of 300x300 pixels and aspect ratio of 1:1 (+-1%)
marketing_image_headline (str):
Headline of the marketing image.
marketing_image_description (str):
Description of the marketing image.
marketing_image_display_call_to_action (google.ads.googleads.v8.common.types.DisplayCallToAction):
Display-call-to-action of the marketing
image.
product_images (Sequence[google.ads.googleads.v8.common.types.ProductImage]):
Product images. Up to 15 images are
supported.
product_videos (Sequence[google.ads.googleads.v8.common.types.ProductVideo]):
Product videos. Up to 7 videos are supported.
At least one product video or a marketing image
must be specified.
"""
teaser = proto.Field(
proto.MESSAGE,
number=1,
message='GmailTeaser',
)
header_image = proto.Field(
proto.STRING,
number=10,
optional=True,
)
marketing_image = proto.Field(
proto.STRING,
number=11,
optional=True,
)
marketing_image_headline = proto.Field(
proto.STRING,
number=12,
optional=True,
)
marketing_image_description = proto.Field(
proto.STRING,
number=13,
optional=True,
)
marketing_image_display_call_to_action = proto.Field(
proto.MESSAGE,
number=6,
message='DisplayCallToAction',
)
product_images = proto.RepeatedField(
proto.MESSAGE,
number=7,
message='ProductImage',
)
product_videos = proto.RepeatedField(
proto.MESSAGE,
number=8,
message='ProductVideo',
)
class GmailTeaser(proto.Message):
r"""Gmail teaser data. The teaser is a small header that acts as
an invitation to view the rest of the ad (the body).
Attributes:
headline (str):
Headline of the teaser.
description (str):
Description of the teaser.
business_name (str):
Business name of the advertiser.
logo_image (str):
The MediaFile resource name of the logo
image. Valid image types are GIF, JPEG and PNG.
The minimum size is 144x144 pixels and the
aspect ratio must be 1:1 (+-1%).
"""
headline = proto.Field(
proto.STRING,
number=5,
optional=True,
)
description = proto.Field(
proto.STRING,
number=6,
optional=True,
)
business_name = proto.Field(
proto.STRING,
number=7,
optional=True,
)
logo_image = proto.Field(
proto.STRING,
number=8,
optional=True,
)
class DisplayCallToAction(proto.Message):
r"""Data for display call to action. The call to action is a
piece of the ad that prompts the user to do something. Like
clicking a link or making a phone call.
Attributes:
text (str):
Text for the display-call-to-action.
text_color (str):
Text color for the display-call-to-action in
hexadecimal, e.g. #ffffff for white.
url_collection_id (str):
Identifies the url collection in the ad.url_collections
field. If not set the url defaults to final_url.
"""
text = proto.Field(
proto.STRING,
number=5,
optional=True,
)
text_color = proto.Field(
proto.STRING,
number=6,
optional=True,
)
url_collection_id = proto.Field(
proto.STRING,
number=7,
optional=True,
)
class ProductImage(proto.Message):
r"""Product image specific data.
Attributes:
product_image (str):
The MediaFile resource name of the product
image. Valid image types are GIF, JPEG and PNG.
The minimum size is 300x300 pixels and the
aspect ratio must be 1:1 (+-1%).
description (str):
Description of the product.
display_call_to_action (google.ads.googleads.v8.common.types.DisplayCallToAction):
Display-call-to-action of the product image.
"""
product_image = proto.Field(
proto.STRING,
number=4,
optional=True,
)
description = proto.Field(
proto.STRING,
number=5,
optional=True,
)
display_call_to_action = proto.Field(
proto.MESSAGE,
number=3,
message='DisplayCallToAction',
)
class ProductVideo(proto.Message):
r"""Product video specific data.
Attributes:
product_video (str):
The MediaFile resource name of a video which
must be hosted on YouTube.
"""
product_video = proto.Field(
proto.STRING,
number=2,
optional=True,
)
class ImageAdInfo(proto.Message):
r"""An image ad.
Attributes:
pixel_width (int):
Width in pixels of the full size image.
pixel_height (int):
Height in pixels of the full size image.
image_url (str):
URL of the full size image.
preview_pixel_width (int):
Width in pixels of the preview size image.
preview_pixel_height (int):
Height in pixels of the preview size image.
preview_image_url (str):
URL of the preview size image.
mime_type (google.ads.googleads.v8.enums.types.MimeTypeEnum.MimeType):
The mime type of the image.
name (str):
The name of the image. If the image was
created from a MediaFile, this is the
MediaFile's name. If the image was created from
bytes, this is empty.
media_file (str):
The MediaFile resource to use for the image.
data (bytes):
Raw image data as bytes.
ad_id_to_copy_image_from (int):
An ad ID to copy the image from.
"""
pixel_width = proto.Field(
proto.INT64,
number=15,
optional=True,
)
pixel_height = proto.Field(
proto.INT64,
number=16,
optional=True,
)
image_url = proto.Field(
proto.STRING,
number=17,
optional=True,
)
preview_pixel_width = proto.Field(
proto.INT64,
number=18,
optional=True,
)
preview_pixel_height = proto.Field(
proto.INT64,
number=19,
optional=True,
)
preview_image_url = proto.Field(
proto.STRING,
number=20,
optional=True,
)
mime_type = proto.Field(
proto.ENUM,
number=10,
enum=gage_mime_type.MimeTypeEnum.MimeType,
)
name = proto.Field(
proto.STRING,
number=21,
optional=True,
)
media_file = proto.Field(
proto.STRING,
number=12,
oneof='image',
)
data = proto.Field(
proto.BYTES,
number=13,
oneof='image',
)
ad_id_to_copy_image_from = proto.Field(
proto.INT64,
number=14,
oneof='image',
)
class VideoBumperInStreamAdInfo(proto.Message):
r"""Representation of video bumper in-stream ad format (very
short in-stream non-skippable video ad).
Attributes:
companion_banner (str):
The MediaFile resource name of the companion
banner used with the ad.
"""
companion_banner = proto.Field(
proto.STRING,
number=2,
optional=True,
)
class VideoNonSkippableInStreamAdInfo(proto.Message):
r"""Representation of video non-skippable in-stream ad format (15
second in-stream non-skippable video ad).
Attributes:
companion_banner (str):
The MediaFile resource name of the companion
banner used with the ad.
"""
companion_banner = proto.Field(
proto.STRING,
number=2,
optional=True,
)
class VideoTrueViewInStreamAdInfo(proto.Message):
r"""Representation of video TrueView in-stream ad format (ad
shown during video playback, often at beginning, which displays
a skip button a few seconds into the video).
Attributes:
action_button_label (str):
Label on the CTA (call-to-action) button
taking the user to the video ad's final URL.
Required for TrueView for action campaigns,
optional otherwise.
action_headline (str):
Additional text displayed with the CTA (call-
o-action) button to give context and encourage
clicking on the button.
companion_banner (str):
The MediaFile resource name of the companion
banner used with the ad.
"""
action_button_label = proto.Field(
proto.STRING,
number=4,
optional=True,
)
action_headline = proto.Field(
proto.STRING,
number=5,
optional=True,
)
companion_banner = proto.Field(
proto.STRING,
number=6,
optional=True,
)
class VideoOutstreamAdInfo(proto.Message):
r"""Representation of video out-stream ad format (ad shown
alongside a feed with automatic playback, without sound).
Attributes:
headline (str):
The headline of the ad.
description (str):
The description line.
"""
headline = proto.Field(
proto.STRING,
number=3,
optional=True,
)
description = proto.Field(
proto.STRING,
number=4,
optional=True,
)
class VideoTrueViewDiscoveryAdInfo(proto.Message):
r"""Representation of video TrueView discovery ad format.
Attributes:
headline (str):
The headline of the ad.
description1 (str):
First text line for a TrueView video
discovery ad.
description2 (str):
Second text line for a TrueView video
discovery ad.
"""
headline = proto.Field(
proto.STRING,
number=4,
optional=True,
)
description1 = proto.Field(
proto.STRING,
number=5,
optional=True,
)
description2 = proto.Field(
proto.STRING,
number=6,
optional=True,
)
class VideoAdInfo(proto.Message):
r"""A video ad.
Attributes:
media_file (str):
The MediaFile resource to use for the video.
in_stream (google.ads.googleads.v8.common.types.VideoTrueViewInStreamAdInfo):
Video TrueView in-stream ad format.
bumper (google.ads.googleads.v8.common.types.VideoBumperInStreamAdInfo):
Video bumper in-stream ad format.
out_stream (google.ads.googleads.v8.common.types.VideoOutstreamAdInfo):
Video out-stream ad format.
non_skippable (google.ads.googleads.v8.common.types.VideoNonSkippableInStreamAdInfo):
Video non-skippable in-stream ad format.
discovery (google.ads.googleads.v8.common.types.VideoTrueViewDiscoveryAdInfo):
Video TrueView discovery ad format.
"""
media_file = proto.Field(
proto.STRING,
number=7,
optional=True,
)
in_stream = proto.Field(
proto.MESSAGE,
number=2,
oneof='format',
message='VideoTrueViewInStreamAdInfo',
)
bumper = proto.Field(
proto.MESSAGE,
number=3,
oneof='format',
message='VideoBumperInStreamAdInfo',
)
out_stream = proto.Field(
proto.MESSAGE,
number=4,
oneof='format',
message='VideoOutstreamAdInfo',
)
non_skippable = proto.Field(
proto.MESSAGE,
number=5,
oneof='format',
message='VideoNonSkippableInStreamAdInfo',
)
discovery = proto.Field(
proto.MESSAGE,
number=6,
oneof='format',
message='VideoTrueViewDiscoveryAdInfo',
)
class VideoResponsiveAdInfo(proto.Message):
r"""A video responsive ad.
Attributes:
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets used for the short
headline, e.g. the "Call To Action" banner.
Currently, only a single value for the short
headline is supported.
long_headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets used for the long
headline. Currently, only a single value for the
long headline is supported.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets used for the description.
Currently, only a single value for the
description is supported.
call_to_actions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets used for the button, e.g.
the "Call To Action" button. Currently, only a
single value for the button is supported.
videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]):
List of YouTube video assets used for the ad.
Currently, only a single value for the YouTube
video asset is supported.
companion_banners (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
List of image assets used for the companion
banner. Currently, only a single value for the
companion banner asset is supported.
"""
headlines = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
long_headlines = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=3,
message=ad_asset.AdTextAsset,
)
call_to_actions = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=ad_asset.AdTextAsset,
)
videos = proto.RepeatedField(
proto.MESSAGE,
number=5,
message=ad_asset.AdVideoAsset,
)
companion_banners = proto.RepeatedField(
proto.MESSAGE,
number=6,
message=ad_asset.AdImageAsset,
)
class ResponsiveSearchAdInfo(proto.Message):
r"""A responsive search ad.
Responsive search ads let you create an ad that adapts to show
more text, and more relevant messages, to your customers. Enter
multiple headlines and descriptions when creating a responsive
search ad, and over time, Google Ads will automatically test
different combinations and learn which combinations perform
best. By adapting your ad's content to more closely match
potential customers' search terms, responsive search ads may
improve your campaign's performance.
More information at https://support.google.com/google-
ads/answer/7684791
Attributes:
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for headlines. When the
ad serves the headlines will be selected from
this list.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for descriptions. When
the ad serves the descriptions will be selected
from this list.
path1 (str):
First part of text that may appear appended
to the url displayed in the ad.
path2 (str):
Second part of text that may appear appended
to the url displayed in the ad. This field can
only be set when path1 is also set.
"""
headlines = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
path1 = proto.Field(
proto.STRING,
number=5,
optional=True,
)
path2 = proto.Field(
proto.STRING,
number=6,
optional=True,
)
class LegacyResponsiveDisplayAdInfo(proto.Message):
r"""A legacy responsive display ad. Ads of this type are labeled
'Responsive ads' in the Google Ads UI.
Attributes:
short_headline (str):
The short version of the ad's headline.
long_headline (str):
The long version of the ad's headline.
description (str):
The description of the ad.
business_name (str):
The business name in the ad.
allow_flexible_color (bool):
Advertiser's consent to allow flexible color. When true, the
ad may be served with different color if necessary. When
false, the ad will be served with the specified colors or a
neutral color. The default value is true. Must be true if
main_color and accent_color are not set.
accent_color (str):
The accent color of the ad in hexadecimal, e.g. #ffffff for
white. If one of main_color and accent_color is set, the
other is required as well.
main_color (str):
The main color of the ad in hexadecimal, e.g. #ffffff for
white. If one of main_color and accent_color is set, the
other is required as well.
call_to_action_text (str):
The call-to-action text for the ad.
logo_image (str):
The MediaFile resource name of the logo image
used in the ad.
square_logo_image (str):
The MediaFile resource name of the square
logo image used in the ad.
marketing_image (str):
The MediaFile resource name of the marketing
image used in the ad.
square_marketing_image (str):
The MediaFile resource name of the square
marketing image used in the ad.
format_setting (google.ads.googleads.v8.enums.types.DisplayAdFormatSettingEnum.DisplayAdFormatSetting):
Specifies which format the ad will be served in. Default is
ALL_FORMATS.
price_prefix (str):
Prefix before price. E.g. 'as low as'.
promo_text (str):
Promotion text used for dynamic formats of
responsive ads. For example 'Free two-day
shipping'.
"""
short_headline = proto.Field(
proto.STRING,
number=16,
optional=True,
)
long_headline = proto.Field(
proto.STRING,
number=17,
optional=True,
)
description = proto.Field(
proto.STRING,
number=18,
optional=True,
)
business_name = proto.Field(
proto.STRING,
number=19,
optional=True,
)
allow_flexible_color = proto.Field(
proto.BOOL,
number=20,
optional=True,
)
accent_color = proto.Field(
proto.STRING,
number=21,
optional=True,
)
main_color = proto.Field(
proto.STRING,
number=22,
optional=True,
)
call_to_action_text = proto.Field(
proto.STRING,
number=23,
optional=True,
)
logo_image = proto.Field(
proto.STRING,
number=24,
optional=True,
)
square_logo_image = proto.Field(
proto.STRING,
number=25,
optional=True,
)
marketing_image = proto.Field(
proto.STRING,
number=26,
optional=True,
)
square_marketing_image = proto.Field(
proto.STRING,
number=27,
optional=True,
)
format_setting = proto.Field(
proto.ENUM,
number=13,
enum=display_ad_format_setting.DisplayAdFormatSettingEnum.DisplayAdFormatSetting,
)
price_prefix = proto.Field(
proto.STRING,
number=28,
optional=True,
)
promo_text = proto.Field(
proto.STRING,
number=29,
optional=True,
)
class AppAdInfo(proto.Message):
r"""An app ad.
Attributes:
mandatory_ad_text (google.ads.googleads.v8.common.types.AdTextAsset):
Mandatory ad text.
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for headlines. When the
ad serves the headlines will be selected from
this list.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for descriptions. When
the ad serves the descriptions will be selected
from this list.
images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
List of image assets that may be displayed
with the ad.
youtube_videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]):
List of YouTube video assets that may be
displayed with the ad.
html5_media_bundles (Sequence[google.ads.googleads.v8.common.types.AdMediaBundleAsset]):
List of media bundle assets that may be used
with the ad.
"""
mandatory_ad_text = proto.Field(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
headlines = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=3,
message=ad_asset.AdTextAsset,
)
images = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=ad_asset.AdImageAsset,
)
youtube_videos = proto.RepeatedField(
proto.MESSAGE,
number=5,
message=ad_asset.AdVideoAsset,
)
html5_media_bundles = proto.RepeatedField(
proto.MESSAGE,
number=6,
message=ad_asset.AdMediaBundleAsset,
)
class AppEngagementAdInfo(proto.Message):
r"""App engagement ads allow you to write text encouraging a
specific action in the app, like checking in, making a purchase,
or booking a flight. They allow you to send users to a specific
part of your app where they can find what they're looking for
easier and faster.
Attributes:
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for headlines. When the
ad serves the headlines will be selected from
this list.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for descriptions. When
the ad serves the descriptions will be selected
from this list.
images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
List of image assets that may be displayed
with the ad.
videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]):
List of video assets that may be displayed
with the ad.
"""
headlines = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
images = proto.RepeatedField(
proto.MESSAGE,
number=3,
message=ad_asset.AdImageAsset,
)
videos = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=ad_asset.AdVideoAsset,
)
class LegacyAppInstallAdInfo(proto.Message):
r"""A legacy app install ad that only can be used by a few select
customers.
Attributes:
app_id (str):
The id of the mobile app.
app_store (google.ads.googleads.v8.enums.types.LegacyAppInstallAdAppStoreEnum.LegacyAppInstallAdAppStore):
The app store the mobile app is available in.
headline (str):
The headline of the ad.
description1 (str):
The first description line of the ad.
description2 (str):
The second description line of the ad.
"""
app_id = proto.Field(
proto.STRING,
number=6,
optional=True,
)
app_store = proto.Field(
proto.ENUM,
number=2,
enum=legacy_app_install_ad_app_store.LegacyAppInstallAdAppStoreEnum.LegacyAppInstallAdAppStore,
)
headline = proto.Field(
proto.STRING,
number=7,
optional=True,
)
description1 = proto.Field(
proto.STRING,
number=8,
optional=True,
)
description2 = proto.Field(
proto.STRING,
number=9,
optional=True,
)
class ResponsiveDisplayAdInfo(proto.Message):
r"""A responsive display ad.
Attributes:
marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
Marketing images to be used in the ad. Valid image types are
GIF, JPEG, and PNG. The minimum size is 600x314 and the
aspect ratio must be 1.91:1 (+-1%). At least one
marketing_image is required. Combined with
square_marketing_images the maximum is 15.
square_marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
Square marketing images to be used in the ad. Valid image
types are GIF, JPEG, and PNG. The minimum size is 300x300
and the aspect ratio must be 1:1 (+-1%). At least one square
marketing_image is required. Combined with marketing_images
the maximum is 15.
logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
Logo images to be used in the ad. Valid image types are GIF,
JPEG, and PNG. The minimum size is 512x128 and the aspect
ratio must be 4:1 (+-1%). Combined with square_logo_images
the maximum is 5.
square_logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
Square logo images to be used in the ad. Valid image types
are GIF, JPEG, and PNG. The minimum size is 128x128 and the
aspect ratio must be 1:1 (+-1%). Combined with
square_logo_images the maximum is 5.
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
Short format headlines for the ad. The
maximum length is 30 characters. At least 1 and
max 5 headlines can be specified.
long_headline (google.ads.googleads.v8.common.types.AdTextAsset):
A required long format headline. The maximum
length is 90 characters.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
Descriptive texts for the ad. The maximum
length is 90 characters. At least 1 and max 5
headlines can be specified.
youtube_videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]):
Optional YouTube videos for the ad. A maximum
of 5 videos can be specified.
business_name (str):
The advertiser/brand name. Maximum display
width is 25.
main_color (str):
The main color of the ad in hexadecimal, e.g. #ffffff for
white. If one of main_color and accent_color is set, the
other is required as well.
accent_color (str):
The accent color of the ad in hexadecimal, e.g. #ffffff for
white. If one of main_color and accent_color is set, the
other is required as well.
allow_flexible_color (bool):
Advertiser's consent to allow flexible color. When true, the
ad may be served with different color if necessary. When
false, the ad will be served with the specified colors or a
neutral color. The default value is true. Must be true if
main_color and accent_color are not set.
call_to_action_text (str):
The call-to-action text for the ad. Maximum
display width is 30.
price_prefix (str):
Prefix before price. E.g. 'as low as'.
promo_text (str):
Promotion text used for dynamic formats of
responsive ads. For example 'Free two-day
shipping'.
format_setting (google.ads.googleads.v8.enums.types.DisplayAdFormatSettingEnum.DisplayAdFormatSetting):
Specifies which format the ad will be served in. Default is
ALL_FORMATS.
control_spec (google.ads.googleads.v8.common.types.ResponsiveDisplayAdControlSpec):
Specification for various creative controls.
"""
marketing_images = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdImageAsset,
)
square_marketing_images = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdImageAsset,
)
logo_images = proto.RepeatedField(
proto.MESSAGE,
number=3,
message=ad_asset.AdImageAsset,
)
square_logo_images = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=ad_asset.AdImageAsset,
)
headlines = proto.RepeatedField(
proto.MESSAGE,
number=5,
message=ad_asset.AdTextAsset,
)
long_headline = proto.Field(
proto.MESSAGE,
number=6,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=7,
message=ad_asset.AdTextAsset,
)
youtube_videos = proto.RepeatedField(
proto.MESSAGE,
number=8,
message=ad_asset.AdVideoAsset,
)
business_name = proto.Field(
proto.STRING,
number=17,
optional=True,
)
main_color = proto.Field(
proto.STRING,
number=18,
optional=True,
)
accent_color = proto.Field(
proto.STRING,
number=19,
optional=True,
)
allow_flexible_color = proto.Field(
proto.BOOL,
number=20,
optional=True,
)
call_to_action_text = proto.Field(
proto.STRING,
number=21,
optional=True,
)
price_prefix = proto.Field(
proto.STRING,
number=22,
optional=True,
)
promo_text = proto.Field(
proto.STRING,
number=23,
optional=True,
)
format_setting = proto.Field(
proto.ENUM,
number=16,
enum=display_ad_format_setting.DisplayAdFormatSettingEnum.DisplayAdFormatSetting,
)
control_spec = proto.Field(
proto.MESSAGE,
number=24,
message='ResponsiveDisplayAdControlSpec',
)
class LocalAdInfo(proto.Message):
r"""A local ad.
Attributes:
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for headlines. When the
ad serves the headlines will be selected from
this list. At least 1 and at most 5 headlines
must be specified.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for descriptions. When
the ad serves the descriptions will be selected
from this list. At least 1 and at most 5
descriptions must be specified.
call_to_actions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for call-to-actions. When
the ad serves the call-to-actions will be
selected from this list. Call-to-actions are
optional and at most 5 can be specified.
marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
List of marketing image assets that may be
displayed with the ad. The images must be
314x600 pixels or 320x320 pixels. At least 1 and
at most 20 image assets must be specified.
logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]):
List of logo image assets that may be
displayed with the ad. The images must be
128x128 pixels and not larger than 120KB. At
least 1 and at most 5 image assets must be
specified.
videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]):
List of YouTube video assets that may be
displayed with the ad. Videos are optional and
at most 20 can be specified.
path1 (str):
First part of optional text that may appear
appended to the url displayed in the ad.
path2 (str):
Second part of optional text that may appear
appended to the url displayed in the ad. This
field can only be set when path1 is also set.
"""
headlines = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
call_to_actions = proto.RepeatedField(
proto.MESSAGE,
number=3,
message=ad_asset.AdTextAsset,
)
marketing_images = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=ad_asset.AdImageAsset,
)
logo_images = proto.RepeatedField(
proto.MESSAGE,
number=5,
message=ad_asset.AdImageAsset,
)
videos = proto.RepeatedField(
proto.MESSAGE,
number=6,
message=ad_asset.AdVideoAsset,
)
path1 = proto.Field(
proto.STRING,
number=9,
optional=True,
)
path2 = proto.Field(
proto.STRING,
number=10,
optional=True,
)
class DisplayUploadAdInfo(proto.Message):
r"""A generic type of display ad. The exact ad format is controlled by
the display_upload_product_type field, which determines what kinds
of data need to be included with the ad.
Attributes:
display_upload_product_type (google.ads.googleads.v8.enums.types.DisplayUploadProductTypeEnum.DisplayUploadProductType):
The product type of this ad. See comments on
the enum for details.
media_bundle (google.ads.googleads.v8.common.types.AdMediaBundleAsset):
A media bundle asset to be used in the ad. For information
about the media bundle for HTML5_UPLOAD_AD see
https://support.google.com/google-ads/answer/1722096 Media
bundles that are part of dynamic product types use a special
format that needs to be created through the Google Web
Designer. See
https://support.google.com/webdesigner/answer/7543898 for
more information.
"""
display_upload_product_type = proto.Field(
proto.ENUM,
number=1,
enum=gage_display_upload_product_type.DisplayUploadProductTypeEnum.DisplayUploadProductType,
)
media_bundle = proto.Field(
proto.MESSAGE,
number=2,
oneof='media_asset',
message=ad_asset.AdMediaBundleAsset,
)
class ResponsiveDisplayAdControlSpec(proto.Message):
r"""Specification for various creative controls for a responsive
display ad.
Attributes:
enable_asset_enhancements (bool):
Whether the advertiser has opted into the
asset enhancements feature.
enable_autogen_video (bool):
Whether the advertiser has opted into auto-
en video feature.
"""
enable_asset_enhancements = proto.Field(
proto.BOOL,
number=1,
)
enable_autogen_video = proto.Field(
proto.BOOL,
number=2,
)
class SmartCampaignAdInfo(proto.Message):
r"""A Smart campaign ad.
Attributes:
headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for headlines. When the
ad serves the headlines will be selected from
this list. 3 headlines must be specified.
descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]):
List of text assets for descriptions. When
the ad serves the descriptions will be selected
from this list. 2 descriptions must be
specified.
"""
headlines = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=ad_asset.AdTextAsset,
)
descriptions = proto.RepeatedField(
proto.MESSAGE,
number=2,
message=ad_asset.AdTextAsset,
)
class CallAdInfo(proto.Message):
r"""A call ad.
Attributes:
country_code (str):
The country code in the ad.
phone_number (str):
The phone number in the ad.
business_name (str):
The business name in the ad.
headline1 (str):
First headline in the ad.
headline2 (str):
Second headline in the ad.
description1 (str):
The first line of the ad's description.
description2 (str):
The second line of the ad's description.
call_tracked (bool):
Whether to enable call tracking for the
creative. Enabling call tracking also enables
call conversions.
disable_call_conversion (bool):
Whether to disable call conversion for the creative. If set
to ``true``, disables call conversions even when
``call_tracked`` is ``true``. If ``call_tracked`` is
``false``, this field is ignored.
phone_number_verification_url (str):
The URL to be used for phone number
verification.
conversion_action (str):
The conversion action to attribute a call conversion to. If
not set a default conversion action is used. This field only
has effect if call_tracked is set to true. Otherwise this
field is ignored.
conversion_reporting_state (google.ads.googleads.v8.enums.types.CallConversionReportingStateEnum.CallConversionReportingState):
The call conversion behavior of this call ad.
It can use its own call conversion setting,
inherit the account level setting, or be
disabled.
path1 (str):
First part of text that may appear appended
to the url displayed to in the ad. Optional.
path2 (str):
Second part of text that may appear appended
to the url displayed to in the ad. This field
can only be set when path1 is set. Optional.
"""
country_code = proto.Field(
proto.STRING,
number=1,
)
phone_number = proto.Field(
proto.STRING,
number=2,
)
business_name = proto.Field(
proto.STRING,
number=3,
)
headline1 = proto.Field(
proto.STRING,
number=11,
)
headline2 = proto.Field(
proto.STRING,
number=12,
)
description1 = proto.Field(
proto.STRING,
number=4,
)
description2 = proto.Field(
proto.STRING,
number=5,
)
call_tracked = proto.Field(
proto.BOOL,
number=6,
)
disable_call_conversion = proto.Field(
proto.BOOL,
number=7,
)
phone_number_verification_url = proto.Field(
proto.STRING,
number=8,
)
conversion_action = proto.Field(
proto.STRING,
number=9,
)
conversion_reporting_state = proto.Field(
proto.ENUM,
number=10,
enum=call_conversion_reporting_state.CallConversionReportingStateEnum.CallConversionReportingState,
)
path1 = proto.Field(
proto.STRING,
number=13,
)
path2 = proto.Field(
proto.STRING,
number=14,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 1,371,305,617,412,775,000 | 30.178258 | 135 | 0.60667 | false |
mapzen/tilequeue | tilequeue/wof.py | 1 | 46004 | from __future__ import absolute_import
from collections import namedtuple
from contextlib import closing
from cStringIO import StringIO
from datetime import datetime
from edtf import parse_edtf
from operator import attrgetter
from psycopg2.extras import register_hstore
from shapely import geos
from tilequeue.tile import coord_marshall_int
from tilequeue.tile import coord_unmarshall_int
from tilequeue.tile import mercator_point_to_coord
from tilequeue.tile import reproject_lnglat_to_mercator
import csv
import json
import os.path
import psycopg2
import Queue
import requests
import shapely.geometry
import shapely.ops
import shapely.wkb
import threading
DATABASE_SRID = 3857
def generate_csv_lines(requests_result):
for line in requests_result.iter_lines():
if line:
yield line
neighbourhood_placetypes_to_int = dict(
neighbourhood=1,
microhood=2,
macrohood=3,
borough=4,
)
neighbourhood_int_to_placetypes = {
1: 'neighbourhood',
2: 'microhood',
3: 'macrohood',
4: 'borough',
}
NeighbourhoodMeta = namedtuple(
'NeighbourhoodMeta',
'wof_id placetype name hash label_position')
Neighbourhood = namedtuple(
'Neighbourhood',
'wof_id placetype name hash label_position geometry n_photos area '
'min_zoom max_zoom is_landuse_aoi inception cessation l10n_names')
def parse_neighbourhood_meta_csv(csv_line_generator, placetype):
reader = csv.reader(csv_line_generator)
it = iter(reader)
header = it.next()
lbl_lat_idx = header.index('lbl_latitude')
lbl_lng_idx = header.index('lbl_longitude')
name_idx = header.index('name')
wof_id_idx = header.index('id')
hash_idx = header.index('file_hash')
superseded_by_idx = header.index('superseded_by')
min_row_length = (max(
lbl_lat_idx, lbl_lng_idx, name_idx, wof_id_idx, hash_idx,
superseded_by_idx) + 1)
for row in it:
if len(row) < min_row_length:
continue
superseded_by = row[superseded_by_idx]
if superseded_by:
continue
wof_id_str = row[wof_id_idx]
if not wof_id_str:
continue
try:
wof_id = int(wof_id_str)
except ValueError:
continue
name = row[name_idx]
if not name:
continue
lat_str = row[lbl_lat_idx]
lng_str = row[lbl_lng_idx]
try:
lat = float(lat_str)
lng = float(lng_str)
except ValueError:
continue
file_hash = row[hash_idx]
label_x, label_y = reproject_lnglat_to_mercator(lng, lat)
label_position = shapely.geometry.Point(label_x, label_y)
neighbourhood_meta = NeighbourhoodMeta(
wof_id, placetype, name, file_hash, label_position)
yield neighbourhood_meta
def _make_requests_session_with_retries(max_retries):
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util import Retry
s = requests.Session()
a = HTTPAdapter(
max_retries=Retry(
total=max_retries,
status_forcelist=[ # this is a list of statuses to consider to be
# an error and retry.
429, # Too many requests (i.e: back off)
500, # Generic internal server error
502, # Bad Gateway - i.e: upstream failure
503, # Unavailable, temporarily
504, # Gateway timeout
522 # Origin connection timed out
],
backoff_factor=1.0 # back off for 0s, 1s, 3s, 7s, etc... after
# each successive failure. (factor*(2^N-1))
))
# use retry for both HTTP and HTTPS connections.
s.mount('http://', a)
s.mount('https://', a)
return s
def fetch_wof_url_meta_neighbourhoods(url, placetype, max_retries):
s = _make_requests_session_with_retries(max_retries)
r = s.get(url, stream=True)
assert r.status_code == 200, 'Failure requesting: %s' % url
csv_line_generator = generate_csv_lines(r)
return parse_neighbourhood_meta_csv(csv_line_generator, placetype)
class NeighbourhoodFailure(object):
def __init__(self, wof_id, reason, message, halt=False, skipped=False,
funky=False, superseded=False):
# halt is a signal that threads should stop fetching. This
# would happen during a network IO error or when we get an
# unexpected http response when fetching raw json files. In
# some scenarios this could be recoverable, but because that
# isn't always the case we assume that we should stop further
# requests for more raw json files, and just process what we
# have so far.
# skipped means that we won't log this failure, ie there was
# an earlier "halt" error and processing of further records
# has stopped.
# funky is a signal downstream that this is a "soft" or
# expected failure, in the sense that it only means that we
# should skip the record, but we didn't actually detect any
# errors with the processing
# superseded is set when the json has a value for
# wof:superseded. This would indicate a data inconsistency
# because the meta csv file didn't have it set if we're trying
# to fetch the raw json in the first place. But this is meant
# to catch this scenario.
self.wof_id = wof_id
self.reason = reason
self.message = message
self.halt = halt
self.skipped = skipped
self.funky = funky
self.superseded = superseded
# given a string, parse it as EDTF while allowing a single 'u' or None to mean
# completely unknown, and return the EDTF object.
def _normalize_edtf(s):
if s and s != 'u':
try:
return parse_edtf(s)
except Exception:
pass
# when all else fails, return the "most unknown" EDTF.
return parse_edtf('uuuu')
def create_neighbourhood_from_json(json_data, neighbourhood_meta):
def failure(reason):
return NeighbourhoodFailure(
neighbourhood_meta.wof_id, reason, json.dumps(json_data))
if not isinstance(json_data, dict):
return failure('Unexpected json')
props = json_data.get('properties')
if props is None or not isinstance(props, dict):
return failure('Missing properties')
superseded_by = props.get('wof:superseded_by')
# these often show up as empty lists, so we do a truthy test
# instead of expicitly checking for None
if superseded_by:
return NeighbourhoodFailure(
neighbourhood_meta.wof_id,
'superseded_by: %s' % superseded_by,
json.dumps(json_data), superseded=True)
geometry = json_data.get('geometry')
if geometry is None:
return failure('Missing geometry')
try:
shape_lnglat = shapely.geometry.shape(geometry)
except Exception:
return failure('Unexpected geometry')
shape_mercator = shapely.ops.transform(
reproject_lnglat_to_mercator, shape_lnglat)
# ignore any features that are marked as funky
is_funky = props.get('mz:is_funky')
if is_funky is not None:
try:
is_funky = int(is_funky)
except ValueError:
return failure('Unexpected mz:is_funky value %s' % is_funky)
if is_funky != 0:
return NeighbourhoodFailure(
neighbourhood_meta.wof_id,
'mz:is_funky value is not 0: %s' % is_funky,
json.dumps(json_data), funky=True)
wof_id = props.get('wof:id')
if wof_id is None:
return failure('Missing wof:id')
try:
wof_id = int(wof_id)
except ValueError:
return failure('wof_id is not an int: %s' % wof_id)
name = props.get('wof:name')
if name is None:
return failure('Missing name')
n_photos = props.get('misc:photo_sum')
if n_photos is not None:
try:
n_photos = int(n_photos)
except ValueError:
return failure('misc:photo_sum is not an int: %s' % n_photos)
label_lat = props.get('lbl:latitude')
label_lng = props.get('lbl:longitude')
if label_lat is None or label_lng is None:
# first, try to fall back to geom:* when lbl:* is missing. we'd prefer
# to have lbl:*, but it's better to have _something_ than nothing.
label_lat = props.get('geom:latitude')
label_lng = props.get('geom:longitude')
if label_lat is None or label_lng is None:
return failure('Missing lbl:latitude or lbl:longitude and ' +
'geom:latitude or geom:longitude')
try:
label_lat = float(label_lat)
label_lng = float(label_lng)
except ValueError:
return failure('lbl:latitude or lbl:longitude not float')
label_merc_x, label_merc_y = reproject_lnglat_to_mercator(
label_lng, label_lat)
label_position = shapely.geometry.Point(label_merc_x, label_merc_y)
placetype = props.get('wof:placetype')
if placetype is None:
return failure('Missing wof:placetype')
default_min_zoom = 15
default_max_zoom = 16
min_zoom = props.get('mz:min_zoom')
if min_zoom is None:
min_zoom = default_min_zoom
else:
try:
min_zoom = float(min_zoom)
except ValueError:
return failure('mz:min_zoom not float: %s' % min_zoom)
max_zoom = props.get('mz:max_zoom')
if max_zoom is None:
max_zoom = default_max_zoom
else:
try:
max_zoom = float(max_zoom)
except ValueError:
return failure('mz:max_zoom not float: %s' % max_zoom)
is_landuse_aoi = props.get('mz:is_landuse_aoi')
if is_landuse_aoi is not None:
try:
is_landuse_aoi = int(is_landuse_aoi)
except ValueError:
return failure('is_landuse_aoi not int: %s' % is_landuse_aoi)
is_landuse_aoi = is_landuse_aoi != 0
if shape_mercator.type in ('Polygon', 'MultiPolygon'):
area = int(shape_mercator.area)
else:
area = None
# for the purposes of display, we only care about the times when something
# should first start to be shown, and the time when it should stop
# showing.
edtf_inception = _normalize_edtf(props.get('edtf:inception'))
edtf_cessation = _normalize_edtf(props.get('edtf:cessation'))
edtf_deprecated = _normalize_edtf(props.get('edtf:deprecated'))
# check that the dates are valid first to return back a better error
inception_earliest = edtf_inception.lower_fuzzy()
cessation_latest = edtf_cessation.upper_fuzzy()
deprecated_latest = edtf_deprecated.upper_fuzzy()
if inception_earliest is None:
return failure('invalid edtf:inception: %s' %
props.get('edtf:inception'))
if cessation_latest is None:
return failure('invalid edtf:cessation: %s' %
props.get('edtf:cessation'))
if deprecated_latest is None:
return failure('invalid edtf:deprecated: %s' %
props.get('edtf:deprecated'))
# the 'edtf:inception' property gives us approximately the former and we
# take the earliest date it could mean. the 'edtf:cessation' and
# 'edtf:deprecated' would both stop the item showing, so we take the
# earliest of each's latest possible date.
inception = inception_earliest
cessation = min(cessation_latest, deprecated_latest)
# grab any names in other languages
lang_suffix_size = len('_preferred')
l10n_names = {}
for k, v in props.iteritems():
if not v:
continue
if not k.startswith('name:') or not k.endswith('_preferred'):
continue
if isinstance(v, list):
v = v[0]
lang = k[:-lang_suffix_size]
l10n_names[lang] = v
if not l10n_names:
l10n_names = None
neighbourhood = Neighbourhood(
wof_id, placetype, name, neighbourhood_meta.hash, label_position,
shape_mercator, n_photos, area, min_zoom, max_zoom, is_landuse_aoi,
inception, cessation, l10n_names)
return neighbourhood
def fetch_url_raw_neighbourhood(url, neighbourhood_meta, max_retries):
try:
s = _make_requests_session_with_retries(max_retries)
r = s.get(url)
except Exception, e:
# if there is an IO error when fetching the url itself, we'll
# want to halt too
return NeighbourhoodFailure(
neighbourhood_meta.wof_id, 'IO Error fetching %s' % url, str(e),
halt=True)
if r.status_code != 200:
# once we don't get a 200, signal that we should stop all
# remaining processing
return NeighbourhoodFailure(
neighbourhood_meta.wof_id,
'Invalid response %d for %s' % (r.status_code, url), r.text,
halt=True)
try:
doc = r.json()
except Exception, e:
return NeighbourhoodFailure(
neighbourhood_meta.wof_id, 'Response is not json for %s' % url,
r.text)
try:
neighbourhood = create_neighbourhood_from_json(doc, neighbourhood_meta)
except Exception, e:
return NeighbourhoodFailure(
neighbourhood_meta.wof_id,
'Unexpected exception parsing json',
json.dumps(doc))
return neighbourhood
def fetch_fs_raw_neighbourhood(path, neighbourhood_meta):
with open(path) as fp:
json_data = json.load(fp)
neighbourhood = create_neighbourhood_from_json(json_data,
neighbourhood_meta)
return neighbourhood
def generate_wof_url(url_prefix, wof_id):
wof_id_str = str(wof_id)
grouped = []
grouping = []
for c in wof_id_str:
grouping.append(c)
if len(grouping) == 3:
grouped.append(grouping)
grouping = []
if grouping:
grouped.append(grouping)
grouped_part = '/'.join([''.join(part) for part in grouped])
wof_url = '%s/%s/%s.geojson' % (url_prefix, grouped_part, wof_id_str)
return wof_url
def make_fetch_raw_url_fn(data_url_prefix, max_retries):
def fn(neighbourhood_meta):
wof_url = generate_wof_url(
data_url_prefix, neighbourhood_meta.wof_id)
neighbourhood = fetch_url_raw_neighbourhood(wof_url,
neighbourhood_meta,
max_retries)
return neighbourhood
return fn
def make_fetch_raw_filesystem_fn(data_path):
def fn(neighbourhood_meta):
# this will work for OS's with / separators
wof_path = generate_wof_url(
data_path, neighbourhood_meta.wof_id)
neighbourhood = fetch_fs_raw_neighbourhood(wof_path,
neighbourhood_meta)
return neighbourhood
return fn
def threaded_fetch(neighbourhood_metas, n_threads, fetch_raw_fn):
queue_size = n_threads * 10
neighbourhood_input_queue = Queue.Queue(queue_size)
neighbourhood_output_queue = Queue.Queue(len(neighbourhood_metas))
stop = threading.Event()
def _fetch_raw_neighbourhood():
while True:
neighbourhood_meta = neighbourhood_input_queue.get()
if neighbourhood_meta is None:
break
if stop.is_set():
# assume all remaining neighbourhoods are failures
# these will get skipped
neighbourhood_output_queue.put(NeighbourhoodFailure(
neighbourhood_meta.wof_id,
'Skipping remaining neighbourhoods',
'Skipping remaining neighbourhoods',
skipped=True))
continue
neighbourhood = fetch_raw_fn(neighbourhood_meta)
if isinstance(neighbourhood, NeighbourhoodFailure):
failure = neighbourhood
# if this is the type of error that should stop all
# processing, notify all other threads
if failure.halt:
stop.set()
neighbourhood_output_queue.put(neighbourhood)
fetch_threads = []
for i in xrange(n_threads):
fetch_thread = threading.Thread(target=_fetch_raw_neighbourhood)
fetch_thread.start()
fetch_threads.append(fetch_thread)
for neighbourhood_meta in neighbourhood_metas:
neighbourhood_input_queue.put(neighbourhood_meta)
for fetch_thread in fetch_threads:
neighbourhood_input_queue.put(None)
neighbourhoods = []
failures = []
for i in xrange(len(neighbourhood_metas)):
neighbourhood = neighbourhood_output_queue.get()
if isinstance(neighbourhood, NeighbourhoodFailure):
failures.append(neighbourhood)
else:
neighbourhoods.append(neighbourhood)
for fetch_thread in fetch_threads:
fetch_thread.join()
return neighbourhoods, failures
class WofUrlNeighbourhoodFetcher(object):
def __init__(self, neighbourhood_url, microhood_url, macrohood_url,
borough_url, data_url_prefix, n_threads, max_retries):
self.neighbourhood_url = neighbourhood_url
self.microhood_url = microhood_url
self.macrohood_url = macrohood_url
self.borough_url = borough_url
self.data_url_prefix = data_url_prefix
self.n_threads = n_threads
self.max_retries = max_retries
def fetch_meta_neighbourhoods(self):
return fetch_wof_url_meta_neighbourhoods(
self.neighbourhood_url, 'neighbourhood', self.max_retries)
def fetch_meta_microhoods(self):
return fetch_wof_url_meta_neighbourhoods(
self.microhood_url, 'microhood', self.max_retries)
def fetch_meta_macrohoods(self):
return fetch_wof_url_meta_neighbourhoods(
self.macrohood_url, 'macrohood', self.max_retries)
def fetch_meta_boroughs(self):
return fetch_wof_url_meta_neighbourhoods(
self.borough_url, 'borough', self.max_retries)
def fetch_raw_neighbourhoods(self, neighbourhood_metas):
url_fetch_fn = make_fetch_raw_url_fn(self.data_url_prefix,
self.max_retries)
neighbourhoods, failures = threaded_fetch(
neighbourhood_metas, self.n_threads, url_fetch_fn)
return neighbourhoods, failures
class WofFilesystemNeighbourhoodFetcher(object):
def __init__(self, wof_data_path, n_threads):
self.wof_data_path = wof_data_path
self.n_threads = n_threads
def _fetch_meta_neighbourhoods(self, placetype):
meta_fs_path = os.path.join(
self.wof_data_path, 'meta', 'wof-%s-latest.csv' % placetype)
with open(meta_fs_path) as fp:
meta_neighbourhoods = list(
parse_neighbourhood_meta_csv(fp, placetype))
return meta_neighbourhoods
def fetch_meta_neighbourhoods(self):
return self._fetch_meta_neighbourhoods('neighbourhood')
def fetch_meta_microhoods(self):
return self._fetch_meta_neighbourhoods('microhood')
def fetch_meta_macrohoods(self):
return self._fetch_meta_neighbourhoods('macrohood')
def fetch_meta_boroughs(self):
return self._fetch_meta_neighbourhoods('borough')
def fetch_raw_neighbourhoods(self, neighbourhood_metas):
data_prefix = os.path.join(
self.wof_data_path, 'data')
fs_fetch_fn = make_fetch_raw_filesystem_fn(data_prefix)
neighbourhoods, failures = threaded_fetch(
neighbourhood_metas, self.n_threads, fs_fetch_fn)
return neighbourhoods, failures
def create_neighbourhood_file_object(neighbourhoods, curdate=None):
if curdate is None:
curdate = datetime.now().date()
# tell shapely to include the srid when generating WKBs
geos.WKBWriter.defaults['include_srid'] = True
buf = StringIO()
def escape_string(s):
return s.encode('utf-8').replace('\t', ' ').replace('\n', ' ')
def escape_hstore_string(s):
s = escape_string(s)
if ' ' in s:
s = s.replace('"', '\\\\"')
s = '"%s"' % s
return s
def write_nullable_int(buf, x):
if x is None:
buf.write('\\N\t')
else:
buf.write('%d\t' % x)
for n in neighbourhoods:
buf.write('%d\t' % n.wof_id)
buf.write('%d\t' % neighbourhood_placetypes_to_int[n.placetype])
buf.write('%s\t' % escape_string(n.name))
buf.write('%s\t' % escape_string(n.hash))
write_nullable_int(buf, n.n_photos)
write_nullable_int(buf, n.area)
buf.write('%d\t' % n.min_zoom)
buf.write('%d\t' % n.max_zoom)
if n.is_landuse_aoi is None:
buf.write('\\N\t')
else:
buf.write('%s\t' % ('true' if n.is_landuse_aoi else 'false'))
geos.lgeos.GEOSSetSRID(n.label_position._geom, DATABASE_SRID)
buf.write(n.label_position.wkb_hex)
buf.write('\t')
geos.lgeos.GEOSSetSRID(n.geometry._geom, DATABASE_SRID)
buf.write(n.geometry.wkb_hex)
buf.write('\t')
buf.write('%s\t' % n.inception.isoformat())
buf.write('%s\t' % n.cessation.isoformat())
is_visible = n.inception < curdate and n.cessation >= curdate
is_visible_str = 't' if is_visible else 'f'
buf.write('%s\t' % is_visible_str)
if n.l10n_names:
hstore_items = []
for k, v in n.l10n_names.items():
k = escape_hstore_string(k)
v = escape_hstore_string(v)
hstore_items.append("%s=>%s" % (k, v))
hstore_items_str = ','.join(hstore_items)
buf.write('%s' % hstore_items_str)
else:
buf.write('\\N')
buf.write('\n')
buf.seek(0)
return buf
class WofModel(object):
def __init__(self, postgresql_conn_info):
self.postgresql_conn_info = postgresql_conn_info
self.table = 'wof_neighbourhood'
def _create_conn(self):
conn = psycopg2.connect(**self.postgresql_conn_info)
register_hstore(conn)
conn.set_session(autocommit=False)
return conn
def find_previous_neighbourhood_meta(self):
with closing(self._create_conn()) as conn:
with conn.cursor() as cursor:
cursor.execute(
'SELECT wof_id, placetype, name, hash, '
'ST_AsBinary(label_position) '
'FROM %s ORDER BY wof_id ASC' % self.table)
ns = []
for row in cursor:
wof_id, placetype_int, name, hash, label_bytes = row
wof_id = int(wof_id)
label_bytes = bytes(label_bytes)
label_position = shapely.wkb.loads(label_bytes)
placetype = neighbourhood_int_to_placetypes[placetype_int]
n = NeighbourhoodMeta(
wof_id, placetype, name, hash, label_position)
ns.append(n)
return ns
def sync_neighbourhoods(
self, neighbourhoods_to_add, neighbourhoods_to_update,
ids_to_remove):
geos.WKBWriter.defaults['include_srid'] = True
def gen_data(n):
geos.lgeos.GEOSSetSRID(n.label_position._geom, DATABASE_SRID)
geos.lgeos.GEOSSetSRID(n.geometry._geom, DATABASE_SRID)
return dict(
table=self.table,
placetype=neighbourhood_placetypes_to_int[n.placetype],
name=n.name,
hash=n.hash,
n_photos=n.n_photos,
area=n.area,
min_zoom=n.min_zoom,
max_zoom=n.max_zoom,
is_landuse_aoi=n.is_landuse_aoi,
inception=n.inception,
cessation=n.cessation,
label_position=n.label_position.wkb_hex,
geometry=n.geometry.wkb_hex,
wof_id=n.wof_id,
l10n_name=n.l10n_names,
)
if ids_to_remove:
ids_to_remove_str = ', '.join(map(str, ids_to_remove))
if neighbourhoods_to_update:
update_data = map(gen_data, neighbourhoods_to_update)
if neighbourhoods_to_add:
insert_data = map(gen_data, neighbourhoods_to_add)
# this closes the connection
with closing(self._create_conn()) as conn:
# this commits the transaction
with conn as conn:
# this frees any resources associated with the cursor
with conn.cursor() as cursor:
if ids_to_remove:
cursor.execute(
'DELETE FROM %s WHERE wof_id IN (%s)' %
(self.table, ids_to_remove_str))
if neighbourhoods_to_update:
cursor.executemany(
'UPDATE ' + self.table + ' SET '
'placetype=%(placetype)s, '
'name=%(name)s, '
'hash=%(hash)s, '
'n_photos=%(n_photos)s, '
'area=%(area)s, '
'min_zoom=%(min_zoom)s, '
'max_zoom=%(max_zoom)s, '
'is_landuse_aoi=%(is_landuse_aoi)s, '
'inception=%(inception)s, '
'cessation=%(cessation)s, '
'label_position=%(label_position)s, '
'l10n_name=%(l10n_name)s, '
'geometry=%(geometry)s '
'WHERE wof_id=%(wof_id)s',
update_data)
if neighbourhoods_to_add:
cursor.executemany(
'INSERT INTO ' + self.table + ' '
'(wof_id, placetype, name, hash, n_photos, area, '
'min_zoom, max_zoom, is_landuse_aoi, '
'inception, cessation, '
'label_position, geometry, l10n_name) '
'VALUES (%(wof_id)s, %(placetype)s, %(name)s, '
'%(hash)s, %(n_photos)s, %(area)s, %(min_zoom)s, '
'%(max_zoom)s, %(is_landuse_aoi)s, '
'%(inception)s, %(cessation)s, '
'%(label_position)s, %(geometry)s, %(l10n_name)s)',
insert_data)
def insert_neighbourhoods(self, neighbourhoods):
# create this whole input file like object outside of the transaction
nf = create_neighbourhood_file_object(neighbourhoods)
# close the connection
with closing(self._create_conn()) as conn:
# commit the transaction
with conn as conn:
with conn.cursor() as cursor:
cursor.copy_from(nf, self.table)
# update the whole table so that the `is_visible` flag is accurate for the
# `current_date`. this returns a list of coords at `zoom` which have
# changed visibility from true to false or vice-versa.
def update_visible_timestamp(self, zoom, current_date):
coords = set()
def coord_int(row):
x, y = row
return coord_int_at_mercator_point(zoom, x, y)
# close the connection
with closing(self._create_conn()) as conn:
# commit the transaction
with conn as conn:
with conn.cursor() as cursor:
# select the x, y position of the label for each WOF
# neighbourhood that changed visibility when the date
# was updated to `current_date`.
cursor.execute(
'SELECT st_x(n.label_position) as x, '
' st_y(n.label_position) as y '
'FROM ('
' SELECT wof_update_visible_ids(%s::date) AS id '
') u '
'JOIN wof_neighbourhood n '
'ON n.wof_id = u.id',
(current_date.isoformat(),))
for result in cursor:
coords.add(coord_int(result))
return coords
def diff_neighbourhoods(xs, ys):
# NOTE this requires that both xs and ys be sequences of
# neighbourhoods, sorted by wof_id in ascending order
# returns a sequence of tuples:
# (None, x) -> neighbourhoods that have been added
# (x, None) -> neighbourhoods that have been removed
# (x, y) -> neighbourhoods that have been updated
diffs = []
n_xs = len(xs)
n_ys = len(ys)
idx_xs = 0
idx_ys = 0
# iterate through both lists while we still have values for both
while idx_xs < n_xs and idx_ys < n_ys:
x = xs[idx_xs]
y = ys[idx_ys]
if x.wof_id < y.wof_id:
diffs.append((x, None))
idx_xs += 1
continue
if y.wof_id < x.wof_id:
diffs.append((None, y))
idx_ys += 1
continue
if x.hash != y.hash:
# if there are any differences the hash will be different
diffs.append((x, y))
idx_xs += 1
idx_ys += 1
# catch any differences
while idx_xs < n_xs:
x = xs[idx_xs]
diffs.append((x, None))
idx_xs += 1
while idx_ys < n_ys:
y = ys[idx_ys]
diffs.append((None, y))
idx_ys += 1
return diffs
def coord_int_at_mercator_point(z, x, y):
coord = mercator_point_to_coord(z, x, y)
coord_int = coord_marshall_int(coord)
return coord_int
def generate_tile_expiry_list(zoom, diffs):
coord_ints = set()
def add_neighbourhood_diff(n):
if n is not None:
x = n.label_position.x
y = n.label_position.y
coord_int = coord_int_at_mercator_point(zoom, x, y)
coord_ints.add(coord_int)
for n1, n2 in diffs:
# for our purposes, we will expire any kind of modification,
# whether the neighbourhoods were added, removed, or updated
add_neighbourhood_diff(n1)
add_neighbourhood_diff(n2)
return coord_ints
def log_failure(logger, failure):
if not (failure.skipped or failure.funky or failure.superseded):
failure_message_one_line = failure.message.replace('\n', ' | ')
logger.error('Neighbourhood failure for %d: %r - %r' % (
failure.wof_id, failure.reason, failure_message_one_line))
class WofProcessor(object):
def __init__(self, fetcher, model, redis_cache_index, intersector,
rawr_enqueuer, logger, current_date):
self.fetcher = fetcher
self.model = model
self.redis_cache_index = redis_cache_index
self.intersector = intersector
self.rawr_enqueuer = rawr_enqueuer
self.logger = logger
self.zoom_expiry = 16
self.zoom_until = 11
self.current_date = current_date
def __call__(self):
# perform IO to get old/new neighbourhoods and tiles of
# interest in parallel
# queues to pass the results through the threads
prev_neighbourhoods_queue = Queue.Queue(1)
meta_neighbourhoods_queue = Queue.Queue(1)
meta_microhoods_queue = Queue.Queue(1)
meta_macrohoods_queue = Queue.Queue(1)
meta_boroughs_queue = Queue.Queue(1)
toi_queue = Queue.Queue(1)
# functions for the threads
def find_prev_neighbourhoods():
prev_neighbourhoods = (
self.model.find_previous_neighbourhood_meta())
prev_neighbourhoods_queue.put(prev_neighbourhoods)
def make_fetch_meta_csv_fn(fn, queue):
neighbourhood_metas = list(fn())
queue.put(neighbourhood_metas)
def fetch_toi():
toi = self.redis_cache_index.fetch_tiles_of_interest()
toi_queue.put(toi)
self.logger.info('Fetching tiles of interest in background ...')
self.logger.info('Fetching old and new neighbourhoods ...')
# start the threads in parallel
prev_neighbourhoods_thread = threading.Thread(
target=find_prev_neighbourhoods)
prev_neighbourhoods_thread.start()
meta_neighbourhoods_thread = threading.Thread(
target=make_fetch_meta_csv_fn(
self.fetcher.fetch_meta_neighbourhoods,
meta_neighbourhoods_queue))
meta_neighbourhoods_thread.start()
meta_microhoods_thread = threading.Thread(
target=make_fetch_meta_csv_fn(
self.fetcher.fetch_meta_microhoods,
meta_microhoods_queue))
meta_microhoods_thread.start()
meta_macrohoods_thread = threading.Thread(
target=make_fetch_meta_csv_fn(
self.fetcher.fetch_meta_macrohoods,
meta_macrohoods_queue))
meta_macrohoods_thread.start()
meta_boroughs_thread = threading.Thread(
target=make_fetch_meta_csv_fn(
self.fetcher.fetch_meta_boroughs,
meta_boroughs_queue))
meta_boroughs_thread.start()
toi_thread = threading.Thread(target=fetch_toi)
toi_thread.start()
# ensure we're done with finding the next and previous
# neighbourhoods by this point
prev_neighbourhoods_thread.join()
meta_neighbourhoods_thread.join()
meta_microhoods_thread.join()
meta_macrohoods_thread.join()
meta_boroughs_thread.join()
self.logger.info('Fetching old and new neighbourhoods ... done')
prev_neighbourhoods = prev_neighbourhoods_queue.get()
meta_neighbourhoods = meta_neighbourhoods_queue.get()
meta_microhoods = meta_microhoods_queue.get()
meta_macrohoods = meta_macrohoods_queue.get()
meta_boroughs = meta_boroughs_queue.get()
# each of these has the appropriate placetype set now
meta_neighbourhoods = (
meta_neighbourhoods + meta_microhoods + meta_macrohoods +
meta_boroughs)
self.logger.info('Diffing neighbourhoods ...')
by_neighborhood_id = attrgetter('wof_id')
# the model is expected to return records in ascending order by id
# it doesn't seem like the neighbourhoods in the wof csv
# are in ascending order, so we sort explicitly here
meta_neighbourhoods.sort(key=by_neighborhood_id)
# the diff algorithm depends on the neighbourhood lists
# being in sorted order by id
diffs = diff_neighbourhoods(prev_neighbourhoods,
meta_neighbourhoods)
self.logger.info('Diffing neighbourhoods ... done')
# we need to fetch neighbourhoods that have either been
# updated or are new
wof_neighbourhoods_to_fetch = []
# based on the diff, we'll need to keep track of how we'll
# need to update
ids_to_add = set()
ids_to_update = set()
ids_to_remove = set()
for dx, dy in diffs:
if dy is not None:
if dx is None:
ids_to_add.add(dy.wof_id)
else:
ids_to_update.add(dy.wof_id)
wof_neighbourhoods_to_fetch.append(dy)
else:
ids_to_remove.add(dx.wof_id)
if wof_neighbourhoods_to_fetch:
self.logger.info('Fetching %d raw neighbourhoods ...' %
len(wof_neighbourhoods_to_fetch))
raw_neighbourhoods, failures = (
self.fetcher.fetch_raw_neighbourhoods(
wof_neighbourhoods_to_fetch))
self.logger.info('Fetching %d raw neighbourhoods ... done' %
len(wof_neighbourhoods_to_fetch))
else:
self.logger.info('No raw neighbourhoods found to fetch')
raw_neighbourhoods = ()
failures = []
# we should just remove any neighbourhoods from add/update lists
# also keep track of these ids to remove from the diffs too
failed_wof_ids = set()
superseded_by_wof_ids = set()
funky_wof_ids = set()
for failure in failures:
failure_wof_id = failure.wof_id
log_failure(self.logger, failure)
if failure.funky:
# this scenario is triggered for new neighbourhoods,
# or if a neighbourhood became funky
# we handle both of these scenarios in tests later on,
# but for now we just track the id of the funky
# neighbourhoods
funky_wof_ids.add(failure_wof_id)
if failure.superseded:
self.logger.warn(
'superseded_by inconsistency for %s' % failure_wof_id)
# this means that we had a value for superseded_by in
# the raw json, but not in the meta file
# this should get treated as a removal
superseded_by_wof_ids.add(failure_wof_id)
failed_wof_ids.add(failure_wof_id)
ids_to_add.discard(failure_wof_id)
ids_to_update.discard(failure_wof_id)
# we'll only log the number of funky records that we found
if funky_wof_ids:
self.logger.warn('Number of funky neighbourhoods: %d' %
len(funky_wof_ids))
# now we'll want to ensure that the failed ids are not present
# in any additions or updates
new_diffs = []
for n1, n2 in diffs:
if n2 is None or n2.wof_id not in failed_wof_ids:
new_diffs.append((n1, n2))
diffs = new_diffs
# and we'll want to also treat any superseded_by
# inconsistencies as removals
# but we need the original neighbourhood meta object to
# generate the diff, for its label position to expire the
# appropriate tile
if superseded_by_wof_ids:
for n in prev_neighbourhoods:
if n.wof_id in superseded_by_wof_ids:
ids_to_remove.add(n.wof_id)
diffs.append((n, None))
# if the neighbourhood became funky and we had it in our
# existing set, we'll want to remove it
if funky_wof_ids:
for n in prev_neighbourhoods:
if n.wof_id in funky_wof_ids:
ids_to_remove.add(n.wof_id)
diffs.append((n, None))
sync_neighbourhoods_thread = None
if diffs:
self.logger.info("Sync'ing neighbourhoods ...")
# raw_neighbourhoods contains both the neighbourhoods to
# add and update
# we split it up here
neighbourhoods_to_update = []
neighbourhoods_to_add = []
for neighbourhood in raw_neighbourhoods:
if neighbourhood.wof_id in ids_to_add:
neighbourhoods_to_add.append(neighbourhood)
elif neighbourhood.wof_id in ids_to_update:
neighbourhoods_to_update.append(neighbourhood)
else:
assert 0, '%d should have been found to add or update' % (
neighbourhood.wof_id)
if neighbourhoods_to_add:
self.logger.info('Inserting neighbourhoods: %d' %
len(neighbourhoods_to_add))
if neighbourhoods_to_update:
self.logger.info('Updating neighbourhoods: %d' %
len(neighbourhoods_to_update))
if ids_to_remove:
self.logger.info('Removing neighbourhoods: %d' %
len(ids_to_remove))
def _sync_neighbourhoods():
self.model.sync_neighbourhoods(
neighbourhoods_to_add, neighbourhoods_to_update,
ids_to_remove)
sync_neighbourhoods_thread = threading.Thread(
target=_sync_neighbourhoods)
sync_neighbourhoods_thread.start()
else:
self.logger.info('No diffs found, no sync necessary')
if diffs:
self.logger.info('Generating tile expiry list ...')
expired_coord_ints = generate_tile_expiry_list(
self.zoom_expiry, diffs)
self.logger.info(
'Generating tile expiry list ... done - '
'Found %d expired tiles' % len(expired_coord_ints))
else:
self.logger.info('No diffs found, not generating expired coords')
expired_coord_ints = set()
# ensure we're done fetching the tiles of interest by this point
toi_thread.join()
toi = toi_queue.get()
self.logger.info('Have tiles of interest')
# we need to finish sync'ing neighbourhoods before we flip the
# visibility flag and enqueue coordinates
if sync_neighbourhoods_thread is not None:
sync_neighbourhoods_thread.join()
self.logger.info("Sync'ing neighbourhoods ... done")
# update the current timestamp, returning the list of coords that
# have changed visibility.
visibility_updates = \
self.model.update_visible_timestamp(
self.zoom_expiry, self.current_date)
self.logger.info('Have %d tile expiries from visibility changes.'
% len(visibility_updates))
expired_coord_ints.update(visibility_updates)
if diffs:
# intersect the tiles of interest with the expired coords from
# the neighbourhood diff
self.logger.info('Intersecting %d tiles of interest with %d '
'expired tiles' % (
len(toi), len(expired_coord_ints)))
toi_expired_coord_ints, _ = self.intersector(
expired_coord_ints, toi, self.zoom_until)
coords = map(coord_unmarshall_int, toi_expired_coord_ints)
self.logger.info('Intersection complete, will expire %d tiles' %
len(coords))
else:
self.logger.info('No diffs found, no need to intersect')
coords = ()
if coords:
self.logger.info('Asking enqueuer to enqueue %d coords ...' %
len(coords))
self.rawr_enqueuer(coords)
self.logger.info('Asking enqueuer to enqueue %d coords ... done' %
len(coords))
else:
self.logger.info('No expired tiles to enqueue')
class WofInitialLoader(object):
def __init__(self, fetcher, model, logger):
self.fetcher = fetcher
self.model = model
self.logger = logger
def __call__(self):
self.logger.info('Fetching meta neighbourhoods csv ...')
neighbourhood_metas = list(self.fetcher.fetch_meta_neighbourhoods())
self.logger.info('Fetching meta neighbourhoods csv ... done')
self.logger.info('Fetching meta microhoods csv ...')
microhood_metas = list(self.fetcher.fetch_meta_microhoods())
self.logger.info('Fetching meta microhoods csv ... done')
self.logger.info('Fetching meta macrohoods csv ...')
macrohood_metas = list(self.fetcher.fetch_meta_macrohoods())
self.logger.info('Fetching meta macrohoods csv ... done')
self.logger.info('Fetching meta boroughs csv ...')
borough_metas = list(self.fetcher.fetch_meta_boroughs())
self.logger.info('Fetching meta boroughs csv ... done')
neighbourhood_metas = (
neighbourhood_metas + microhood_metas + macrohood_metas +
borough_metas)
self.logger.info('Fetching raw neighbourhoods ...')
neighbourhoods, failures = self.fetcher.fetch_raw_neighbourhoods(
neighbourhood_metas)
for failure in failures:
log_failure(self.logger, failure)
self.logger.info('Fetching raw neighbourhoods ... done')
self.logger.info('Inserting %d neighbourhoods ...' %
len(neighbourhoods))
self.model.insert_neighbourhoods(neighbourhoods)
self.logger.info('Inserting %d neighbourhoods ... done' %
len(neighbourhoods))
def make_wof_url_neighbourhood_fetcher(
neighbourhood_url, microhood_url, macrohood_url, borough_url,
data_prefix_url, n_threads, max_retries):
fetcher = WofUrlNeighbourhoodFetcher(
neighbourhood_url, microhood_url, macrohood_url, borough_url,
data_prefix_url, n_threads, max_retries)
return fetcher
def make_wof_filesystem_neighbourhood_fetcher(wof_data_path, n_threads):
fetcher = WofFilesystemNeighbourhoodFetcher(
wof_data_path, n_threads)
return fetcher
def make_wof_model(postgresql_conn_info):
wof_model = WofModel(postgresql_conn_info)
return wof_model
def make_wof_processor(
fetcher, model, redis_cache_index, rawr_enqueuer, logger,
current_date):
from tilequeue.command import explode_and_intersect
wof_processor = WofProcessor(
fetcher, model, redis_cache_index, explode_and_intersect,
rawr_enqueuer, logger, current_date)
return wof_processor
def make_wof_initial_loader(fetcher, model, logger):
wof_loader = WofInitialLoader(fetcher, model, logger)
return wof_loader
| mit | 9,120,416,503,911,111,000 | 35.280757 | 79 | 0.582449 | false |
Tala/bybop | src/interactive.py | 1 | 1944 | #!/usr/bin/env python
import sys
try:
import readline
except ImportError:
import pyreadline as readline
import os
import code
import rlcompleter
lib_path = os.path.abspath(os.path.join('..', 'src'))
sys.path.append(lib_path)
lib_path = os.path.abspath(os.path.join('..', '..', 'ARSDKBuildUtils', 'Utils', 'Python'))
sys.path.append(lib_path)
from Bybop_Discovery import *
import Bybop_Device
print('Searching for devices')
from zeroconf import ZeroconfServiceTypes
print('\n'.join(ZeroconfServiceTypes.find()))
print('done.')
discovery = Discovery([DeviceID.BEBOP_DRONE, DeviceID.JUMPING_SUMO, DeviceID.AIRBORNE_NIGHT, DeviceID.JUMPING_NIGHT])
discovery.wait_for_change()
devices = discovery.get_devices()
#discovery.stop()
if not devices:
print('Oops ...')
sys.exit(1)
device = devices.itervalues().next()
print('Will connect to ' + get_name(device))
d2c_port = 43210
controller_type = "PC"
controller_name = "bybop shell"
drone = Bybop_Device.create_and_connect(device, d2c_port, controller_type, controller_name)
if drone is None:
print('Unable to connect to a product')
sys.exit(1)
drone.dump_state()
vars = globals().copy()
vars.update(locals())
readline.set_completer(rlcompleter.Completer(vars).complete)
readline.parse_and_bind("tab: complete")
shell = code.InteractiveConsole(vars)
# drone.jump(0) # jump forward
# drone.jump(1) # jump up
# drone.move_forward(20) # move forwards
# drone.move_forward(-20) # move backwards
# drone.move(0,50) # turn right?
# drone.move(0,-50) # turn left?
# drone.spin() # spin around
# drone.simpleAnimation(0)
# drone.simpleAnimation(9)
# Currently known values:
# - 0 : stop
# - 1 : spin
# - 2 : tap
# - 3 : slowshake
# - 4 : metronome
# - 5 : ondulation
# - 6 : spinjump
# - 7 : spintoposture
# - 8 : spiral
# - 9 : slalom
# """
shell.interact()
drone.stop()
| bsd-3-clause | -245,576,819,196,394,050 | 21.870588 | 117 | 0.667181 | false |
status-im/status-react | test/appium/tests/atomic/account_management/test_profile.py | 1 | 71527 | import re
from tests import marks, bootnode_address, mailserver_address, test_dapp_url, test_dapp_name, mailserver_ams, \
mailserver_gc, mailserver_hk, used_fleet, common_password
from tests.base_test_case import SingleDeviceTestCase, MultipleDeviceTestCase
from tests.users import transaction_senders, basic_user, ens_user, ens_user_ropsten
from views.sign_in_view import SignInView
from time import time
class TestProfileSingleDevice(SingleDeviceTestCase):
@marks.testrail_id(6318)
@marks.medium
def test_can_delete_several_multiaccounts(self):
sign_in = SignInView(self.driver)
sign_in.create_user()
delete_alert_warning = sign_in.get_translation_by_key("delete-profile-warning")
profile = sign_in.profile_button.click()
profile.logout()
if sign_in.ok_button.is_element_displayed():
sign_in.ok_button.click()
sign_in.back_button.click()
sign_in.your_keys_more_icon.click()
sign_in.generate_new_key_button.click()
sign_in.next_button.click()
sign_in.next_button.click()
sign_in.create_password_input.set_value(common_password)
sign_in.next_button.click()
sign_in.confirm_your_password_input.set_value(common_password)
sign_in.next_button.click()
sign_in.maybe_later_button.click_until_presence_of_element(sign_in.lets_go_button)
sign_in.lets_go_button.click()
sign_in.just_fyi('Delete 2nd multiaccount')
public_key, username = sign_in.get_public_key_and_username(return_username=True)
profile.privacy_and_security_button.click()
profile.delete_my_profile_button.scroll_and_click()
for text in (username, delete_alert_warning):
if not profile.element_by_text(text).is_element_displayed():
self.errors.append('Required %s is not shown when deleting multiaccount' % text)
profile.delete_profile_button.click()
if profile.element_by_translation_id("profile-deleted-title").is_element_displayed():
self.driver.fail('Profile is deleted without confirmation with password')
profile.delete_my_profile_password_input.set_value(common_password)
profile.delete_profile_button.click_until_presence_of_element(profile.element_by_translation_id("profile-deleted-title"))
profile.ok_button.click()
sign_in.just_fyi('Delete last multiaccount')
sign_in.sign_in()
sign_in.profile_button.click()
profile.privacy_and_security_button.click()
profile.delete_my_profile_button.scroll_and_click()
profile.delete_my_profile_password_input.set_value(common_password)
profile.delete_profile_button.click()
profile.ok_button.click()
if not sign_in.get_started_button.is_element_displayed(20):
self.errors.append('No redirected to carousel view after deleting last multiaccount')
self.errors.verify_no_errors()
@marks.testrail_id(5323)
@marks.critical
def test_share_copy_contact_code_and_wallet_address(self):
home = SignInView(self.driver).create_user()
profile = home.profile_button.click()
home.just_fyi("Copying contact code")
profile.share_my_profile_button.click()
public_key = profile.public_key_text.text
profile.public_key_text.long_press_element()
profile.copy_text()
home.just_fyi("Sharing contact code via messenger")
profile.share_button.click()
profile.share_via_messenger()
if not profile.element_by_text_part(public_key).is_element_present():
self.errors.append("Can't share public key")
[profile.click_system_back_button() for _ in range(2)]
profile.close_share_popup()
home.just_fyi("Check that can paste contact code in chat message input")
home = profile.home_button.click()
chat = home.add_contact(transaction_senders['M']['public_key'])
chat.chat_message_input.click()
chat.paste_text()
input_text = chat.chat_message_input.text
if input_text not in public_key or len(input_text) < 1:
self.errors.append('Public key was not copied')
chat.chat_message_input.clear()
chat.get_back_to_home_view()
home.just_fyi("Copying wallet address")
wallet = profile.wallet_button.click()
wallet.set_up_wallet()
wallet.accounts_status_account.click()
request = wallet.receive_transaction_button.click()
address = wallet.address_text.text
request.share_button.click()
request.element_by_translation_id("sharing-copy-to-clipboard").click()
home.just_fyi("Sharing wallet address via messenger")
request.share_button.click()
wallet.share_via_messenger()
if not wallet.element_by_text_part(address).is_element_present():
self.errors.append("Can't share address")
[wallet.click_system_back_button() for _ in range(2)]
wallet.close_share_popup()
home.just_fyi("Check that can paste wallet address in chat message input")
wallet.home_button.click()
home.get_chat(transaction_senders['M']['username']).click()
chat.chat_message_input.click()
chat.paste_text()
if chat.chat_message_input.text != address:
self.errors.append('Wallet address was not copied')
self.errors.verify_no_errors()
@marks.testrail_id(5502)
@marks.critical
def test_can_add_existing_ens(self):
home = SignInView(self.driver).recover_access(ens_user['passphrase'])
profile = home.profile_button.click()
profile.switch_network('Mainnet with upstream RPC')
home.profile_button.click()
dapp_view = profile.ens_usernames_button.click()
dapp_view.just_fyi('check if your name can be added via "ENS usernames" in Profile')
dapp_view.element_by_text('Get started').click()
dapp_view.ens_name_input.set_value(ens_user['ens'])
dapp_view.check_ens_name.click_until_absense_of_element(dapp_view.check_ens_name)
if not dapp_view.element_by_translation_id('ens-saved-title').is_element_displayed():
self.errors.append('No message "Username added" after resolving own username')
dapp_view.element_by_translation_id("ens-got-it").click()
dapp_view.just_fyi('check that after adding username is shown in "ENS usernames" and profile')
if not dapp_view.element_by_text(ens_user['ens']).is_element_displayed():
self.errors.append('No ENS name is shown in own "ENS usernames" after adding')
dapp_view.back_button.click()
if not dapp_view.element_by_text('@%s' % ens_user['ens']).is_element_displayed():
self.errors.append('No ENS name is shown in own profile after adding')
if not dapp_view.element_by_text('%s.stateofus.eth' % ens_user['ens']).is_element_displayed():
self.errors.append('No ENS name is shown in own profile after adding')
profile.share_my_profile_button.click()
if profile.ens_name_in_share_chat_key_text.text != '%s.stateofus.eth' % ens_user['ens']:
self.errors.append('No ENS name is shown on tapping on share icon in Profile')
profile.close_share_popup()
self.errors.verify_no_errors()
@marks.testrail_id(6296)
@marks.high
def test_recover_account_from_new_user_seedphrase(self):
sign_in_view = SignInView(self.driver)
sign_in_view.create_user()
profile_view = sign_in_view.profile_button.click()
profile_view.privacy_and_security_button.click()
profile_view.backup_recovery_phrase_button.click()
profile_view.ok_continue_button.click()
recovery_phrase = " ".join(profile_view.get_recovery_phrase().values())
profile_view.close_button.click()
profile_view.back_button.click()
public_key = profile_view.get_public_key_and_username()
wallet_view = profile_view.wallet_button.click()
wallet_view.set_up_wallet()
address = wallet_view.get_wallet_address()
sign_in_view.profile_button.click()
profile_view.logout()
self.driver.reset()
sign_in_view.recover_access(recovery_phrase)
wallet_view = sign_in_view.wallet_button.click()
wallet_view.set_up_wallet()
if wallet_view.get_wallet_address() != address:
self.driver.fail("Seed phrase displayed in new accounts for back up does not recover respective address")
profile_view = wallet_view.profile_button.click()
if profile_view.get_public_key_and_username() != public_key:
self.driver.fail("Seed phrase displayed in new accounts for back up does not recover respective public key")
@marks.testrail_id(5433)
@marks.medium
def test_invite_friends(self):
home = SignInView(self.driver).create_user()
self.driver.info("Check it via 'Invite friends' on home view")
home.invite_friends_button.click()
home.share_via_messenger()
home.element_by_text_part("Hey join me on Status: https://join.status.im/u/0x")
home.click_system_back_button()
self.driver.info("Check it via bottom sheet menu")
home.plus_button.click()
home.chats_menu_invite_friends_button.click()
home.share_via_messenger()
home.element_by_text_part("Hey join me on Status: https://join.status.im/u/0x")
@marks.testrail_id(6312)
@marks.medium
def test_add_remove_contact_via_contacts_view(self):
home = SignInView(self.driver).create_user()
home.just_fyi('Check empty contacts view')
profile = home.profile_button.click()
profile.switch_network()
home.profile_button.click()
profile.contacts_button.click()
if not profile.add_new_contact_button.is_element_displayed():
self.driver.fail('No expected element on contacts view')
users = {
'scanning_ens_with_stateofus_domain_deep_link': {
'contact_code': 'https://join.status.im/u/%s.stateofus.eth' % ens_user_ropsten['ens'],
'username': ens_user_ropsten['username']
},
'scanning_public_key': {
'contact_code': transaction_senders['A']['public_key'],
'username': transaction_senders['A']['username'],
},
'pasting_public_key': {
'contact_code': basic_user['public_key'],
'username': basic_user['username'],
},
'pasting_ens_another_domain': {
'contact_code': ens_user['ens_another_domain'],
'username': '@%s' % ens_user['ens_another_domain'],
'nickname': 'my_dear_friend'
},
}
home.just_fyi('Add contact and check that they appear in Contacts view')
chat_view = home.get_chat_view()
for key in users:
profile.add_new_contact_button.click()
home.just_fyi('Checking %s case' % key)
if 'scanning' in key:
chat_view.scan_contact_code_button.click()
if chat_view.allow_button.is_element_displayed():
chat_view.allow_button.click()
chat_view.enter_qr_edit_box.scan_qr(users[key]['contact_code'])
else:
chat_view.public_key_edit_box.click()
chat_view.public_key_edit_box.send_keys(users[key]['contact_code'])
if 'nickname' in users[key]:
chat_view.nickname_input_field.set_value(users[key]['nickname'])
chat_view.confirm_until_presence_of_element(profile.contacts_button)
if not profile.element_by_text(users[key]['username']).is_element_displayed():
self.errors.append('In %s case username not found in contact view after scanning' % key)
if 'nickname' in users[key]:
if not profile.element_by_text(users[key]['nickname']).is_element_displayed():
self.errors.append('In %s case nickname %s not found in contact view after scanning' % (key, users[key]['nickname']))
home.just_fyi('Remove contact and check that it disappeared')
user_to_remove = '@%s' % ens_user['ens_another_domain']
profile.element_by_text(user_to_remove).click()
chat_view.remove_from_contacts.click()
chat_view.close_button.click()
if profile.element_by_text(user_to_remove).is_element_displayed():
self.errors.append('Removed user is still shown in contact view')
home.just_fyi('Relogin and open profile view of the contact removed from Contact list to ensure there is no crash')
profile.profile_button.click()
profile.relogin()
one_to_one_chat = home.add_contact(public_key=ens_user['ens_another_domain'], add_in_contacts=False)
one_to_one_chat.chat_options.click()
profile = one_to_one_chat.view_profile_button.click()
if profile.remove_from_contacts.is_element_displayed():
self.errors.append('User still added in contact after relogin')
self.errors.verify_no_errors()
@marks.testrail_id(5431)
@marks.medium
def test_add_custom_network(self):
sign_in = SignInView(self.driver)
sign_in.create_user()
profile = sign_in.profile_button.click()
profile.add_custom_network()
sign_in.sign_in()
sign_in.profile_button.click()
profile.advanced_button.click()
profile.network_settings_button.scroll_to_element(10, 'up')
if not profile.element_by_text_part('custom_ropsten').is_element_displayed():
self.driver.fail("Network custom_ropsten was not added!")
@marks.critical
@marks.testrail_id(5419)
@marks.flaky
def test_logcat_backup_recovery_phrase(self):
sign_in = SignInView(self.driver)
home = sign_in.create_user()
home.just_fyi("Check that badge on profile about back up seed phrase is presented")
if home.profile_button.counter.text != '1':
self.errors.append('Profile button counter is not shown')
home.just_fyi("Back up seed phrase and check logcat")
profile = home.profile_button.click()
profile.privacy_and_security_button.click()
profile.backup_recovery_phrase_button.click()
profile.ok_continue_button.click()
recovery_phrase = profile.get_recovery_phrase()
profile.next_button.click()
word_number = profile.recovery_phrase_word_number.number
profile.recovery_phrase_word_input.set_value(recovery_phrase[word_number])
profile.next_button.click()
word_number_1 = profile.recovery_phrase_word_number.number
profile.recovery_phrase_word_input.set_value(recovery_phrase[word_number_1])
profile.done_button.click()
profile.yes_button.click()
profile.ok_got_it_button.click()
if home.profile_button.counter.is_element_displayed():
self.errors.append('Profile button counter is shown after recovery phrase backup')
values_in_logcat = profile.find_values_in_logcat(passphrase1=recovery_phrase[word_number],
passphrase2=recovery_phrase[word_number_1])
if len(values_in_logcat) == 2:
self.driver.fail(values_in_logcat)
profile.profile_button.double_click()
home.just_fyi("Try to restore same account from seed phrase (should be possible only to unlock existing account)")
profile.logout()
sign_in.back_button.click()
sign_in.access_key_button.click()
sign_in.enter_seed_phrase_button.click()
sign_in.seedphrase_input.click()
sign_in.seedphrase_input.set_value(' '.join(recovery_phrase.values()))
sign_in.next_button.click()
sign_in.element_by_translation_id(id="unlock", uppercase=True).click()
sign_in.password_input.set_value(common_password)
chat = sign_in.sign_in_button.click()
chat.plus_button.click()
if not chat.start_new_chat_button.is_element_displayed():
self.errors.append("Can't proceed using account after it's re-recover twice.")
self.errors.verify_no_errors()
@marks.testrail_id(5453)
@marks.medium
@marks.flaky
def test_privacy_policy_terms_of_use_node_version_need_help_in_profile(self):
signin = SignInView(self.driver)
no_link_found_error_msg = 'Could not find privacy policy link at'
no_link_open_error_msg = 'Could not open our privacy policy from'
no_link_tos_error_msg = 'Could not open Terms of Use from'
signin.just_fyi("Checking privacy policy from sign in")
if not signin.privacy_policy_link.is_element_present():
self.driver.fail('%s Sign in view!' % no_link_found_error_msg)
web_page = signin.privacy_policy_link.click()
web_page.open_in_webview()
if not web_page.policy_summary.is_element_displayed():
self.errors.append('%s Sign in view!' % no_link_open_error_msg)
web_page.close_privacy_policy_button.click()
signin.just_fyi("Checking Terms of Use from sign")
if not signin.terms_of_use_link.is_element_displayed():
self.driver.fail("No Terms of Use link on Sign in view!")
web_page = signin.terms_of_use_link.click()
web_page.open_in_webview()
web_page.wait_for_d_aap_to_load()
web_page.swipe_by_custom_coordinates(0.5,0.8,0.5,0.4)
if not web_page.terms_of_use_summary.is_element_displayed():
self.errors.append('%s Sign in view!' % no_link_tos_error_msg)
web_page.close_privacy_policy_button.click()
home = signin.create_user()
profile = home.profile_button.click()
profile.about_button.click()
profile.privacy_policy_button.click()
if not web_page.policy_summary.is_element_displayed():
self.errors.append('%s Profile about view!' % no_link_open_error_msg)
web_page.click_system_back_button()
profile.terms_of_use_button.click()
web_page.wait_for_d_aap_to_load()
web_page.swipe_by_custom_coordinates(0.5,0.8,0.5,0.4)
if not web_page.terms_of_use_summary.is_element_displayed():
self.errors.append('%s Profile about view!' % no_link_tos_error_msg)
web_page.click_system_back_button()
signin.just_fyi("Checking that version match expected format and can be copied")
app_version = profile.app_version_text.text
node_version = profile.node_version_text.text
if not re.search(r'\d{1}[.]\d{1,2}[.]\d{1,2}\s[(]\d*[)]', app_version):
self.errors.append("App version %s didn't match expected format" % app_version)
if not re.search(r'StatusIM\/v.*\/android-\d{3}\/go\d{1}[.]\d{1,}', node_version):
self.errors.append("Node version %s didn't match expected format" % node_version)
profile.app_version_text.click()
profile.back_button.click()
profile.home_button.click()
chat = home.join_public_chat(home.get_random_chat_name())
message_input = chat.chat_message_input
message_input.paste_text_from_clipboard()
if message_input.text != app_version:
self.errors.append('Version number was not copied to clipboard')
signin.just_fyi("Checking Need help section")
home.profile_button.double_click()
profile.help_button.click()
web_page = profile.faq_button.click()
web_page.open_in_webview()
web_page.wait_for_d_aap_to_load()
if not profile.element_by_text_part("F.A.Q").is_element_displayed():
self.errors.append("FAQ is not shown")
profile.click_system_back_button()
profile.submit_bug_button.click()
if not profile.element_by_text_part("Welcome to Gmail").is_element_displayed():
self.errors.append("Mail client is not opened when submitting bug")
profile.click_system_back_button()
profile.request_a_feature_button.click()
if not profile.element_by_text("#support").is_element_displayed():
self.errors.append("Support channel is not suggested for requesting a feature")
self.errors.verify_no_errors()
@marks.testrail_id(5738)
@marks.high
def test_dapps_permissions(self):
home = SignInView(self.driver).create_user()
account_name = home.status_account_name
home.just_fyi('open Status Test Dapp, allow all and check permissions in Profile')
web_view = home.open_status_test_dapp()
dapp_view = home.dapp_tab_button.click()
profile = home.profile_button.click()
profile.privacy_and_security_button.click()
profile.dapp_permissions_button.click()
profile.element_by_text(test_dapp_name).click()
if not profile.element_by_text(account_name).is_element_displayed():
self.errors.append('Wallet permission was not granted')
if not profile.element_by_translation_id("chat-key").is_element_displayed():
self.errors.append('Contact code permission was not granted')
profile.just_fyi('revoke access and check that they are asked second time')
profile.revoke_access_button.click()
profile.back_button.click()
profile.dapp_tab_button.click()
web_view.open_tabs_button.click()
web_view.empty_tab_button.click()
dapp_view.open_url(test_dapp_url)
if not dapp_view.element_by_text_part(account_name).is_element_displayed():
self.errors.append('Wallet permission is not asked')
if dapp_view.allow_button.is_element_displayed():
dapp_view.allow_button.click(times_to_click=1)
if not dapp_view.element_by_translation_id("your-contact-code").is_element_displayed():
self.errors.append('Profile permission is not asked')
self.errors.verify_no_errors()
@marks.testrail_id(5368)
@marks.medium
def test_change_log_level_and_fleet(self):
home = SignInView(self.driver).create_user()
profile = home.profile_button.click()
profile.advanced_button.click()
default_log_level = 'INFO'
for text in default_log_level, used_fleet:
if not profile.element_by_text(text).is_element_displayed():
self.errors.append('%s is not selected by default' % text)
if home.find_values_in_geth('lvl=trce', 'lvl=dbug'):
self.errors.append('"%s" is set, but found another entries!' % default_log_level)
if not home.find_values_in_geth('lvl=info'):
self.errors.append('"%s" is set, but no entries are found!' % default_log_level)
home.just_fyi('Set another loglevel and check that changes are applied')
profile.log_level_setting_button.click()
changed_log_level = 'TRACE'
profile.element_by_text(changed_log_level).click_until_presence_of_element(profile.confirm_button)
profile.confirm_button.click()
SignInView(self.driver).sign_in()
home.profile_button.click()
profile.advanced_button.click()
if not profile.element_by_text(changed_log_level).is_element_displayed():
self.errors.append('"%s" is not selected after change' % changed_log_level)
if not home.find_values_in_geth('lvl=trc'):
self.errors.append('"%s" is set, but no entries are found!' % changed_log_level)
home.just_fyi('Set another fleet and check that changes are applied')
profile.fleet_setting_button.click()
changed_fleet = 'eth.prod'
profile.element_by_text(changed_fleet).click_until_presence_of_element(profile.confirm_button)
profile.confirm_button.click()
SignInView(self.driver).sign_in()
home.profile_button.click()
profile.advanced_button.click()
if not profile.element_by_text(changed_fleet).is_element_displayed():
self.errors.append('"%s" fleet is not selected after change' % changed_fleet)
if not home.find_values_in_geth(changed_fleet):
self.errors.append('"%s" is set, but no entry is found!' % changed_fleet)
self.errors.verify_no_errors()
@marks.testrail_id(5766)
@marks.medium
@marks.flaky
def test_use_pinned_mailserver(self):
home = SignInView(self.driver).create_user()
profile = home.profile_button.click()
profile.just_fyi('pin history node')
profile.sync_settings_button.click()
node_gc, node_ams, node_hk = [profile.return_mailserver_name(history_node_name, used_fleet) for history_node_name in (mailserver_gc, mailserver_ams, mailserver_hk)]
h_node = node_ams
profile.mail_server_button.click()
profile.mail_server_auto_selection_button.click()
profile.mail_server_by_name(h_node).click()
profile.confirm_button.click()
if profile.element_by_translation_id("mailserver-error-title").is_element_displayed(10):
h_node = node_hk
profile.element_by_translation_id("mailserver-pick-another", uppercase=True).click()
profile.mail_server_by_name(h_node).click()
profile.confirm_button.click()
if profile.element_by_translation_id("mailserver-error-title").is_element_displayed(10):
self.driver.fail("Couldn't connect to any history node")
profile.just_fyi('check that history node is pinned')
profile.close_button.click()
if not profile.element_by_text(h_node).is_element_displayed():
self.errors.append('"%s" history node is not pinned' % h_node)
profile.home_button.click()
profile.just_fyi('Relogin and check that settings are preserved')
home.relogin()
home.profile_button.click()
profile.sync_settings_button.click()
if not profile.element_by_text(h_node).is_element_displayed():
self.errors.append('"%s" history node is not pinned' % h_node)
self.errors.verify_no_errors()
@marks.testrail_id(6219)
@marks.medium
def test_set_primary_ens_custom_domain(self):
home = SignInView(self.driver).recover_access(ens_user['passphrase'])
ens_not_stateofus = ens_user['ens_another_domain']
ens_stateofus = ens_user['ens']
home.just_fyi('add 2 ENS names in Profile')
profile = home.profile_button.click()
dapp = profile.connect_existing_status_ens(ens_stateofus)
profile.element_by_text("Add username").click()
profile.element_by_text_part("another domain").click()
dapp.ens_name_input.set_value(ens_not_stateofus)
dapp.check_ens_name.click_until_presence_of_element(dapp.element_by_translation_id("ens-got-it"))
dapp.element_by_translation_id("ens-got-it").click()
home.just_fyi('check that by default %s ENS is set' % ens_stateofus)
dapp.element_by_text('Primary username').click()
message_to_check = 'Your messages are displayed to others with'
if not dapp.element_by_text('%s\n@%s.stateofus.eth' % (message_to_check, ens_stateofus)).is_element_displayed():
self.errors.append('%s ENS username is not set as primary by default' % ens_stateofus)
home.just_fyi('check view in chat settings ENS from other domain: %s after set new primary ENS' % ens_not_stateofus)
dapp.set_primary_ens_username(ens_user['ens_another_domain']).click()
if profile.username_in_ens_chat_settings_text.text != '@' + ens_not_stateofus:
self.errors.append('ENS username %s is not shown in ENS username Chat Settings after enabling' % ens_not_stateofus)
self.errors.verify_no_errors()
@marks.testrail_id(5468)
@marks.medium
@marks.skip
# TODO: skip until profile picture change feature is enabled
def test_deny_camera_access_changing_profile_photo(self):
sign_in = SignInView(self.driver)
sign_in.create_user()
profile = sign_in.profile_button.click()
profile.profile_picture.click()
profile.capture_button.click()
for _ in range(2):
profile.deny_button.click()
profile.element_by_translation_id("camera-access-error").wait_for_visibility_of_element(3)
profile.ok_button.click()
profile.profile_picture.click()
profile.capture_button.click()
profile.deny_button.wait_for_visibility_of_element(2)
@marks.testrail_id(5469)
@marks.medium
@marks.skip
# TODO: skip until profile picture change feature is enabled
def test_deny_device_storage_access_changing_profile_photo(self):
sign_in = SignInView(self.driver)
sign_in.create_user()
profile = sign_in.profile_button.click()
profile.profile_picture.click()
profile.select_from_gallery_button.click()
profile.deny_button.click()
profile.element_by_translation_id(id="external-storage-denied", element_type='text').wait_for_visibility_of_element(3)
profile.ok_button.click()
profile.profile_picture.click()
profile.select_from_gallery_button.click()
profile.deny_button.wait_for_visibility_of_element(2)
class TestProfileMultipleDevice(MultipleDeviceTestCase):
@marks.testrail_id(6646)
@marks.high
def test_set_profile_picture(self):
self.create_drivers(2)
home_1, home_2 = SignInView(self.drivers[0]).create_user(), SignInView(self.drivers[1]).create_user()
profile_1 = home_1.profile_button.click()
public_key_1 = profile_1.get_public_key_and_username()
profile_1.just_fyi("Set user Profile image from Gallery")
profile_1.edit_profile_picture(file_name='sauce_logo.png')
home_1.profile_button.click()
profile_1.swipe_down()
if not profile_1.profile_picture.is_element_image_similar_to_template('sauce_logo_profile.png'):
self.drivers[0].fail('Profile picture was not updated')
profile_1.just_fyi("Check user profile updated in chat")
home = profile_1.home_button.click()
message = "Text message"
public_chat_name = home.get_random_chat_name()
home_2.add_contact(public_key=public_key_1)
home_2.home_button.click()
public_chat_2 = home_2.join_public_chat(public_chat_name)
public_chat_1 = home.join_public_chat(public_chat_name)
public_chat_1.chat_message_input.send_keys(message)
public_chat_1.send_message_button.click()
if not public_chat_2.chat_element_by_text(message).member_photo.is_element_image_similar_to_template('sauce_logo.png'):
self.drivers[0].fail('Profile picture was not updated in chat')
profile_1.just_fyi("Set user Profile image by taking Photo")
home_1.profile_button.click()
profile_1.edit_profile_picture(file_name='sauce_logo.png', update_by='Make Photo')
home_1.home_button.click(desired_view='chat')
public_chat_1.chat_message_input.send_keys(message)
public_chat_1.send_message_button.click()
if public_chat_2.chat_element_by_text(message).member_photo.is_element_image_similar_to_template('sauce_logo.png'):
self.drivers[0].fail('Profile picture was not updated in chat after making photo')
@marks.testrail_id(6636)
@marks.medium
def test_show_profile_picture_of_setting(self):
self.create_drivers(2)
home_1, home_2 = SignInView(self.drivers[0]).create_user(), SignInView(self.drivers[1]).create_user()
profile_1 = home_1.profile_button.click()
public_key_1, default_username_1 = profile_1.get_public_key_and_username(return_username=True)
profile_1.just_fyi("Set user Profile image from Gallery")
profile_1.edit_profile_picture(file_name='sauce_logo.png')
home_1.profile_button.click()
profile_1.swipe_down()
profile_1.just_fyi('set status in profile')
device_1_status = 'My new update!'
timeline = profile_1.status_button.click()
timeline.set_new_status(device_1_status)
if not timeline.timeline_own_account_photo.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('Profile picture was not updated in timeline')
profile_1.just_fyi('Check profile image it is not in mentions because user not in contacts yet')
one_to_one_chat_2 = home_2.add_contact(public_key_1, add_in_contacts=False)
one_to_one_chat_2.chat_message_input.set_value('@' + default_username_1)
one_to_one_chat_2.chat_message_input.click()
if one_to_one_chat_2.user_profile_image_in_mentions_list(default_username_1).is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('Profile picture is updated in 1-1 chat mentions list of contact not in Contacts list')
profile_1.just_fyi('Check profile image is in mentions because now user was added in contacts')
one_to_one_chat_2.add_to_contacts.click()
one_to_one_chat_2.chat_message_input.set_value('@' + default_username_1)
one_to_one_chat_2.chat_message_input.click()
if not one_to_one_chat_2.user_profile_image_in_mentions_list(default_username_1).is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('Profile picture was not updated in 1-1 chat mentions list')
profile_1.just_fyi('Check profile image updated in user profile view and on Chats view')
profile_2 = one_to_one_chat_2.profile_button.click()
profile_2.contacts_button.click()
profile_2.element_by_text(default_username_1).click()
if not profile_2.profile_picture.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('Profile picture was not updated on user Profile view')
profile_2.close_button.click()
one_to_one_chat_2.home_button.click(desired_view='home')
if not home_2.get_chat(default_username_1).chat_image.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture was not updated on Chats view')
profile_1.just_fyi('Check profile image updated in user profile view in Group chat views 4')
home_1.home_button.click(desired_view='home')
group_chat_message = 'Trololo'
group_chat_2 = home_2.create_group_chat(user_names_to_add=[default_username_1])
group_chat_2.send_message('Message')
group_chat_1 = home_1.get_chat('new_group_chat').click()
group_chat_1.join_chat_button.click()
group_chat_1.send_message(group_chat_message)
if not group_chat_2.chat_element_by_text(group_chat_message).member_photo.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture was not updated in message Group chat view')
profile_1.just_fyi('Check profile image updated in on login view')
home_1.profile_button.click()
profile_1.logout()
sign_in_1 = home_1.get_sign_in_view()
if not sign_in_1.get_multiaccount_by_position(1).account_logo.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture was not updated on Multiaccounts list select login view')
sign_in_1.element_by_text(default_username_1).click()
if not sign_in_1.get_multiaccount_by_position(1).account_logo.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture was not updated on account login view')
sign_in_1.password_input.set_value(common_password)
sign_in_1.sign_in_button.click()
profile_1.just_fyi('Remove user from contact and check there is no profile image displayed')
group_chat_2.profile_button.click()
profile_2.contacts_button.click()
profile_2.element_by_text(default_username_1).click()
one_to_one_chat_2.remove_from_contacts.click()
# Send message to User 2 so update of profile image picked up
group_chat_1 = home_1.get_chat('new_group_chat').click()
group_chat_1.send_message(group_chat_message)
one_to_one_chat_2.close_button.click()
one_to_one_chat_2.home_button.click(desired_view='home')
if home_2.get_chat(default_username_1).chat_image.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture is not default to default after user removed from Contacts')
profile_2.just_fyi('Enable to see profile image from "Everyone" setting')
home_2.profile_button.click()
profile_2.appearance_button.click()
profile_2.show_profile_pictures_of.click()
profile_2.element_by_text('Everyone').click()
group_chat_1.send_message(group_chat_message)
profile_2.home_button.click(desired_view='home')
if not home_2.get_chat(default_username_1).chat_image.is_element_image_similar_to_template('sauce_logo.png'):
self.errors.append('User profile picture is not returned to default after user removed from Contacts')
self.errors.verify_no_errors()
@marks.testrail_id(5432)
@marks.medium
def test_custom_bootnodes(self):
self.create_drivers(2)
home_1, home_2 = SignInView(self.drivers[0]).create_user(), SignInView(self.drivers[1]).create_user()
public_key = home_2.get_public_key_and_username()
profile_1, profile_2 = home_1.profile_button.click(), home_2.profile_button.click()
username_1, username_2 = profile_1.default_username_text.text, profile_2.default_username_text.text
profile_1.just_fyi('Add custom bootnode, enable bootnodes and check validation')
profile_1.advanced_button.click()
profile_1.bootnodes_button.click()
profile_1.add_bootnode_button.click()
profile_1.specify_name_input.set_value('test')
profile_1.bootnode_address_input.set_value('invalid_bootnode_address')
if not profile_1.element_by_text_part('Invalid format').is_element_displayed():
self.errors.append('Validation message about invalid format of bootnode is not shown')
profile_1.save_button.click()
if profile_1.add_bootnode_button.is_element_displayed():
self.errors.append('User was navigated to another screen when tapped on disabled "Save" button')
profile_1.bootnode_address_input.clear()
profile_1.bootnode_address_input.set_value(bootnode_address)
profile_1.save_button.click()
profile_1.enable_bootnodes.click()
profile_1.home_button.click()
profile_1.just_fyi('Add contact and send first message')
chat_1 = home_1.add_contact(public_key)
message = 'test message'
chat_1.chat_message_input.send_keys(message)
chat_1.send_message_button.click()
profile_2.home_button.click()
chat_2 = home_2.get_chat(username_1).click()
chat_2.chat_element_by_text(message).wait_for_visibility_of_element()
chat_2.add_to_contacts.click()
profile_1.just_fyi('Disable custom bootnodes')
chat_1.profile_button.click()
profile_1.advanced_button.click()
profile_1.bootnodes_button.click()
profile_1.enable_bootnodes.click()
profile_1.home_button.click()
profile_1.just_fyi('Send message and check that it is received after disabling bootnodes')
home_1.get_chat(username_2).click()
message_1 = 'new message'
chat_1.chat_message_input.send_keys(message_1)
chat_1.send_message_button.click()
for chat in chat_1, chat_2:
if not chat.chat_element_by_text(message_1).is_element_displayed():
self.errors.append('Message was not received after enabling bootnodes!')
self.errors.verify_no_errors()
@marks.testrail_id(5436)
@marks.medium
@marks.flaky
def test_add_switch_delete_custom_mailserver(self):
self.create_drivers(2)
sign_in_1, sign_in_2 = SignInView(self.drivers[0]), SignInView(self.drivers[1])
home_1, home_2 = sign_in_1.create_user(), sign_in_2.create_user()
public_key = home_2.get_public_key_and_username()
home_2.home_button.click()
profile_1 = home_1.profile_button.click()
username_1 = profile_1.default_username_text.text
profile_1.just_fyi('disable autoselection')
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
mailserver = profile_1.return_mailserver_name(mailserver_hk, used_fleet)
profile_1.mail_server_auto_selection_button.click()
profile_1.mail_server_by_name(mailserver).click()
profile_1.confirm_button.click()
profile_1.just_fyi('add custom mailserver (check address/name validation) and connect to it')
profile_1.plus_button.click()
server_name = 'test'
profile_1.save_button.click()
if profile_1.element_by_text(mailserver).is_element_displayed():
self.errors.append('Could add custom mailserver with empty address and name')
profile_1.specify_name_input.set_value(server_name)
profile_1.mail_server_address_input.set_value(mailserver_address[:-3])
profile_1.save_button.click()
if not profile_1.element_by_text_part("Invalid format").is_element_displayed():
self.errors.append('could add custom mailserver with invalid address')
profile_1.mail_server_address_input.clear()
profile_1.mail_server_address_input.set_value(mailserver_address)
profile_1.save_button.click()
profile_1.mail_server_by_name(server_name).click()
profile_1.mail_server_connect_button.click()
profile_1.confirm_button.click()
if profile_1.element_by_text_part("Error connecting").is_element_displayed(40):
profile_1.retry_to_connect_to_mailserver()
profile_1.get_back_to_home_view()
profile_1.home_button.click()
profile_1.just_fyi('start chat with user2 and check that all messages are delivered')
chat_1 = home_1.add_contact(public_key)
message = 'test message'
chat_1.chat_message_input.send_keys(message)
chat_1.send_message_button.click()
chat_2 = home_2.get_chat(username_1).click()
chat_2.chat_element_by_text(message).wait_for_visibility_of_element()
message_1 = 'new message'
chat_2.chat_message_input.send_keys(message_1)
chat_2.send_message_button.click()
chat_1.chat_element_by_text(message_1).wait_for_visibility_of_element()
profile_1.just_fyi('delete custom mailserver')
chat_1.profile_button.click()
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
profile_1.element_by_text(mailserver).scroll_to_element()
profile_1.element_by_text(mailserver).click()
profile_1.confirm_button.click()
profile_1.element_by_text(server_name).scroll_to_element()
profile_1.element_by_text(server_name).click()
profile_1.mail_server_delete_button.scroll_to_element()
profile_1.mail_server_delete_button.click()
profile_1.mail_server_confirm_delete_button.click()
if profile_1.element_by_text(server_name).is_element_displayed():
self.errors.append('Deleted custom mailserver is shown')
profile_1.get_back_to_home_view()
profile_1.relogin()
chat_1.profile_button.click()
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
if profile_1.element_by_text(server_name).is_element_displayed():
self.errors.append('Deleted custom mailserver is shown after relogin')
self.errors.verify_no_errors()
@marks.testrail_id(5767)
@marks.medium
@marks.flaky
def test_can_not_connect_to_mailserver(self):
self.create_drivers(2)
home_1, home_2= SignInView(self.drivers[0]).create_user(), SignInView(self.drivers[1]).create_user()
profile_1 = home_1.profile_button.click()
profile_1.just_fyi('add non-working mailserver and connect to it')
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
profile_1.mail_server_auto_selection_button.click()
profile_1.plus_button.click()
server_name = 'test'
profile_1.specify_name_input.set_value(server_name)
profile_1.mail_server_address_input.set_value(mailserver_address.replace('4','5'))
profile_1.save_button.click()
profile_1.mail_server_by_name(server_name).click()
profile_1.mail_server_connect_button.click()
profile_1.confirm_button.click()
profile_1.just_fyi('check that popup "Error connecting" will not reappear if tap on "Cancel"')
profile_1.element_by_translation_id(id='mailserver-error-title').wait_for_element(60)
profile_1.cancel_button.click()
profile_1.home_button.click()
home_2.just_fyi('send several messages to public channel')
public_chat_name = home_2.get_random_chat_name()
message = 'test_message'
public_chat_2 = home_2.join_public_chat(public_chat_name)
public_chat_2.chat_message_input.send_keys(message)
public_chat_2.send_message_button.click()
public_chat_2.back_button.click()
profile_1.just_fyi('join same public chat and try to reconnect via "Tap to reconnect" and check "Connecting"')
profile_1.home_button.click()
public_chat_1 = home_1.join_public_chat(public_chat_name)
public_chat_1.relogin()
profile_1.just_fyi('check that still connected to custom mailserver after relogin')
home_1.profile_button.click()
profile_1.sync_settings_button.click()
if not profile_1.element_by_text(server_name).is_element_displayed():
self.drivers[0].fail("Not connected to custom mailserver after re-login")
profile_1.just_fyi('check that can RETRY to connect')
profile_1.element_by_translation_id(id='mailserver-error-title').wait_for_element(60)
public_chat_1.element_by_translation_id(id='mailserver-retry', uppercase=True).wait_and_click(60)
profile_1.just_fyi('check that can pick another mailserver and receive messages')
profile_1.element_by_translation_id(id='mailserver-error-title').wait_for_element(60)
profile_1.element_by_translation_id(id='mailserver-pick-another', uppercase=True).wait_and_click(120)
mailserver = profile_1.return_mailserver_name(mailserver_ams, used_fleet)
profile_1.element_by_text(mailserver).click()
profile_1.confirm_button.click()
profile_1.home_button.click()
home_1.get_chat('#%s' % public_chat_name).click()
if not public_chat_1.chat_element_by_text(message).is_element_displayed(60):
self.errors.append("Chat history wasn't fetched")
self.errors.verify_no_errors()
@marks.testrail_id(6332)
@marks.medium
def test_disable_use_history_node(self):
self.create_drivers(2)
home_1, home_2 = SignInView(self.drivers[0]).create_user(), SignInView(self.drivers[1]).create_user()
profile_1 = home_1.profile_button.click()
home_2.just_fyi('send several messages to public channel')
public_chat_name = home_2.get_random_chat_name()
message, message_no_history = 'test_message', 'history node is disabled'
public_chat_2 = home_2.join_public_chat(public_chat_name)
public_chat_2.send_message(message)
profile_1.just_fyi('disable use_history_node and check that no history is fetched but you can still send messages')
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
profile_1.use_history_node_button.click()
profile_1.home_button.click()
public_chat_1 = home_1.join_public_chat(public_chat_name)
if public_chat_1.chat_element_by_text(message).is_element_displayed(30):
self.errors.append('Chat history was fetched when use_history_node is disabled')
public_chat_1.send_message(message_no_history)
if not public_chat_2.chat_element_by_text(message_no_history).is_element_displayed(30):
self.errors.append('Message sent when use_history_node is disabled was not received')
public_chat_1.profile_button.click()
profile_1.relogin()
home_1.get_chat('#%s'%public_chat_name).click()
if public_chat_1.chat_element_by_text(message).is_element_displayed(30):
self.drivers[0].fail('History was fetched after relogin when use_history_node is disabled')
profile_1.just_fyi('enable use_history_node and check that history is fetched')
home_1.profile_button.click()
profile_1.sync_settings_button.click()
profile_1.mail_server_button.click()
profile_1.use_history_node_button.click()
profile_1.home_button.click(desired_view='chat')
if not public_chat_1.chat_element_by_text(message).is_element_displayed(30):
self.errors.append('History was not fetched after enabling use_history_node')
self.errors.verify_no_errors()
@marks.testrail_id(5762)
@marks.high
def test_pair_devices_sync_one_to_one_contacts_nicknames_public_chat(self):
self.create_drivers(2)
device_1, device_2 = SignInView(self.drivers[0]), SignInView(self.drivers[1])
device_1_home = device_1.create_user()
device_1_home.profile_button.click()
device_1_profile = device_1_home.get_profile_view()
device_1_profile.privacy_and_security_button.click()
device_1_profile.backup_recovery_phrase_button.click()
device_1_profile.ok_continue_button.click()
recovery_phrase = device_1_profile.get_recovery_phrase()
device_1_profile.close_button.click()
device_1_profile.home_button.click()
device_1_name = 'device_%s' % device_1.driver.number
device_2_name = 'device_%s' % device_2.driver.number
message_before_sync = 'sent before sync'
message_after_sync = 'sent after sync'
public_chat_before_sync = 'before-pairing'
public_chat_after_sync = 'after-pairing'
device_1.just_fyi('add contact, start 1-1 chat with basic user')
device_1_chat = device_1_home.add_contact(basic_user['public_key'])
device_1_chat.chat_message_input.send_keys(message_before_sync)
device_1_chat.send_message_button.click()
device_1.just_fyi('join public chat')
device_1_chat.get_back_to_home_view()
device_1_public_chat = device_1_home.join_public_chat(public_chat_before_sync)
device_2_home = device_2.recover_access(passphrase=' '.join(recovery_phrase.values()))
device_1_profile, device_2_profile = device_1_home.profile_button.click(), device_2_home.profile_button.click()
device_2.just_fyi('go to profile and set nickname for contact')
device_1_profile.open_contact_from_profile(basic_user['username'])
nickname = 'my_basic_user'
device_1_chat.set_nickname(nickname)
device_1_profile.close_button.click()
device_1.back_button.click()
device_2.just_fyi('go to profile > Devices, set device name, discover device 2 to device 1')
device_2_profile.discover_and_advertise_device(device_2_name)
device_1_profile.discover_and_advertise_device(device_1_name)
device_1_profile.get_toggle_device_by_name(device_2_name).wait_and_click()
device_1_profile.sync_all_button.click()
device_1_profile.sync_all_button.wait_for_visibility_of_element(15)
[device.profile_button.click() for device in (device_1_profile, device_2_profile)]
device_2.just_fyi('check that contact with nickname is appeared in Contact list')
device_2_profile.contacts_button.scroll_to_element(9, 'up')
device_2_profile.contacts_button.click()
for name in (basic_user['username'], nickname):
if not device_2_profile.element_by_text(name).is_element_displayed():
self.errors.append('"%s" is not found in Contacts after initial sync' % name)
device_1.just_fyi('send message to 1-1 chat with basic user and add another contact')
device_1_profile.home_button.click(desired_view='chat')
device_1_public_chat.back_button.click()
device_1_home.get_chat(nickname).click()
device_1_chat.chat_message_input.send_keys(message_after_sync)
device_1_chat.send_message_button.click()
device_1_chat.back_button.click()
device_1_home.add_contact(transaction_senders['A']['public_key'])
device_2.just_fyi('check that messages appeared in 1-1 chat, public chats and new contacts are synced')
if not device_2_profile.element_by_text(transaction_senders['A']['username']).is_element_displayed(60):
self.errors.append(
'"%s" is not found in Contacts after adding when devices are paired' % transaction_senders['A'][
'username'])
device_1.just_fyi('Set nickname for added contact and check that it will be synced')
device_1_home.profile_button.click()
device_1_profile.contacts_button.scroll_to_element(9, 'up')
device_1_profile.open_contact_from_profile(transaction_senders['A']['username'])
nickname_after_sync = 'my_transaction sender'
device_1_chat.set_nickname(nickname_after_sync)
device_1_profile.close_button.click()
device_1.home_button.click(desired_view='chat')
if not device_2_profile.element_by_text(nickname_after_sync).is_element_displayed(60):
self.errors.append(
'"%s" is not updated in Contacts after setting nickname when devices are paired' % nickname_after_sync)
device_2_profile.home_button.click()
if not device_2_home.element_by_text_part(public_chat_before_sync).is_element_displayed():
self.errors.append(
'"%s" is not found in Home after initial sync when devices are paired' % public_chat_before_sync)
chat = device_2_home.get_chat(nickname).click()
if chat.chat_element_by_text(message_before_sync).is_element_displayed():
self.errors.append('"%s" message sent before pairing is synced' % message_before_sync)
if not chat.chat_element_by_text(message_after_sync).is_element_displayed(60):
self.errors.append('"%s" message in 1-1 is not synced' % message_after_sync)
device_1.just_fyi('add new public chat and check that it will be synced with device2')
device_1_chat.get_back_to_home_view()
device_1_home.join_public_chat(public_chat_after_sync)
device_2_home = chat.get_back_to_home_view()
if not device_2_home.element_by_text_part(public_chat_after_sync).is_element_displayed(20):
self.errors.append(
'"%s" public chat is not synced after adding when devices are paired' % public_chat_after_sync)
self.errors.verify_no_errors()
@marks.testrail_id(6226)
@marks.critical
def test_ens_mentions_pn_and_nickname_in_public_and_1_1_chats(self):
self.create_drivers(2)
device_1, device_2 = self.drivers[0], self.drivers[1]
sign_in_1, sign_in_2 = SignInView(device_1), SignInView(device_2)
user_1 = ens_user
home_1 = sign_in_1.recover_access(user_1['passphrase'], enable_notifications=True)
home_2 = sign_in_2.create_user()
publuc_key_2, username_2 = home_2.get_public_key_and_username(return_username=True)
home_2.home_button.double_click()
home_1.just_fyi('switching to mainnet and add ENS')
profile_1 = sign_in_1.profile_button.click()
profile_1.switch_network('Mainnet with upstream RPC')
home_1.profile_button.click()
dapp_view_1 = profile_1.ens_usernames_button.click()
dapp_view_1.element_by_text('Get started').click()
dapp_view_1.ens_name_input.set_value(ens_user['ens'])
expected_text = 'This user name is owned by you and connected with your chat key.'
if not dapp_view_1.element_by_text_part(expected_text).is_element_displayed():
dapp_view_1.click_system_back_button()
dapp_view_1.wait_for_element_starts_with_text(expected_text)
dapp_view_1.check_ens_name.click_until_presence_of_element(dapp_view_1.element_by_text('Ok, got it'))
dapp_view_1.element_by_text('Ok, got it').click()
home_1.just_fyi('check ENS name wallet address and public key')
profile_1.element_by_text(user_1['ens']).click()
for text in ('10 SNT, deposit unlocked', user_1['address'].lower(), user_1['public_key'] ):
if not profile_1.element_by_text_part(text).is_element_displayed(40):
self.errors.append('%s text is not shown' % text)
profile_1.home_button.click()
home_2.just_fyi('joining same public chat, set ENS name and check it in chat from device2')
chat_name = home_1.get_random_chat_name()
chat_2 = home_2.join_public_chat(chat_name)
chat_1 = home_1.join_public_chat(chat_name)
chat_1.get_back_to_home_view()
home_1.profile_button.click()
ens_name = '@' + user_1['ens']
profile_1.element_by_text('Your ENS name').click()
if profile_1.username_in_ens_chat_settings_text.text != ens_name:
self.errors.append('ENS username is not shown in ENS usernames Chat Settings after enabling')
profile_1.back_button.click()
profile_1.home_button.click()
home_1.get_chat('#' + chat_name).click()
message_text_2 = 'message test text 1'
chat_1.send_message(message_text_2)
if not chat_2.wait_for_element_starts_with_text(ens_name):
self.errors.append('ENS username is not shown in public chat')
home_1.put_app_to_background()
home_2.just_fyi('check that can mention user with ENS name')
chat_2.select_mention_from_suggestion_list(user_1['ens'])
if chat_2.chat_message_input.text != ens_name + ' ':
self.errors.append('ENS username is not resolved in chat input after selecting it in mention suggestions list!')
chat_2.send_message_button.click()
chat_2.element_starts_with_text(ens_name,'button').click()
for element in (chat_2.element_by_text(user_1['username']), chat_2.profile_add_to_contacts):
if not element.is_element_displayed():
self.errors.append('Was not redirected to user profile after tapping on mention!')
home_1.just_fyi('check that PN is received and after tap you are redirected to public chat, mention is highligted')
home_1.open_notification_bar()
home_1.element_by_text_part(username_2).click()
if home_1.element_starts_with_text(user_1['ens'] +'.stateofus.eth').is_element_differs_from_template('mentioned.png', 2):
self.errors.append('Mention is not highlighted!')
# Close Device1 driver session since it's not needed anymore
self.drivers[0].quit()
home_2.just_fyi('check that ENS name is shown in 1-1 chat without adding user as contact in header, profile, options')
chat_2_one_to_one = chat_2.profile_send_message.click()
if chat_2_one_to_one.user_name_text.text != ens_name:
self.errors.append('ENS username is not shown in 1-1 chat header')
chat_2_one_to_one.chat_options.click()
if not chat_2_one_to_one.element_by_text(ens_name).is_element_displayed():
self.errors.append('ENS username is not shown in 1-1 chat options')
chat_2_one_to_one.view_profile_button.click()
if not chat_2_one_to_one.element_by_text(ens_name).is_element_displayed():
self.errors.append('ENS username is not shown in user profile')
home_2.just_fyi('add user to contacts and check that ENS name is shown in contact')
chat_2_one_to_one.profile_add_to_contacts.click()
chat_2.close_button.click()
profile_2 = chat_2_one_to_one.profile_button.click()
profile_2.open_contact_from_profile(ens_name)
home_2.just_fyi('set nickname and recheck username in 1-1 header, profile, options, contacts')
nickname = 'test user' + str(round(time()))
chat_2.set_nickname(nickname)
profile_2.close_button.click()
for name in (nickname, ens_name):
if not profile_2.element_by_text(name).is_element_displayed():
self.errors.append('%s is not shown in contact list' % name)
profile_2.home_button.click(desired_view='chat')
if chat_2_one_to_one.user_name_text.text != nickname:
self.errors.append('Nickname for user with ENS is not shown in 1-1 chat header')
chat_2_one_to_one.chat_options.click()
if not chat_2_one_to_one.element_by_text(nickname).is_element_displayed():
self.errors.append('Nickname for user with ENS is not shown in 1-1 chat options')
home_2.just_fyi('check nickname in public chat')
chat_2.get_back_to_home_view()
home_2.get_chat('#' + chat_name).click()
chat_element = chat_2.chat_element_by_text(message_text_2)
chat_element.find_element()
if chat_element.username.text != '%s %s' % (nickname, ens_name):
self.errors.append('Nickname for user with ENS is not shown in public chat')
self.errors.verify_no_errors()
@marks.testrail_id(6228)
@marks.high
def test_mobile_data_usage_complex_settings(self):
self.create_drivers(2)
device_1, device_2 = SignInView(self.drivers[0]), SignInView(self.drivers[1])
device_1_home = device_1.create_user()
public_chat_name, public_chat_message = 'e2e-started-before', 'message to pub chat'
device_1_public = device_1_home.join_public_chat(public_chat_name)
device_1_public.send_message(public_chat_message)
device_1_home.just_fyi('set mobile data to "OFF" and check that peer-to-peer connection is still working')
device_2_home = device_2.create_user()
device_2_home.toggle_mobile_data()
device_2_home.mobile_connection_off_icon.wait_for_visibility_of_element(20)
for element in device_2_home.continue_syncing_button, device_2_home.stop_syncing_button, device_2_home.remember_my_choice_checkbox:
if not element.is_element_displayed(10):
self.drivers[0].fail('Element %s is not not shown in "Syncing mobile" bottom sheet' % element.locator)
device_2_home.stop_syncing_button.click()
if not device_2_home.mobile_connection_off_icon.is_element_displayed():
self.drivers[0].fail('No mobile connection OFF icon is shown')
device_2_home.mobile_connection_off_icon.click()
for element in device_2_home.connected_to_n_peers_text, device_2_home.waiting_for_wi_fi:
if not element.is_element_displayed():
self.errors.append("Element '%s' is not shown in Connection status bottom sheet" % element.locator)
device_2_home.click_system_back_button()
device_2_public = device_2_home.join_public_chat(public_chat_name)
if device_2_public.chat_element_by_text(public_chat_message).is_element_displayed(30):
self.errors.append("Chat history was fetched with mobile data fetching off")
public_chat_new_message = 'new message'
device_1_public.send_message(public_chat_new_message)
if not device_2_public.chat_element_by_text(public_chat_new_message).is_element_displayed(30):
self.errors.append("Peer-to-peer connection is not working when mobile data fetching is off")
device_2_home.just_fyi('set mobile data to "ON"')
device_2_home.home_button.click()
device_2_home.mobile_connection_off_icon.click()
device_2_home.use_mobile_data_switch.click()
if not device_2_home.connected_to_node_text.is_element_displayed(10):
self.errors.append("Not connected to history node after enabling fetching on mobile data")
device_2_home.click_system_back_button()
device_2_home.mobile_connection_on_icon.wait_for_visibility_of_element(10)
if not device_2_home.mobile_connection_on_icon.is_element_displayed():
self.errors.append('No mobile connection ON icon is shown')
device_2_home.get_chat('#%s'% public_chat_name).click()
if not device_2_public.chat_element_by_text(public_chat_message).is_element_displayed(90):
self.errors.append("Chat history was not fetched with mobile data fetching ON")
device_2_home.just_fyi('check redirect to sync settings by tappin "Sync" in connection status bottom sheet')
device_2_home.home_button.click()
device_2_home.mobile_connection_on_icon.click()
device_2_home.connection_settings_button.click()
if not device_2_home.element_by_translation_id("mobile-network-use-mobile").is_element_displayed():
self.errors.append("Was not redirected to sync settings after tapping on Settings in connection bottom sheet")
device_1_home.just_fyi("Check default preferences in Sync settings")
device_1_profile = device_1_home.profile_button.click()
device_1_profile.sync_settings_button.click()
if not device_1_profile.element_by_translation_id("mobile-network-use-wifi").is_element_displayed():
self.errors.append("Mobile data is enabled by default")
device_1_profile.element_by_translation_id("mobile-network-use-wifi").click()
if device_1_profile.ask_me_when_on_mobile_network.text != "ON":
self.errors.append("'Ask me when on mobile network' is not enabled by default")
device_1_profile.just_fyi("Disable 'ask me when on mobile network' and check that it is not shown")
device_1_profile.ask_me_when_on_mobile_network.click()
device_1_profile.toggle_mobile_data()
if device_1_profile.element_by_translation_id("mobile-network-start-syncing").is_element_displayed(20):
self.errors.append("Popup is shown, but 'ask me when on mobile network' is disabled")
device_1_profile.just_fyi("Check 'Restore default' setting")
device_1_profile.element_by_text('Restore Defaults').click()
if device_1_profile.use_mobile_data.attribute_value("checked"):
self.errors.append("Mobile data is enabled by default")
if not device_1_profile.ask_me_when_on_mobile_network.attribute_value("checked"):
self.errors.append("'Ask me when on mobile network' is not enabled by default")
self.errors.verify_no_errors()
@marks.testrail_id(5680)
@marks.high
@marks.skip
# TODO: skip until edit userpic is enabled back
def test_pair_devices_sync_name_photo_public_group_chats(self):
self.create_drivers(2)
device_1, device_2 = SignInView(self.drivers[0]), SignInView(self.drivers[1])
device_1_home = device_1.create_user()
device_1_home.profile_button.click()
device_1_profile = device_1_home.get_profile_view()
device_1_profile.privacy_and_security_button.click()
device_1_profile.backup_recovery_phrase_button.click()
recovery_phrase = device_1_profile.backup_recovery_phrase()
device_1_profile.back_button.click()
device_1_profile.get_back_to_home_view()
device_1_name = 'device_%s' % device_1.driver.number
device_2_name = 'device_%s' % device_2.driver.number
public_chat_before_sync_name = 'b-public-%s' % device_1_home.get_random_chat_name()
public_chat_after_sync_name = 'a-public-%s' % device_1_home.get_random_chat_name()
group_chat_name = 'group-%s' % device_1_home.get_random_chat_name()
message_after_sync = 'sent after sync'
device_1.just_fyi('join public chat, create group chat, edit user picture')
device_1_public_chat = device_1_home.join_public_chat(public_chat_before_sync_name)
device_1_public_chat.back_button.click()
device_1_one_to_one = device_1_home.add_contact(basic_user['public_key'])
device_1_one_to_one.back_button.click()
device_1_group_chat = device_1_home.create_group_chat([basic_user['username']], group_chat_name)
device_1_group_chat.back_button.click()
device_1_home.profile_button.click()
device_1_profile = device_1_home.get_profile_view()
device_1_profile.edit_profile_picture('sauce_logo.png')
device_2.just_fyi('go to profile > Devices, set device name, discover device 2 to device 1')
device_2_home = device_2.recover_access(passphrase=' '.join(recovery_phrase.values()))
device_2_profile = device_2_home.get_profile_view()
device_2_profile.discover_and_advertise_device(device_2_name)
device_1.just_fyi('enable pairing of `device 2` and sync')
device_1_profile.discover_and_advertise_device(device_1_name)
device_1_profile.get_toggle_device_by_name(device_2_name).click()
device_1_profile.sync_all_button.click()
device_1_profile.sync_all_button.wait_for_visibility_of_element(15)
device_2.just_fyi('check that public chat and profile details are updated')
device_2_home = device_2_profile.home_button.click()
if not device_2_home.element_by_text('#%s' % public_chat_before_sync_name).is_element_displayed():
self.errors.append('Public chat "%s" doesn\'t appear after initial sync'
% public_chat_before_sync_name)
device_2_home.home_button.click()
device_2_home.profile_button.click()
if not device_2_profile.profile_picture.is_element_image_equals_template('sauce_logo_profile.png'):
self.errors.append('Profile picture was not updated after initial sync')
device_2_profile.home_button.click()
device_1.just_fyi('send message to group chat, and join to new public chat')
device_1_home = device_1_profile.home_button.click()
device_1_public_chat = device_1_home.join_public_chat(public_chat_after_sync_name)
device_1_public_chat.back_button.click()
device_1_home.element_by_text(group_chat_name).click()
device_1_group_chat.chat_message_input.send_keys(message_after_sync)
device_1_group_chat.send_message_button.click()
device_1_group_chat.back_button.click()
device_2.just_fyi('check that message in group chat is shown, public chats are synced')
if not device_2_home.element_by_text('#%s' % public_chat_after_sync_name).is_element_displayed():
self.errors.append('Public chat "%s" doesn\'t appear on other device when devices are paired'
% public_chat_before_sync_name)
device_2_home.element_by_text(group_chat_name).click()
device_2_group_chat = device_2_home.get_chat_view()
if not device_2_group_chat.chat_element_by_text(message_after_sync).is_element_displayed():
self.errors.append('"%s" message in group chat is not synced' % message_after_sync)
self.errors.verify_no_errors()
| mpl-2.0 | 5,973,305,768,616,145,000 | 52.820166 | 172 | 0.66031 | false |
ttthy1/2017sejongAI | week14/Mnist.py | 1 | 2273 | # Lab 7 Learning rate and Evaluation
import tensorflow as tf
import random
import matplotlib.pyplot as plt
tf.set_random_seed(777) # for reproducibility
from tensorflow.examples.tutorials.mnist import input_data
# Check out https://www.tensorflow.org/get_started/mnist/beginners for
# more information about the mnist dataset
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
nb_classes = 10
# MNIST data image of shape 28 * 28 = 784
X = tf.placeholder(tf.float32, [None, 784])
# 0 - 9 digits recognition = 10 classes
Y = tf.placeholder(tf.float32, [None, nb_classes])
W = tf.Variable(tf.random_normal([784, nb_classes]))
b = tf.Variable(tf.random_normal([nb_classes]))
# Hypothesis (using softmax)
hypothesis = tf.nn.softmax(tf.matmul(X, W) + b)
cost = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(hypothesis), axis=1))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost)
# Test model
is_correct = tf.equal(tf.arg_max(hypothesis, 1), tf.arg_max(Y, 1))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
# parameters
training_epochs = 15
batch_size = 100
with tf.Session() as sess:
# Initialize TensorFlow variables
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size)
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
c, _ = sess.run([cost, optimizer], feed_dict={
X: batch_xs, Y: batch_ys})
avg_cost += c / total_batch
print('Epoch:', '%04d' % (epoch + 1),
'cost =', '{:.9f}'.format(avg_cost))
print("Learning finished")
# Test the model using test sets
print("Accuracy: ", accuracy.eval(session=sess, feed_dict={
X: mnist.test.images, Y: mnist.test.labels}))
# Get one and predict
r = random.randint(0, mnist.test.num_examples - 1)
print("Label: ", sess.run(tf.argmax(mnist.test.labels[r:r + 1], 1)))
print("Prediction: ", sess.run(
tf.argmax(hypothesis, 1), feed_dict={X: mnist.test.images[r:r + 1]}))
plt.imshow(
mnist.test.images[r:r + 1].reshape(28, 28),
cmap='Greys',
interpolation='nearest')
plt.show()
| gpl-3.0 | -1,161,395,743,003,919,600 | 31.471429 | 79 | 0.675759 | false |
JDongian/LangGrind | src/parse_raw.py | 1 | 1957 | """Parse human data into JSON"""
import string
def parse_file(filename="../data/RAW.txt"):
"""Parse human readable file into JSON."""
entries = []
with open(filename) as f_in:
next_line = f_in.readline()
data = {}
state = "section"
while next_line:
if state == "section":
line = next_line.split(" ")
if line[0] == "Chapter":
data = {'section': {'chapter': int(line[1]),
'part': line[4].strip()}}
state = "term"
elif state == "term":
if not next_line.strip():
state = "section"
next_line = f_in.readline()
continue
entry = data.copy()
term, definition = next_line.split(";")
#print("'{}'".format(next_line))
entry['term'] = term.strip()
entry['definitions'] = [_.strip() for\
_ in definition.split(",")]
entry['class'] = []
# Determine the lexical class of the word.
if "(be)" in "".join(entry['definitions']):
entry['class'].append("adjective")
for _ in entry['definitions']:
initial = _.split(" ")[0]
end = _[-1]
if initial in ["a", "an"]:
entry['class'].append("noun")
if initial in ["to"]:
entry['class'].append("verb")
if end in ".!?":
entry['class'].append("phrase")
# Proper nouns
elif initial[0] in string.ascii_uppercase:
entry['class'].append("noun")
entries.append(entry)
next_line = f_in.readline()
return entries
| gpl-3.0 | 6,378,814,179,138,106,000 | 38.14 | 67 | 0.406234 | false |
prateeksan/python-design-patterns | structural/adapter.py | 1 | 3485 | """ The Adapter Pattern
Notes:
If the interface of an object does not match the interface required by the
client code, this pattern recommends using an 'adapter' that can create a proxy
interface. It is particularly useful in homogenizing interfaces of
non-homogenous objects.
The following example represents a use case for adapting various resource
types to be readable as text resources. We assume that the client programmer
works with resource objects that wrap binary, web-based or textual data. Each
of the aforementioned has its own type and interface but we need to read them
all as text type objects. Since every resource type can be represented as text
(albeit the method calls to do so vary), we use the TextResourceAdapter to
homogenize the interface and output the textual representation using a common
read() method (set to behave like the read() method for TextResource).
"""
class TextResource:
"""We assume that our server can only read text. Therefore this resource is
the only resource the server knows how to interpret.
"""
def read(self):
return "Sample plain text."
class BinaryResource:
"""An instance of this class wraps binary data. While it has many output
formats, the server can only read the plain-text output.
"""
def read_plain_text(self):
return "Sample plain text from binary."
def read_raw(self):
pass
def read_interactive(self):
pass
class WebResource:
"""An instance of this class wraps web data. While it has many output
formats, the server can only read the json output.
"""
def read_json(self):
return "Sample plain text as json."
def read_html(self):
pass
class IncompatibleResourceError(Exception):
pass
class TextResourceAdapter:
"""Acts as an adapter that uses the read() method to return a textual
representation of the client_resource.
"""
convertibles = ("TextResource", "BinaryResource", "WebResource")
def __init__(self, client_resource):
self._verify_compatibility(client_resource)
self._client_resource = client_resource
def read(self):
"""Note that for a resource to use the adapter, it needs to be
configured beforehand in this method. Your implementation may be
modified to change this (depending on your use case).
"""
if self._client_resource.__class__ is BinaryResource:
return self._client_resource.read_plain_text()
elif self._client_resource.__class__ is WebResource:
return self._client_resource.read_json()
return self._client_resource.read()
def _verify_compatibility(self, resource):
"""Since we need to pre-configure the adapter to handle various resource
types, we raise an error if the client_resource is not pre-configured.
"""
if resource.__class__.__name__ not in self.__class__.convertibles:
raise IncompatibleResourceError("{} cannot be adapted.".format(
resource.__class__.__name__))
if __name__ == "__main__":
client_resources = [BinaryResource(), WebResource(), TextResource()]
for resource in client_resources:
print("Adapting {} as a text resource...".format(
resource.__class__.__name__))
adapted_resource = TextResourceAdapter(resource)
# Note how the read interface has been homogenized.
print(adapted_resource.read() + "\n") | mit | 5,776,622,469,529,610,000 | 32.84466 | 80 | 0.68637 | false |
3dfxsoftware/cbss-addons | account_aged_partner_balance_report/report/account_aged_partner_balance_report.py | 1 | 10928 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pooler
from report import report_sxw
from tools.translate import _
from openerp.osv import fields, osv
from openerp.addons.account_report_lib.account_report_base import accountReportbase
class Parser(accountReportbase):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'cr' : cr,
'uid': uid,
'storage':{},
'set_data_template': self.set_data_template,
'set_data_move_lines': self.set_data_move_lines,
'get_data':self.get_data,
'get_data_by_partner':self.get_data_by_partner,
'get_period_length': self.get_period_length,
'get_direction_selection': self.get_direction_selection,
'display_account_type':self.display_account_type,
'display_direction_selection': self.display_direction_selection,
'display_period_length': self.display_period_length,
'process_lines_period':self.process_lines_period,
})
#====Extract data from wizard==============================================
def get_period_length(self, data):
return self._get_form_param('period_length', data)
def get_direction_selection(self, data):
return self._get_form_param('direction_selection', data)
def get_account_type(self, data):
return self._get_form_param('account_type', data)
"""
Return a dictionary, with this structure:
result[account_type][move_list_lines] (a dictionary)
"""
def get_data_by_partner(self, partner_id):
return self.localcontext['storage']['result'][partner_id]
#==========================================================================
#====Display data==========================================================
def display_account_type(self, data=None, account_type=None):
#if it's necessary to display in report's header
if data:
account_type = self.get_account_type(data)
##Options for report information (keys are different)
if account_type == 'receivable':
return _('Receivable Accounts')
elif account_type == 'payable':
return _('Payable Accounts')
###Options for header
if account_type == 'customer':
return _('Receivable accounts')
elif account_type == 'supplier':
return _('Payable accounts')
elif account_type == 'customer_supplier':
return _('Payable and Receivable accounts')
return ''
def display_direction_selection(self, data):
direction_selection = self.get_direction_selection(data)
if direction_selection == 'past':
return _('Past')
elif direction_selection == 'future':
return _('Future')
return ''
def display_period_length(self, data):
return self.get_period_length(data)
#===== Set data =========================================================
#set data to use in odt template.
def set_data_template(self, cr, uid, data):
result, partner_ids_order = self.get_data(cr, uid, data)
dict_update = {'result': result, 'partner_ids_order': partner_ids_order,}
self.localcontext['storage'].update(dict_update)
return False
def set_data_move_lines(self, data, move_lines):
#move_lines is a dictionary
move_lines, partner_total = self.process_lines_period(data, move_lines)
dict_update = {'move_lines':move_lines, 'partner_total':partner_total}
self.localcontext['storage'].update(dict_update)
return False
#==========================================================================
def get_move_lines(self, data):
account_account_obj = self.pool.get('account.account')
account_move_line_obj = self.pool.get('account.move.line')
account_type_domain = []
#Get parameters
date_from = str(self.get_date_from(data))
direction_selection = str(self.get_direction_selection(data))
account_type = self.get_account_type(data)
if account_type == 'customer':
account_type_domain.append('receivable')
if account_type == 'supplier':
account_type_domain.append('payable')
if account_type == 'customer_supplier':
account_type_domain.append('receivable')
account_type_domain.append('payable')
#Build domains
account_account_ids = account_account_obj.search(self.cr, self.uid, [('type', 'in', account_type_domain), ('active','=',True)])
account_move_line_domain = [('state', '=', 'valid'), ('reconcile_id', '=', False), ('account_id', 'in', account_account_ids)]
#=====Build a account move lines domain
#Date
tuple_date = ()
if direction_selection == 'past':
tuple_date = ('date','<=', date_from)
account_move_line_domain.append(tuple_date)
else:
tuple_date = ('date','>=', date_from)
account_move_line_domain.append(tuple_date)
#Get move_lines based on previous domain
account_move_line_ids = account_move_line_obj.search(self.cr, self.uid, account_move_line_domain, order='date_maturity desc')
account_move_lines = account_move_line_obj.browse(self.cr, self.uid, account_move_line_ids)
return account_move_lines
def get_data(self, cr, uid, data):
partner_ids = []
res = {}
""" 1. Extract move lines """
move_lines = self.get_move_lines(data)
""" 2. Classified move_lines by partner and account_type """
for line in move_lines:
if line.partner_id:
partner_id = line.partner_id.id
else:
partner_id = 0 #key for lines that don't have partner_id
#== Create a list, them order it by name ============
if partner_id not in partner_ids:
partner_ids.append(partner_id)
#====================================================
if partner_id not in res.keys():
res[partner_id] = {}
if line.account_id.type not in res[partner_id].keys():
res[line.partner_id.id][line.account_id.type] = []
res[partner_id][line.account_id.type].append(line)
#Sort by partner's name (alphabetically)
partner_ids_order = self.pool.get('res.partner').search(cr, uid, [('id','in', partner_ids)], order='name ASC')
partner_list = self.pool.get('res.partner').browse(self.cr, self.uid, partner_ids_order)
return res, partner_list
#Process each column for line.
def process_lines_period(self, data, move_lines):
res = {}
partner_total = 0.0
result_list = [7]
#Get parameters
date_from = str(self.get_date_from(data))
direction_selection = str(self.get_direction_selection(data))
for line in move_lines:
result_list = map(float, result_list)
#initialize list
result_list = [0.0 for i in range(7)]
if not line.date_maturity or direction_selection == 'past' and line.date_maturity > date_from \
or direction_selection == 'future' and line.date_maturity < date_from:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[0] = value
if line.date_maturity >= data['form']['4']['start'] and line.date_maturity <= data['form']['4']['stop']:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[1] = value
if line.date_maturity >= data['form']['3']['start'] and line.date_maturity <= data['form']['3']['stop']:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[2] = value
if line.date_maturity >= data['form']['2']['start'] and line.date_maturity <= data['form']['2']['stop']:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[3] = value
if line.date_maturity >= data['form']['1']['start'] and line.date_maturity <= data['form']['1']['stop']:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[4] = value
if line.date_maturity and data['form']['0']['stop'] and line.date_maturity <= data['form']['0']['stop'] or line.date_maturity and data['form']['0']['start'] and line.date_maturity >= data['form']['0']['start']:
if line.debit:
value = line.debit
else:
value = line.credit
result_list[5] = value
#Total by partner
partner_total += line.debit if line.debit else line.credit * -1
result_list[6] = partner_total
res[line.id] = result_list
return res, partner_total
| gpl-2.0 | 284,019,789,491,869,440 | 41.858824 | 221 | 0.522053 | false |
nosuchtim/VizBench | src/jsonrpc/jsonrpc.py | 1 | 1044 | # Utility to send JSON RPC messages
# Avoid the requests module to reduce installation hassles
import urllib
import urllib2
import json
import sys
verbose = False
def dorpc(port,meth,params):
url = 'http://127.0.0.1:%d/api' % (port)
id = '12345'
data = '{ "jsonrpc": "2.0", "method": "'+meth+'", "params": '+params+', "id":"'+id+'" }\n'
req = urllib2.Request(url,data)
r = urllib2.urlopen(req)
response = r.read()
if verbose:
print "HTTP status code = ",r.getcode()
print "HTTP url = ",r.geturl()
print "HTTP info = ",r.info()
print "response is ",response
j = json.loads(response)
if "error" in j:
print "ERROR: "+str(j["error"]["message"])
elif "result" in j:
print "RESULT: "+str(j["result"])
else:
print "No error or result in JSON response!? r="+r
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: jsonrpc {port} {meth} [ {params} ]"
else:
port = int(sys.argv[1])
meth = sys.argv[2]
if len(sys.argv) < 4:
params = "{}"
else:
params = sys.argv[3]
dorpc(port,meth,params)
| mit | 6,698,785,829,892,262,000 | 22.2 | 91 | 0.6159 | false |
pombreda/ruffus | ruffus/test/test_tutorial7.py | 1 | 6147 | #!/usr/bin/env python
from __future__ import print_function
NUMBER_OF_RANDOMS = 10000
CHUNK_SIZE = 1000
working_dir = "temp_tutorial7/"
import os
import sys
# add grandparent to search path for testing
grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
sys.path.insert(0, grandparent_dir)
# module name = script name without extension
module_name = os.path.splitext(os.path.basename(__file__))[0]
# funky code to import by file name
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
ruffus_name = os.path.basename(parent_dir)
ruffus = __import__ (ruffus_name)
for attr in "follows", "split", "mkdir", "files", "transform", "suffix", "posttask", "touch_file", "merge", "Pipeline":
globals()[attr] = getattr (ruffus, attr)
import random
import glob
#---------------------------------------------------------------
#
# Create random numbers
#
@follows(mkdir(working_dir))
@files(None, working_dir + "random_numbers.list")
def create_random_numbers(input_file_name, output_file_name):
f = open(output_file_name, "w")
for i in range(NUMBER_OF_RANDOMS):
f.write("%g\n" % (random.random() * 100.0))
f.close()
#---------------------------------------------------------------
#
# Split initial file
#
@follows(create_random_numbers)
@split(working_dir + "random_numbers.list", working_dir + "*.chunks")
def step_4_split_numbers_into_chunks (input_file_name, output_files):
"""
Splits random numbers file into XXX files of CHUNK_SIZE each
"""
#
# clean up files from previous runs
#
for f in glob.glob("*.chunks"):
os.unlink(f)
#
# create new file every CHUNK_SIZE lines and
# copy each line into current file
#
output_file = None
cnt_files = 0
with open(input_file_name) as ii:
for i, line in enumerate(ii):
if i % CHUNK_SIZE == 0:
cnt_files += 1
if output_file:
output_file.close()
output_file = open(working_dir + "%d.chunks" % cnt_files, "w")
output_file.write(line)
if output_file:
output_file.close()
#---------------------------------------------------------------
#
# Calculate sum and sum of squares for each chunk file
#
@transform(step_4_split_numbers_into_chunks, suffix(".chunks"), ".sums")
def step_5_calculate_sum_of_squares (input_file_name, output_file_name):
with open(output_file_name, "w") as oo:
sum_squared, sum = [0.0, 0.0]
cnt_values = 0
with open(input_file_name) as ii:
for line in ii:
cnt_values += 1
val = float(line.rstrip())
sum_squared += val * val
sum += val
oo.write("%s\n%s\n%d\n" % (repr(sum_squared), repr(sum), cnt_values))
def print_hooray_again():
print(" hooray again")
def print_whoppee_again():
print(" whoppee again")
#---------------------------------------------------------------
#
# Calculate sum and sum of squares for each chunk
#
@posttask(lambda: sys.stdout.write(" hooray\n"))
@posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(working_dir, "done")))
@merge(step_5_calculate_sum_of_squares, os.path.join(working_dir, "variance.result"))
def step_6_calculate_variance (input_file_names, output_file_name):
"""
Calculate variance naively
"""
output = open(output_file_name, "w")
#
# initialise variables
#
all_sum_squared = 0.0
all_sum = 0.0
all_cnt_values = 0.0
#
# added up all the sum_squared, and sum and cnt_values from all the chunks
#
for input_file_name in input_file_names:
with open(input_file_name) as ii:
sum_squared, sum, cnt_values = list(map(float, ii.readlines()))
all_sum_squared += sum_squared
all_sum += sum
all_cnt_values += cnt_values
all_mean = all_sum / all_cnt_values
variance = (all_sum_squared - all_sum * all_mean)/(all_cnt_values)
#
# print output
#
print(variance, file=output)
output.close()
import unittest, shutil
try:
from StringIO import StringIO
except:
from io import StringIO
class Test_ruffus(unittest.TestCase):
def setUp(self):
try:
shutil.rmtree(working_dir)
except:
pass
def tearDown(self):
try:
shutil.rmtree(working_dir)
pass
except:
pass
def atest_ruffus (self):
pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
output_file = os.path.join(working_dir, "variance.result")
if not os.path.exists (output_file):
raise Exception("Missing %s" % output_file)
def test_newstyle_ruffus (self):
test_pipeline = Pipeline("test")
test_pipeline.files(create_random_numbers, None, working_dir + "random_numbers.list")\
.follows(mkdir(working_dir))
test_pipeline.split(task_func = step_4_split_numbers_into_chunks,
input = working_dir + "random_numbers.list",
output = working_dir + "*.chunks")\
.follows(create_random_numbers)
test_pipeline.transform(task_func = step_5_calculate_sum_of_squares,
input = step_4_split_numbers_into_chunks,
filter = suffix(".chunks"),
output = ".sums")
test_pipeline.merge(task_func = step_6_calculate_variance, input = step_5_calculate_sum_of_squares, output = os.path.join(working_dir, "variance.result"))\
.posttask(lambda: sys.stdout.write(" hooray\n"))\
.posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(working_dir, "done")))
test_pipeline.run(multiprocess = 50, verbose = 0)
output_file = os.path.join(working_dir, "variance.result")
if not os.path.exists (output_file):
raise Exception("Missing %s" % output_file)
if __name__ == '__main__':
unittest.main()
| mit | 6,303,239,535,678,823,000 | 30.203046 | 163 | 0.576216 | false |
azheng51714/PYLearning | scrapy_project/ELEProject/ELEProject/middlewares.py | 1 | 1884 | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class EleprojectSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| mit | 7,885,680,243,928,233,000 | 32.607143 | 78 | 0.664187 | false |
h4wldev/Frest | app/routes/api/v1/users/user.py | 1 | 6676 | # # -*- coding: utf-8 -*-
import re
import datetime
from flask import request
from flask_api import status
from flask_restful import Resource
from sqlalchemy.exc import IntegrityError
from werkzeug.security import generate_password_hash
from app import db, token_auth
from app.models.user_model import UserModel, get_user
from app.models.user_token_model import token_is_auth, token_load_with_auth, token_expire_all, token_delete_all
from app.modules import frest
from app.modules.frest.api.error import get_exists_error
from app.modules.frest.validate import user as userValidate
from app.modules.frest.serialize.user import serialize_user
_URL = '/users/<prefix>'
class User(Resource):
"""
@api {get} /users/:prefix Get particular user's info
@apiName User Info
@apiGroup Users
@apiHeader {String} Authorization Access token.
@apiHeaderExample {json} Header-Example:
{
"Authorization": "304924"
}
@apiParam {String} prefix user's prefix
@apiSuccess (200) {String} data Users data.
@apiError (401) UnAuthorized You don't have permission.
@apiError (400) ValueError Prefix can only be me or number
"""
@frest.API
@token_auth.login_required
def get(self, prefix):
try:
if prefix == 'me':
user_id = token_load_with_auth(request.headers['Authorization'])['user_id']
else:
user_id = int(prefix)
if token_is_auth(request.headers['Authorization'], user_id):
user = get_user(user_id)
return serialize_user(user), status.HTTP_200_OK
else:
return "You don't have permission.", status.HTTP_401_UNAUTHORIZED
except ValueError:
return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST
"""
@api {put} /users/:prefix Update user info
@apiName Update user info
@apiGroup Users
@apiPermission Admin
@apiHeader {String} Authorization Access token.
@apiHeaderExample {json} Header-Example:
{
"Authorization": "304924"
}
@apiParam {String} prefix user's prefix
@apiSuccess (200) None
@apiError (400) BadRequest Invalid input - Prefix can only be me or a number.
@apiError (401) UnAuthorized You don't have permission - Should be admin.
@apiError (404) NotFound User not found.
"""
@frest.API
@token_auth.login_required
def put(self, prefix):
try:
if prefix == 'me':
user_id = token_load_with_auth(request.headers['Authorization'])['user_id']
else:
user_id = int(prefix)
user_query = UserModel.query \
.filter(UserModel.id == user_id)
if token_is_auth(request.headers['Authorization'], user_id):
user_permission = token_load_with_auth(request.headers['Authorization'])['permission']
if user_permission != 'ADMIN' and request.form.get('permission') is not None:
return "You don't have permission.", status.HTTP_401_UNAUTHORIZED
form = userValidate.modificationForm(request.form)
if form.validate():
if user_query.count():
user = user_query.first()
try:
for key, value in request.form.items():
if value is not None and value != '':
if key == 'password':
value = generate_password_hash(value)
token_expire_all(user.id)
setattr(user, key, value)
user.updated_at = datetime.datetime.now()
db.session.commit()
except IntegrityError as e:
field, value = get_exists_error(e)
_return = {
'message': "'" + value + "' is already exists.",
'field': {
'label': getattr(form, field).label.text,
'name': field
}
}
return _return, status.HTTP_400_BAD_REQUEST
return None, status.HTTP_200_OK
else:
return "The user does not exist.", status.HTTP_404_NOT_FOUND
for field, errors in form.errors.items():
for error in errors:
_return = {
'message': error,
'field': getattr(form, field).label.text
}
return _return, status.HTTP_400_BAD_REQUEST
else:
return "You don't have permission.", status.HTTP_401_UNAUTHORIZED
except ValueError:
return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST
"""
@api {delete} /users/:prefix Delete user
@apiName User Delete
@apiGroup Users
@apiHeader {String} Authorization Access token.
@apiHeaderExample {json} Header-Example:
{
"Authorization": "304924"
}
@apiParam {String} prefix user's prefix
@apiSuccess (200) None
@apiError (404) NotFound User not found.
@apiError (401) UnAuthorized You don't have permission.
@apiError (400) ValueError Prefix can only be me or number
"""
@frest.API
@token_auth.login_required
def delete(self, prefix):
try:
if prefix == 'me':
user_id = token_load_with_auth(request.headers['Authorization'])['user_id']
else:
user_id = int(prefix)
user_query = UserModel.query \
.filter(UserModel.id == user_id)
if token_is_auth(request.headers['Authorization'], user_id):
if user_query.count():
token_delete_all(user_id)
user = user_query.first()
db.session.delete(user)
db.session.commit()
return None, status.HTTP_200_OK
else:
return "The user does not exist.", status.HTTP_404_NOT_FOUND
else:
return "You don't have permission.", status.HTTP_401_UNAUTHORIZED
except ValueError:
return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST
| mit | 2,234,537,345,375,100,000 | 33.770833 | 111 | 0.539994 | false |
Nikola-K/django_reddit | users/models.py | 1 | 1565 | from hashlib import md5
import mistune
from django.contrib.auth.models import User
from django.db import models
class RedditUser(models.Model):
user = models.OneToOneField(User)
first_name = models.CharField(max_length=35, null=True, default=None,
blank=True)
last_name = models.CharField(max_length=35, null=True, default=None,
blank=True)
email = models.EmailField(null=True, blank=True, default=None)
about_text = models.TextField(blank=True, null=True, max_length=500,
default=None)
about_html = models.TextField(blank=True, null=True, default=None)
gravatar_hash = models.CharField(max_length=32, null=True, blank=True,
default=None)
display_picture = models.NullBooleanField(default=False)
homepage = models.URLField(null=True, blank=True, default=None)
twitter = models.CharField(null=True, blank=True, max_length=15,
default=None)
github = models.CharField(null=True, blank=True, max_length=39,
default=None)
comment_karma = models.IntegerField(default=0)
link_karma = models.IntegerField(default=0)
def update_profile_data(self):
self.about_html = mistune.markdown(self.about_text)
if self.display_picture:
self.gravatar_hash = md5(self.email.lower().encode('utf-8')).hexdigest()
def __unicode__(self):
return "<RedditUser:{}>".format(self.user.username)
| apache-2.0 | -6,009,276,806,158,894,000 | 42.472222 | 84 | 0.628754 | false |
selentd/pythontools | pytools/src/oldsrc/addindex.py | 1 | 2335 |
import datetime
import pymongo
from pymongo.mongo_client import MongoClient
import indexdata
def getIndexEntry( indexData ):
return indexData.getDictionary()
def getIndexDateEntry( indexData ):
return { "date": datetime.datetime(indexData.date.year,
indexData.date.month,
indexData.date.day,
0,
0)
}
def getIndexHistory( source, size = 10000000 ):
indexHistory = indexdata.IndexHistory(source, size)
indexHistory.readIndex()
return indexHistory
def addIndex( source, dbName, indexName ):
#client = MongoClient("192.168.81.147")
client = MongoClient("127.0.0.1")
database = client[dbName]
collection = database[indexName]
collection.create_index([("date", pymongo.ASCENDING)],
name="date",
unique=True)
indexHistory = getIndexHistory(source)
for indexData in indexHistory.indexHistory:
indexEntry = getIndexEntry(indexData)
indexDate = getIndexDateEntry(indexData)
if collection.find_one(indexDate) == None:
collection.insert(indexEntry)
def addIndizes():
'''
addIndex('../../data/sp500.csv', 'stockdb', 'sp500')
addIndex('../../data/tecdax.csv', 'stockdb', 'tecdax')
addIndex('../../data/mdax.csv', 'stockdb', 'mdax')
addIndex('../../data/nasdaq100.csv', 'stockdb', 'nasdaq100')
addIndex('../../data/smi.csv', 'stockdb', 'smi')
addIndex('../../data/tecdax.csv', 'stockdb', 'tecdax')
'''
indexList = ['atx',
'brent',
'cac',
'dax',
'dowjones',
'estoxx50',
'ftse100',
'ftsemib',
'gold',
'hangseng',
'hscei',
'ibex',
'mdax',
'nasdaq100',
'nikkei',
'sdax',
'smi',
'sp500',
'tecdax']
for index in indexList:
print '../../data/'+index+'.csv'
addIndex('../../data/'+index+'.csv', 'stockdb', index)
if __name__ == '__main__':
addIndizes()
| apache-2.0 | 2,964,568,120,293,750,000 | 28.556962 | 64 | 0.494647 | false |
Rezzie/Batcher | generators/g_randomchoice.py | 1 | 2268 | #!/usr/bin/env python
# Copyright (c) 2011, The University of York
# All rights reserved.
# Author(s):
# James Arnold <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the The University of York nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF YORK BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from generator import Generator
import random
class random_choice(Generator):
def __init__(self, choices, seed=None):
assert len(choices) > 0
self.__choices = choices
if seed is not None:
random.seed(seed)
def Generate(self):
"""Return one of the choices at random."""
while True:
yield random.choice(self.__choices)
if __name__ == "__main__":
from generator import PrintExamples
options = {'choices': ["James", "Ralph", "Rob", "Mike", "Harry"],
'seed': 2398639}
gen = random_choice(**options)
PrintExamples(gen)
| bsd-3-clause | -5,633,717,101,477,794,000 | 37.440678 | 80 | 0.713845 | false |
openvenues/address_normalizer | address_normalizer/deduping/near_duplicates.py | 1 | 6806 | import geohash
import logging
import operator
from functools import partial
from itertools import chain, product, combinations, imap
from address_normalizer.deduping.duplicates import *
from address_normalizer.deduping.storage.base import *
from address_normalizer.text.gazetteers import *
from address_normalizer.text.normalize import *
from address_normalizer.models.address import *
from address_normalizer.models.venue import *
near_dupe_registry = {}
# Two lat/longs sharing a geohash prefix of 6 characters are within about 610 meters of each other
DEFAULT_GEOHASH_PRECISION = 6
logger = logging.getLogger('near_dupes')
class NearDupeMeta(type):
def __init__(cls, name, bases, dict_):
if 'abstract' not in dict_:
near_dupe_registry[cls.__entity_type__] = cls
super(NearDupeMeta, cls).__init__(name, bases, dict_)
dupe_cache = {}
class NearDupe(object):
abstract = True
__metaclass__ = NearDupeMeta
key_generators = ()
configured = False
storage = NopStorage()
@classmethod
def configure(cls, storage):
cls.storage = storage
@classmethod
def find_dupes(cls, ents):
if not ents:
return {}, {}, {}
entity_dict = {e.guid: e for e in ents}
clusters = defaultdict(set)
_ = [clusters[safe_encode(c)].add(ent.guid) for ent in ents for c in cls.gen_keys(ent)]
clusters = dict(clusters)
logger.info('{} clusters found'.format(len(clusters)))
logger.info('Checking for local dupes')
local_guid_pairs = set()
local_dupes = {}
for cluster_id, guids in clusters.iteritems():
if len(guids) < 2:
continue
local_guid_pairs.update(combinations(guids, 2))
for g1, g2 in local_guid_pairs:
ent1 = entity_dict[g1]
ent2 = entity_dict[g2]
if cls.exact_dupe.is_dupe(ent1, ent2):
cls.assign_local_dupe(local_dupes, ent1, ent2)
logger.info('Checking global dupes')
existing_clusters = defaultdict(list)
if clusters:
_ = [existing_clusters[c].append(guid) for c, guid in cls.storage.search(clusters.keys()).iteritems() if guid]
existing_guids = set()
existing_ents = {}
if existing_clusters:
existing_guids = set.union(*(set(v) for v in existing_clusters.itervalues()))
existing_ents = {guid: cls.model(json.loads(e)) for guid, e in cls.storage.multiget(list(existing_guids)).iteritems() if e}
global_dupes = {}
global_guid_pairs = set([(new_guid, existing_guid) for cluster_id, existing in existing_clusters.iteritems() for new_guid, existing_guid in product(clusters[cluster_id], existing)])
for new_guid, existing_guid in global_guid_pairs:
local_ent = entity_dict[new_guid]
existing_ent = existing_ents[existing_guid]
if cls.exact_dupe.is_dupe(existing_ent, local_ent):
cls.assign_global_dupe(global_dupes, existing_ent, local_ent)
logger.info('Done with global dupe checking')
return clusters, local_dupes, global_dupes
@classmethod
def check(cls, objects, add=True):
object_dict = {o.guid: o for o in objects}
clusters, local_dupes, global_dupes = cls.find_dupes(objects)
new_clusters = {}
new_objects = {}
dupes = local_dupes.copy()
dupes.update(global_dupes)
if add:
for k, guids in clusters.iteritems():
non_dupes = [g for g in guids if g not in dupes]
if non_dupes:
guid = non_dupes[0]
new_clusters[k] = guid
new_objects[guid] = object_dict[guid]
cls.add({guid: json.dumps(obj.to_primitive()) for guid, obj in new_objects.iteritems()})
cls.add_clusters(new_clusters)
return [(obj, (dupes.get(obj.guid, obj.guid), obj.guid in dupes)) for obj in objects]
@classmethod
def assign_local_dupe(cls, dupes, existing, new):
guid1 = existing.guid
guid2 = new.guid
guid1_existing = dupes.get(guid1)
guid2_existing = dupes.get(guid2)
if not guid1_existing and not guid2_existing:
dupes[guid1] = guid2
elif guid1_existing:
dupes[guid2] = guid1_existing
elif guid2_existing:
dupes[guid1] = guid2_existing
@classmethod
def assign_global_dupe(cls, dupes, existing, new):
dupes[new.guid] = existing.guid
@classmethod
def add(cls, kvs):
cls.storage.multiput(kvs)
@classmethod
def add_clusters(cls, kvs):
cls.storage.multiput(kvs)
class AddressNearDupe(NearDupe):
__entity_type__ = Address.entity_type
model = Address
exact_dupe = AddressDupe
geohash_precision = DEFAULT_GEOHASH_PRECISION
street_gazetteers = list(chain(*[gazette_field_registry[f] for f in (address_fields.NAME, address_fields.HOUSE_NUMBER, address_fields.STREET)]))
all_gazetteers = list(chain(*gazette_field_registry.values()))
@classmethod
def configure(cls, storage, bloom_filter=None, geohash_precision=DEFAULT_GEOHASH_PRECISION):
cls.storage = storage
if bloom_filter:
cls.bloom_filter = bloom_filter
cls.geohash_precision = geohash_precision
@classmethod
def expanded_street_address(cls, address):
street_address_components = []
house_number = (address.house_number or '').strip()
if house_number:
street_address_components.append(house_number)
street = (address.street or '').strip()
if street:
street_address_components.append(street)
surface_forms = set()
if street_address_components:
street_address = u' '.join(street_address_components)
# the return value from expand
return address_phrase_filter.expand_street_address(street_address)
@classmethod
def geohash(cls, address):
geo = geohash.encode(address.latitude, address.longitude, cls.geohash_precision)
neighbors = geohash.neighbors(geo)
all_geo = [geo] + neighbors
return all_geo
@classmethod
def gen_keys(cls, address):
street_surface_forms = cls.expanded_street_address(address)
if address.latitude and address.longitude:
all_geo = cls.geohash(address)
for geo, norm_address in product(all_geo, street_surface_forms):
key = '|'.join([geo, norm_address])
yield key
class VenueNearDupe(NearDupe):
__entity_type__ = Venue.entity_type
model = Venue | mit | 7,338,996,294,223,279,000 | 31.108491 | 189 | 0.618425 | false |
miyanishi2/caffe-rpc | caffe_extractor.py | 1 | 1439 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'miyanishi'
import caffe
import numpy as np
class CaffeExtractor():
def __init__(self, caffe_root=None, feature_layers=["fc6"], gpu=True):
self.feature_layers = feature_layers
MODEL_FILE = caffe_root + 'examples/imagenet/imagenet_deploy.prototxt'
PRETRAINED = caffe_root + 'examples/imagenet/caffe_reference_imagenet_model'
MEAN_FILE = caffe_root + 'python/caffe/imagenet/ilsvrc_2012_mean.npy'
self.net = caffe.Classifier(MODEL_FILE, PRETRAINED, mean=np.load(MEAN_FILE),
channel_swap=(2,1,0),
raw_scale=255,
image_dims=(256, 256))
#self.net.set_phase_test()
if gpu:
self.net.set_mode_gpu()
else:
self.net.set_mode_cpu()
imagenet_labels_filename = caffe_root + 'data/ilsvrc12/synset_words.txt'
self.labels = np.loadtxt(imagenet_labels_filename, str, delimiter='\t')
def getImageFeatures(self, image):
score = self.net.predict([image])
feature_dic = {layer:np.copy(self.net.blobs[layer].data[4][:,0,0]) for layer in self.feature_layers}
return feature_dic
def getImageLabels(self):
top_k = self.net.blobs['prob'].data[4].flatten().argsort()[-1:-6:-1]
labels = self.labels[top_k].tolist()
return labels
| bsd-2-clause | -3,062,253,772,976,018,400 | 36.868421 | 108 | 0.587908 | false |
Donkyhotay/MoonPy | twisted/internet/posixbase.py | 1 | 14121 | # -*- test-case-name: twisted.test.test_internet -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Posix reactor base class
"""
import warnings
import socket
import errno
import os
from zope.interface import implements, classImplements
from twisted.python.compat import set
from twisted.internet.interfaces import IReactorUNIX, IReactorUNIXDatagram
from twisted.internet.interfaces import IReactorTCP, IReactorUDP, IReactorSSL, IReactorArbitrary
from twisted.internet.interfaces import IReactorProcess, IReactorMulticast
from twisted.internet.interfaces import IHalfCloseableDescriptor
from twisted.internet import error
from twisted.internet import tcp, udp
from twisted.python import log, failure, util
from twisted.persisted import styles
from twisted.python.runtime import platformType, platform
from twisted.internet.base import ReactorBase, _SignalReactorMixin
try:
from twisted.internet import ssl
sslEnabled = True
except ImportError:
sslEnabled = False
try:
from twisted.internet import unix
unixEnabled = True
except ImportError:
unixEnabled = False
processEnabled = False
if platformType == 'posix':
from twisted.internet import fdesc
import process
processEnabled = True
if platform.isWindows():
try:
import win32process
processEnabled = True
except ImportError:
win32process = None
class _Win32Waker(log.Logger, styles.Ephemeral):
"""I am a workaround for the lack of pipes on win32.
I am a pair of connected sockets which can wake up the main loop
from another thread.
"""
disconnected = 0
def __init__(self, reactor):
"""Initialize.
"""
self.reactor = reactor
# Following select_trigger (from asyncore)'s example;
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.setsockopt(socket.IPPROTO_TCP, 1, 1)
server.bind(('127.0.0.1', 0))
server.listen(1)
client.connect(server.getsockname())
reader, clientaddr = server.accept()
client.setblocking(0)
reader.setblocking(0)
self.r = reader
self.w = client
self.fileno = self.r.fileno
def wakeUp(self):
"""Send a byte to my connection.
"""
try:
util.untilConcludes(self.w.send, 'x')
except socket.error, (err, msg):
if err != errno.WSAEWOULDBLOCK:
raise
def doRead(self):
"""Read some data from my connection.
"""
try:
self.r.recv(8192)
except socket.error:
pass
def connectionLost(self, reason):
self.r.close()
self.w.close()
class _UnixWaker(log.Logger, styles.Ephemeral):
"""This class provides a simple interface to wake up the event loop.
This is used by threads or signals to wake up the event loop.
"""
disconnected = 0
i = None
o = None
def __init__(self, reactor):
"""Initialize.
"""
self.reactor = reactor
self.i, self.o = os.pipe()
fdesc.setNonBlocking(self.i)
fdesc._setCloseOnExec(self.i)
fdesc.setNonBlocking(self.o)
fdesc._setCloseOnExec(self.o)
self.fileno = lambda: self.i
def doRead(self):
"""Read some bytes from the pipe.
"""
fdesc.readFromFD(self.fileno(), lambda data: None)
def wakeUp(self):
"""Write one byte to the pipe, and flush it.
"""
# We don't use fdesc.writeToFD since we need to distinguish
# between EINTR (try again) and EAGAIN (do nothing).
if self.o is not None:
try:
util.untilConcludes(os.write, self.o, 'x')
except OSError, e:
if e.errno != errno.EAGAIN:
raise
def connectionLost(self, reason):
"""Close both ends of my pipe.
"""
if not hasattr(self, "o"):
return
for fd in self.i, self.o:
try:
os.close(fd)
except IOError:
pass
del self.i, self.o
if platformType == 'posix':
_Waker = _UnixWaker
elif platformType == 'win32':
_Waker = _Win32Waker
class PosixReactorBase(_SignalReactorMixin, ReactorBase):
"""
A basis for reactors that use file descriptors.
"""
implements(IReactorArbitrary, IReactorTCP, IReactorUDP, IReactorMulticast)
def __init__(self):
ReactorBase.__init__(self)
if self.usingThreads or platformType == "posix":
self.installWaker()
def _disconnectSelectable(self, selectable, why, isRead, faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost())
}):
"""
Utility function for disconnecting a selectable.
Supports half-close notification, isRead should be boolean indicating
whether error resulted from doRead().
"""
self.removeReader(selectable)
f = faildict.get(why.__class__)
if f:
if (isRead and why.__class__ == error.ConnectionDone
and IHalfCloseableDescriptor.providedBy(selectable)):
selectable.readConnectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(f)
else:
self.removeWriter(selectable)
selectable.connectionLost(failure.Failure(why))
def installWaker(self):
"""
Install a `waker' to allow threads and signals to wake up the IO thread.
We use the self-pipe trick (http://cr.yp.to/docs/selfpipe.html) to wake
the reactor. On Windows we use a pair of sockets.
"""
if not self.waker:
self.waker = _Waker(self)
self._internalReaders.add(self.waker)
self.addReader(self.waker)
# IReactorProcess
def spawnProcess(self, processProtocol, executable, args=(),
env={}, path=None,
uid=None, gid=None, usePTY=0, childFDs=None):
args, env = self._checkProcessArgs(args, env)
if platformType == 'posix':
if usePTY:
if childFDs is not None:
raise ValueError("Using childFDs is not supported with usePTY=True.")
return process.PTYProcess(self, executable, args, env, path,
processProtocol, uid, gid, usePTY)
else:
return process.Process(self, executable, args, env, path,
processProtocol, uid, gid, childFDs)
elif platformType == "win32":
if uid is not None or gid is not None:
raise ValueError("The uid and gid parameters are not supported on Windows.")
if usePTY:
raise ValueError("The usePTY parameter is not supported on Windows.")
if childFDs:
raise ValueError("Customizing childFDs is not supported on Windows.")
if win32process:
from twisted.internet._dumbwin32proc import Process
return Process(self, processProtocol, executable, args, env, path)
else:
raise NotImplementedError, "spawnProcess not available since pywin32 is not installed."
else:
raise NotImplementedError, "spawnProcess only available on Windows or POSIX."
# IReactorUDP
def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
"""Connects a given L{DatagramProtocol} to the given numeric UDP port.
@returns: object conforming to L{IListeningPort}.
"""
p = udp.Port(port, protocol, interface, maxPacketSize, self)
p.startListening()
return p
def connectUDP(self, remotehost, remoteport, protocol, localport=0,
interface='', maxPacketSize=8192):
"""DEPRECATED.
Connects a L{ConnectedDatagramProtocol} instance to a UDP port.
"""
warnings.warn("use listenUDP and then transport.connect().", DeprecationWarning, stacklevel=2)
p = udp.ConnectedPort((remotehost, remoteport), localport, protocol, interface, maxPacketSize, self)
p.startListening()
return p
# IReactorMulticast
def listenMulticast(self, port, protocol, interface='', maxPacketSize=8192, listenMultiple=False):
"""Connects a given DatagramProtocol to the given numeric UDP port.
EXPERIMENTAL.
@returns: object conforming to IListeningPort.
"""
p = udp.MulticastPort(port, protocol, interface, maxPacketSize, self, listenMultiple)
p.startListening()
return p
# IReactorUNIX
def connectUNIX(self, address, factory, timeout=30, checkPID=0):
"""@see: twisted.internet.interfaces.IReactorUNIX.connectUNIX
"""
assert unixEnabled, "UNIX support is not present"
c = unix.Connector(address, factory, timeout, self, checkPID)
c.connect()
return c
_unspecified = object()
def _checkMode(self, name, mode):
"""
Check C{mode} to see if a value was specified for it and emit a
deprecation warning if so. Return the default value if none was
specified, otherwise return C{mode}.
"""
if mode is not self._unspecified:
warnings.warn(
'The mode parameter of %(name)s will be removed. Do not pass '
'a value for it. Set permissions on the containing directory '
'before calling %(name)s, instead.' % dict(name=name),
category=DeprecationWarning,
stacklevel=3)
else:
mode = 0666
return mode
def listenUNIX(self, address, factory, backlog=50, mode=_unspecified,
wantPID=0):
"""
@see: twisted.internet.interfaces.IReactorUNIX.listenUNIX
"""
assert unixEnabled, "UNIX support is not present"
mode = self._checkMode('IReactorUNIX.listenUNIX', mode)
p = unix.Port(address, factory, backlog, mode, self, wantPID)
p.startListening()
return p
# IReactorUNIXDatagram
def listenUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=_unspecified):
"""
Connects a given L{DatagramProtocol} to the given path.
EXPERIMENTAL.
@returns: object conforming to L{IListeningPort}.
"""
assert unixEnabled, "UNIX support is not present"
mode = self._checkMode('IReactorUNIXDatagram.listenUNIXDatagram', mode)
p = unix.DatagramPort(address, protocol, maxPacketSize, mode, self)
p.startListening()
return p
def connectUNIXDatagram(self, address, protocol, maxPacketSize=8192,
mode=_unspecified, bindAddress=None):
"""
Connects a L{ConnectedDatagramProtocol} instance to a path.
EXPERIMENTAL.
"""
assert unixEnabled, "UNIX support is not present"
mopde = self._checkMode('IReactorUNIXDatagram.connectUNIXDatagram', mode)
p = unix.ConnectedDatagramPort(address, protocol, maxPacketSize, mode, bindAddress, self)
p.startListening()
return p
# IReactorTCP
def listenTCP(self, port, factory, backlog=50, interface=''):
"""@see: twisted.internet.interfaces.IReactorTCP.listenTCP
"""
p = tcp.Port(port, factory, backlog, interface, self)
p.startListening()
return p
def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
"""@see: twisted.internet.interfaces.IReactorTCP.connectTCP
"""
c = tcp.Connector(host, port, factory, timeout, bindAddress, self)
c.connect()
return c
# IReactorSSL (sometimes, not implemented)
def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
"""@see: twisted.internet.interfaces.IReactorSSL.connectSSL
"""
assert sslEnabled, "SSL support is not present"
c = ssl.Connector(host, port, factory, contextFactory, timeout, bindAddress, self)
c.connect()
return c
def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
"""@see: twisted.internet.interfaces.IReactorSSL.listenSSL
"""
assert sslEnabled, "SSL support is not present"
p = ssl.Port(port, factory, contextFactory, backlog, interface, self)
p.startListening()
return p
# IReactorArbitrary
def listenWith(self, portType, *args, **kw):
kw['reactor'] = self
p = portType(*args, **kw)
p.startListening()
return p
def connectWith(self, connectorType, *args, **kw):
kw['reactor'] = self
c = connectorType(*args, **kw)
c.connect()
return c
def _removeAll(self, readers, writers):
"""
Remove all readers and writers, and list of removed L{IReadDescriptor}s
and L{IWriteDescriptor}s.
Meant for calling from subclasses, to implement removeAll, like::
def removeAll(self):
return self._removeAll(self._reads, self._writes)
where C{self._reads} and C{self._writes} are iterables.
"""
removedReaders = set(readers) - self._internalReaders
for reader in removedReaders:
self.removeReader(reader)
removedWriters = set(writers)
for writer in removedWriters:
self.removeWriter(writer)
return list(removedReaders | removedWriters)
if sslEnabled:
classImplements(PosixReactorBase, IReactorSSL)
if unixEnabled:
classImplements(PosixReactorBase, IReactorUNIX, IReactorUNIXDatagram)
if processEnabled:
classImplements(PosixReactorBase, IReactorProcess)
__all__ = ["PosixReactorBase"]
| gpl-3.0 | 8,697,282,309,824,158,000 | 32.147887 | 108 | 0.622477 | false |
unfoldingWord-dev/uwadmin | uwadmin/migrations/0005_auto_20150524_1534.py | 1 | 1202 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('uwadmin', '0004_auto_20150318_0034'),
]
operations = [
migrations.AddField(
model_name='publishrequest',
name='approved_at',
field=models.DateTimeField(default=None, null=True, db_index=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='publishrequest',
name='source_text',
field=models.ForeignKey(related_name='source_publish_requests', to='uwadmin.LangCode', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='publishrequest',
name='source_version',
field=models.CharField(max_length=10, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='publishrequest',
name='language',
field=models.ForeignKey(related_name='publish_requests', to='uwadmin.LangCode'),
preserve_default=True,
),
]
| mit | 8,915,345,621,021,246,000 | 30.631579 | 110 | 0.583195 | false |
Subsets and Splits