_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q279000
|
RPCSystem.create_function_stub
|
test
|
def create_function_stub(self, url):
"""
Create a callable that will invoke the given remote function.
The stub will return a deferred even if the remote function does not.
"""
assert self._opened, "RPC System is not opened"
logging.debug("create_function_stub(%s)" % repr(url))
parseresult = urlparse.urlparse(url)
scheme = parseresult.scheme
path = parseresult.path.split("/")
if scheme != "anycall":
raise ValueError("Not an anycall URL: %s" % repr(url))
if len(path) != 3 or path[0] != "" or path[1] != "functions":
raise ValueError("Not an URL for a remote function: %s" % repr(url))
try:
functionid = uuid.UUID(path[2])
except ValueError:
raise ValueError("Not a valid URL for a remote function: %s" % repr(url))
return _RPCFunctionStub(parseresult.netloc, functionid, self)
|
python
|
{
"resource": ""
}
|
q279001
|
RPCSystem._ping
|
test
|
def _ping(self, peerid, callid):
"""
Called from remote to ask if a call made to here is still in progress.
"""
if not (peerid, callid) in self._remote_to_local:
logger.warn("No remote call %s from %s. Might just be unfoutunate timing." % (callid, peerid))
|
python
|
{
"resource": ""
}
|
q279002
|
_CommandCompleterMixin._cmdRegex
|
test
|
def _cmdRegex(self, cmd_grp=None):
"""Get command regex string and completer dict."""
cmd_grp = cmd_grp or "cmd"
help_opts = ("-h", "--help")
cmd = self.name()
names = "|".join([re.escape(cmd)] +
[re.escape(a) for a in self.aliases()])
opts = []
for action in self.parser._actions:
opts += [a for a in action.option_strings
if a not in help_opts]
opts_re = "|".join([re.escape(o) for o in opts])
if opts_re:
opts_re = rf"(\s+(?P<{cmd_grp}_opts>{opts_re}))*"
help_re = "|".join([re.escape(o) for o in help_opts])
help_re = rf"(\s+(?P<HELP_OPTS>{help_re}))*"
completers = {}
if opts_re:
completers[f"{cmd_grp}_opts"] = WordCompleter(opts)
# Singe Help completer added elsewhere
return tuple([
rf"""(?P<{cmd_grp}>{names}){opts_re}{help_re}""",
completers
])
|
python
|
{
"resource": ""
}
|
q279003
|
NestedAMPBox.fromStringProto
|
test
|
def fromStringProto(self, inString, proto):
"""
Defers to `amp.AmpList`, then gets the element from the list.
"""
value, = amp.AmpList.fromStringProto(self, inString, proto)
return value
|
python
|
{
"resource": ""
}
|
q279004
|
NestedAMPBox.toStringProto
|
test
|
def toStringProto(self, inObject, proto):
"""
Wraps the object in a list, and then defers to ``amp.AmpList``.
"""
return amp.AmpList.toStringProto(self, [inObject], proto)
|
python
|
{
"resource": ""
}
|
q279005
|
MetadataStatement.verify
|
test
|
def verify(self, **kwargs):
"""
Verifies that an instance of this class adheres to the given
restrictions.
:param kwargs: A set of keyword arguments
:return: True if it verifies OK otherwise False.
"""
super(MetadataStatement, self).verify(**kwargs)
if "signing_keys" in self:
if 'signing_keys_uri' in self:
raise VerificationError(
'You can only have one of "signing_keys" and '
'"signing_keys_uri" in a metadata statement')
else:
# signing_keys MUST be a JWKS
kj = KeyJar()
try:
kj.import_jwks(self['signing_keys'], '')
except Exception:
raise VerificationError('"signing_keys" not a proper JWKS')
if "metadata_statements" in self and "metadata_statement_uris" in self:
s = set(self['metadata_statements'].keys())
t = set(self['metadata_statement_uris'].keys())
if s.intersection(t):
raise VerificationError(
'You should not have the same key in "metadata_statements" '
'and in "metadata_statement_uris"')
return True
|
python
|
{
"resource": ""
}
|
q279006
|
KeyBundle._parse_remote_response
|
test
|
def _parse_remote_response(self, response):
"""
Parse simple JWKS or signed JWKS from the HTTP response.
:param response: HTTP response from the 'jwks_uri' or 'signed_jwks_uri'
endpoint
:return: response parsed as JSON or None
"""
# Check if the content type is the right one.
try:
if response.headers["Content-Type"] == 'application/json':
logger.debug(
"Loaded JWKS: %s from %s" % (response.text, self.source))
try:
return json.loads(response.text)
except ValueError:
return None
elif response.headers["Content-Type"] == 'application/jwt':
logger.debug(
"Signed JWKS: %s from %s" % (response.text, self.source))
_jws = factory(response.text)
_resp = _jws.verify_compact(
response.text, keys=self.verify_keys.get_signing_key())
return _resp
else:
logger.error('Wrong content type: {}'.format(
response.headers['Content-Type']))
raise ValueError('Content-type mismatch')
except KeyError:
pass
|
python
|
{
"resource": ""
}
|
q279007
|
dump
|
test
|
def dump(filename, dbname, username=None, password=None, host=None,
port=None, tempdir='/tmp', pg_dump_path='pg_dump', format='p'):
"""Performs a pg_dump backup.
It runs with the current systemuser's privileges, unless you specify
username and password.
By default pg_dump connects to the value given in the PGHOST environment
variable.
You can either specify "hostname" and "port" or a socket path.
pg_dump expects the pg_dump-utility to be on $PATCH.
Should that not be case you are allowed to specify a custom location with
"pg_dump_path"
Format is p (plain / default), c = custom, d = directory, t=tar
returns statuscode and shelloutput
"""
filepath = os.path.join(tempdir, filename)
cmd = pg_dump_path
cmd += ' --format %s' % format
cmd += ' --file ' + os.path.join(tempdir, filename)
if username:
cmd += ' --username %s' % username
if host:
cmd += ' --host %s' % host
if port:
cmd += ' --port %s' % port
cmd += ' ' + dbname
## export pgpasswd
if password:
os.environ["PGPASSWORD"] = password
## run pgdump
return sh(cmd)
|
python
|
{
"resource": ""
}
|
q279008
|
db_list
|
test
|
def db_list(username=None, password=None, host=None, port=None,
maintain_db='postgres'):
"returns a list of all databases on this server"
conn = _connection(username=username, password=password, host=host,
port=port, db=maintain_db)
cur = conn.cursor()
cur.execute('SELECT DATNAME from pg_database')
rows = cur.fetchall()
conn.close()
result = []
for row in rows:
result.append(row[0])
return result
|
python
|
{
"resource": ""
}
|
q279009
|
Tigre._get_local_files
|
test
|
def _get_local_files(self, path):
"""Returns a dictionary of all the files under a path."""
if not path:
raise ValueError("No path specified")
files = defaultdict(lambda: None)
path_len = len(path) + 1
for root, dirs, filenames in os.walk(path):
for name in filenames:
full_path = join(root, name)
files[full_path[path_len:]] = compute_md5(full_path)
return files
|
python
|
{
"resource": ""
}
|
q279010
|
Tigre.sync_folder
|
test
|
def sync_folder(self, path, bucket):
"""Syncs a local directory with an S3 bucket.
Currently does not delete files from S3 that are not in the local directory.
path: The path to the directory to sync to S3
bucket: The name of the bucket on S3
"""
bucket = self.conn.get_bucket(bucket)
local_files = self._get_local_files(path)
s3_files = self._get_s3_files(bucket)
for filename, hash in local_files.iteritems():
s3_key = s3_files[filename]
if s3_key is None:
s3_key = Key(bucket)
s3_key.key = filename
s3_key.etag = '"!"'
if s3_key.etag[1:-1] != hash[0]:
s3_key.set_contents_from_filename(join(path, filename), md5=hash)
|
python
|
{
"resource": ""
}
|
q279011
|
tokens_required
|
test
|
def tokens_required(service_list):
"""
Ensure the user has the necessary tokens for the specified services
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
for service in service_list:
if service not in request.session["user_tokens"]:
return redirect('denied')
return func(request, *args, **kwargs)
return inner
return decorator
|
python
|
{
"resource": ""
}
|
q279012
|
login
|
test
|
def login(request, template_name='ci/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(request, data=request.POST)
if form.is_valid():
# Ensure the user-originating redirection url is safe.
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security check complete. Get the user object from auth api.
user = form.get_user()
request.session['user_token'] = user["token"]
request.session['user_email'] = user["email"]
request.session['user_permissions'] = user["permissions"]
request.session['user_id'] = user["id"]
request.session['user_list'] = user["user_list"]
if not settings.HIDE_DASHBOARDS:
# Set user dashboards because they are slow to change
dashboards = ciApi.get_user_dashboards(user["id"])
dashboard_list = list(dashboards['results'])
if len(dashboard_list) > 0:
request.session['user_dashboards'] = \
dashboard_list[0]["dashboards"]
request.session['user_default_dashboard'] = \
dashboard_list[0]["default_dashboard"]["id"]
else:
request.session['user_dashboards'] = []
request.session['user_default_dashboard'] = None
# Get the user access tokens too and format for easy access
tokens = ciApi.get_user_service_tokens(
params={"user_id": user["id"]})
token_list = list(tokens['results'])
user_tokens = {}
if len(token_list) > 0:
for token in token_list:
user_tokens[token["service"]["name"]] = {
"token": token["token"],
"url": token["service"]["url"] + "/api/v1"
}
request.session['user_tokens'] = user_tokens
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
return TemplateResponse(request, template_name, context)
|
python
|
{
"resource": ""
}
|
q279013
|
build
|
test
|
def build(cli, path, package):
"""Build CLI dynamically based on the package structure.
"""
for _, name, ispkg in iter_modules(path):
module = import_module(f'.{name}', package)
if ispkg:
build(cli.group(name)(module.group),
module.__path__,
module.__package__)
else:
cli.command(name)(module.command)
|
python
|
{
"resource": ""
}
|
q279014
|
Fridge.readonly
|
test
|
def readonly(cls, *args, **kwargs):
"""
Return an already closed read-only instance of Fridge.
Arguments are the same as for the constructor.
"""
fridge = cls(*args, **kwargs)
fridge.close()
return fridge
|
python
|
{
"resource": ""
}
|
q279015
|
Fridge.load
|
test
|
def load(self):
"""
Force reloading the data from the file.
All data in the in-memory dictionary is discarded.
This method is called automatically by the constructor, normally you
don't need to call it.
"""
self._check_open()
try:
data = json.load(self.file, **self.load_args)
except ValueError:
data = {}
if not isinstance(data, dict):
raise ValueError('Root JSON type must be dictionary')
self.clear()
self.update(data)
|
python
|
{
"resource": ""
}
|
q279016
|
self_sign_jwks
|
test
|
def self_sign_jwks(keyjar, iss, kid='', lifetime=3600):
"""
Create a signed JWT containing a JWKS. The JWT is signed by one of the
keys in the JWKS.
:param keyjar: A KeyJar instance with at least one private signing key
:param iss: issuer of the JWT, should be the owner of the keys
:param kid: A key ID if a special key should be used otherwise one
is picked at random.
:param lifetime: The lifetime of the signed JWT
:return: A signed JWT
"""
# _json = json.dumps(jwks)
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
jwks = keyjar.export_jwks(issuer=iss)
return _jwt.pack(payload={'jwks': jwks}, owner=iss, kid=kid)
|
python
|
{
"resource": ""
}
|
q279017
|
request_signed_by_signing_keys
|
test
|
def request_signed_by_signing_keys(keyjar, msreq, iss, lifetime, kid=''):
"""
A metadata statement signing request with 'signing_keys' signed by one
of the keys in 'signing_keys'.
:param keyjar: A KeyJar instance with the private signing key
:param msreq: Metadata statement signing request. A MetadataStatement
instance.
:param iss: Issuer of the signing request also the owner of the signing
keys.
:return: Signed JWT where the body is the metadata statement
"""
try:
jwks_to_keyjar(msreq['signing_keys'], iss)
except KeyError:
jwks = keyjar.export_jwks(issuer=iss)
msreq['signing_keys'] = jwks
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
return _jwt.pack(owner=iss, kid=kid, payload=msreq.to_dict())
|
python
|
{
"resource": ""
}
|
q279018
|
library
|
test
|
def library(func):
"""
A decorator for providing a unittest with a library and have it called only
once.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
SINGLES.append(wrapped)
return wrapped
|
python
|
{
"resource": ""
}
|
q279019
|
descovery
|
test
|
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import join, exists, isdir, splitext, basename, sep
if not testdir or not exists(testdir) or not isdir(testdir):
return None
from os import walk
import fnmatch
import imp
for root, _, filenames in walk(testdir):
for filename in fnmatch.filter(filenames, '*.py'):
path = join(root, filename)
modulepath = splitext(root)[0].replace(sep, '.')
imp.load_source(modulepath, path)
|
python
|
{
"resource": ""
}
|
q279020
|
main
|
test
|
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="test/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
cardcount, passes, failures = execute_tests(library)
print(RESULTS.format(len(SINGLES), len(TESTS), cardcount, passes,
failures))
sys.exit(failures)
|
python
|
{
"resource": ""
}
|
q279021
|
letter_score
|
test
|
def letter_score(letter):
"""Returns the Scrabble score of a letter.
Args:
letter: a single character string
Raises:
TypeError if a non-Scrabble character is supplied
"""
score_map = {
1: ["a", "e", "i", "o", "u", "l", "n", "r", "s", "t"],
2: ["d", "g"],
3: ["b", "c", "m", "p"],
4: ["f", "h", "v", "w", "y"],
5: ["k"],
8: ["j", "x"],
10: ["q", "z"],
}
for score, letters in score_map.items():
if letter.lower() in letters:
return score
else:
raise TypeError("Invalid letter: %s", letter)
|
python
|
{
"resource": ""
}
|
q279022
|
word_score
|
test
|
def word_score(word, input_letters, questions=0):
"""Checks the Scrabble score of a single word.
Args:
word: a string to check the Scrabble score of
input_letters: the letters in our rack
questions: integer of the tiles already on the board to build on
Returns:
an integer Scrabble score amount for the word
"""
score = 0
bingo = 0
filled_by_blanks = []
rack = list(input_letters) # make a copy to speed up find_anagrams()
for letter in word:
if letter in rack:
bingo += 1
score += letter_score(letter)
rack.remove(letter)
else:
filled_by_blanks.append(letter_score(letter))
# we can have both ?'s and _'s in the word. this will apply the ?s to the
# highest scrabble score value letters and leave the blanks for low points.
for blank_score in sorted(filled_by_blanks, reverse=True):
if questions > 0:
score += blank_score
questions -= 1
# 50 bonus points for using all the tiles in your rack
if bingo > 6:
score += 50
return score
|
python
|
{
"resource": ""
}
|
q279023
|
word_list
|
test
|
def word_list(sowpods=False, start="", end=""):
"""Opens the word list file.
Args:
sowpods: a boolean to declare using the sowpods list or TWL (default)
start: a string of starting characters to find anagrams based on
end: a string of ending characters to find anagrams based on
Yeilds:
a word at a time out of 178691 words for TWL, 267751 for sowpods. Much
less if either start or end are used (filtering is applied here)
"""
location = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"wordlists",
)
if sowpods:
filename = "sowpods.txt"
else:
filename = "twl.txt"
filepath = os.path.join(location, filename)
with open(filepath) as wordfile:
for word in wordfile.readlines():
word = word.strip()
if start and end and word.startswith(start) and word.endswith(end):
yield word
elif start and word.startswith(start) and not end:
yield word
elif end and word.endswith(end) and not start:
yield word
elif not start and not end:
yield word
|
python
|
{
"resource": ""
}
|
q279024
|
valid_scrabble_word
|
test
|
def valid_scrabble_word(word):
"""Checks if the input word could be played with a full bag of tiles.
Returns:
True or false
"""
letters_in_bag = {
"a": 9,
"b": 2,
"c": 2,
"d": 4,
"e": 12,
"f": 2,
"g": 3,
"h": 2,
"i": 9,
"j": 1,
"k": 1,
"l": 4,
"m": 2,
"n": 6,
"o": 8,
"p": 2,
"q": 1,
"r": 6,
"s": 4,
"t": 6,
"u": 4,
"v": 2,
"w": 2,
"x": 1,
"y": 2,
"z": 1,
"_": 2,
}
for letter in word:
if letter == "?":
continue
try:
letters_in_bag[letter] -= 1
except KeyError:
return False
if letters_in_bag[letter] < 0:
letters_in_bag["_"] -= 1
if letters_in_bag["_"] < 0:
return False
return True
|
python
|
{
"resource": ""
}
|
q279025
|
main
|
test
|
def main(args):
"""docstring for main"""
try:
args.query = ' '.join(args.query).replace('?', '')
so = SOSearch(args.query, args.tags)
result = so.first_q().best_answer.code
if result != None:
print result
else:
print("Sorry I can't find your answer, try adding tags")
except NoResult, e:
print("Sorry I can't find your answer, try adding tags")
|
python
|
{
"resource": ""
}
|
q279026
|
cli_run
|
test
|
def cli_run():
"""docstring for argparse"""
parser = argparse.ArgumentParser(description='Stupidly simple code answers from StackOverflow')
parser.add_argument('query', help="What's the problem ?", type=str, nargs='+')
parser.add_argument('-t','--tags', help='semicolon separated tags -> python;lambda')
args = parser.parse_args()
main(args)
|
python
|
{
"resource": ""
}
|
q279027
|
JSONAMPDialectReceiver.stringReceived
|
test
|
def stringReceived(self, string):
"""Handle a JSON AMP dialect request.
First, the JSON is parsed. Then, all JSON dialect specific
values in the request are turned into the correct objects.
Then, finds the correct responder function, calls it, and
serializes the result (or error).
"""
request = loads(string)
identifier = request.pop("_ask")
commandName = request.pop("_command")
command, responder = self._getCommandAndResponder(commandName)
self._parseRequestValues(request, command)
d = self._runResponder(responder, request, command, identifier)
d.addCallback(self._writeResponse)
|
python
|
{
"resource": ""
}
|
q279028
|
JSONAMPDialectReceiver._getCommandAndResponder
|
test
|
def _getCommandAndResponder(self, commandName):
"""Gets the command class and matching responder function for the
given command name.
"""
# DISGUSTING IMPLEMENTATION DETAIL EXPLOITING HACK
locator = self._remote.boxReceiver.locator
responder = locator.locateResponder(commandName)
responderFunction = responder.func_closure[1].cell_contents
command = responder.func_closure[2].cell_contents
return command, responderFunction
|
python
|
{
"resource": ""
}
|
q279029
|
JSONAMPDialectReceiver._parseRequestValues
|
test
|
def _parseRequestValues(self, request, command):
"""Parses all the values in the request that are in a form specific
to the JSON AMP dialect.
"""
for key, ampType in command.arguments:
ampClass = ampType.__class__
if ampClass is exposed.ExposedResponderLocator:
request[key] = self._remote
continue
decoder = _decoders.get(ampClass)
if decoder is not None:
value = request.get(key)
request[key] = decoder(value, self)
|
python
|
{
"resource": ""
}
|
q279030
|
JSONAMPDialectReceiver._runResponder
|
test
|
def _runResponder(self, responder, request, command, identifier):
"""Run the responser function. If it succeeds, add the _answer key.
If it fails with an error known to the command, serialize the
error.
"""
d = defer.maybeDeferred(responder, **request)
def _addIdentifier(response):
"""Return the response with an ``_answer`` key.
"""
response["_answer"] = identifier
return response
def _serializeFailure(failure):
"""
If the failure is serializable by this AMP command, serialize it.
"""
key = failure.trap(*command.allErrors)
response = {
"_error_code": command.allErrors[key],
"_error_description": str(failure.value),
"_error": identifier
}
return response
d.addCallbacks(_addIdentifier, _serializeFailure)
return d
|
python
|
{
"resource": ""
}
|
q279031
|
JSONAMPDialectReceiver._writeResponse
|
test
|
def _writeResponse(self, response):
"""
Serializes the response to JSON, and writes it to the transport.
"""
encoded = dumps(response, default=_default)
self.transport.write(encoded)
|
python
|
{
"resource": ""
}
|
q279032
|
JSONAMPDialectReceiver.connectionLost
|
test
|
def connectionLost(self, reason):
"""
Tells the box receiver to stop receiving boxes.
"""
self._remote.boxReceiver.stopReceivingBoxes(reason)
return basic.NetstringReceiver.connectionLost(self, reason)
|
python
|
{
"resource": ""
}
|
q279033
|
JSONAMPDialectFactory.buildProtocol
|
test
|
def buildProtocol(self, addr):
"""
Builds a bridge and associates it with an AMP protocol instance.
"""
proto = self._factory.buildProtocol(addr)
return JSONAMPDialectReceiver(proto)
|
python
|
{
"resource": ""
}
|
q279034
|
jwks_to_keyjar
|
test
|
def jwks_to_keyjar(jwks, iss=''):
"""
Convert a JWKS to a KeyJar instance.
:param jwks: String representation of a JWKS
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
if not isinstance(jwks, dict):
try:
jwks = json.loads(jwks)
except json.JSONDecodeError:
raise ValueError('No proper JSON')
kj = KeyJar()
kj.import_jwks(jwks, issuer=iss)
return kj
|
python
|
{
"resource": ""
}
|
q279035
|
JWKSBundle.loads
|
test
|
def loads(self, jstr):
"""
Upload a bundle from an unsigned JSON document
:param jstr: A bundle as a dictionary or a JSON document
"""
if isinstance(jstr, dict):
_info = jstr
else:
_info = json.loads(jstr)
for iss, jwks in _info.items():
kj = KeyJar()
if isinstance(jwks, dict):
kj.import_jwks(jwks, issuer=iss)
else:
kj.import_jwks_as_json(jwks, issuer=iss)
self.bundle[iss] = kj
return self
|
python
|
{
"resource": ""
}
|
q279036
|
nova_process
|
test
|
def nova_process(body, message):
"""
This function deal with the nova notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = nova_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in nova_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = nova_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279037
|
cinder_process
|
test
|
def cinder_process(body, message):
"""
This function deal with the cinder notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = cinder_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in cinder_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = cinder_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279038
|
neutron_process
|
test
|
def neutron_process(body, message):
"""
This function deal with the neutron notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = neutron_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in neutron_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = neutron_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279039
|
glance_process
|
test
|
def glance_process(body, message):
"""
This function deal with the glance notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = glance_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in glance_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = glance_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279040
|
swift_process
|
test
|
def swift_process(body, message):
"""
This function deal with the swift notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = swift_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in swift_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = swift_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279041
|
keystone_process
|
test
|
def keystone_process(body, message):
"""
This function deal with the keystone notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = keystone_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in keystone_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = keystone_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279042
|
heat_process
|
test
|
def heat_process(body, message):
"""
This function deal with the heat notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = heat_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in heat_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = heat_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack()
|
python
|
{
"resource": ""
}
|
q279043
|
App.serve
|
test
|
def serve(self, server=None):
"""Serve app using wsgiref or provided server.
Args:
- server (callable): An callable
"""
if server is None:
from wsgiref.simple_server import make_server
server = lambda app: make_server('', 8000, app).serve_forever()
print('Listening on 0.0.0.0:8000')
try:
server(self)
finally:
server.socket.close()
|
python
|
{
"resource": ""
}
|
q279044
|
pout
|
test
|
def pout(msg, log=None):
"""Print 'msg' to stdout, and option 'log' at info level."""
_print(msg, sys.stdout, log_func=log.info if log else None)
|
python
|
{
"resource": ""
}
|
q279045
|
perr
|
test
|
def perr(msg, log=None):
"""Print 'msg' to stderr, and option 'log' at info level."""
_print(msg, sys.stderr, log_func=log.error if log else None)
|
python
|
{
"resource": ""
}
|
q279046
|
register
|
test
|
def register(CommandSubClass):
"""A class decorator for Command classes to register in the default set."""
name = CommandSubClass.name()
if name in Command._all_commands:
raise ValueError("Command already exists: " + name)
Command._all_commands[name] = CommandSubClass
return CommandSubClass
|
python
|
{
"resource": ""
}
|
q279047
|
Command.register
|
test
|
def register(Class, CommandSubClass):
"""A class decorator for Command classes to register."""
for name in [CommandSubClass.name()] + CommandSubClass.aliases():
if name in Class._registered_commands[Class]:
raise ValueError("Command already exists: " + name)
Class._registered_commands[Class][name] = CommandSubClass
return CommandSubClass
|
python
|
{
"resource": ""
}
|
q279048
|
ConstrainedArgument.toString
|
test
|
def toString(self, value):
"""
If all of the constraints are satisfied with the given value, defers
to the composed AMP argument's ``toString`` method.
"""
self._checkConstraints(value)
return self.baseArgument.toString(value)
|
python
|
{
"resource": ""
}
|
q279049
|
ConstrainedArgument.fromString
|
test
|
def fromString(self, string):
"""
Converts the string to a value using the composed AMP argument, then
checks all the constraints against that value.
"""
value = self.baseArgument.fromString(string)
self._checkConstraints(value)
return value
|
python
|
{
"resource": ""
}
|
q279050
|
_updateCompleterDict
|
test
|
def _updateCompleterDict(completers, cdict, regex=None):
"""Merges ``cdict`` into ``completers``. In the event that a key
in cdict already exists in the completers dict a ValueError is raised
iff ``regex`` false'y. If a regex str is provided it and the duplicate
key are updated to be unique, and the updated regex is returned.
"""
for key in cdict:
if key in completers and not regex:
raise ValueError(f"Duplicate completion key: {key}")
if key in completers:
uniq = "_".join([key, str(uuid.uuid4()).replace("-", "")])
regex = regex.replace(f"P<{key}>", f"P<{uniq}>")
completers[uniq] = cdict[key]
else:
completers[key] = cdict[key]
return regex
|
python
|
{
"resource": ""
}
|
q279051
|
Ternya.work
|
test
|
def work(self):
"""
Start ternya work.
First, import customer's service modules.
Second, init openstack mq.
Third, keep a ternya connection that can auto-reconnect.
"""
self.init_modules()
connection = self.init_mq()
TernyaConnection(self, connection).connect()
|
python
|
{
"resource": ""
}
|
q279052
|
Ternya.init_mq
|
test
|
def init_mq(self):
"""Init connection and consumer with openstack mq."""
mq = self.init_connection()
self.init_consumer(mq)
return mq.connection
|
python
|
{
"resource": ""
}
|
q279053
|
Ternya.init_modules
|
test
|
def init_modules(self):
"""Import customer's service modules."""
if not self.config:
raise ValueError("please read your config file.")
log.debug("begin to import customer's service modules.")
modules = ServiceModules(self.config)
modules.import_modules()
log.debug("end to import customer's service modules.")
|
python
|
{
"resource": ""
}
|
q279054
|
Ternya.init_nova_consumer
|
test
|
def init_nova_consumer(self, mq):
"""
Init openstack nova mq
1. Check if enable listening nova notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Nova):
log.debug("disable listening nova notification")
return
for i in range(self.config.nova_mq_consumer_count):
mq.create_consumer(self.config.nova_mq_exchange,
self.config.nova_mq_queue,
ProcessFactory.process(Openstack.Nova))
log.debug("enable listening openstack nova notification.")
|
python
|
{
"resource": ""
}
|
q279055
|
Ternya.init_cinder_consumer
|
test
|
def init_cinder_consumer(self, mq):
"""
Init openstack cinder mq
1. Check if enable listening cinder notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Cinder):
log.debug("disable listening cinder notification")
return
for i in range(self.config.cinder_mq_consumer_count):
mq.create_consumer(self.config.cinder_mq_exchange,
self.config.cinder_mq_queue,
ProcessFactory.process(Openstack.Cinder))
log.debug("enable listening openstack cinder notification.")
|
python
|
{
"resource": ""
}
|
q279056
|
Ternya.init_neutron_consumer
|
test
|
def init_neutron_consumer(self, mq):
"""
Init openstack neutron mq
1. Check if enable listening neutron notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Neutron):
log.debug("disable listening neutron notification")
return
for i in range(self.config.neutron_mq_consumer_count):
mq.create_consumer(self.config.neutron_mq_exchange,
self.config.neutron_mq_queue,
ProcessFactory.process(Openstack.Neutron))
log.debug("enable listening openstack neutron notification.")
|
python
|
{
"resource": ""
}
|
q279057
|
Ternya.init_glance_consumer
|
test
|
def init_glance_consumer(self, mq):
"""
Init openstack glance mq
1. Check if enable listening glance notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Glance):
log.debug("disable listening glance notification")
return
for i in range(self.config.glance_mq_consumer_count):
mq.create_consumer(self.config.glance_mq_exchange,
self.config.glance_mq_queue,
ProcessFactory.process(Openstack.Glance))
log.debug("enable listening openstack glance notification.")
|
python
|
{
"resource": ""
}
|
q279058
|
Ternya.init_heat_consumer
|
test
|
def init_heat_consumer(self, mq):
"""
Init openstack heat mq
1. Check if enable listening heat notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Heat):
log.debug("disable listening heat notification")
return
for i in range(self.config.heat_mq_consumer_count):
mq.create_consumer(self.config.heat_mq_exchange,
self.config.heat_mq_queue,
ProcessFactory.process(Openstack.Heat))
log.debug("enable listening openstack heat notification.")
|
python
|
{
"resource": ""
}
|
q279059
|
Ternya.enable_component_notification
|
test
|
def enable_component_notification(self, openstack_component):
"""
Check if customer enable openstack component notification.
:param openstack_component: Openstack component type.
"""
openstack_component_mapping = {
Openstack.Nova: self.config.listen_nova_notification,
Openstack.Cinder: self.config.listen_cinder_notification,
Openstack.Neutron: self.config.listen_neutron_notification,
Openstack.Glance: self.config.listen_glance_notification,
Openstack.Swift: self.config.listen_swift_notification,
Openstack.Keystone: self.config.listen_keystone_notification,
Openstack.Heat: self.config.listen_heat_notification
}
return openstack_component_mapping[openstack_component]
|
python
|
{
"resource": ""
}
|
q279060
|
music_info
|
test
|
def music_info(songid):
"""
Get music info from baidu music api
"""
if isinstance(songid, list):
songid = ','.join(songid)
data = {
"hq": 1,
"songIds": songid
}
res = requests.post(MUSIC_INFO_URL, data=data)
info = res.json()
music_data = info["data"]
songs = []
for song in music_data["songList"]:
song_link, size = _song_link(song, music_data["xcode"])
songs.append({
"name": song["songName"],
"singer": song["artistName"],
"lrc_link": song["lrcLink"],
"song_link": song_link,
"size": size
})
return songs
|
python
|
{
"resource": ""
}
|
q279061
|
download_music
|
test
|
def download_music(song, thread_num=4):
"""
process for downing music with multiple threads
"""
filename = "{}.mp3".format(song["name"])
if os.path.exists(filename):
os.remove(filename)
part = int(song["size"] / thread_num)
if part <= 1024:
thread_num = 1
_id = uuid.uuid4().hex
logger.info("downloading '{}'...".format(song["name"]))
threads = []
for i in range(thread_num):
if i == thread_num - 1:
end = ''
else:
end = (i + 1) * part - 1
thread = Worker((i * part, end), song, _id)
thread.start()
threads.append(thread)
for t in threads:
t.join()
fileParts = glob.glob("part-{}-*".format(_id))
fileParts.sort(key=lambda e: e.split('-')[-1])
logger.info("'{}' combine parts...".format(song["name"]))
with open(filename, "ab") as f:
for part in fileParts:
with open(part, "rb") as d:
shutil.copyfileobj(d, f)
os.remove(part)
logger.info("'{}' finished".format(song["name"]))
|
python
|
{
"resource": ""
}
|
q279062
|
Machine.execute
|
test
|
def execute(self, globals_=None, _locals=None):
"""
Execute a code object
The inputs and behavior of this function should match those of
eval_ and exec_.
.. _eval: https://docs.python.org/3/library/functions.html?highlight=eval#eval
.. _exec: https://docs.python.org/3/library/functions.html?highlight=exec#exec
.. note:: Need to figure out how the internals of this function must change for
``eval`` or ``exec``.
:param code: a python code object
:param globals_: optional globals dictionary
:param _locals: optional locals dictionary
"""
if globals_ is None:
globals_ = globals()
if _locals is None:
self._locals = globals_
else:
self._locals = _locals
self.globals_ = globals_
if self.contains_op("YIELD_VALUE"):
return self.iterate_instructions()
else:
return self.execute_instructions()
|
python
|
{
"resource": ""
}
|
q279063
|
Machine.load_name
|
test
|
def load_name(self, name):
"""
Implementation of the LOAD_NAME operation
"""
if name in self.globals_:
return self.globals_[name]
b = self.globals_['__builtins__']
if isinstance(b, dict):
return b[name]
else:
return getattr(b, name)
|
python
|
{
"resource": ""
}
|
q279064
|
Machine.call_function
|
test
|
def call_function(self, c, i):
"""
Implement the CALL_FUNCTION_ operation.
.. _CALL_FUNCTION: https://docs.python.org/3/library/dis.html#opcode-CALL_FUNCTION
"""
callable_ = self.__stack[-1-i.arg]
args = tuple(self.__stack[len(self.__stack) - i.arg:])
self._print('call function')
self._print('\tfunction ', callable_)
self._print('\ti.arg ', i.arg)
self._print('\targs ', args)
self.call_callbacks('CALL_FUNCTION', callable_, *args)
if isinstance(callable_, FunctionType):
ret = callable_(*args)
elif callable_ is builtins.__build_class__:
ret = self.build_class(callable_, args)
elif callable_ is builtins.globals:
ret = self.builtins_globals()
else:
ret = callable_(*args)
self.pop(1 + i.arg)
self.__stack.append(ret)
|
python
|
{
"resource": ""
}
|
q279065
|
dump
|
test
|
def dump(filename, dbname, username=None, password=None, host=None,
port=None, tempdir='/tmp', mysqldump_path='mysqldump'):
"""Perfoms a mysqldump backup.
Create a database dump for the given database.
returns statuscode and shelloutput
"""
filepath = os.path.join(tempdir, filename)
cmd = mysqldump_path
cmd += ' --result-file=' + os.path.join(tempdir, filename)
if username:
cmd += ' --user=%s' % username
if host:
cmd += ' --host=%s' % host
if port:
cmd += ' --port=%s' % port
if password:
cmd += ' --password=%s' % password
cmd += ' ' + dbname
## run mysqldump
return sh(cmd)
|
python
|
{
"resource": ""
}
|
q279066
|
render_ditaa
|
test
|
def render_ditaa(self, code, options, prefix='ditaa'):
"""Render ditaa code into a PNG output file."""
hashkey = code.encode('utf-8') + str(options) + \
str(self.builder.config.ditaa) + \
str(self.builder.config.ditaa_args)
infname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "ditaa")
outfname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "png")
inrelfn = posixpath.join(self.builder.imgpath, infname)
infullfn = path.join(self.builder.outdir, '_images', infname)
outrelfn = posixpath.join(self.builder.imgpath, outfname)
outfullfn = path.join(self.builder.outdir, '_images', outfname)
if path.isfile(outfullfn):
return outrelfn, outfullfn
ensuredir(path.dirname(outfullfn))
# ditaa expects UTF-8 by default
if isinstance(code, unicode):
code = code.encode('utf-8')
ditaa_args = [self.builder.config.ditaa]
ditaa_args.extend(self.builder.config.ditaa_args)
ditaa_args.extend(options)
ditaa_args.extend( [infullfn] )
ditaa_args.extend( [outfullfn] )
f = open(infullfn, 'w')
f.write(code)
f.close()
try:
self.builder.warn(ditaa_args)
p = Popen(ditaa_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
except OSError, err:
if err.errno != ENOENT: # No such file or directory
raise
self.builder.warn('ditaa command %r cannot be run (needed for ditaa '
'output), check the ditaa setting' %
self.builder.config.ditaa)
self.builder._ditaa_warned_dot = True
return None, None
wentWrong = False
try:
# Ditaa may close standard input when an error occurs,
# resulting in a broken pipe on communicate()
stdout, stderr = p.communicate(code)
except OSError, err:
if err.errno != EPIPE:
raise
wentWrong = True
except IOError, err:
if err.errno != EINVAL:
raise
wentWrong = True
if wentWrong:
# in this case, read the standard output and standard error streams
# directly, to get the error message(s)
stdout, stderr = p.stdout.read(), p.stderr.read()
p.wait()
if p.returncode != 0:
raise DitaaError('ditaa exited with error:\n[stderr]\n%s\n'
'[stdout]\n%s' % (stderr, stdout))
return outrelfn, outfullfn
|
python
|
{
"resource": ""
}
|
q279067
|
Application._atexit
|
test
|
def _atexit(self):
"""Invoked in the 'finally' block of Application.run."""
self.log.debug("Application._atexit")
if self._atexit_func:
self._atexit_func(self)
|
python
|
{
"resource": ""
}
|
q279068
|
Application.run
|
test
|
def run(self, args_list=None):
"""Run Application.main and exits with the return value."""
self.log.debug("Application.run: {args_list}".format(**locals()))
retval = None
try:
retval = self._run(args_list=args_list)
except KeyboardInterrupt:
self.log.verbose("Interrupted") # pragma: nocover
except SystemExit as exit:
self.log.verbose("Exited")
retval = exit.code
except Exception:
print("Uncaught exception", file=sys.stderr)
traceback.print_exc()
if "debug_pdb" in self.args and self.args.debug_pdb:
debugger()
retval = Application.UNCAUGHT_EXCEPTION_EXIT
raise
finally:
try:
self._atexit()
finally:
sys.stderr.flush()
sys.stdout.flush()
sys.exit(retval)
|
python
|
{
"resource": ""
}
|
q279069
|
cd
|
test
|
def cd(path):
"""Context manager that changes to directory `path` and return to CWD
when exited.
"""
old_path = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(old_path)
|
python
|
{
"resource": ""
}
|
q279070
|
copytree
|
test
|
def copytree(src, dst, symlinks=True):
"""
Modified from shutil.copytree docs code sample, merges files rather than
requiring dst to not exist.
"""
from shutil import copy2, Error, copystat
names = os.listdir(src)
if not Path(dst).exists():
os.makedirs(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except OSError as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
try:
copystat(src, dst)
except OSError as why:
# can't copy file access times on Windows
if why.winerror is None:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
|
python
|
{
"resource": ""
}
|
q279071
|
debugger
|
test
|
def debugger():
"""If called in the context of an exception, calls post_mortem; otherwise
set_trace.
``ipdb`` is preferred over ``pdb`` if installed.
"""
e, m, tb = sys.exc_info()
if tb is not None:
_debugger.post_mortem(tb)
else:
_debugger.set_trace()
|
python
|
{
"resource": ""
}
|
q279072
|
FileSystem.get_mtime
|
test
|
def get_mtime(fname):
"""
Find the time this file was last modified.
:param fname: File name
:return: The last time the file was modified.
"""
try:
mtime = os.stat(fname).st_mtime_ns
except OSError:
# The file might be right in the middle of being written
# so sleep
time.sleep(1)
mtime = os.stat(fname).st_mtime_ns
return mtime
|
python
|
{
"resource": ""
}
|
q279073
|
FileSystem.is_changed
|
test
|
def is_changed(self, item):
"""
Find out if this item has been modified since last
:param item: A key
:return: True/False
"""
fname = os.path.join(self.fdir, item)
if os.path.isfile(fname):
mtime = self.get_mtime(fname)
try:
_ftime = self.fmtime[item]
except KeyError: # Never been seen before
self.fmtime[item] = mtime
return True
if mtime > _ftime: # has changed
self.fmtime[item] = mtime
return True
else:
return False
else:
logger.error('Could not access {}'.format(fname))
raise KeyError(item)
|
python
|
{
"resource": ""
}
|
q279074
|
FileSystem.sync
|
test
|
def sync(self):
"""
Goes through the directory and builds a local cache based on
the content of the directory.
"""
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir)
for f in os.listdir(self.fdir):
fname = os.path.join(self.fdir, f)
if not os.path.isfile(fname):
continue
if f in self.fmtime:
if self.is_changed(f):
self.db[f] = self._read_info(fname)
else:
mtime = self.get_mtime(fname)
self.db[f] = self._read_info(fname)
self.fmtime[f] = mtime
|
python
|
{
"resource": ""
}
|
q279075
|
FileSystem.clear
|
test
|
def clear(self):
"""
Completely resets the database. This means that all information in
the local cache and on disc will be erased.
"""
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir, exist_ok=True)
return
for f in os.listdir(self.fdir):
del self[f]
|
python
|
{
"resource": ""
}
|
q279076
|
scrape
|
test
|
def scrape(ctx, url):
"""
Rip the events from a given rss feed, normalize the data and store.
"""
data = load_feed(url)
feed = data['feed']
entries = data['entries']
# THIS IS SPECIFIC TO # http://konfery.cz/rss/
_type = 'community'
country = 'Czech Republic'
# title, title_detail, links, link, published, summary, tags
# unused: summary_detail, guidislink, published_parsed
for entry in entries:
_id = sluggify(entry['id'])
city = entry['tags'][0]['term']
landing = entry['link']
start_time = dt_normalize(entry['published_parsed'], local_tz=True)
title = entry['title']
summary = entry['summary']
link = entry['link']
ipdb.set_trace()
|
python
|
{
"resource": ""
}
|
q279077
|
Camera.download_image
|
test
|
def download_image(self):
"""
Download the image and return the
local path to the image file.
"""
split = urlsplit(self.url)
filename = split.path.split("/")[-1]
# Ensure the directory to store the image cache exists
if not os.path.exists(self.cache_directory):
os.makedirs(self.cache_directory)
filepath = os.path.join(self.cache_directory, filename)
data = urllib_request.urlopen(self.url)
with open(filepath, "wb") as image:
image.write(data.read())
return filepath
|
python
|
{
"resource": ""
}
|
q279078
|
Camera.has_changed
|
test
|
def has_changed(self):
"""
Method to check if an image has changed
since it was last downloaded. By making
a head request, this check can be done
quicker that downloading and processing
the whole file.
"""
request = urllib_request.Request(self.url)
request.get_method = lambda: 'HEAD'
response = urllib_request.urlopen(request)
information = response.info()
if 'Last-Modified' in information:
last_modified = information['Last-Modified']
# Return False if the image has not been modified
if last_modified == self.image_last_modified:
return False
self.image_last_modified = last_modified
# Return True if the image has been modified
# or if the image has no last-modified header
return True
|
python
|
{
"resource": ""
}
|
q279079
|
fancy_tag_compiler
|
test
|
def fancy_tag_compiler(params, defaults, takes_var_args, takes_var_kwargs, takes_context, name, node_class, parser, token):
"Returns a template.Node subclass."
bits = token.split_contents()[1:]
if takes_context:
if 'context' in params[:1]:
params = params[1:]
else:
raise TemplateSyntaxError(
"Any tag function decorated with takes_context=True "
"must have a first argument of 'context'")
# Split args and kwargs
args = []
kwargs = {}
kwarg_found = False
unhandled_params = list(params)
handled_params = []
if len(bits) > 1 and bits[-2] == 'as':
output_var = bits[-1]
if len(set(output_var) - set(ALLOWED_VARIABLE_CHARS)) > 0:
raise TemplateSyntaxError("%s got output var name with forbidden chars: '%s'" % (name, output_var))
bits = bits[:-2]
else:
output_var = None
for bit in bits:
kwarg_match = kwarg_re.match(bit)
if kwarg_match:
kw, var = kwarg_match.groups()
if kw not in params and not takes_var_kwargs:
raise TemplateSyntaxError("%s got unknown keyword argument '%s'" % (name, kw))
elif kw in handled_params:
raise TemplateSyntaxError("%s got multiple values for keyword argument '%s'" % (name, kw))
else:
kwargs[str(kw)] = var
kwarg_found = True
handled_params.append(kw)
else:
if kwarg_found:
raise TemplateSyntaxError("%s got non-keyword arg after keyword arg" % name)
else:
args.append(bit)
try:
handled_params.append(unhandled_params.pop(0))
except IndexError:
if not takes_var_args:
raise TemplateSyntaxError("%s got too many arguments" % name)
# Consider the last n params handled, where n is the number of defaults.
if defaults is not None:
unhandled_params = unhandled_params[:-len(defaults)]
if len(unhandled_params) == 1:
raise TemplateSyntaxError("%s didn't get a value for argument '%s'" % (name, unhandled_params[0]))
elif len(unhandled_params) > 1:
raise TemplateSyntaxError("%s didn't get values for arguments: %s" % (
name, ', '.join(["'%s'" % p for p in unhandled_params])))
return node_class(args, kwargs, output_var, takes_context)
|
python
|
{
"resource": ""
}
|
q279080
|
SeabornLogger.findCaller
|
test
|
def findCaller(self, stack_info=False):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = logging.currentframe()
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == logging._srcfile or filename == self._srcfile:
f = f.f_back
continue
rv = (co.co_filename, f.f_lineno, co.co_name)
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
break
return rv
|
python
|
{
"resource": ""
}
|
q279081
|
get_defining_component
|
test
|
def get_defining_component(pe_pe):
'''
get the C_C in which pe_pe is defined
'''
if pe_pe is None:
return None
if pe_pe.__class__.__name__ != 'PE_PE':
pe_pe = xtuml.navigate_one(pe_pe).PE_PE[8001]()
ep_pkg = xtuml.navigate_one(pe_pe).EP_PKG[8000]()
if ep_pkg:
return get_defining_component(ep_pkg)
return xtuml.navigate_one(pe_pe).C_C[8003]()
|
python
|
{
"resource": ""
}
|
q279082
|
main
|
test
|
def main():
'''
Parse command line options and launch the prebuilder.
'''
parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path..]",
version=xtuml.version.complete_string,
formatter=optparse.TitledHelpFormatter())
parser.add_option("-v", "--verbosity", dest='verbosity',
action="count",
help="increase debug logging level",
default=1)
parser.add_option("-o", "--output", dest="output", metavar="PATH",
help="set output to PATH",
action="store",
default=None)
(opts, args) = parser.parse_args()
if len(args) == 0 or opts.output is None:
parser.print_help()
sys.exit(1)
levels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG))
m = ooaofooa.load_metamodel(args)
prebuild_model(m)
xtuml.persist_instances(m, opts.output)
|
python
|
{
"resource": ""
}
|
q279083
|
SymbolTable.find_symbol
|
test
|
def find_symbol(self, name=None, kind=None):
'''
Find a symbol in the symbol table by name, kind, or both.
'''
for s in reversed(self.stack):
for symbol_name, handle in s.symbols.items():
symbol_kind = handle.__class__.__name__
if name == symbol_name and kind == symbol_kind:
return handle
elif name is None and kind == handle.__class__.__name__:
return handle
elif name == symbol_name and kind is None:
return handle
if name is None and kind == s.handle.__class__.__name__:
return s.handle
|
python
|
{
"resource": ""
}
|
q279084
|
is_contained_in
|
test
|
def is_contained_in(pe_pe, root):
'''
Determine if a PE_PE is contained within a EP_PKG or a C_C.
'''
if not pe_pe:
return False
if type(pe_pe).__name__ != 'PE_PE':
pe_pe = one(pe_pe).PE_PE[8001]()
ep_pkg = one(pe_pe).EP_PKG[8000]()
c_c = one(pe_pe).C_C[8003]()
if root in [ep_pkg, c_c]:
return True
elif is_contained_in(ep_pkg, root):
return True
elif is_contained_in(c_c, root):
return True
else:
return False
|
python
|
{
"resource": ""
}
|
q279085
|
is_global
|
test
|
def is_global(pe_pe):
'''
Check if a PE_PE is globally defined, i.e. not inside a C_C
'''
if type(pe_pe).__name__ != 'PE_PE':
pe_pe = one(pe_pe).PE_PE[8001]()
if one(pe_pe).C_C[8003]():
return False
pe_pe = one(pe_pe).EP_PKG[8000].PE_PE[8001]()
if not pe_pe:
return True
return is_global(pe_pe)
|
python
|
{
"resource": ""
}
|
q279086
|
_get_data_type_name
|
test
|
def _get_data_type_name(s_dt):
'''
Convert a BridgePoint data type to a pyxtuml meta model type.
'''
s_cdt = one(s_dt).S_CDT[17]()
if s_cdt and s_cdt.Core_Typ in range(1, 6):
return s_dt.Name.upper()
if one(s_dt).S_EDT[17]():
return 'INTEGER'
s_dt = one(s_dt).S_UDT[17].S_DT[18]()
if s_dt:
return _get_data_type_name(s_dt)
|
python
|
{
"resource": ""
}
|
q279087
|
_get_related_attributes
|
test
|
def _get_related_attributes(r_rgo, r_rto):
'''
The two lists of attributes which relates two classes in an association.
'''
l1 = list()
l2 = list()
ref_filter = lambda ref: ref.OIR_ID == r_rgo.OIR_ID
for o_ref in many(r_rto).O_RTIDA[110].O_REF[111](ref_filter):
o_attr = one(o_ref).O_RATTR[108].O_ATTR[106]()
l1.append(o_attr.Name)
o_attr = one(o_ref).O_RTIDA[111].O_OIDA[110].O_ATTR[105]()
l2.append(o_attr.Name)
return l1, l2
|
python
|
{
"resource": ""
}
|
q279088
|
mk_enum
|
test
|
def mk_enum(s_edt):
'''
Create a named tuple from a BridgePoint enumeration.
'''
s_dt = one(s_edt).S_DT[17]()
enums = list()
kwlist =['False', 'None', 'True'] + keyword.kwlist
for enum in many(s_edt).S_ENUM[27]():
if enum.Name in kwlist:
enums.append(enum.Name + '_')
else:
enums.append(enum.Name)
Enum = collections.namedtuple(s_dt.Name, enums)
return Enum(*range(len(enums)))
|
python
|
{
"resource": ""
}
|
q279089
|
mk_bridge
|
test
|
def mk_bridge(metamodel, s_brg):
'''
Create a python function from a BridgePoint bridge.
'''
action = s_brg.Action_Semantics_internal
label = s_brg.Name
return lambda **kwargs: interpret.run_function(metamodel, label,
action, kwargs)
|
python
|
{
"resource": ""
}
|
q279090
|
mk_external_entity
|
test
|
def mk_external_entity(metamodel, s_ee):
'''
Create a python object from a BridgePoint external entity with bridges
realized as python member functions.
'''
bridges = many(s_ee).S_BRG[19]()
names = [brg.Name for brg in bridges]
EE = collections.namedtuple(s_ee.Key_Lett, names)
funcs = list()
for s_brg in many(s_ee).S_BRG[19]():
fn = mk_bridge(metamodel, s_brg)
funcs.append(fn)
return EE(*funcs)
|
python
|
{
"resource": ""
}
|
q279091
|
mk_function
|
test
|
def mk_function(metamodel, s_sync):
'''
Create a python function from a BridgePoint function.
'''
action = s_sync.Action_Semantics_internal
label = s_sync.Name
return lambda **kwargs: interpret.run_function(metamodel, label,
action, kwargs)
|
python
|
{
"resource": ""
}
|
q279092
|
mk_constant
|
test
|
def mk_constant(cnst_syc):
'''
Create a python value from a BridgePoint constant.
'''
s_dt = one(cnst_syc).S_DT[1500]()
cnst_lsc = one(cnst_syc).CNST_LFSC[1502].CNST_LSC[1503]()
if s_dt.Name == 'boolean':
return cnst_lsc.Value.lower() == 'true'
if s_dt.Name == 'integer':
return int(cnst_lsc.Value)
if s_dt.Name == 'real':
return float(cnst_lsc.Value)
if s_dt.Name == 'string':
return str(cnst_lsc.Value)
|
python
|
{
"resource": ""
}
|
q279093
|
mk_operation
|
test
|
def mk_operation(metaclass, o_tfr):
'''
Create a python function that interprets that action of a BridgePoint class
operation.
'''
o_obj = one(o_tfr).O_OBJ[115]()
action = o_tfr.Action_Semantics_internal
label = '%s::%s' % (o_obj.Name, o_tfr.Name)
run = interpret.run_operation
if o_tfr.Instance_Based:
return lambda self, **kwargs: run(metaclass, label, action, kwargs, self)
else:
fn = lambda cls, **kwargs: run(metaclass, label, action, kwargs, None)
return classmethod(fn)
|
python
|
{
"resource": ""
}
|
q279094
|
mk_derived_attribute
|
test
|
def mk_derived_attribute(metaclass, o_dbattr):
'''
Create a python property that interprets that action of a BridgePoint derived
attribute.
'''
o_attr = one(o_dbattr).O_BATTR[107].O_ATTR[106]()
o_obj = one(o_attr).O_OBJ[102]()
action = o_dbattr.Action_Semantics_internal
label = '%s::%s' % (o_obj.Name, o_attr.Name)
fget = functools.partial(interpret.run_derived_attribute, metaclass,
label, action, o_attr.Name)
return property(fget)
|
python
|
{
"resource": ""
}
|
q279095
|
mk_class
|
test
|
def mk_class(m, o_obj, derived_attributes=False):
'''
Create a pyxtuml class from a BridgePoint class.
'''
first_filter = lambda selected: not one(selected).O_ATTR[103, 'succeeds']()
o_attr = one(o_obj).O_ATTR[102](first_filter)
attributes = list()
while o_attr:
s_dt = get_attribute_type(o_attr)
ty = _get_data_type_name(s_dt)
if not derived_attributes and one(o_attr).O_BATTR[106].O_DBATTR[107]():
pass
# logger.warning('Omitting derived attribute %s.%s ' %
# (o_obj.Key_Lett, o_attr.Name))
elif not ty:
logger.warning('Omitting unsupported attribute %s.%s ' %
(o_obj.Key_Lett, o_attr.Name))
else:
attributes.append((o_attr.Name, ty))
o_attr = one(o_attr).O_ATTR[103, 'precedes']()
metaclass = m.define_class(o_obj.Key_Lett, list(attributes), o_obj.Descrip)
for o_id in many(o_obj).O_ID[104]():
o_oida = many(o_id).O_OIDA[105]()
o_attrs = many(o_oida).O_ATTR[105]()
if not derived_attributes and one(o_attrs).O_BATTR[106].O_DBATTR[107]():
logger.warning('Omitting unique identifier %s.I%d' %
(o_obj.Key_Lett, o_id.Oid_ID + 1))
continue
names = [o_attr.Name for o_attr in o_attrs]
m.define_unique_identifier(o_obj.Key_Lett, o_id.Oid_ID + 1, *names)
for o_tfr in many(o_obj).O_TFR[115]():
fn = mk_operation(metaclass, o_tfr)
setattr(metaclass.clazz, o_tfr.Name, fn)
for o_dbattr in many(o_obj).O_ATTR[102].O_BATTR[106].O_DBATTR[107]():
o_attr = one(o_dbattr).O_BATTR[107].O_ATTR[106]()
fn = mk_derived_attribute(metaclass, o_dbattr)
setattr(metaclass.clazz, o_attr.Name, fn)
return metaclass
|
python
|
{
"resource": ""
}
|
q279096
|
mk_simple_association
|
test
|
def mk_simple_association(m, r_simp):
'''
Create a pyxtuml association from a simple association in BridgePoint.
'''
r_rel = one(r_simp).R_REL[206]()
r_form = one(r_simp).R_FORM[208]()
r_part = one(r_simp).R_PART[207]()
r_rgo = one(r_form).R_RGO[205]()
r_rto = one(r_part).R_RTO[204]()
if not r_form:
logger.info('unformalized association R%s' % (r_rel.Numb))
r_form = one(r_simp).R_PART[207](lambda sel: sel != r_part)
r_rgo = one(r_form).R_RTO[204]()
source_o_obj = one(r_rgo).R_OIR[203].O_OBJ[201]()
target_o_obj = one(r_rto).R_OIR[203].O_OBJ[201]()
source_ids, target_ids = _get_related_attributes(r_rgo, r_rto)
if source_o_obj.Obj_ID != target_o_obj.Obj_ID:
source_phrase = target_phrase = ''
else:
source_phrase = r_part.Txt_Phrs
target_phrase = r_form.Txt_Phrs
m.define_association(rel_id=r_rel.Numb,
source_kind=source_o_obj.Key_Lett,
target_kind=target_o_obj.Key_Lett,
source_keys=source_ids,
target_keys=target_ids,
source_conditional=r_form.Cond,
target_conditional=r_part.Cond,
source_phrase=source_phrase,
target_phrase=target_phrase,
source_many=r_form.Mult,
target_many=r_part.Mult)
|
python
|
{
"resource": ""
}
|
q279097
|
mk_linked_association
|
test
|
def mk_linked_association(m, r_assoc):
'''
Create pyxtuml associations from a linked association in BridgePoint.
'''
r_rel = one(r_assoc).R_REL[206]()
r_rgo = one(r_assoc).R_ASSR[211].R_RGO[205]()
source_o_obj = one(r_rgo).R_OIR[203].O_OBJ[201]()
def _mk_assoc(side1, side2):
r_rto = one(side1).R_RTO[204]()
target_o_obj = one(r_rto).R_OIR[203].O_OBJ[201]()
source_ids, target_ids = _get_related_attributes(r_rgo, r_rto)
if side1.Obj_ID != side2.Obj_ID:
source_phrase = target_phrase = ''
else:
source_phrase = side1.Txt_Phrs
target_phrase = side2.Txt_Phrs
m.define_association(rel_id=r_rel.Numb,
source_kind=source_o_obj.Key_Lett,
target_kind=target_o_obj.Key_Lett,
source_keys=source_ids,
target_keys=target_ids,
source_conditional=side2.Cond,
target_conditional=False,
source_phrase=source_phrase,
target_phrase=target_phrase,
source_many=side2.Mult,
target_many=False)
r_aone = one(r_assoc).R_AONE[209]()
r_aoth = one(r_assoc).R_AOTH[210]()
_mk_assoc(r_aone, r_aoth)
_mk_assoc(r_aoth, r_aone)
|
python
|
{
"resource": ""
}
|
q279098
|
mk_association
|
test
|
def mk_association(m, r_rel):
'''
Create a pyxtuml association from a R_REL in ooaofooa.
'''
handler = {
'R_SIMP': mk_simple_association,
'R_ASSOC': mk_linked_association,
'R_SUBSUP': mk_subsuper_association,
'R_COMP': mk_derived_association,
}
inst = subtype(r_rel, 206)
fn = handler.get(type(inst).__name__)
return fn(m, inst)
|
python
|
{
"resource": ""
}
|
q279099
|
mk_component
|
test
|
def mk_component(bp_model, c_c=None, derived_attributes=False):
'''
Create a pyxtuml meta model from a BridgePoint model.
Optionally, restrict to classes and associations contained in the
component c_c.
'''
target = Domain()
c_c_filt = lambda sel: c_c is None or is_contained_in(sel, c_c)
for o_obj in bp_model.select_many('O_OBJ', c_c_filt):
mk_class(target, o_obj, derived_attributes)
for r_rel in bp_model.select_many('R_REL', c_c_filt):
mk_association(target, r_rel)
for s_sync in bp_model.select_many('S_SYNC', c_c_filt):
fn = mk_function(target, s_sync)
target.add_symbol(s_sync.Name, fn)
for s_dt in bp_model.select_many('S_DT', c_c_filt):
s_edt = one(s_dt).S_EDT[17]()
if s_edt:
enum = mk_enum(s_edt)
target.add_symbol(s_dt.Name, enum)
for cnst_csp in bp_model.select_many('CNST_CSP', c_c_filt):
for cnst_syc in many(cnst_csp).CNST_SYC[1504]():
value = mk_constant(cnst_syc)
target.add_symbol(cnst_syc.Name, value)
for ass in target.associations:
ass.formalize()
for s_ee in bp_model.select_many('S_EE', c_c_filt):
if s_ee.Key_Lett in ['LOG', 'ARCH', 'TIM', 'NVS', 'PERSIST']:
target.add_symbol(s_ee.Key_Lett, getattr(builtin_ee, s_ee.Key_Lett))
else:
ee = mk_external_entity(target, s_ee)
target.add_symbol(s_ee.Key_Lett, ee)
return target
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.