Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
3,200 | def belong_to(self, pos):
"""
given a cursor position, return the deeper header
that contains it
"""
match = []
for h in self.headers:
start, end = self.region(h)
if start <= pos <= end:
match.append(h)
try:
return sorted(match, key=lambda h: h.level, reverse=True)[0]
except __HOLE__:
return None | IndexError | dataset/ETHPy150Open mgaitan/sublime-rst-completion/headers.py/RstHeaderTree.belong_to |
3,201 | def region(self, header):
"""
determines the (start, end) region under the given header
A region ends when a header of the same or higher level
(i.e lower number) is found or at the EOF
"""
try:
index = self.headers.index(header)
except __HOLE__:
return
start = header.start
if index == len(self.headers) - 1: # last header
return (start, self._text_lenght)
for next_h in self.headers[index + 1:]:
if next_h.level <= header.level:
return (start, next_h.start - 1)
return (start, self._text_lenght) | ValueError | dataset/ETHPy150Open mgaitan/sublime-rst-completion/headers.py/RstHeaderTree.region |
3,202 | def next(self, header, same_or_high=False):
"""
given a header returns the closer header
(down direction)
"""
index, headers = self._index(header, same_or_high)
try:
return headers[index + 1]
except __HOLE__:
return None | IndexError | dataset/ETHPy150Open mgaitan/sublime-rst-completion/headers.py/RstHeaderTree.next |
3,203 | def run(self, edit, offset=-1):
vid = self.view.id()
HeaderChangeLevelEvent.listen.pop(vid, None)
cursor_pos = self.view.sel()[0].begin()
region = sublime.Region(0, self.view.size())
tree = RstHeaderTree(self.view.substr(region))
parent = tree.belong_to(cursor_pos)
is_in_header = parent.start <= cursor_pos <= parent.end
if not is_in_header:
return
idx, levels = HeaderChangeLevelCommand.views.get(vid, (None, None))
if idx != parent.idx:
levels = tree.levels()
HeaderChangeLevelCommand.views[vid] = (parent.idx, levels)
try:
level = levels.index(parent.adornment)
if level + offset < 0:
return
adornment = levels[level + offset]
except __HOLE__:
return
new_header = RstHeaderTree.make_header(parent.title, adornment)
hregion = sublime.Region(parent.start, parent.end + 1)
try:
self.view.replace(edit, hregion, new_header)
finally:
def callback():
HeaderChangeLevelEvent.listen[vid] = True
sublime.set_timeout(callback, 0) | IndexError | dataset/ETHPy150Open mgaitan/sublime-rst-completion/headers.py/HeaderChangeLevelCommand.run |
3,204 | def dequeue(self):
"""Dequeues the front-most job from this queue.
Returns a Job instance, which can be executed or inspected.
Does not respect serial queue locks
"""
with transaction.commit_on_success(using=self.connection):
try:
job = Job.objects.using(self.connection).select_for_update().filter(
queue=self, status=Job.QUEUED,
scheduled_for__lte=now()).order_by('scheduled_for')[0]
job.queue = None
job.save()
except __HOLE__:
job = None
if job and job.repeat:
self.enqueue_next(job)
return job | IndexError | dataset/ETHPy150Open bretth/django-pq/pq/queue.py/Queue.dequeue |
3,205 | def load_dataset(name):
"""
Load dataset defined with name in SETTINGS json from cache. If not in cache
create the dataset.
Args:
name - a string of a valid dataset defined in SETTINGS
Returns:
the dataset
"""
try:
dataset = utils.load_from_cache(name)
logger.info('Loaded dataset %s from cache' % name)
except __HOLE__:
dataset = make_dataset(name)
utils.save_to_cache(dataset, name)
return dataset | IOError | dataset/ETHPy150Open theusual/kaggle-seeclickfix-ensemble/Miroslaw/datasets.py/load_dataset |
3,206 | def make_dataset(name):
"""
Create the dataset defined with name in SETTINGS json. A dataset definition
in SETTINGS takes the form:
{ "DatasetName": {
"input_data": ["NamesOfAnyDatasetsRequiredAsInput", ...],
"transforms": [[ "TrainsformName1", { "TransformArg1": Value1,
"TransformArg2": Value2, ... }].
... ]
}
}
"DatasetName" is the name that will be used for the dataset throughout the model
this name is used for accessing configuration information, and when passing the
dataset as input into other dataset definitions or model definitions
"input_data" is a list of other dataset names that are required as input for
the creation of the dataset
"transforms" is a list of lists. Each sub-list must have length 2 and contain
a transform name, which is a valid name of a function defined in data_transforms.py
and a dict of arguments required to pass into the transform. All transforms defined
in data_transforms.py must have the structure:
def transform_name(input_data, **args):
...
The "transforms" list allows for chaining of transforms for creating complex
datasets. The input for the first transform in the chain is always defined in
"input_data", the next transform in the chain takes the output of the previous
transform as input_data.
"""
cfgs = config.dataset_configs(name)
data = [load_dataset(ds) for ds in cfgs['input_data']]
if len(data) == 1:
data = data[0]
logger.info('Creating dataset %s' % name)
for tname, args in cfgs['transforms']:
try:
transform = getattr(data_transforms, tname)
except __HOLE__:
raise AttributeError('Unable to find transform \
%s in data_transforms.py' % tname)
logger.info('Applying %s on %s' % (tname, name))
data = transform(data, **args)
return data | AttributeError | dataset/ETHPy150Open theusual/kaggle-seeclickfix-ensemble/Miroslaw/datasets.py/make_dataset |
3,207 | def _wrappedTest(self):
strargs = dict(testmodule=self.testalias,
testcase=self.__class__.__name__,
testfunc=self._testMethodName)
teststr = """tc = {testmodule}.{testcase}("{testfunc}")
try:
tc.setUp()
tc.{testfunc}()
finally:
tc.tearDown()""".format(**strargs)
try:
sendrecv(self.reqsock, ('exec', teststr))
except __HOLE__ as ex:
if 'AssertionError' in str(ex):
raise AssertionError(*ex.args)
raise | RuntimeError | dataset/ETHPy150Open rgalanakis/practicalmayapython/src/chapter6/mayatestcase.py/MayaTestCase._wrappedTest |
3,208 | def get_is_old(self, obj):
try:
return obj.age > 80
except __HOLE__ as te:
raise ValidationError(text_type(te)) | TypeError | dataset/ETHPy150Open marshmallow-code/marshmallow/tests/base.py/UserSchema.get_is_old |
3,209 | def get_is_old(self, obj):
try:
return obj.age > 80
except __HOLE__ as te:
raise ValidationError(te) | TypeError | dataset/ETHPy150Open marshmallow-code/marshmallow/tests/base.py/UserMetaSchema.get_is_old |
3,210 | def handle(self, *args, **options):
if not len(args):
raise CommandError('This command accepts space delimited list of application names.')
if not set(args).issubset(settings.INSTALLED_APPS):
raise CommandError('One or more application names issued to the command are not in INSTALLED_APPS.')
for app_name in args:
decorated_views_count = 0
self.stdout.write('Working on "%s" application ...\n' % app_name)
try:
app_views = __import__('%s.views' % app_name)
except __HOLE__:
raise CommandError('No views.py found in the application.')
app_views_substr = path.join('oauthost', 'decorators.py')
for func_name in dir(app_views.views):
if '__' not in func_name:
func = getattr(app_views.views, func_name)
# That's how we find decorated views.
if func_name != 'oauth_required' and app_views_substr in getfile(func):
decorated_views_count += 1
# TODO That would be nice to have here a value of `scope` parameter of @oauth_required if it set.
# That is, of course, if only we can trace it up at a low cost.
scope_name = '%(app_name)s:%(view_name)s' % {'app_name': app_name, 'view_name': func_name}
self.stdout.write(' Found "%s" view. Syncing "%s" scope ... ' % (func_name, scope_name))
# A try to give our scope a pretty name.
scope_title = '%s %s' % (app_name.capitalize(), ' '.join([word.capitalize() for word in func_name.split('_')]))
scope = Scope(identifier=scope_name, title=scope_title)
try:
scope.save()
except IntegrityError:
self.stdout.write('WARNING: Scope skipped as already exists\n')
else:
self.stdout.write('Done\n')
if not decorated_views_count:
self.stdout.write('NOTE: No views decorated with "@oauth_required" are found in the application.\n')
self.stdout.write('\n') | ImportError | dataset/ETHPy150Open idlesign/django-oauthost/oauthost/management/commands/syncscopes.py/Command.handle |
3,211 | def get_user(request):
try:
user_id = request.session[auth.SESSION_KEY]
backend_path = request.session[auth.BACKEND_SESSION_KEY]
backend = auth.load_backend(backend_path)
backend.request = request
user = backend.get_user(user_id) or models.AnonymousUser()
except __HOLE__:
user = models.AnonymousUser()
return user | KeyError | dataset/ETHPy150Open openstack/django_openstack_auth/openstack_auth/utils.py/get_user |
3,212 | def _id_handler(self, f):
"""
Given a Feature from self.iterator, figure out what the ID should be.
This uses `self.id_spec` identify the ID.
"""
# If id_spec is a string, convert to iterable for later
if isinstance(self.id_spec, six.string_types):
id_key = [self.id_spec]
elif hasattr(self.id_spec, '__call__'):
id_key = [self.id_spec]
# If dict, then assume it's a feature -> attribute mapping, e.g.,
# {'gene': 'gene_id'} for GTF
elif isinstance(self.id_spec, dict):
try:
id_key = self.id_spec[f.featuretype]
if isinstance(id_key, six.string_types):
id_key = [id_key]
# Otherwise, use default auto-increment.
except __HOLE__:
return self._increment_featuretype_autoid(f.featuretype)
# Otherwise assume it's an iterable.
else:
id_key = self.id_spec
# Then try them in order, returning the first one that works:
for k in id_key:
if hasattr(k, '__call__'):
_id = k(f)
if _id:
if _id.startswith('autoincrement:'):
return self._increment_featuretype_autoid(_id[14:])
return _id
else:
# use GFF fields rather than attributes for cases like :seqid:
# or :strand:
if (len(k) > 3) and (k[0] == ':') and (k[-1] == ':'):
# No [0] here -- only attributes key/vals are forced into
# lists, not standard GFF fields.
return getattr(f, k[1:-1])
else:
try:
return f.attributes[k][0]
except (KeyError, IndexError):
pass
# If we get here, then default autoincrement
return self._increment_featuretype_autoid(f.featuretype) | KeyError | dataset/ETHPy150Open daler/gffutils/gffutils/create.py/_DBCreator._id_handler |
3,213 | def _longest_val_in_column(self, col):
"""
get size of longest value in specific column
:param col: str, column name
:return int
"""
try:
# +2 is for implicit separator
return max([len(x[col]) for x in self.table if x[col]]) + 2
except __HOLE__:
logger.error("there is no column %r", col)
raise | KeyError | dataset/ETHPy150Open projectatomic/osbs-client/osbs/cli/render.py/TableFormatter._longest_val_in_column |
3,214 | def code_token_response(**kwargs):
_areq = kwargs["areq"]
_scode = kwargs["scode"]
_sdb = kwargs["sdb"]
aresp = AuthorizationResponse()
for key in ["state", "nonce", "scope"]:
try:
aresp[key] = _areq[key]
except KeyError:
pass
aresp["code"] = _scode
_dic = _sdb.upgrade_to_token(_scode, issue_refresh=False)
for prop in AccessTokenResponse.c_param.keys():
try:
aresp[prop] = _dic[prop]
except __HOLE__:
pass
return aresp | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/code_token_response |
3,215 | def build_jwx_def(self):
self.jwx_def = {}
for _typ in ["signing_alg", "encryption_alg", "encryption_enc"]:
self.jwx_def[_typ] = {}
for item in ["id_token", "userinfo"]:
cap_param = '{}_{}_values_supported'.format(item, _typ)
try:
self.jwx_def[_typ][item] = self.capabilities[cap_param][0]
except __HOLE__:
self.jwx_def[_typ][item] = "" | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.build_jwx_def |
3,216 | def set_mode(self, mode):
"""
The mode is a set of parameters that govern how this OP will behave.
:param mode:
:return:
"""
# Is there a signing algorithm I should use
try:
self.jwx_def["signing_alg"]["id_token"] = mode["sign"]
self.jwx_def["signing_alg"]["userinfo"] = mode["sign"]
except KeyError:
pass
else:
# make sure id_token_signed_response_alg is set in client register
# response. This will make it happen in match_preferences()
for val in PREFERENCE2PROVIDER.values():
if val.endswith("signing_alg_values_supported"):
self.capabilities[val] = [mode["sign"]]
# Is there a encryption algorithm I should use
try:
_enc_alg = mode["enc_alg"]
except __HOLE__:
pass
else:
# make sure id_token_signed_response_alg is set in client register
# response. This will make it happen in match_preferences()
for val in PREFERENCE2PROVIDER.values():
if val.endswith("encryption_alg_values_supported"):
self.capabilities[val] = [_enc_alg]
# Is there a encryption enc algorithm I should use
try:
_enc_enc = mode["enc_enc"]
except KeyError:
pass
else:
# make sure id_token_signed_response_alg is set in client register
# response. This will make it happen in match_preferences()
for val in PREFERENCE2PROVIDER.values():
if val.endswith("encryption_enc_values_supported"):
self.capabilities[val] = [_enc_enc] | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.set_mode |
3,217 | @staticmethod
def get_sector_id(redirect_uri, client_info):
"""
Pick the sector id given a number of factors
:param redirect_uri: The redirect_uri used
:param client_info: Information provided by the client in the
client registration
:return: A sector_id or None
"""
_redirect_uri = unquote(redirect_uri)
part = urlparse(_redirect_uri)
if part.fragment:
raise ValueError
(_base, _query) = splitquery(_redirect_uri)
sid = ""
try:
if _base in client_info["si_redirects"]:
sid = client_info["sector_id"]
except __HOLE__:
try:
uit = client_info["subject_type"]
if uit == "pairwise":
sid = _base
except KeyError:
pass
return sid | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.get_sector_id |
3,218 | def required_user(self, areq):
req_user = ""
try:
_req = areq["request"]
except KeyError:
_req = areq
if "id_token_hint" in _req:
try:
req_user = _req["id_token_hint"]["sub"]
aud = _req["id_token_hint"]["aud"]
except (KeyError, __HOLE__):
# A signed jwt, should verify signature if I can
jso = json.loads(b64d(str(_req["id_token_hint"].split(".")[1])))
req_user = jso["sub"]
aud = jso["aud"]
if not self._verify_client(areq, aud):
req_user = ""
return req_user | TypeError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.required_user |
3,219 | def pick_auth(self, areq, comparision_type=""):
"""
:param areq: AuthorizationRequest instance
:param comparision_type: How to pick the authentication method
:return: An authentication method and its authn class ref
"""
if comparision_type == "any":
return self.authn_broker[0]
try:
if "acr_values" in areq:
if not comparision_type:
comparision_type = "exact"
if not isinstance(areq["acr_values"], list):
areq["acr_values"] = [areq["acr_values"]]
for acr in areq["acr_values"]:
res = self.authn_broker.pick(acr, comparision_type)
logger.debug("Picked AuthN broker for ACR %s: %s" % (
str(acr), str(res)))
if res:
# Return the best guess by pick.
return res[0]
else: # same as any
return self.authn_broker[0]
except __HOLE__ as exc:
logger.debug(
"An error occured while picking the authN broker: %s" % str(
exc))
# return the best I have
return None, None | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.pick_auth |
3,220 | def verify_post_logout_redirect_uri(self, esreq, cookie):
"""
:param esreq: End session request
:param cookie:
:return:
"""
try:
redirect_uri = esreq["post_logout_redirect_uri"]
except __HOLE__:
logger.debug("Missing post_logout_redirect_uri parameter")
return None
try:
authn, acr = self.pick_auth(esreq)
except Exception as err:
logger.exception("%s", err)
raise
try:
uid, _ts = authn.authenticated_as(cookie)
client_ids = self.sdb.get_client_ids_for_uid(uid["uid"])
accepted_urls = [self.cdb[cid]["post_logout_redirect_uris"] for cid
in client_ids]
if self._verify_url(redirect_uri,
itertools.chain.from_iterable(accepted_urls)):
return redirect_uri
except Exception as exc:
logger.debug(
"An error occurred while verifying redirect URI: %s" % str(
exc))
return None | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.verify_post_logout_redirect_uri |
3,221 | def end_session_endpoint(self, request="", cookie=None, **kwargs):
esr = EndSessionRequest().from_urlencoded(request)
logger.debug("End session request: {}".format(esr.to_dict()))
redirect_uri = None
if "post_logout_redirect_uri" in esr:
redirect_uri = self.verify_post_logout_redirect_uri(esr, cookie)
if not redirect_uri:
return self._error_response(
"Not allowed (Post logout redirect URI verification "
"failed)!")
authn, acr = self.pick_auth(esr)
sid = None
if "id_token_hint" in esr:
id_token_hint = OpenIDRequest().from_jwt(esr["id_token_hint"],
keyjar=self.keyjar,
verify=True)
sub = id_token_hint["sub"]
try:
sid = self.sdb.get_sids_by_sub(sub)[
0] # any sid will do, choose the first
except __HOLE__:
pass
else:
identity, _ts = authn.authenticated_as(cookie)
if identity:
uid = identity["uid"]
try:
# any sid will do, choose the first
sid = self.sdb.uid2sid[uid][0]
except (KeyError, IndexError):
pass
else:
return self._error_response(
"Not allowed (UID could not be retrieved)!")
# if self.sdb.get_verified_logout(uid):
# return self.let_user_verify_logout(uid, esr, cookie, redirect_uri)
if sid is not None:
del self.sdb[sid]
# Delete cookies
headers = [authn.delete_cookie(), self.delete_session_cookie()]
if redirect_uri is not None:
return SeeOther(str(redirect_uri), headers=headers)
return Response("Successful logout", headers=headers) | IndexError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.end_session_endpoint |
3,222 | def verify_endpoint(self, request="", cookie=None, **kwargs):
"""
:param request:
:param cookie:
:param kwargs:
:return:
"""
logger.debug("verify request: %s" % request)
_req = parse_qs(request)
if "query" in _req:
try:
# TODO FIX THIS !!! Why query ?
areq = parse_qs(_req["query"][0])
except __HOLE__:
return BadRequest()
else:
areq = _req
logger.debug("REQ: %s" % areq)
try:
authn, acr = self.pick_auth(areq, "exact")
except Exception as err:
logger.exception("%s", err)
raise
kwargs["cookie"] = cookie
return authn.verify(_req, **kwargs) | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.verify_endpoint |
3,223 | def setup_session(self, areq, authn_event, cinfo):
try:
oidc_req = areq["request"]
except __HOLE__:
oidc_req = None
sid = self.sdb.create_authz_session(authn_event, areq, oidreq=oidc_req)
kwargs = {}
for param in ["sector_id", "subject_type"]:
try:
kwargs[param] = cinfo[param]
except KeyError:
pass
self.sdb.do_sub(sid, cinfo['client_salt'], **kwargs)
return sid | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.setup_session |
3,224 | def encrypt(self, payload, client_info, cid, val_type="id_token", cty=""):
"""
Handles the encryption of a payload.
Shouldn't get here unless there are encrypt parameters in client info
:param payload: The information to be encrypted
:param client_info: Client information
:param cid: Client id
:return: The encrypted information as a JWT
"""
try:
alg = client_info["%s_encrypted_response_alg" % val_type]
enc = client_info["%s_encrypted_response_enc" % val_type]
except KeyError as err: # both must be defined
logger.warning("undefined parameter: %s" % err)
raise JWEException("%s undefined" % err)
logger.debug("alg=%s, enc=%s, val_type=%s" % (alg, enc, val_type))
keys = self.keyjar.get_encrypt_key(owner=cid)
logger.debug("Encryption keys for %s: %s" % (cid, keys))
try:
_ckeys = self.keyjar[cid]
except __HOLE__:
# Weird, but try to recuperate
logger.warning(
"Lost keys for {} trying to recuperate!!".format(cid))
self.keyjar.issuer_keys[cid] = []
self.keyjar.add(cid, client_info["jwks_uri"])
_ckeys = self.keyjar[cid]
logger.debug("keys for %s: %s" % (
cid, "[" + ", ".join([str(x) for x in _ckeys])) + "]")
kwargs = {"alg": alg, "enc": enc}
if cty:
kwargs["cty"] = cty
# use the clients public key for encryption
_jwe = JWE(payload, **kwargs)
return _jwe.encrypt(keys, context="public") | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.encrypt |
3,225 | def sign_encrypt_id_token(self, sinfo, client_info, areq, code=None,
access_token=None, user_info=None):
"""
Signed and or encrypt a IDToken
:param sinfo: Session information
:param client_info: Client information
:param areq: The request
:param code: Access grant
:param access_token: Access Token
:param user_info: User information
:return: IDToken instance
"""
try:
alg = client_info["id_token_signed_response_alg"]
except __HOLE__:
try:
alg = self.jwx_def["signing_alg"]["id_token"]
except KeyError:
alg = PROVIDER_DEFAULT["id_token_signed_response_alg"]
else:
if not alg:
alg = PROVIDER_DEFAULT["id_token_signed_response_alg"]
_authn_event = sinfo["authn_event"]
id_token = self.id_token_as_signed_jwt(
sinfo, loa=_authn_event.authn_info, alg=alg, code=code,
access_token=access_token, user_info=user_info,
auth_time=_authn_event.authn_time)
# Then encrypt
if "id_token_encrypted_response_alg" in client_info:
id_token = self.encrypt(id_token, client_info, areq["client_id"],
"id_token", "JWT")
return id_token | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.sign_encrypt_id_token |
3,226 | def _access_token_endpoint(self, req, **kwargs):
_sdb = self.sdb
_log_debug = logger.debug
client_info = self.cdb[str(req["client_id"])]
assert req["grant_type"] == "authorization_code"
_access_code = req["code"].replace(' ', '+')
# assert that the code is valid
if self.sdb.is_revoked(_access_code):
return self._error(error="access_denied", descr="Token is revoked")
# Session might not exist or _access_code malformed
try:
_info = _sdb[_access_code]
except KeyError:
return self._error(error="access_denied", descr="Token is invalid")
# If redirect_uri was in the initial authorization request
# verify that the one given here is the correct one.
if "redirect_uri" in _info:
try:
assert req["redirect_uri"] == _info["redirect_uri"]
except __HOLE__:
return self._error(error="access_denied",
descr="redirect_uri mismatch")
_log_debug("All checks OK")
issue_refresh = False
if "issue_refresh" in kwargs:
issue_refresh = kwargs["issue_refresh"]
permissions = _info.get('permission', ['offline_access']) or [
'offline_access']
if 'offline_access' in _info[
'scope'] and 'offline_access' in permissions:
issue_refresh = True
try:
_tinfo = _sdb.upgrade_to_token(_access_code,
issue_refresh=issue_refresh)
except AccessCodeUsed as err:
logger.error("%s" % err)
# Should revoke the token issued to this access code
_sdb.revoke_all_tokens(_access_code)
return self._error(error="access_denied", descr="%s" % err)
if "openid" in _info["scope"]:
userinfo = self.userinfo_in_id_token_claims(_info)
# _authn_event = _info["authn_event"]
try:
_idtoken = self.sign_encrypt_id_token(
_info, client_info, req, user_info=userinfo)
except (JWEException, NoSuitableSigningKeys) as err:
logger.warning(str(err))
return self._error(error="access_denied",
descr="Could not sign/encrypt id_token")
_sdb.update_by_token(_access_code, "id_token", _idtoken)
# Refresh the _tinfo
_tinfo = _sdb[_access_code]
_log_debug("_tinfo: %s" % _tinfo)
atr = AccessTokenResponse(**by_schema(AccessTokenResponse, **_tinfo))
logger.info("access_token_response: %s" % atr.to_dict())
return Response(atr.to_json(), content="application/json") | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider._access_token_endpoint |
3,227 | def signed_userinfo(self, client_info, userinfo, session):
"""
Will create a JWS with the userinfo as payload.
:param client_info: Client registration information
:param userinfo: An OpenIDSchema instance
:param session: Session information
:return: A JWS containing the userinfo as a JWT
"""
try:
algo = client_info["userinfo_signed_response_alg"]
except __HOLE__: # Fall back to default
algo = self.jwx_def["signing_alg"]["userinfo"]
if algo == "none":
key = []
else:
if algo.startswith("HS"):
key = self.keyjar.get_signing_key(alg2keytype(algo),
client_info["client_id"],
alg=algo)
else:
# Use my key for signing
key = self.keyjar.get_signing_key(alg2keytype(algo), "",
alg=algo)
if not key:
return self._error(error="access_denied",
descr="Missing signing key")
jinfo = userinfo.to_jwt(key, algo)
if "userinfo_encrypted_response_alg" in client_info:
# encrypt with clients public key
jinfo = self.encrypt(jinfo, client_info, session["client_id"],
"userinfo", "JWT")
return jinfo
# noinspection PyUnusedLocal | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.signed_userinfo |
3,228 | def _do_user_info(self, token, **kwargs):
try:
_log_debug = kwargs["logger"].debug
except KeyError:
_log_debug = logger.debug
_sdb = self.sdb
# should be an access token
typ, key = _sdb.access_token.type_and_key(token)
_log_debug("access_token type: '%s'" % (typ,))
try:
assert typ == "T"
except __HOLE__:
raise FailedAuthentication("Wrong type of token")
# _log_info("keys: %s" % self.sdb.keys())
if _sdb.is_revoked(key):
return self._error(error="access_denied", descr="Token is revoked")
session = _sdb[key]
# Scope can translate to userinfo_claims
info = self.schema(**self._collect_user_info(session))
# Should I return a JSON or a JWT ?
_cinfo = self.cdb[session["client_id"]]
try:
if "userinfo_signed_response_alg" in _cinfo:
# Will also encrypt if defined in cinfo
jinfo = self.signed_userinfo(_cinfo, info, session)
content_type = "application/jwt"
elif "userinfo_encrypted_response_alg" in _cinfo:
jinfo = info.to_json()
jinfo = self.encrypt(jinfo, _cinfo, session["client_id"],
"userinfo", "")
content_type = "application/jwt"
else:
jinfo = info.to_json()
content_type = "application/json"
except NotSupportedAlgorithm as err:
return self._error(error="access_denied",
descr="Not supported algorithm: {}".format(
err.args[0]))
except JWEException:
return self._error(error="access_denied",
descr="Could not encrypt")
return Response(jinfo, content=content_type)
# noinspection PyUnusedLocal | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider._do_user_info |
3,229 | def check_session_endpoint(self, request, **kwargs):
"""
"""
try:
_log_debug = kwargs["logger"].debug
_log_info = kwargs["logger"].info
except __HOLE__:
_log_debug = logger.debug
_log_info = logger.info
if not request:
_tok = kwargs["authn"]
if not _tok:
return self._error(error="access_denied", descr="Illegal token")
else:
info = "id_token=%s" % _tok
if self.test_mode:
_log_info("check_session_request: %s" % request)
idt = self.server.parse_check_session_request(query=request)
if self.test_mode:
_log_info("check_session_response: %s" % idt.to_dict())
return Response(idt.to_json(), content="application/json") | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.check_session_endpoint |
3,230 | def match_client_request(self, request):
for _pref, _prov in PREFERENCE2PROVIDER.items():
if _pref in request:
if _pref == "response_types":
if not self.match_sp_sep(request[_pref],
self.capabilities[_prov]):
raise CapabilitiesMisMatch(_pref)
else:
if isinstance(request[_pref], six.string_types):
try:
assert request[_pref] in self.capabilities[_prov]
except __HOLE__:
raise CapabilitiesMisMatch(_pref)
else:
if not set(request[_pref]).issubset(
set(self.capabilities[_prov])):
raise CapabilitiesMisMatch(_pref) | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.match_client_request |
3,231 | def do_client_registration(self, request, client_id, ignore=None):
if ignore is None:
ignore = []
_cinfo = self.cdb[client_id].copy()
logger.debug("_cinfo: %s" % _cinfo)
for key, val in request.items():
if key not in ignore:
_cinfo[key] = val
if "post_logout_redirect_uris" in request:
plruri = []
for uri in request["post_logout_redirect_uris"]:
if urlparse(uri).fragment:
err = ClientRegistrationErrorResponse(
error="invalid_configuration_parameter",
error_description="post_logout_redirect_uris "
"contains "
"fragment")
return Response(err.to_json(),
content="application/json",
status="400 Bad Request")
base, query = splitquery(uri)
if query:
plruri.append((base, parse_qs(query)))
else:
plruri.append((base, query))
_cinfo["post_logout_redirect_uris"] = plruri
if "redirect_uris" in request:
try:
ruri = self._verify_redirect_uris(request)
_cinfo["redirect_uris"] = ruri
except InvalidRedirectURIError as e:
err = ClientRegistrationErrorResponse(
error="invalid_redirect_uri",
error_description=str(e))
return Response(err.to_json(),
content="application/json",
status="400 Bad Request")
if "sector_identifier_uri" in request:
si_url = request["sector_identifier_uri"]
try:
res = self.server.http_request(si_url)
except ConnectionError as err:
logger.error("%s" % err)
return self._error_response(
"invalid_configuration_parameter",
descr="Couldn't open sector_identifier_uri")
if not res:
return self._error_response(
"invalid_configuration_parameter",
descr="Couldn't open sector_identifier_uri")
logger.debug("sector_identifier_uri => %s" % res.text)
try:
si_redirects = json.loads(res.text)
except ValueError:
return self._error_response(
"invalid_configuration_parameter",
descr="Error deserializing sector_identifier_uri "
"content")
if "redirect_uris" in request:
logger.debug("redirect_uris: %s" % request["redirect_uris"])
for uri in request["redirect_uris"]:
try:
assert uri in si_redirects
except __HOLE__:
return self._error_response(
"invalid_configuration_parameter",
descr="redirect_uri missing from "
"sector_identifiers"
)
_cinfo["si_redirects"] = si_redirects
_cinfo["sector_id"] = si_url
elif "redirect_uris" in request:
if len(request["redirect_uris"]) > 1:
# check that the hostnames are the same
host = ""
for url in request["redirect_uris"]:
part = urlparse(url)
_host = part.netloc.split(":")[0]
if not host:
host = _host
else:
try:
assert host == _host
except AssertionError:
return self._error_response(
"invalid_configuration_parameter",
descr="'sector_identifier_uri' must be "
"registered")
for item in ["policy_uri", "logo_uri", "tos_uri"]:
if item in request:
if self._verify_url(request[item], _cinfo["redirect_uris"]):
_cinfo[item] = request[item]
else:
return self._error_response(
"invalid_configuration_parameter",
descr="%s pointed to illegal URL" % item)
# Do I have the necessary keys
for item in ["id_token_signed_response_alg",
"userinfo_signed_response_alg"]:
if item in request:
if request[item] in self.capabilities[
PREFERENCE2PROVIDER[item]]:
ktyp = jws.alg2keytype(request[item])
# do I have this ktyp and for EC type keys the curve
if ktyp not in ["none", "oct"]:
_k = self.keyjar.get_signing_key(ktyp,
alg=request[item])
if not _k:
del _cinfo[item]
try:
self.keyjar.load_keys(request, client_id)
try:
logger.debug("keys for %s: [%s]" % (
client_id,
",".join(["%s" % x for x in self.keyjar[client_id]])))
except KeyError:
pass
except Exception as err:
logger.error("Failed to load client keys: %s" % request.to_dict())
logger.error("%s", err)
err = ClientRegistrationErrorResponse(
error="invalid_configuration_parameter",
error_description="%s" % err)
return Response(err.to_json(), content="application/json",
status="400 Bad Request")
return _cinfo | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.do_client_registration |
3,232 | @staticmethod
def _verify_redirect_uris(registration_request):
verified_redirect_uris = []
try:
client_type = registration_request["application_type"]
except __HOLE__: # default
client_type = "web"
if client_type == "web":
try:
if registration_request["response_types"] == ["code"]:
must_https = False
else: # one has to be implicit or hybrid
must_https = True
except KeyError:
must_https = True
else:
must_https = False
for uri in registration_request["redirect_uris"]:
p = urlparse(uri)
if client_type == "native" and p.scheme == "http":
if p.hostname != "localhost":
raise InvalidRedirectURIError(
"Http redirect_uri must use localhost")
elif must_https and p.scheme != "https":
raise InvalidRedirectURIError(
"None https redirect_uri not allowed")
elif p.fragment:
raise InvalidRedirectURIError(
"redirect_uri contains fragment")
# This rule will break local testing.
# elif must_https and p.hostname == "localhost":
# err = InvalidRedirectURIError(
# "https redirect_uri with host localhost")
base, query = splitquery(uri)
if query:
verified_redirect_uris.append((base, parse_qs(query)))
else:
verified_redirect_uris.append((base, query))
return verified_redirect_uris | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider._verify_redirect_uris |
3,233 | def l_registration_endpoint(self, request, authn=None, **kwargs):
logger.debug("@registration_endpoint: <<%s>>" % request)
try:
request = RegistrationRequest().deserialize(request, "json")
except __HOLE__:
request = RegistrationRequest().deserialize(request)
logger.info("registration_request:%s" % request.to_dict())
result = self.client_registration_setup(request)
if isinstance(result, Response):
return result
return Created(result.to_json(), content="application/json",
headers=[("Cache-Control", "no-store")]) | ValueError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.l_registration_endpoint |
3,234 | def client_registration_setup(self, request):
try:
request.verify()
except MessageException as err:
if "type" not in request:
return self._error(error="invalid_type",
descr="%s" % err)
else:
return self._error(error="invalid_configuration_parameter",
descr="%s" % err)
request.rm_blanks()
try:
self.match_client_request(request)
except CapabilitiesMisMatch as err:
return self._error(error="invalid_request",
descr="Don't support proposed %s" % err)
# create new id och secret
client_id = rndstr(12)
while client_id in self.cdb:
client_id = rndstr(12)
client_secret = secret(self.seed, client_id)
_rat = rndstr(32)
reg_enp = ""
for endp in self.endp:
if endp.etype == 'registration':
reg_enp = urljoin(self.baseurl, endp.url)
break
self.cdb[client_id] = {
"client_id": client_id,
"client_secret": client_secret,
"registration_access_token": _rat,
"registration_client_uri": "%s?client_id=%s" % (reg_enp, client_id),
"client_secret_expires_at": utc_time_sans_frac() + 86400,
"client_id_issued_at": utc_time_sans_frac(),
"client_salt": rndstr(8)}
self.cdb[_rat] = client_id
_cinfo = self.do_client_registration(request, client_id,
ignore=["redirect_uris",
"policy_uri", "logo_uri",
"tos_uri"])
if isinstance(_cinfo, Response):
return _cinfo
args = dict([(k, v) for k, v in _cinfo.items()
if k in RegistrationResponse.c_param])
self.comb_uri(args)
response = RegistrationResponse(**args)
# Add the client_secret as a symmetric key to the keyjar
if client_secret:
_kc = KeyBundle([{"kty": "oct", "key": client_secret,
"use": "ver"},
{"kty": "oct", "key": client_secret,
"use": "sig"}])
try:
self.keyjar[client_id].append(_kc)
except __HOLE__:
self.keyjar[client_id] = [_kc]
self.cdb[client_id] = _cinfo
try:
self.cdb.sync()
except AttributeError: # Not all databases can be sync'ed
pass
logger.info("registration_response: %s" % response.to_dict())
return response | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.client_registration_setup |
3,235 | def verify_capabilities(self, capabilities):
"""
Verify that what the admin wants the server to do actually
can be done by this implementation.
:param capabilities: The asked for capabilities as a dictionary
or a ProviderConfigurationResponse instance. The later can be
treated as a dictionary.
:return: True or False
"""
_pinfo = self.provider_features()
not_supported = {}
for key, val in capabilities.items():
if isinstance(val, six.string_types):
try:
if val in _pinfo[key]:
continue
else:
not_supported[key] = val
except KeyError:
not_supported[key] = ''
elif isinstance(val, bool):
if not _pinfo[key] and val:
not_supported[key] = ''
elif isinstance(val, list):
for v in val:
try:
if v in _pinfo[key]:
continue
else:
try:
not_supported[key].append(v)
except KeyError:
not_supported[key] = [v]
except __HOLE__:
not_supported[key] = ''
if not_supported:
logging.error(
"Server doesn't support the following features: {}".format(
not_supported))
return False
return True
# noinspection PyUnusedLocal | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.verify_capabilities |
3,236 | def discovery_endpoint(self, request, handle=None, **kwargs):
"""
:param request:
:param handle:
"""
_log_debug = logger.debug
_log_debug("@discovery_endpoint")
request = DiscoveryRequest().deserialize(request, "urlencoded")
_log_debug("discovery_request:%s" % (request.to_dict(),))
try:
assert request["service"] == SWD_ISSUER
except __HOLE__:
return BadRequest("Unsupported service")
# verify that the principal is one of mine
_response = DiscoveryResponse(locations=[self.baseurl])
_log_debug("discovery_response:%s" % (_response.to_dict(),))
headers = [("Cache-Control", "no-store")]
(key, timestamp) = handle
if key.startswith(STR) and key.endswith(STR):
cookie = self.cookie_func(key, self.cookie_name, "disc",
self.sso_ttl)
headers.append(cookie)
return Response(_response.to_json(), content="application/json",
headers=headers) | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.discovery_endpoint |
3,237 | def auth_resp_extension(self, aresp, areq, sid, rtype):
if "id_token" in areq["response_type"]:
_sinfo = self.sdb[sid]
if "code" in areq["response_type"]:
_code = aresp["code"] = _sinfo["code"]
rtype.remove("code")
else:
_sinfo[sid]["code"] = None
_code = None
try:
_access_token = aresp["access_token"]
except __HOLE__:
_access_token = None
user_info = self.userinfo_in_id_token_claims(_sinfo)
client_info = self.cdb[str(areq["client_id"])]
hargs = {}
if set(areq["response_type"]) == {'code', 'id_token', 'token'}:
hargs = {"code": _code, "access_token": _access_token}
elif set(areq["response_type"]) == {'code', 'id_token'}:
hargs = {"code": _code}
elif set(areq["response_type"]) == {'id_token', 'token'}:
hargs = {"access_token": _access_token}
# or 'code id_token'
id_token = self.sign_encrypt_id_token(
_sinfo, client_info, areq, user_info=user_info, **hargs)
aresp["id_token"] = id_token
_sinfo["id_token"] = id_token
rtype.remove("id_token")
return aresp | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.auth_resp_extension |
3,238 | def aresp_check(self, aresp, areq):
# Use of the nonce is REQUIRED for all requests where an ID Token is
# returned directly from the Authorization Endpoint
if "id_token" in aresp:
try:
assert "nonce" in areq
except __HOLE__:
return self._error("invalid_request", "Missing nonce value")
return None | AssertionError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.aresp_check |
3,239 | def create_authn_response(self, areq, sid):
# create the response
aresp = AuthorizationResponse()
try:
aresp["state"] = areq["state"]
except KeyError:
pass
if "response_type" in areq and areq["response_type"] == ["none"]:
fragment_enc = False
else:
_sinfo = self.sdb[sid]
try:
aresp["scope"] = areq["scope"]
except __HOLE__:
pass
rtype = set(areq["response_type"][:])
if len(rtype) == 1 and "code" in rtype:
fragment_enc = False
else:
fragment_enc = True
if "code" in areq["response_type"]:
_code = aresp["code"] = self.sdb[sid]["code"]
rtype.remove("code")
else:
self.sdb[sid]["code"] = None
_code = None
if "token" in rtype:
_dic = self.sdb.upgrade_to_token(issue_refresh=False, key=sid)
logger.debug("_dic: %s" % _dic)
for key, val in _dic.items():
if key in aresp.parameters() and val is not None:
aresp[key] = val
rtype.remove("token")
try:
_access_token = aresp["access_token"]
except KeyError:
_access_token = None
if "id_token" in areq["response_type"]:
user_info = self.userinfo_in_id_token_claims(_sinfo)
if areq["response_type"] == ["id_token"]:
# scopes should be returned here
info = self._collect_user_info(_sinfo)
if user_info is None:
user_info = info
else:
user_info.update(info)
client_info = self.cdb[str(areq["client_id"])]
hargs = {}
if set(areq["response_type"]) == {'code', 'id_token', 'token'}:
hargs = {"code": _code, "access_token": _access_token}
elif set(areq["response_type"]) == {'code', 'id_token'}:
hargs = {"code": _code}
elif set(areq["response_type"]) == {'id_token', 'token'}:
hargs = {"access_token": _access_token}
# or 'code id_token'
try:
id_token = self.sign_encrypt_id_token(
_sinfo, client_info, areq, user_info=user_info,
**hargs)
except (JWEException, NoSuitableSigningKeys) as err:
logger.warning(str(err))
return self._error(error="access_denied",
descr="Could not sign/encrypt id_token")
aresp["id_token"] = id_token
_sinfo["id_token"] = id_token
rtype.remove("id_token")
if len(rtype):
return BadRequest("Unknown response type: %s" % rtype)
return aresp, fragment_enc | KeyError | dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/provider.py/Provider.create_authn_response |
3,240 | def stopupdateloop(self, wlan):
''' Kill the update loop thread if there are no more WLANs using it.
'''
self.updatelock.acquire()
try:
del self.last_update_time[wlan]
except __HOLE__:
pass
self.updatelock.release()
if len(self.last_update_time) > 0:
return
self.doupdateloop = False
if self.updatethread:
self.updatethread.join()
self.updatethread = None | KeyError | dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnet.py/EbtablesQueue.stopupdateloop |
3,241 | def lastupdate(self, wlan):
''' Return the time elapsed since this WLAN was last updated.
'''
try:
elapsed = time.time() - self.last_update_time[wlan]
except __HOLE__:
self.last_update_time[wlan] = time.time()
elapsed = 0.0
return elapsed | KeyError | dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnet.py/EbtablesQueue.lastupdate |
3,242 | def linked(self, netif1, netif2):
# check if the network interfaces are attached to this network
if self._netif[netif1.netifi] != netif1:
raise ValueError, "inconsistency for netif %s" % netif1.name
if self._netif[netif2.netifi] != netif2:
raise ValueError, "inconsistency for netif %s" % netif2.name
try:
linked = self._linked[netif1][netif2]
except __HOLE__:
if self.policy == "ACCEPT":
linked = True
elif self.policy == "DROP":
linked = False
else:
raise Exception, "unknown policy: %s" % self.policy
self._linked[netif1][netif2] = linked
return linked | KeyError | dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnet.py/LxBrNet.linked |
3,243 | def linknet(self, net):
''' Link this bridge with another by creating a veth pair and installing
each device into each bridge.
'''
sessionid = self.session.shortsessionid()
try:
self_objid = '%x' % self.objid
except TypeError:
self_objid = '%s' % self.objid
try:
net_objid = '%x' % net.objid
except __HOLE__:
net_objid = '%s' % net.objid
localname = 'veth%s.%s.%s' % (self_objid, net_objid, sessionid)
if len(localname) >= 16:
raise ValueError, "interface local name '%s' too long" % \
localname
name = 'veth%s.%s.%s' % (net_objid, self_objid, sessionid)
if len(name) >= 16:
raise ValueError, "interface name '%s' too long" % name
netif = VEth(node = None, name = name, localname = localname,
mtu = 1500, net = self, start = self.up)
self.attach(netif)
if net.up:
# this is similar to net.attach() but uses netif.name instead
# of localname
check_call([BRCTL_BIN, "addif", net.brname, netif.name])
check_call([IP_BIN, "link", "set", netif.name, "up"])
i = net.newifindex()
net._netif[i] = netif
with net._linked_lock:
net._linked[netif] = {}
netif.net = self
netif.othernet = net
return netif | TypeError | dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnet.py/LxBrNet.linknet |
3,244 | def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
try:
mod = import_module(mod_name)
except __HOLE__ as e:
raise ImproperlyConfigured(('Error importing module %s: "%s"' % (mod_name, e)))
return getattr(mod, cls_name) | ImportError | dataset/ETHPy150Open mariocesar/sorl-thumbnail/sorl/thumbnail/helpers.py/get_module_class |
3,245 | def recv(self):
try:
return uwsgi.websocket_recv()
except __HOLE__:
return None | IOError | dataset/ETHPy150Open zeekay/flask-uwsgi-websocket/flask_uwsgi_websocket/websocket.py/WebSocketClient.recv |
3,246 | def _word_tokenizer_re(self):
"""Compiles and returns a regular expression for word tokenization"""
try:
return self._re_word_tokenizer
except __HOLE__:
self._re_word_tokenizer = re.compile(
self._word_tokenize_fmt %
{
'NonWord': self._re_non_word_chars,
'MultiChar': self._re_multi_char_punct,
'WordStart': self._re_word_start,
},
re.UNICODE | re.VERBOSE
)
return self._re_word_tokenizer | AttributeError | dataset/ETHPy150Open nltk/nltk/nltk/tokenize/punkt.py/PunktLanguageVars._word_tokenizer_re |
3,247 | def _find_collocations(self):
"""
Generates likely collocations and their log-likelihood.
"""
for types in self._collocation_fdist:
try:
typ1, typ2 = types
except __HOLE__:
# types may be None after calling freq_threshold()
continue
if typ2 in self._params.sent_starters:
continue
col_count = self._collocation_fdist[types]
typ1_count = self._type_fdist[typ1]+self._type_fdist[typ1+'.']
typ2_count = self._type_fdist[typ2]+self._type_fdist[typ2+'.']
if (typ1_count > 1 and typ2_count > 1
and self.MIN_COLLOC_FREQ <
col_count <= min(typ1_count, typ2_count)):
ll = self._col_log_likelihood(typ1_count, typ2_count,
col_count, self._type_fdist.N())
# Filter out the not-so-collocative
if (ll >= self.COLLOCATION and
(self._type_fdist.N()/typ1_count >
typ2_count/col_count)):
yield (typ1, typ2), ll
#////////////////////////////////////////////////////////////
#{ Sentence-Starter Finder
#//////////////////////////////////////////////////////////// | TypeError | dataset/ETHPy150Open nltk/nltk/nltk/tokenize/punkt.py/PunktTrainer._find_collocations |
3,248 | @property
def masks(self):
masks = self.context.config[self.key]
if hasattr(self.context, 'db'):
# update config with storage values
try:
value = self.context.db[self]
except __HOLE__:
pass
else:
if isinstance(value, dict):
masks.update(value)
return masks | KeyError | dataset/ETHPy150Open gawel/irc3/irc3/plugins/command.py/mask_based_policy.masks |
3,249 | def _is_pingable(ip):
"""Checks whether an IP address is reachable by pinging.
Use linux utils to execute the ping (ICMP ECHO) command.
Sends 5 packets with an interval of 0.2 seconds and timeout of 1
seconds. Runtime error implies unreachability else IP is pingable.
:param ip: IP to check
:return: bool - True or False depending on pingability.
"""
ping_cmd = ['ping',
'-c', '5',
'-W', '1',
'-i', '0.2',
ip]
try:
linux_utils.execute(ping_cmd, check_exit_code=True)
return True
except __HOLE__:
LOG.warning(_LW("Cannot ping ip address: %s"), ip)
return False | RuntimeError | dataset/ETHPy150Open openstack/networking-cisco/networking_cisco/plugins/cisco/cfg_agent/device_status.py/_is_pingable |
3,250 | def _new_queue(self, queue, **kwargs):
"""Ensures a queue exists in SLQS."""
queue = self.entity_name(self.queue_name_prefix + queue)
try:
return self._queue_cache[queue]
except __HOLE__:
try:
self.slmq.create_queue(
queue, visibility_timeout=self.visibility_timeout)
except ResponseError:
pass
q = self._queue_cache[queue] = self.slmq.queue(queue)
return q | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/SLMQ.py/Channel._new_queue |
3,251 | def basic_ack(self, delivery_tag):
delivery_info = self.qos.get(delivery_tag).delivery_info
try:
queue = delivery_info['slmq_queue_name']
except __HOLE__:
pass
else:
self.delete_message(queue, delivery_info['slmq_message_id'])
super(Channel, self).basic_ack(delivery_tag) | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/SLMQ.py/Channel.basic_ack |
3,252 | def test():
"""Test program for telnetlib.
Usage: python telnetlib.py [-d] ... [host [port]]
Default host is localhost; default port is 23.
"""
debuglevel = 0
while sys.argv[1:] and sys.argv[1] == '-d':
debuglevel = debuglevel+1
del sys.argv[1]
host = 'localhost'
if sys.argv[1:]:
host = sys.argv[1]
port = 0
if sys.argv[2:]:
portstr = sys.argv[2]
try:
port = int(portstr)
except __HOLE__:
port = socket.getservbyname(portstr, 'tcp')
tn = Telnet()
tn.set_debuglevel(debuglevel)
tn.open(host, port, timeout=0.5)
tn.interact()
tn.close() | ValueError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/telnetlib.py/test |
3,253 | def default(self, o):
# Convert any iterable to list
try:
iterable = iter(o)
except __HOLE__:
pass
else:
return list(iterable)
if isinstance(o, (dict, list, basestring, int, long, float, bool, type(None))):
# Let the base class handle it
return json.JSONEncoder.default(self, o)
else:
# Convert it to a string
return unicode(str(o)) | TypeError | dataset/ETHPy150Open EricssonResearch/calvin-base/calvin/utilities/calvinlogger.py/JSONEncoderIters.default |
3,254 | @expose('error.html')
def error(self, status):
try:
status = int(status)
except __HOLE__: # pragma: no cover
status = 500
message = getattr(status_map.get(status), 'explanation', '')
return dict(status=status, message=message) | ValueError | dataset/ETHPy150Open pecan/pecan/pecan/scaffolds/base/+package+/controllers/root.py/RootController.error |
3,255 | def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except __HOLE__:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn") | AssertionError | dataset/ETHPy150Open trevorstephens/gplearn/gplearn/skutils/tests/test_testing.py/TestWarns.test_warn_wrong_warning |
3,256 | def test_set_xattr_native(self):
if not samba.xattr_native.is_xattr_supported():
raise TestSkipped()
ntacl = xattr.NTACL()
ntacl.version = 1
tempf = self._tmpfilename()
open(tempf, 'w').write("empty")
try:
samba.xattr_native.wrap_setxattr(tempf, "user.unittests",
ndr_pack(ntacl))
except __HOLE__:
raise TestSkipped("the filesystem where the tests are runned do not support XATTR")
os.unlink(tempf) | IOError | dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/tests/xattr.py/XattrTests.test_set_xattr_native |
3,257 | def test_set_and_get_native(self):
if not samba.xattr_native.is_xattr_supported():
raise TestSkipped()
tempf = self._tmpfilename()
reftxt = "this is a test"
open(tempf, 'w').write("empty")
try:
samba.xattr_native.wrap_setxattr(tempf, "user.unittests", reftxt)
text = samba.xattr_native.wrap_getxattr(tempf, "user.unittests")
self.assertEquals(text, reftxt)
except __HOLE__:
raise TestSkipped("the filesystem where the tests are runned do not support XATTR")
os.unlink(tempf) | IOError | dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/tests/xattr.py/XattrTests.test_set_and_get_native |
3,258 | def register(self, bdb):
with bdb.savepoint():
schema_sql = 'SELECT version FROM bayesdb_metamodel WHERE name = ?'
cursor = bdb.sql_execute(schema_sql, (self.name(),))
version = None
try:
row = cursor.next()
except __HOLE__:
version = 0
else:
version = row[0]
assert version is not None
if version == 0:
# XXX WHATTAKLUDGE!
for stmt in nig_normal_schema_1.split(';'):
bdb.sql_execute(stmt)
version = 1
if version != 1:
raise BQLError(bdb, 'NIG-Normal already installed'
' with unknown schema version: %d' % (version,)) | StopIteration | dataset/ETHPy150Open probcomp/bayeslite/src/metamodels/nig_normal.py/NIGNormalMetamodel.register |
3,259 | def get_version(self, filename=None, version=-1, **kwargs):
"""Get a file from GridFS by ``"filename"`` or metadata fields.
Returns a version of the file in GridFS whose filename matches
`filename` and whose metadata fields match the supplied keyword
arguments, as an instance of :class:`~gridfs.grid_file.GridOut`.
Version numbering is a convenience atop the GridFS API provided
by MongoDB. If more than one file matches the query (either by
`filename` alone, by metadata fields, or by a combination of
both), then version ``-1`` will be the most recently uploaded
matching file, ``-2`` the second most recently
uploaded, etc. Version ``0`` will be the first version
uploaded, ``1`` the second version, etc. So if three versions
have been uploaded, then version ``0`` is the same as version
``-3``, version ``1`` is the same as version ``-2``, and
version ``2`` is the same as version ``-1``.
Raises :class:`~gridfs.errors.NoFile` if no such version of
that file exists.
An index on ``{filename: 1, uploadDate: -1}`` will
automatically be created when this method is called the first
time.
:Parameters:
- `filename`: ``"filename"`` of the file to get, or `None`
- `version` (optional): version of the file to get (defaults
to -1, the most recent version uploaded)
- `**kwargs` (optional): find files by custom metadata.
.. versionchanged:: 1.11
`filename` defaults to None;
.. versionadded:: 1.11
Accept keyword arguments to find files by custom metadata.
.. versionadded:: 1.9
"""
self.__ensure_index_filename()
query = kwargs
if filename is not None:
query["filename"] = filename
cursor = self.__files.find(query)
if version < 0:
skip = abs(version) - 1
cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING)
else:
cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING)
try:
grid_file = cursor.next()
return GridOut(self.__collection, file_document=grid_file)
except __HOLE__:
raise NoFile("no version %d for filename %r" % (version, filename)) | StopIteration | dataset/ETHPy150Open blynch/CloudMemeBackend/gridfs/__init__.py/GridFS.get_version |
3,260 | def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except __HOLE__: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, param_list,
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
############################################### | TypeError | dataset/ETHPy150Open django/django/django/db/backends/utils.py/CursorDebugWrapper.executemany |
3,261 | def _get(self, queue):
if queue in self._fanout_queues:
try:
msg = next(self._get_broadcast_cursor(queue))
except __HOLE__:
msg = None
else:
msg = self.messages.find_and_modify(
query={'queue': queue},
sort=[('priority', pymongo.ASCENDING),
('$natural', pymongo.ASCENDING)],
remove=True,
)
if self.ttl:
self._update_queues_expire(queue)
if msg is None:
raise Empty()
return loads(bytes_to_str(msg['payload'])) | StopIteration | dataset/ETHPy150Open celery/kombu/kombu/transport/mongodb.py/Channel._get |
3,262 | def queue_delete(self, queue, **kwargs):
self.routing.remove({'queue': queue})
if self.ttl:
self.queues.remove({'_id': queue})
super(Channel, self).queue_delete(queue, **kwargs)
if queue in self._fanout_queues:
try:
cursor = self._broadcast_cursors.pop(queue)
except __HOLE__:
pass
else:
cursor.close()
self._fanout_queues.pop(queue)
# Implementation details | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/mongodb.py/Channel.queue_delete |
3,263 | def _get_broadcast_cursor(self, queue):
try:
return self._broadcast_cursors[queue]
except __HOLE__:
# Cursor may be absent when Channel created more than once.
# _fanout_queues is a class-level mutable attribute so it's
# shared over all Channel instances.
return self._create_broadcast_cursor(
self._fanout_queues[queue], None, None, queue,
) | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/mongodb.py/Channel._get_broadcast_cursor |
3,264 | def _get_expire(self, queue, argument):
"""Gets expiration header named `argument` of queue definition.
`queue` must be either queue name or options itself."""
if isinstance(queue, basestring):
doc = self.queues.find_one({'_id': queue})
if not doc:
return
data = doc['options']
else:
data = queue
try:
value = data['arguments'][argument]
except (KeyError, __HOLE__):
return
return self.get_now() + datetime.timedelta(milliseconds=value) | TypeError | dataset/ETHPy150Open celery/kombu/kombu/transport/mongodb.py/Channel._get_expire |
3,265 | def _get_openssl_crypto_module():
try:
from OpenSSL import crypto
except __HOLE__:
raise TypeError('Using ad-hoc certificates requires the pyOpenSSL '
'library.')
else:
return crypto | ImportError | dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/serving.py/_get_openssl_crypto_module |
3,266 | def run_wsgi(self):
if self.headers.get('Expect', '').lower().strip() == '100-continue':
self.wfile.write(b'HTTP/1.1 100 Continue\r\n\r\n')
self.environ = environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, 'write() before start_response'
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except __HOLE__:
code, msg = status, ""
self.send_response(int(code), msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if 'content-length' not in header_keys:
self.close_connection = True
self.send_header('Connection', 'close')
if 'server' not in header_keys:
self.send_header('Server', self.version_string())
if 'date' not in header_keys:
self.send_header('Date', self.date_time_string())
self.end_headers()
assert isinstance(data, bytes), 'applications must write bytes'
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError('Headers already set')
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b'')
finally:
if hasattr(application_iter, 'close'):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from werkzeug.debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log('error', 'Error on request:\n%s',
traceback.plaintext) | ValueError | dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/serving.py/WSGIRequestHandler.run_wsgi |
3,267 | def is_ssl_error(error=None):
"""Checks if the given error (or the current one) is an SSL error."""
exc_types = (ssl.SSLError,)
try:
from OpenSSL.SSL import Error
exc_types += (Error,)
except __HOLE__:
pass
if error is None:
error = sys.exc_info()[1]
return isinstance(error, exc_types) | ImportError | dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/serving.py/is_ssl_error |
3,268 | def serve_forever(self):
self.shutdown_signal = False
try:
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
display_hostname = self.host != '*' and self.host or 'localhost'
if ':' in display_hostname:
display_hostname = '[%s]' % display_hostname
quit_msg = '(Press CTRL+C to quit)'
_log('info', ' * Running on %s://%s:%d/ %s',
self.ssl_context is None and 'http' or 'https',
display_hostname, self.port, quit_msg)
HTTPServer.serve_forever(self)
except __HOLE__:
pass
finally:
self.server_close() | KeyboardInterrupt | dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/serving.py/BaseWSGIServer.serve_forever |
3,269 | def run_simple(hostname, port, application, use_reloader=False,
use_debugger=False, use_evalex=True,
extra_files=None, reloader_interval=1,
reloader_type='auto', threaded=False,
processes=1, request_handler=None, static_files=None,
passthrough_errors=False, ssl_context=None):
"""Start a WSGI application. Optional features include a reloader,
multithreading and fork support.
This function has a command-line interface too::
python -m werkzeug.serving --help
.. versionadded:: 0.5
`static_files` was added to simplify serving of static files as well
as `passthrough_errors`.
.. versionadded:: 0.6
support for SSL was added.
.. versionadded:: 0.8
Added support for automatically loading a SSL context from certificate
file and private key.
.. versionadded:: 0.9
Added command-line interface.
.. versionadded:: 0.10
Improved the reloader and added support for changing the backend
through the `reloader_type` parameter. See :ref:`reloader`
for more information.
:param hostname: The host for the application. eg: ``'localhost'``
:param port: The port for the server. eg: ``8080``
:param application: the WSGI application to execute
:param use_reloader: should the server automatically restart the python
process if modules were changed?
:param use_debugger: should the werkzeug debugging system be used?
:param use_evalex: should the exception evaluation feature be enabled?
:param extra_files: a list of files the reloader should watch
additionally to the modules. For example configuration
files.
:param reloader_interval: the interval for the reloader in seconds.
:param reloader_type: the type of reloader to use. The default is
auto detection. Valid values are ``'stat'`` and
``'watchdog'``. See :ref:`reloader` for more
information.
:param threaded: should the process handle each request in a separate
thread?
:param processes: if greater than 1 then handle each request in a new process
up to this maximum number of concurrent processes.
:param request_handler: optional parameter that can be used to replace
the default one. You can use this to replace it
with a different
:class:`~BaseHTTPServer.BaseHTTPRequestHandler`
subclass.
:param static_files: a dict of paths for static files. This works exactly
like :class:`SharedDataMiddleware`, it's actually
just wrapping the application in that middleware before
serving.
:param passthrough_errors: set this to `True` to disable the error catching.
This means that the server will die on errors but
it can be useful to hook debuggers in (pdb etc.)
:param ssl_context: an SSL context for the connection. Either an
:class:`ssl.SSLContext`, a tuple in the form
``(cert_file, pkey_file)``, the string ``'adhoc'`` if
the server should automatically create one, or ``None``
to disable SSL (which is the default).
"""
if use_debugger:
from werkzeug.debug import DebuggedApplication
application = DebuggedApplication(application, use_evalex)
if static_files:
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(application, static_files)
def inner():
try:
fd = int(os.environ['WERKZEUG_SERVER_FD'])
except (LookupError, __HOLE__):
fd = None
make_server(hostname, port, application, threaded,
processes, request_handler,
passthrough_errors, ssl_context,
fd=fd).serve_forever()
if use_reloader:
# If we're not running already in the subprocess that is the
# reloader we want to open up a socket early to make sure the
# port is actually available.
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
if port == 0 and not can_open_by_fd:
raise ValueError('Cannot bind to a random port with enabled '
'reloader if the Python interpreter does '
'not support socket opening by fd.')
# Create and destroy a socket so that any exceptions are
# raised before we spawn a separate Python interpreter and
# lose this ability.
address_family = select_ip_version(hostname, port)
s = socket.socket(address_family, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((hostname, port))
if hasattr(os, 'set_inheritable'):
os.set_inheritable(s.fileno(), True)
# If we can open the socket by file descriptor, then we can just
# reuse this one and our socket will survive the restarts.
if can_open_by_fd:
os.environ['WERKZEUG_SERVER_FD'] = str(s.fileno())
s.listen(LISTEN_QUEUE)
else:
s.close()
from ._reloader import run_with_reloader
run_with_reloader(inner, extra_files, reloader_interval,
reloader_type)
else:
inner() | ValueError | dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/serving.py/run_simple |
3,270 | def interact(locals=None, plain=False):
locals = locals or inspect.currentframe().f_back.f_locals
try:
if plain:
raise ImportError
from IPython import embed
embed(user_ns=locals, banner1='')
except __HOLE__:
code.interact(local=locals) | ImportError | dataset/ETHPy150Open fusic-com/flask-todo/utils/pyutils.py/interact |
3,271 | @property
def unit_of_measurement(self):
"""Return the defined units of measurement or None."""
try:
return self.node.units
except __HOLE__:
return None | AttributeError | dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/isy994.py/ISYDeviceABC.unit_of_measurement |
3,272 | def collect(self):
paths = self.config.get('paths')
if isinstance(paths, basestring):
paths = [paths]
for path in paths:
matches = re.search('((.+)\@)?(.+)?', path)
alias = matches.group(2)
myfile = matches.group(3)
if alias is None:
try:
filename = os.path.basename(path)
alias = filename.replace (".","_")
except Exception, e:
self.log.error('Could not derive bucket name: %s', e)
continue
try:
test = open(myfile)
except __HOLE__:
self.log.error('Unable to access file: %s', myfile)
continue
stats = os.stat(myfile)
fileage = (time.time()-stats.st_mtime)
self.publish(alias, fileage)
alias = None | IOError | dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/fileage/fileage.py/FileAgeCollector.collect |
3,273 | def run(self):
for row in self.input.rows():
for i in self.string_indexes:
value = row[i]
if type(value) == str or type(value) == unicode:
value = value.strip()
elif value:
value = unicode(value)
if not value:
value = self.string_none
row[i] = value
for i in self.integer_indexes:
value = row[i]
if type(value) == str or type(value) == unicode:
value = re.sub(r"\s", "", value.strip())
if value is None:
value = self.integer_none
else:
try:
value = int(value)
except __HOLE__:
value = self.integer_none
row[i] = value
for i in self.float_indexes:
value = row[i]
if type(value) == str or type(value) == unicode:
value = re.sub(r"\s", "", value.strip())
if value is None:
value = self.float_none
else:
try:
value = float(value)
except ValueError:
value = self.float_none
row[i] = value
self.put(row) | ValueError | dataset/ETHPy150Open Stiivi/brewery/brewery/nodes/field_nodes.py/CoalesceValueToTypeNode.run |
3,274 | def test_temporary_commit(self):
self.mount()
git = self.git_repo()
try:
git.refs['refs/heads/mounted']
except __HOLE__:
self.fail('branch "mounted" does not exist')
initial_heads = {
"master": git.refs['refs/heads/master'],
"mounted": git.refs['refs/heads/mounted'],
}
self.assertNotEqual(initial_heads['master'], initial_heads['mounted'])
with open(path.join(self.mount_point, 'bigfile'), 'wb') as f:
filesize = filesystem.WRITE_BUFFER_SIZE * 3
for c in xrange(filesize / 4096 + 1):
f.write('asdf' * 1024)
git = self.git_repo()
current_heads = {
"master": git.refs['refs/heads/master'],
"mounted": git.refs['refs/heads/mounted'],
}
self.assertEqual(initial_heads['master'], current_heads['master'])
self.assertNotEqual(current_heads['master'], current_heads['mounted'])
self.assertNotEqual(initial_heads['mounted'], current_heads['mounted'])
self.umount()
git = self.git_repo()
final_heads = {
"master": git.refs['refs/heads/master'],
}
self.assertRaises(KeyError, lambda: git.refs['refs/heads/mounted'])
self.assertNotEqual(final_heads['master'], current_heads['master']) | KeyError | dataset/ETHPy150Open mgax/SpaghettiFS/spaghettifs/tests/test_mount_commits.py/MountCommits.test_temporary_commit |
3,275 | def open(self):
"""\
Open port with current settings. This may throw a SerialException
if the port cannot be opened.
"""
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
if self.is_open:
raise SerialException("Port is already open.")
# if RTS and/or DTR are not set before open, they default to True
if self._rts_state is None:
self._rts_state = True
if self._dtr_state is None:
self._dtr_state = True
# the "\\.\COMx" format is required for devices other than COM1-COM8
# not all versions of windows seem to support this properly
# so that the first few ports are used with the DOS device name
port = self.name
try:
if port.upper().startswith('COM') and int(port[3:]) > 8:
port = '\\\\.\\' + port
except __HOLE__:
# for like COMnotanumber
pass
self._port_handle = win32.CreateFile(
port,
win32.GENERIC_READ | win32.GENERIC_WRITE,
0, # exclusive access
None, # no security
win32.OPEN_EXISTING,
win32.FILE_ATTRIBUTE_NORMAL | win32.FILE_FLAG_OVERLAPPED,
0)
if self._port_handle == win32.INVALID_HANDLE_VALUE:
self._port_handle = None # 'cause __del__ is called anyway
raise SerialException("could not open port {!r}: {!r}".format(self.portstr, ctypes.WinError()))
try:
self._overlapped_read = win32.OVERLAPPED()
self._overlapped_read.hEvent = win32.CreateEvent(None, 1, 0, None)
self._overlapped_write = win32.OVERLAPPED()
#~ self._overlapped_write.hEvent = win32.CreateEvent(None, 1, 0, None)
self._overlapped_write.hEvent = win32.CreateEvent(None, 0, 0, None)
# Setup a 4k buffer
win32.SetupComm(self._port_handle, 4096, 4096)
# Save original timeout values:
self._orgTimeouts = win32.COMMTIMEOUTS()
win32.GetCommTimeouts(self._port_handle, ctypes.byref(self._orgTimeouts))
self._reconfigure_port()
# Clear buffers:
# Remove anything that was there
win32.PurgeComm(
self._port_handle,
win32.PURGE_TXCLEAR | win32.PURGE_TXABORT |
win32.PURGE_RXCLEAR | win32.PURGE_RXABORT)
except:
try:
self._close()
except:
# ignore any exception when closing the port
# also to keep original exception that happened when setting up
pass
self._port_handle = None
raise
else:
self.is_open = True | ValueError | dataset/ETHPy150Open pyserial/pyserial/serial/serialwin32.py/Serial.open |
3,276 | def get_last_post(self):
try:
return self.post_set.order_by('-date')[0]
except __HOLE__:
return None | IndexError | dataset/ETHPy150Open johnboxall/snapboard/snapboard/models.py/Thread.get_last_post |
3,277 | def getlines():
try:
f = open(os.path.join(os.path.dirname(cffi.__file__),
'..', 'c', 'commontypes.c'))
except __HOLE__:
py.test.skip("cannot find ../c/commontypes.c")
lines = [line for line in f.readlines() if line.strip().startswith('EQ(')]
f.close()
return lines | IOError | dataset/ETHPy150Open johncsnyder/SwiftKitten/cffi/testing/cffi1/test_commontypes.py/getlines |
3,278 | def handle_filing(this_filing):
try:
co = Committee_Overlay.objects.get(fec_id=this_filing.fec_id, cycle=this_filing.cycle)
this_filing.committee_designation = co.designation
this_filing.committee_name = co.name
this_filing.committee_type = co.ctype
this_filing.committee_slug = co.slug
this_filing.party = co.party
# mark that the committee is dirty
co.is_dirty=True
co.save()
except Committee_Overlay.DoesNotExist:
try:
## remember that ftpdata committees have cycles as ints, not strings. Not ideal.
if not this_filing.cycle:
this_filing.cycle = CURRENT_CYCLE
co = Committee.objects.get(cmte_id=this_filing.fec_id, cycle=int(this_filing.cycle))
this_filing.committee_designation = co.cmte_dsgn
this_filing.committee_type = co.cmte_tp
this_filing.committee_name = co.cmte_name
this_filing.party = get_party_from_pty(co.cmte_pty_affiliation)
except Committee.DoesNotExist:
pass
header_data = this_filing.header_data
form_type = this_filing.form_type
parsed_data = {'coh_start':None, 'coh_end':None, 'new_loans':None,'tot_raised':None,'tot_spent':None}
if form_type in ['F3A', 'F3N', 'F3T','F3PA', 'F3PN', 'F3PT', 'F3', 'F3P']:
parsed_data = process_f3_header(header_data)
print "got data %s" % (parsed_data)
this_filing.coh_end = parsed_data['coh_end'] if parsed_data['coh_end'] else 0
this_filing.tot_raised = parsed_data['tot_raised'] if parsed_data['tot_raised'] else 0
this_filing.tot_spent = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
this_filing.new_loans = parsed_data['new_loans'] if parsed_data['new_loans'] else 0
this_filing.new_filing_details_set = True
elif form_type in ['F3X', 'F3XA', 'F3XN', 'F3XT']:
parsed_data = process_f3x_header(header_data)
print "got data %s" % (parsed_data)
this_filing.coh_end = parsed_data['coh_end'] if parsed_data['coh_end'] else 0
this_filing.tot_raised = parsed_data['tot_raised'] if parsed_data['tot_raised'] else 0
this_filing.tot_spent = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
this_filing.new_loans = parsed_data['new_loans'] if parsed_data['new_loans'] else 0
this_filing.tot_coordinated = parsed_data['tot_coordinated'] if parsed_data['tot_coordinated'] else 0
this_filing.tot_ies = parsed_data['tot_ies'] if parsed_data['tot_ies'] else 0
this_filing.new_filing_details_set = True
elif form_type in ['F5', 'F5A', 'F5N']:
parsed_data = process_f5_header(header_data)
this_filing.tot_raised = parsed_data['tot_raised'] if parsed_data['tot_raised'] else 0
this_filing.tot_spent = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
# total spending is total ies
this_filing.tot_ies = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
this_filing.coverage_from_date = parsed_data['coverage_from_date']
this_filing.coverage_to_date = parsed_data['coverage_to_date']
try:
this_filing.is_f5_quarterly = header_data['report_code'] in ['Q1', 'Q2', 'Q3', 'Q4', 'YE']
except __HOLE__:
# this is probably a problem.
pass
this_filing.new_filing_details_set = True
elif form_type in ['F7', 'F7A', 'F7N']:
parsed_data = process_f7_header(header_data)
#print "got data %s" % (parsed_data)
this_filing.tot_raised = 0
this_filing.tot_spent = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
this_filing.coverage_from_date = parsed_data['coverage_from_date'] if parsed_data['coverage_from_date'] else None
this_filing.coverage_to_date = parsed_data['coverage_to_date'] if parsed_data['coverage_to_date'] else None
this_filing.new_filing_details_set = True
elif form_type in ['F9', 'F9A', 'F9N']:
parsed_data = process_f9_header(header_data)
this_filing.tot_raised = parsed_data['tot_raised'] if parsed_data['tot_raised'] else 0
this_filing.tot_spent = parsed_data['tot_spent'] if parsed_data['tot_spent'] else 0
this_filing.coverage_from_date = parsed_data['coverage_from_date'] if parsed_data['coverage_from_date'] else None
this_filing.coverage_to_date = parsed_data['coverage_to_date'] if parsed_data['coverage_to_date'] else None
this_filing.new_filing_details_set = True
elif form_type in ['F13', 'F13A', 'F13N']:
parsed_data = process_f13_header(header_data)
#print "got data %s" % (parsed_data)
this_filing.tot_raised = parsed_data['tot_raised'] if parsed_data['tot_raised'] else 0
this_filing.coverage_from_date = parsed_data['coverage_from_date'] if parsed_data['coverage_from_date'] else None
this_filing.coverage_to_date = parsed_data['coverage_to_date'] if parsed_data['coverage_to_date'] else None
this_filing.new_filing_details_set = True
else:
# Nothing to be done, but mark this step as done.
this_filing.new_filing_details_set = True
this_filing.save() | KeyError | dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/fec_alerts/management/commands/set_new_filing_details.py/handle_filing |
3,279 | def get_queryset(self, request, *args, **kwargs):
try:
draft = resources.review_request_draft.get_object(
request, *args, **kwargs)
except __HOLE__:
raise self.model.DoesNotExist
return self.model.objects.filter(review_request_draft=draft) | ObjectDoesNotExist | dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/draft_diff.py/DraftDiffResource.get_queryset |
3,280 | def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase._pre_setup, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except __HOLE__:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, six.string_types):
items = [items]
if action == 'append':
value = value + [item for item in items if item not in value]
elif action == 'prepend':
value = [item for item in items if item not in value] + value
elif action == 'remove':
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super(modify_settings, self).enable() | KeyError | dataset/ETHPy150Open st4lk/django-rest-social-auth/tests/utils.py/modify_settings.enable |
3,281 | def loadBiblioDataFile(lines, storage):
try:
while True:
fullKey = lines.next()
prefix, key = fullKey[0], fullKey[2:].strip()
if prefix == "d":
b = {
"linkText": lines.next(),
"date": lines.next(),
"status": lines.next(),
"title": lines.next(),
"dated_url": lines.next(),
"current_url": lines.next(),
"other": lines.next(),
"etAl": lines.next() != "\n",
"order": 3,
"biblioFormat": "dict",
"authors": []
}
while True:
line = lines.next()
if line == b"-\n":
break
b['authors'].append(line)
elif prefix == "s":
b = {
"linkText": lines.next(),
"data": lines.next(),
"biblioFormat": "string",
"order": 3
}
line = lines.next() # Eat the -
elif prefix == "a":
b = {
"linkText": lines.next(),
"aliasOf": lines.next(),
"biblioFormat": "alias",
"order": 3
}
line = lines.next() # Eat the -
else:
die("Unknown biblio prefix '{0}' on key '{1}'", prefix, fullKey)
continue
storage[key].append(b)
except __HOLE__:
pass | StopIteration | dataset/ETHPy150Open tabatkins/bikeshed/bikeshed/biblio.py/loadBiblioDataFile |
3,282 | def get_parameter_intervals_exception_test(self):
"""Testing for NotImplementedError."""
class IllegalMethodErrorClass(BaseMethod):
pass
try:
IllegalMethodErrorClass()
except __HOLE__:
pass
else:
assert False # pragma: no cover | NotImplementedError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/BaseMethodTest.get_parameter_intervals_exception_test |
3,283 | def parameter_get_test(self):
"""Test the parameter set function."""
b = BaseMethod()
b.set_parameter("param1", 42.23)
param1 = b.get_parameter("param1")
assert param1 == 42.23
try:
b.get_parameter("param2")
except __HOLE__:
pass
else:
assert False # pragma: no cover | KeyError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/BaseMethodTest.parameter_get_test |
3,284 | def execute_not_implemented_exception_test(self):
"""Test the correct interface of BaseMethod."""
b = BaseMethod(["param1", "param2"])
data = [[0.0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]]
ts = TimeSeries.from_twodim_list(data)
ts.normalize("second")
try:
b.execute(ts)
except __HOLE__:
pass
else:
assert False # pragma: no cover | NotImplementedError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/BaseMethodTest.execute_not_implemented_exception_test |
3,285 | def initialization_exception_test(self):
"""Test BaseForecastingMethod initialization for ValueError."""
for valuesToForecast in xrange(-10,0):
try:
BaseForecastingMethod(valuesToForecast=valuesToForecast)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/BaseForecastingMethodTest.initialization_exception_test |
3,286 | def calculate_values_to_forecast_exception_test(self):
"""Test for correct handling of illegal TimeSeries instances.
@todo remove NotImplementedError Catch."""
data = [[1.5, 152.0],[2.5, 172.8],[3.5, 195.07200000000003],[4.5, 218.30528000000004]]
ts = TimeSeries.from_twodim_list(data)
ts.add_entry(3, 1343)
bfm = BaseForecastingMethod()
# nothing has to be done, because forecast_until was never called
bfm._calculate_values_to_forecast(ts)
bfm.forecast_until(134)
try:
bfm._calculate_values_to_forecast(ts)
except ValueError:
pass
else:
assert False # pragma: no cover
ts.sort_timeseries()
try:
bfm._calculate_values_to_forecast(ts)
except __HOLE__:
pass
else:
assert False # pragma: no cover
ts.normalize("second")
bfm._calculate_values_to_forecast(ts) | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/BaseForecastingMethodTest.calculate_values_to_forecast_exception_test |
3,287 | def initialization_exception_Test(self):
"""Test the exeptions of SimpleMovingAverage's __init__."""
for invalidWindowSize in xrange(-5, 1):
try:
SimpleMovingAverage(invalidWindowSize)
except ValueError:
pass
else:
assert False # pragma: no cover
for invalidWindowSize in xrange(2, 10, 2):
try:
SimpleMovingAverage(invalidWindowSize)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/SimpleMovingAverageTest.initialization_exception_Test |
3,288 | def execute_value_error_test(self):
"""Test for the ValueError in SimpleMovingAverage.execute()."""
tsOne = TimeSeries()
data = [[1.5, 10.0],[2.5, 12.4],[3.5, 17.380000000000003],[4.5, 16.666],[5.5, 20.6662],[6.5, 23.46634],[7.5, 20.026438]]
tsTwo = TimeSeries.from_twodim_list(data)
sma = SimpleMovingAverage(3)
tsOne.normalize("second")
res = tsTwo.apply(sma)
try:
tsOne.apply(sma)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/SimpleMovingAverageTest.execute_value_error_test |
3,289 | def initialization_test(self):
"""Test the initialization of the ExponentialSmoothing method."""
ExponentialSmoothing(0.2, 0)
for alpha in [-42.23, -0.1, 0.0, 1.0, 1.1, 3.1, 4.2]:
try:
ExponentialSmoothing(alpha)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/ExponentialSmoothingTest.initialization_test |
3,290 | def initialization_test(self):
"""Test the initialization of the HoltMethod method."""
HoltMethod(0.2, 0.3)
for alpha in [-0.1, 0.45, 1.1]:
for beta in [-1.4, 3.2]:
try:
HoltMethod(alpha, beta)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/HoltMethodTest.initialization_test |
3,291 | def initialization_test(self):
"""Test the initialization of the HoltWintersMethod method."""
HoltWintersMethod(0.2, 0.3, 0.4, 5)
for alpha in [-0.1, 0.81, 1.1]:
for beta in [-1.4, 0.12, 3.2]:
for gamma in [-0.05, 1.3]:
try:
HoltWintersMethod(alpha, beta, gamma)
except __HOLE__:
pass
else:
assert False # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/HoltWintersMethodTest.initialization_test |
3,292 | def sanity_test(self):
"""HoltWinters should throw an Exception if applied to a Time Series shorter than the season length"""
hwm = HoltWintersMethod(seasonLength = 2)
data = [[0.0, 152]]
tsSrc = TimeSeries.from_twodim_list(data)
try:
tsSrc.apply(hwm)
except __HOLE__:
pass
else:
assert False, "HoltWinters should throw an Exception if applied to a Time Series shorter than the season length" # pragma: no cover | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/HoltWintersMethodTest.sanity_test |
3,293 | def preset_season_factor_test(self):
"""Initial Season Factors should be presetable"""
hwm = HoltWintersMethod(seasonLength=4)
factors = [0,1,2,3]
hwm.set_parameter("seasonValues", factors)
data = [[0, 362.0], [1,385.0], [2, 432.0], [3, 341.0], [4, 382.0], [5, 409.0], [6, 498.0], [7, 387.0], [8, 473.0], [9, 513.0], [10, 582.0], [11, 474.0]]
tsSrc = TimeSeries.from_twodim_list(data)
seasonValues = hwm.initSeasonFactors(tsSrc)
assert seasonValues == factors, "Preset Season Factors are not returned by initSeasonFactors"
hwm.set_parameter("seasonValues", factors[:2])
try:
hwm.initSeasonFactors(tsSrc)
except __HOLE__:
pass
else:
assert False, "If preset season factors and season length do not comply, initSeasonFactors should throw an AssertionError" # pragma: no cover | AssertionError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/HoltWintersMethodTest.preset_season_factor_test |
3,294 | def season_length_test(self):
"""Test that the season length has to be greater than 0."""
for seasonLength in xrange(-4, 1):
try:
HoltWintersMethod(seasonLength=seasonLength)
except __HOLE__:
pass
else:
assert False # pragma: no cover
for seasonLength in xrange(1,12414, 412):
HoltWintersMethod(seasonLength=seasonLength) | ValueError | dataset/ETHPy150Open T-002/pycast/pycast/tests/methodtest.py/HoltWintersMethodTest.season_length_test |
3,295 | def _is_payed_admin(self):
try:
self.payment
return True
except __HOLE__:
return False | ObjectDoesNotExist | dataset/ETHPy150Open oxyum/django-payment-webmoney/webmoney/models.py/Invoice._is_payed_admin |
3,296 | def __call__(self, request):
request.register_hook('response', self.handle_response)
try:
self.pos = request.body.tell()
except __HOLE__:
# In the case of HTTPKerberosAuth being reused and the body
# of the previous request was a file-like object, pos has
# the file position of the previous body. Ensure it's set to
# None.
self.pos = None
return request | AttributeError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/requests-kerberos-0.6.1/requests_kerberos/kerberos_.py/HTTPKerberosAuth.__call__ |
3,297 | def _calculate_log_likelihood(self):
"""
Calculates the log-likelihood (up to a constant) for a given
self.theta.
"""
R = zeros((self.n, self.n))
X, Y = self.X, self.Y
thetas = power(10., self.thetas)
# exponentially weighted distance formula
for i in range(self.n):
R[i, i+1:self.n] = exp(-thetas.dot(square(X[i, ...] - X[i+1:self.n, ...]).T))
R *= (1.0 - self.nugget)
R += R.T + eye(self.n)
self.R = R
one = ones(self.n)
rhs = column_stack([Y, one])
try:
# Cholesky Decomposition
self.R_fact = cho_factor(R)
sol = cho_solve(self.R_fact, rhs)
solve = lambda x: cho_solve(self.R_fact, x)
det_factor = log(abs(prod(diagonal(self.R_fact[0])) ** 2) + 1.e-16)
except (linalg.LinAlgError, __HOLE__):
# Since Cholesky failed, try linear least squares
self.R_fact = None # reset this to none, so we know not to use Cholesky
sol = lstsq(self.R, rhs)[0]
solve = lambda x: lstsq(self.R, x)[0]
det_factor = slogdet(self.R)[1]
self.mu = dot(one, sol[:, :-1]) / dot(one, sol[:, -1])
y_minus_mu = Y - self.mu
self.R_solve_ymu = solve(y_minus_mu)
self.R_solve_one = sol[:, -1]
self.sig2 = dot(y_minus_mu.T, self.R_solve_ymu) / self.n
if isinstance(self.sig2, ndarray):
self.log_likelihood = -self.n/2. * slogdet(self.sig2)[1] \
- 1./2.*det_factor
else:
self.log_likelihood = -self.n/2. * log(self.sig2) \
- 1./2.*det_factor | ValueError | dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/surrogate_models/kriging.py/KrigingSurrogate._calculate_log_likelihood |
3,298 | def testunsupportedpagebreak():
"""Ensure unsupported page break types are trapped"""
document = newdocument()
docbody = document.xpath('/w:document/w:body', namespaces=nsprefixes)[0]
try:
docbody.append(pagebreak(type='unsup'))
except __HOLE__:
return # passed
assert False # failed | ValueError | dataset/ETHPy150Open mikemaccana/python-docx/tests/test_docx.py/testunsupportedpagebreak |
3,299 | def scan(fn,
sequences=None,
outputs_info=None,
non_sequences=None,
n_steps=None,
truncate_gradient=-1,
go_backwards=False,
mode=None,
name=None,
profile=False,
allow_gc=None,
strict=False):
"""
This function constructs and applies a Scan op to the provided
arguments.
Parameters
----------
fn
``fn`` is a function that describes the operations involved in one
step of ``scan``. ``fn`` should construct variables describing the
output of one iteration step. It should expect as input theano
variables representing all the slices of the input sequences
and previous values of the outputs, as well as all other arguments
given to scan as ``non_sequences``. The order in which scan passes
these variables to ``fn`` is the following :
* all time slices of the first sequence
* all time slices of the second sequence
* ...
* all time slices of the last sequence
* all past slices of the first output
* all past slices of the second otuput
* ...
* all past slices of the last output
* all other arguments (the list given as `non_sequences` to
scan)
The order of the sequences is the same as the one in the list
`sequences` given to scan. The order of the outputs is the same
as the order of ``outputs_info``. For any sequence or output the
order of the time slices is the same as the one in which they have
been given as taps. For example if one writes the following :
.. code-block:: python
scan(fn, sequences = [ dict(input= Sequence1, taps = [-3,2,-1])
, Sequence2
, dict(input = Sequence3, taps = 3) ]
, outputs_info = [ dict(initial = Output1, taps = [-3,-5])
, dict(initial = Output2, taps = None)
, Output3 ]
, non_sequences = [ Argument1, Argument2])
``fn`` should expect the following arguments in this given order:
#. ``Sequence1[t-3]``
#. ``Sequence1[t+2]``
#. ``Sequence1[t-1]``
#. ``Sequence2[t]``
#. ``Sequence3[t+3]``
#. ``Output1[t-3]``
#. ``Output1[t-5]``
#. ``Output3[t-1]``
#. ``Argument1``
#. ``Argument2``
The list of ``non_sequences`` can also contain shared variables
used in the function, though ``scan`` is able to figure those
out on its own so they can be skipped. For the clarity of the
code we recommend though to provide them to scan. To some extend
``scan`` can also figure out other ``non sequences`` (not shared)
even if not passed to scan (but used by `fn`). A simple example of
this would be :
.. code-block:: python
import theano.tensor as TT
W = TT.matrix()
W_2 = W**2
def f(x):
return TT.dot(x,W_2)
The function is expected to return two things. One is a list of
outputs ordered in the same order as ``outputs_info``, with the
difference that there should be only one output variable per
output initial state (even if no tap value is used). Secondly
`fn` should return an update dictionary (that tells how to
update any shared variable after each iteration step). The
dictionary can optionally be given as a list of tuples. There is
no constraint on the order of these two list, ``fn`` can return
either ``(outputs_list, update_dictionary)`` or
``(update_dictionary, outputs_list)`` or just one of the two (in
case the other is empty).
To use ``scan`` as a while loop, the user needs to change the
function ``fn`` such that also a stopping condition is returned.
To do so, he/she needs to wrap the condition in an ``until`` class.
The condition should be returned as a third element, for example:
.. code-block:: python
...
return [y1_t, y2_t], {x:x+1}, theano.scan_module.until(x < 50)
Note that a number of steps (considered in here as the maximum
number of steps ) is still required even though a condition is
passed (and it is used to allocate memory if needed). = {}):
sequences
``sequences`` is the list of Theano variables or dictionaries
describing the sequences ``scan`` has to iterate over. If a
sequence is given as wrapped in a dictionary, then a set of optional
information can be provided about the sequence. The dictionary
should have the following keys:
* ``input`` (*mandatory*) -- Theano variable representing the
sequence.
* ``taps`` -- Temporal taps of the sequence required by ``fn``.
They are provided as a list of integers, where a value ``k``
impiles that at iteration step ``t`` scan will pass to ``fn``
the slice ``t+k``. Default value is ``[0]``
Any Theano variable in the list ``sequences`` is automatically
wrapped into a dictionary where ``taps`` is set to ``[0]``
outputs_info
``outputs_info`` is the list of Theano variables or dictionaries
describing the initial state of the outputs computed
recurrently. When this initial states are given as dictionary
optional information can be provided about the output corresponding
to these initial states. The dictionary should have the following
keys:
* ``initial`` -- Theano variable that represents the initial
state of a given output. In case the output is not computed
recursively (think of a map) and does not require an initial
state this field can be skipped. Given that (only) the previous
time step of the output is used by ``fn``, the initial state
**should have the same shape** as the output and **should not
involve a downcast** of the data type of the output. If multiple
time taps are used, the initial state should have one extra
dimension that should cover all the possible taps. For example
if we use ``-5``, ``-2`` and ``-1`` as past taps, at step 0,
``fn`` will require (by an abuse of notation) ``output[-5]``,
``output[-2]`` and ``output[-1]``. This will be given by
the initial state, which in this case should have the shape
(5,)+output.shape. If this variable containing the initial
state is called ``init_y`` then ``init_y[0]`` *corresponds to*
``output[-5]``. ``init_y[1]`` *correponds to* ``output[-4]``,
``init_y[2]`` corresponds to ``output[-3]``, ``init_y[3]``
coresponds to ``output[-2]``, ``init_y[4]`` corresponds to
``output[-1]``. While this order might seem strange, it comes
natural from splitting an array at a given point. Assume that
we have a array ``x``, and we choose ``k`` to be time step
``0``. Then our initial state would be ``x[:k]``, while the
output will be ``x[k:]``. Looking at this split, elements in
``x[:k]`` are ordered exactly like those in ``init_y``.
* ``taps`` -- Temporal taps of the output that will be pass to
``fn``. They are provided as a list of *negative* integers,
where a value ``k`` implies that at iteration step ``t`` scan
will pass to ``fn`` the slice ``t+k``.
``scan`` will follow this logic if partial information is given:
* If an output is not wrapped in a dictionary, ``scan`` will wrap
it in one assuming that you use only the last step of the output
(i.e. it makes your tap value list equal to [-1]).
* If you wrap an output in a dictionary and you do not provide any
taps but you provide an initial state it will assume that you are
using only a tap value of -1.
* If you wrap an output in a dictionary but you do not provide any
initial state, it assumes that you are not using any form of
taps.
* If you provide a ``None`` instead of a variable or a empty
dictionary ``scan`` assumes that you will not use any taps for
this output (like for example in case of a map)
If ``outputs_info`` is an empty list or None, ``scan`` assumes
that no tap is used for any of the outputs. If information is
provided just for a subset of the outputs an exception is
raised (because there is no convention on how scan should map
the provided information to the outputs of ``fn``)
non_sequences
``non_sequences`` is the list of arguments that are passed to
``fn`` at each steps. One can opt to exclude variable
used in ``fn`` from this list as long as they are part of the
computational graph, though for clarity we encourage not to do so.
n_steps
``n_steps`` is the number of steps to iterate given as an int
or Theano scalar. If any of the input sequences do not have
enough elements, scan will raise an error. If the *value is 0* the
outputs will have *0 rows*. If the value is negative, ``scan``
will run backwards in time. If the ``go_backwards`` flag is already
set and also ``n_steps`` is negative, ``scan`` will run forward
in time. If n_steps is not provided, ``scan`` will figure
out the amount of steps it should run given its input sequences.
truncate_gradient
``truncate_gradient`` is the number of steps to use in truncated
BPTT. If you compute gradients through a scan op, they are
computed using backpropagation through time. By providing a
different value then -1, you choose to use truncated BPTT instead
of classical BPTT, where you go for only ``truncate_gradient``
number of steps back in time.
go_backwards
``go_backwards`` is a flag indicating if ``scan`` should go
backwards through the sequences. If you think of each sequence
as indexed by time, making this flag True would mean that
``scan`` goes back in time, namely that for any sequence it
starts from the end and goes towards 0.
name
When profiling ``scan``, it is crucial to provide a name for any
instance of ``scan``. The profiler will produce an overall
profile of your code as well as profiles for the computation of
one step of each instance of ``scan``. The ``name`` of the instance
appears in those profiles and can greatly help to disambiguate
information.
mode
It is recommended to leave this argument to None, especially
when profiling ``scan`` (otherwise the results are not going to
be accurate). If you prefer the computations of one step of
``scan`` to be done differently then the entire function, you
can use this parameter to describe how the computations in this
loop are done (see ``theano.function`` for details about
possible values and their meaning).
profile
Flag or string. If true, or different from the empty string, a
profile object will be created and attached to the inner graph of
scan. In case ``profile`` is True, the profile object will have the
name of the scan instance, otherwise it will have the passed string.
Profile object collect (and print) information only when running the
inner graph with the new cvm linker ( with default modes,
other linkers this argument is useless)
allow_gc
Set the value of allow gc for the internal graph of scan. If
set to None, this will use the value of config.scan.allow_gc.
strict
If true, all the shared variables used in ``fn`` must be provided as a
part of ``non_sequences`` or ``sequences``.
Returns
-------
tuple
Tuple of the form (outputs, updates); ``outputs`` is either a
Theano variable or a list of Theano variables representing the
outputs of ``scan`` (in the same order as in ``outputs_info``).
``updates`` is a subclass of dictionary specifying the update rules for
all shared variables used in scan.
This dictionary should be passed to ``theano.function`` when you compile
your function. The change compared to a normal dictionary is that we
validate that keys are SharedVariable and addition of those dictionary
are validated to be consistent.
"""
# General observation : this code is executed only once, at creation
# of the computational graph, so we don't yet need to be smart about
# anything (to speed things up)
##
# Step 1. Wrap all inputs in dictionaries and add default values
##
# check if inputs are just single variables instead of lists
def wrap_into_list(x):
"""
Wrap the input into a list if it is not already a list.
"""
if x is None:
return []
elif not isinstance(x, (list, tuple)):
return [x]
else:
return list(x)
seqs = wrap_into_list(sequences)
outs_info = wrap_into_list(outputs_info)
# Make sure we get rid of numpy arrays or ints or anything like that
# passed as inputs to scan
non_seqs = []
for elem in wrap_into_list(non_sequences):
if not isinstance(elem, gof.Variable):
non_seqs.append(tensor.as_tensor_variable(elem))
else:
non_seqs.append(elem)
# If we provided a known number of steps ( before compilation)
# and if that number is 1 or -1, then we can skip the Scan Op,
# and just apply the inner function once
# To do that we check here to see the nature of n_steps
n_fixed_steps = None
if isinstance(n_steps, (float, int)):
n_fixed_steps = int(n_steps)
else:
try:
n_fixed_steps = opt.get_scalar_constant_value(n_steps)
except tensor.basic.NotScalarConstantError:
n_fixed_steps = None
# Check n_steps is an int
if (hasattr(n_steps, 'dtype') and
str(n_steps.dtype)[:3] not in ('uin', 'int')):
raise ValueError(' n_steps must be an int. dtype provided '
'is %s' % n_steps.dtype)
# compute number of sequences and number of outputs
n_seqs = len(seqs)
n_outs = len(outs_info)
return_steps = OrderedDict()
# wrap sequences in a dictionary if they are not already dictionaries
for i in xrange(n_seqs):
if not isinstance(seqs[i], dict):
seqs[i] = OrderedDict([('input', seqs[i]), ('taps', [0])])
elif seqs[i].get('taps', None) is not None:
seqs[i]['taps'] = wrap_into_list(seqs[i]['taps'])
elif seqs[i].get('taps', None) is None:
# seqs dictionary does not have the ``taps`` key
seqs[i]['taps'] = [0]
# wrap outputs info in a dictionary if they are not already in one
for i in xrange(n_outs):
if outs_info[i] is not None:
if isinstance(outs_info[i], dict):
# DEPRECATED :
if outs_info[i].get('return_steps', None) is not None:
raise ValueError(
"Using `return_steps` has been deprecated. "
"Simply select the entries you need using a "
"subtensor. Scan will optimize memory "
"consumption, so do not worry about that.")
# END
if not isinstance(outs_info[i], dict):
# by default any output has a tap value of -1
outs_info[i] = OrderedDict([('initial', outs_info[i]), ('taps', [-1])])
elif (outs_info[i].get('initial', None) is None and
outs_info[i].get('taps', None) is not None):
# ^ no initial state but taps provided
raise ValueError(('If you are using slices of an output '
'you need to provide a initial state '
'for it'), outs_info[i])
elif (outs_info[i].get('initial', None) is not None and
outs_info[i].get('taps', None) is None):
# ^ initial state but taps not provided
if 'taps' in outs_info[i]:
# ^ explicitly provided a None for taps
_logger.warning('Output %s ( index %d) has a initial '
'state but taps is explicitly set to None ',
getattr(outs_info[i]['initial'], 'name', 'None'),
i)
outs_info[i]['taps'] = [-1]
else:
# if a None is provided as the output info we replace it
# with an empty OrdereDict() to simplify handling
outs_info[i] = OrderedDict()
##
# Step 2. Generate inputs and outputs of the inner functions
# for compiling a dummy function (Iteration #1)
##
# create theano inputs for the recursive function
# note : this is a first batch of possible inputs that will
# be compiled in a dummy function; we used this dummy
# function to detect shared variables and their updates
# and to construct a new and complete list of inputs and
# outputs
n_seqs = 0
scan_seqs = [] # Variables passed as inputs to the scan op
inner_seqs = [] # Variables passed as inputs to the inner function
inner_slices = [] # Actual slices if scan is removed from the picture
# go through sequences picking up time slices as needed
for i, seq in enumerate(seqs):
# Note that you can have something like no taps for
# a sequence, though is highly unlikely in practice
if 'taps' in seq:
# go through the indicated slice
mintap = numpy.min(seq['taps'])
maxtap = numpy.max(seq['taps'])
for k in seq['taps']:
# create one slice of the input
# Later on, if we decide not to use scan because we are
# going for just one step, it makes things easier if we
# compute the correct outputs here. This way we can use
# the output of the lambda expression directly to replace
# the output of scan.
# If not we need to use copies, that will be replaced at
# each frame by the corresponding slice
actual_slice = seq['input'][k - mintap]
_seq_val = tensor.as_tensor_variable(seq['input'])
_seq_val_slice = _seq_val[k - mintap]
nw_slice = _seq_val_slice.type()
# Try to transfer test_value to the new variable
if config.compute_test_value != 'off':
try:
nw_slice.tag.test_value = gof.Op._get_test_value(
_seq_val_slice)
except AttributeError as e:
if config.compute_test_value != 'ignore':
# No need to print a warning or raise an error now,
# it will be done when fn will be called.
_logger.info(('Cannot compute test value for '
'the inner function of scan, input value '
'missing %s'), e)
# Add names to slices for debugging and pretty printing ..
# that is if the input already has a name
if getattr(seq['input'], 'name', None) is not None:
if k > 0:
nw_name = seq['input'].name + '[t+%d]' % k
elif k == 0:
nw_name = seq['input'].name + '[t]'
else:
nw_name = seq['input'].name + '[t%d]' % k
nw_slice.name = nw_name
# We cut the sequence such that seq[i] to correspond to
# seq[i-k]. For the purposes of cutting the sequences, we
# need to pretend tap 0 is used to avoid cutting the sequences
# too long if the taps are all lower or all higher than 0.
maxtap_proxy = max(maxtap, 0)
mintap_proxy = min(mintap, 0)
start = (k - mintap_proxy)
if k == maxtap_proxy:
nw_seq = seq['input'][start:]
else:
end = -(maxtap_proxy - k)
nw_seq = seq['input'][start:end]
if go_backwards:
nw_seq = nw_seq[::-1]
scan_seqs.append(nw_seq)
inner_seqs.append(nw_slice)
inner_slices.append(actual_slice)
n_seqs += 1
# Since we've added all sequences now we need to level them up based on
# n_steps or their different shapes
lengths_vec = []
for seq in scan_seqs:
lengths_vec.append(seq.shape[0])
if not scan_utils.isNaN_or_Inf_or_None(n_steps):
# ^ N_steps should also be considered
lengths_vec.append(tensor.as_tensor(n_steps))
if len(lengths_vec) == 0:
# ^ No information about the number of steps
raise ValueError('No information about the number of steps '
'provided. Either provide a value for '
'n_steps argument of scan or provide an input '
'sequence')
# If the user has provided the number of steps, do that regardless ( and
# raise an error if the sequences are not long enough )
if scan_utils.isNaN_or_Inf_or_None(n_steps):
actual_n_steps = lengths_vec[0]
for contestant in lengths_vec[1:]:
actual_n_steps = tensor.minimum(actual_n_steps, contestant)
else:
actual_n_steps = tensor.as_tensor(n_steps)
# Add names -- it helps a lot when debugging
for (nw_seq, seq) in zip(scan_seqs, seqs):
if getattr(seq['input'], 'name', None) is not None:
nw_seq.name = seq['input'].name + '[%d:]' % k
scan_seqs = [seq[:actual_n_steps] for seq in scan_seqs]
# Conventions :
# mit_mot = multiple input taps, multiple output taps ( only provided
# by the gradient function )
# mit_sot = multiple input taps, single output tap (t + 0)
# sit_sot = single input tap, single output tap (t + 0)
# nit_sot = no input tap, single output tap (t + 0)
# MIT_MOT -- not provided by the user only by the grad function
n_mit_mot = 0
n_mit_mot_outs = 0
mit_mot_scan_inputs = []
mit_mot_inner_inputs = []
mit_mot_inner_outputs = []
mit_mot_out_slices = []
mit_mot_rightOrder = []
# SIT_SOT -- provided by the user
n_mit_sot = 0
mit_sot_scan_inputs = []
mit_sot_inner_inputs = []
mit_sot_inner_slices = []
mit_sot_inner_outputs = []
mit_sot_return_steps = OrderedDict()
mit_sot_tap_array = []
mit_sot_rightOrder = []
n_sit_sot = 0
sit_sot_scan_inputs = []
sit_sot_inner_inputs = []
sit_sot_inner_slices = []
sit_sot_inner_outputs = []
sit_sot_return_steps = OrderedDict()
sit_sot_rightOrder = []
# go through outputs picking up time slices as needed
for i, init_out in enumerate(outs_info):
# Note that our convention dictates that if an output uses
# just the previous time step, as a initial state we will only
# provide a tensor of the same dimension as one time step; This
# makes code much cleaner for those who do not use taps. Otherwise
# they would always had to shape_padleft the initial state ..
# which is ugly
if init_out.get('taps', None) == [-1]:
actual_arg = init_out['initial']
if not isinstance(actual_arg, tensor.Variable):
actual_arg = tensor.as_tensor_variable(actual_arg)
arg = safe_new(actual_arg)
if isinstance(arg, tensor.Constant):
# safe new returns a clone of the constants, but that is not
# what we need for initial states
arg = arg.type()
# Try to transfer test_value to the new variable
if config.compute_test_value != 'off':
try:
arg.tag.test_value = gof.Op._get_test_value(actual_arg)
except __HOLE__ as e:
if config.compute_test_value != 'ignore':
# No need to print a warning or raise an error now,
# it will be done when fn will be called.
_logger.info(('Cannot compute test value for the '
'inner function of scan, input value missing %s'),
e)
if getattr(init_out['initial'], 'name', None) is not None:
arg.name = init_out['initial'].name + '[t-1]'
# We need now to allocate space for storing the output and copy
# the initial state over. We do this using the expand function
# defined in scan utils
sit_sot_scan_inputs.append(
scan_utils.expand_empty(
tensor.unbroadcast(
tensor.shape_padleft(actual_arg), 0),
actual_n_steps
))
sit_sot_inner_slices.append(actual_arg)
if i in return_steps:
sit_sot_return_steps[n_sit_sot] = return_steps[i]
sit_sot_inner_inputs.append(arg)
sit_sot_rightOrder.append(i)
n_sit_sot += 1
elif init_out.get('taps', None):
if numpy.any(numpy.array(init_out.get('taps', [])) > 0):
# Make sure we do not have requests for future values of a
# sequence we can not provide such values
raise ValueError('Can not use future taps of outputs',
init_out)
# go through the taps
mintap = abs(numpy.min(init_out['taps']))
mit_sot_tap_array.append(init_out['taps'])
idx_offset = abs(numpy.min(init_out['taps']))
# Sequence
mit_sot_scan_inputs.append(
scan_utils.expand_empty(init_out['initial'][:mintap],
actual_n_steps))
if i in return_steps:
mit_sot_return_steps[n_mit_sot] = return_steps[i]
mit_sot_rightOrder.append(i)
n_mit_sot += 1
for k in init_out['taps']:
# create a new slice
actual_nw_slice = init_out['initial'][k + mintap]
_init_out_var = tensor.as_tensor_variable(init_out['initial'])
_init_out_var_slice = _init_out_var[k + mintap]
nw_slice = _init_out_var_slice.type()
# Try to transfer test_value to the new variable
if config.compute_test_value != 'off':
try:
nw_slice.tag.test_value = gof.Op._get_test_value(
_init_out_var_slice)
except AttributeError as e:
if config.compute_test_value != 'ignore':
# No need to print a warning or raise an error now,
# it will be done when fn will be called.
_logger.info(('Cannot compute test value for '
'the inner function of scan, input value '
'missing. %s'), e)
# give it a name or debugging and pretty printing
if getattr(init_out['initial'], 'name', None) is not None:
if k > 0:
nw_slice.name = (init_out['initial'].name +
'[t+%d]' % k)
elif k == 0:
nw_slice.name = init_out['initial'].name + '[t]'
else:
nw_slice.name = (init_out['initial'].name +
'[t%d]' % k)
mit_sot_inner_inputs.append(nw_slice)
mit_sot_inner_slices.append(actual_nw_slice)
# NOTE: there is another case, in which we do not want to provide
# any previous value of the output to the inner function (i.e.
# a map); in that case we do not have to do anything ..
# Re-order args
max_mit_sot = numpy.max([-1] + mit_sot_rightOrder) + 1
max_sit_sot = numpy.max([-1] + sit_sot_rightOrder) + 1
n_elems = numpy.max([max_mit_sot, max_sit_sot])
_ordered_args = [[] for x in xrange(n_elems)]
offset = 0
for idx in xrange(n_mit_sot):
n_inputs = len(mit_sot_tap_array[idx])
if n_fixed_steps in [1, -1]:
_ordered_args[mit_sot_rightOrder[idx]] = \
mit_sot_inner_slices[offset:offset + n_inputs]
else:
_ordered_args[mit_sot_rightOrder[idx]] = \
mit_sot_inner_inputs[offset:offset + n_inputs]
offset += n_inputs
for idx in xrange(n_sit_sot):
if n_fixed_steps in [1, -1]:
_ordered_args[sit_sot_rightOrder[idx]] = \
[sit_sot_inner_slices[idx]]
else:
_ordered_args[sit_sot_rightOrder[idx]] = \
[sit_sot_inner_inputs[idx]]
ordered_args = []
for ls in _ordered_args:
ordered_args += ls
if n_fixed_steps in [1, -1]:
args = (inner_slices +
ordered_args +
non_seqs)
else:
args = (inner_seqs +
ordered_args +
non_seqs)
# add only the non-shared variables and non-constants to the arguments of
# the dummy function [ a function should not get shared variables or
# constants as input ]
dummy_args = [arg for arg in args
if (not isinstance(arg, SharedVariable) and
not isinstance(arg, tensor.Constant))]
# when we apply the lambda expression we get a mixture of update rules
# and outputs that needs to be separated
condition, outputs, updates = scan_utils.get_updates_and_outputs(fn(*args))
if condition is not None:
as_while = True
else:
as_while = False
##
# Step 3. Check if we actually need scan and remove it if we don't
##
if n_fixed_steps in [1, -1]:
# We do not need to use the scan op anymore, so we can just return
# the outputs and updates we have
if condition is not None:
_logger.warning(('When the number of steps is fixed and equal '
'to 1, the provided stopping condition, ',
str(condition), ' is ignored'))
for pos, inner_out in enumerate(outputs):
# we need to see if we need to pad our sequences with an
# unbroadcastable dimension; case example : we return an
# output for which we want all intermediate. If n_steps is 1
# then, if we return the output as given by the innner function
# this will represent only a slice and it will have one
# dimension less.
if (isinstance(inner_out.type, tensor.TensorType) and
return_steps.get(pos, 0) != 1):
outputs[pos] = tensor.unbroadcast(
tensor.shape_padleft(inner_out), 0)
if len(outputs) == 1:
outputs = outputs[0]
return (outputs, updates)
##
# Step 4. Compile the dummy function
##
# We can now compile a dummy function just to see what shared variable
# we have and what are their update rules (note that the user has
# the option not to pass the shared variable to scan, so we need to
# pick them manually and add them to scan)
# make the compilation as fast as possible by not applying any
# optimization or conversion to C [ note this region is not important
# for performance so we can do stuff as unoptimal as we wish ]
# extract still missing inputs (there still might be so) and add them
# as non sequences at the end of our args
fake_nonseqs = [x.type() for x in non_seqs]
fake_outputs = scan_utils.clone(outputs,
replace=OrderedDict(izip(non_seqs,
fake_nonseqs)))
all_inputs = ifilter(
lambda x: (isinstance(x, gof.Variable) and
not isinstance(x, SharedVariable) and
not isinstance(x, gof.Constant)),
gof.graph.inputs(fake_outputs))
extra_inputs = [x for x in all_inputs if x not in args + fake_nonseqs]
non_seqs += extra_inputs
# Note we do not use all_inputs directly since the order of variables
# in args is quite important
dummy_args += extra_inputs
dummy_outs = outputs
if condition is not None:
dummy_outs.append(condition)
dummy_f = function(dummy_args,
dummy_outs,
updates=updates,
mode=compile.mode.Mode(linker='py',
optimizer=None),
on_unused_input='ignore',
profile=False)
##
# Step 5. Re-arange inputs of scan into a more strict order
##
# Step 5.0 Check the outputs of the dummy function to see if they
# match with user provided data
# if the number of outputs to the function does not match the number of
# assumed outputs until now (provided by the user) there can be
# only one explanation: No information is provided for any of the
# outputs (i.e. we are dealing with a map)
tmp_dummy_f_outs = len(dummy_f.maker.outputs)
if as_while:
tmp_dummy_f_outs -= 1
if not (tmp_dummy_f_outs == n_outs or outs_info == []):
raise ValueError('Please provide None as outputs_info for '
'any output that does not feed back into '
'scan (i.e. it behaves like a map) ')
if outs_info == []:
n_outs = len(dummy_f.maker.outputs)
if as_while:
n_outs = n_outs - 1
outs_info = [OrderedDict() for x in xrange(n_outs)]
# Step 5.1 Outputs with taps different then -1
for i, out in enumerate(outs_info):
if 'taps' in out and out['taps'] != [-1]:
mit_sot_inner_outputs.append(outputs[i])
# Step 5.2 Outputs with tap equal to -1
for i, out in enumerate(outs_info):
if 'taps' in out and out['taps'] == [-1]:
sit_sot_inner_outputs.append(outputs[i])
# Step 5.3 Outputs that correspond to update rules of shared variables
givens = OrderedDict()
n_shared_outs = 0
shared_scan_inputs = []
shared_inner_inputs = []
shared_inner_outputs = []
sit_sot_shared = []
for input in dummy_f.maker.expanded_inputs:
if isinstance(input.variable, SharedVariable) and input.update:
new_var = safe_new(input.variable)
if getattr(input.variable, 'name', None) is not None:
new_var.name = input.variable.name + '_copy'
if isinstance(new_var.type, ops.expandable_types):
sit_sot_inner_inputs.append(new_var)
sit_sot_scan_inputs.append(
scan_utils.expand_empty(
tensor.unbroadcast(
tensor.shape_padleft(input.variable), 0),
actual_n_steps))
tensor_update = tensor.as_tensor_variable(input.update)
sit_sot_inner_outputs.append(tensor_update)
# Not that pos is not a negative index. The sign of pos is used
# as a flag to indicate if this output should be part of the
# update rules or part of the standard outputs of scan.
# If `pos` is positive than it corresponds to the standard
# outputs of scan and it refers to output of index `pos`. If `pos`
# is negative that it corresponds to update rules of scan and it
# refers to update rule of index -1 - `pos`.
sit_sot_rightOrder.append(-1 - len(sit_sot_shared))
sit_sot_shared.append(input.variable)
givens[input.variable] = new_var
else:
shared_inner_inputs.append(new_var)
shared_scan_inputs.append(input.variable)
shared_inner_outputs.append(input.update)
givens[input.variable] = new_var
n_shared_outs += 1
n_sit_sot = len(sit_sot_inner_inputs)
# Step 5.4 Outputs with no taps used in the input
n_nit_sot = 0
nit_sot_inner_outputs = []
nit_sot_return_steps = OrderedDict()
nit_sot_rightOrder = []
for i, out in enumerate(outs_info):
if not 'taps' in out:
nit_sot_inner_outputs.append(outputs[i])
if i in return_steps:
nit_sot_return_steps[n_nit_sot] = return_steps[i]
nit_sot_rightOrder.append(i)
n_nit_sot += 1
# Step 5.5 all other arguments including extra inputs
other_scan_args = []
other_inner_args = []
other_scan_args += [arg for arg in non_seqs
if (not isinstance(arg, SharedVariable) and
not isinstance(arg, tensor.Constant))]
# Step 5.6 all shared variables with no update rules
other_inner_args += [safe_new(arg, '_copy') for arg in non_seqs
if (not isinstance(arg, SharedVariable) and
not isinstance(arg, tensor.Constant))]
givens.update(OrderedDict(izip(other_scan_args, other_inner_args)))
if strict:
non_seqs_set = set(non_sequences if non_sequences is not None else [])
other_shared_scan_args = [arg.variable for arg
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update and
arg.variable in non_seqs_set)]
other_shared_inner_args = [safe_new(arg.variable, '_copy') for arg
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update and
arg.variable in non_seqs_set)]
else:
other_shared_scan_args = [arg.variable for arg
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update)]
other_shared_inner_args = [safe_new(arg.variable, '_copy') for arg
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update)]
givens.update(OrderedDict(izip(other_shared_scan_args,
other_shared_inner_args)))
##
# Step 6. Re-order the outputs and clone them replacing things
# using the givens
##
inner_inputs = (inner_seqs +
mit_mot_inner_inputs +
mit_sot_inner_inputs +
sit_sot_inner_inputs +
shared_inner_inputs +
other_shared_inner_args +
other_inner_args)
inner_outs = (mit_mot_inner_outputs +
mit_sot_inner_outputs +
sit_sot_inner_outputs +
nit_sot_inner_outputs +
shared_inner_outputs)
if condition is not None:
inner_outs.append(condition)
# Cuda and Gpuarray are imported here, instead of being imported on top of
# the file because that would force on the user some dependencies that we
# might do not want to. Currently we are working on removing the
# dependencies on sandbox code completeley.
from theano.sandbox import cuda, gpuarray
if cuda.cuda_available or gpuarray.pygpu_activated:
# very often we end up in this situation when we want to
# replace w with w_copy, where w is a GPU variable
# and w_copy is TensorType. This is caused because shared
# variables are put on GPU right aways >:| ,
new_givens = OrderedDict()
for w, w_copy in iteritems(givens):
if ((isinstance(w.type, cuda.CudaNdarrayType) or
isinstance(w.type, gpuarray.GpuArrayType)) and
isinstance(w_copy.type, tensor.TensorType)):
for o in inner_outs:
new_givens = traverse(o, w, w_copy, new_givens)
else:
new_givens[w] = w_copy
else:
new_givens = givens
new_outs = scan_utils.clone(inner_outs, replace=new_givens)
##
# Step 7. Create the Scan Op
##
tap_array = mit_sot_tap_array + [[-1] for x in xrange(n_sit_sot)]
if allow_gc is None:
allow_gc = config.scan.allow_gc
info = OrderedDict()
info['tap_array'] = tap_array
info['n_seqs'] = n_seqs
info['n_mit_mot'] = n_mit_mot
info['n_mit_mot_outs'] = n_mit_mot_outs
info['mit_mot_out_slices'] = mit_mot_out_slices
info['n_mit_sot'] = n_mit_sot
info['n_sit_sot'] = n_sit_sot
info['n_shared_outs'] = n_shared_outs
info['n_nit_sot'] = n_nit_sot
info['truncate_gradient'] = truncate_gradient
info['name'] = name
info['mode'] = mode
info['destroy_map'] = OrderedDict()
info['gpu'] = False
info['as_while'] = as_while
info['profile'] = profile
info['allow_gc'] = allow_gc
info['strict'] = strict
local_op = scan_op.Scan(inner_inputs, new_outs, info)
##
# Step 8. Compute the outputs using the scan op
##
_scan_inputs = (scan_seqs +
mit_mot_scan_inputs +
mit_sot_scan_inputs +
sit_sot_scan_inputs +
shared_scan_inputs +
[actual_n_steps for x in xrange(n_nit_sot)] +
other_shared_scan_args +
other_scan_args)
scan_inputs = []
for arg in [actual_n_steps] + _scan_inputs:
try:
arg = tensor.as_tensor_variable(arg)
except TypeError:
# This happens for Random States for e.g. but it is a good way
# to make sure no input is a cuda ndarrays
pass
scan_inputs += [arg]
scan_outs = local_op(*scan_inputs)
if type(scan_outs) not in (list, tuple):
scan_outs = [scan_outs]
##
# Step 9. Figure out which outs are update rules for shared variables
# and so on ...
##
update_map = OrderedUpdates()
def remove_dimensions(outs, steps_return, offsets=None):
out_ls = []
for idx, out in enumerate(outs):
if idx in steps_return:
if steps_return[idx] > 1:
out_ls.append(out[-steps_return[idx]:])
else:
out_ls.append(out[-1])
else:
if offsets is None:
out_ls.append(out)
else:
out_ls.append(out[offsets[idx]:])
return out_ls
offset = n_mit_mot
offsets = [abs(numpy.min(x)) for x in mit_sot_tap_array]
mit_sot_outs = remove_dimensions(
scan_outs[offset:offset + n_mit_sot],
mit_sot_return_steps,
offsets)
offset += n_mit_sot
offsets = [1 for x in xrange(n_sit_sot)]
sit_sot_outs = remove_dimensions(
scan_outs[offset:offset + n_sit_sot],
sit_sot_return_steps,
offsets)
offset += n_sit_sot
nit_sot_outs = remove_dimensions(
scan_outs[offset:offset + n_nit_sot],
nit_sot_return_steps)
offset += n_nit_sot
for idx, update_rule in enumerate(
scan_outs[offset:offset + n_shared_outs]):
update_map[shared_scan_inputs[idx]] = update_rule
_scan_out_list = (mit_sot_outs +
sit_sot_outs +
nit_sot_outs)
# Step 10. I need to reorder the outputs to be in the order expected by
# the user
rightOrder = (mit_sot_rightOrder +
sit_sot_rightOrder +
nit_sot_rightOrder)
scan_out_list = [None] * len(rightOrder)
for idx, pos in enumerate(rightOrder):
if pos >= 0:
scan_out_list[pos] = _scan_out_list[idx]
else:
# Not that pos is not a negative index. The sign of pos is used
# as a flag to indicate if this output should be part of the
# update rules or part of the standard outputs of scan.
# If `pos` is positive than it corresponds to the standard
# outputs of scan and it refers to output of index `pos`. If `pos`
# is negative that it corresponds to update rules of scan and it
# refers to update rule of index -1 - `pos`.
update_map[sit_sot_shared[abs(pos) - 1]] = _scan_out_list[idx][-1]
scan_out_list = [x for x in scan_out_list if x is not None]
if len(scan_out_list) == 1:
scan_out_list = scan_out_list[0]
elif len(scan_out_list) == 0:
scan_out_list = None
return (scan_out_list, update_map) | AttributeError | dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/scan_module/scan.py/scan |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.