_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4100
|
LZMAFile.peek
|
train
|
def peek(self, size=-1):
"""Return buffered data without advancing the file position.
Always returns at least one byte of data, unless at EOF.
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
if self._mode == _MODE_READ_EOF or not self._fill_buffer():
return b""
return self._buffer
|
python
|
{
"resource": ""
}
|
q4101
|
LZMAFile.read
|
train
|
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
if size is None:
#This is not needed on Python 3 where the comparison to zeo
#will fail with a TypeError.
raise TypeError("Read size should be an integer, not None")
if self._mode == _MODE_READ_EOF or size == 0:
return b""
elif size < 0:
return self._read_all()
else:
return self._read_block(size)
|
python
|
{
"resource": ""
}
|
q4102
|
LZMAFile.read1
|
train
|
def read1(self, size=-1):
"""Read up to size uncompressed bytes, while trying to avoid
making multiple reads from the underlying stream.
Returns b"" if the file is at EOF.
"""
# Usually, read1() calls _fp.read() at most once. However, sometimes
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
if size is None:
#This is not needed on Python 3 where the comparison to zero
#will fail with a TypeError.
raise TypeError("Read size should be an integer, not None")
if (size == 0 or self._mode == _MODE_READ_EOF or
not self._fill_buffer()):
return b""
if 0 < size < len(self._buffer):
data = self._buffer[:size]
self._buffer = self._buffer[size:]
else:
data = self._buffer
self._buffer = None
self._pos += len(data)
return data
|
python
|
{
"resource": ""
}
|
q4103
|
LZMAFile.write
|
train
|
def write(self, data):
"""Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
always len(data). Note that due to buffering, the file on disk
may not reflect the data written until close() is called.
"""
self._check_can_write()
compressed = self._compressor.compress(data)
self._fp.write(compressed)
self._pos += len(data)
return len(data)
|
python
|
{
"resource": ""
}
|
q4104
|
LZMAFile.seek
|
train
|
def seek(self, offset, whence=0):
"""Change the file position.
The new position is specified by offset, relative to the
position indicated by whence. Possible values for whence are:
0: start of stream (default): offset must not be negative
1: current stream position
2: end of stream; offset must not be positive
Returns the new file position.
Note that seeking is emulated, sp depending on the parameters,
this operation may be extremely slow.
"""
self._check_can_seek()
# Recalculate offset as an absolute file position.
if whence == 0:
pass
elif whence == 1:
offset = self._pos + offset
elif whence == 2:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
self._read_all(return_data=False)
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset is None:
#This is not needed on Python 3 where the comparison to self._pos
#will fail with a TypeError.
raise TypeError("Seek offset should be an integer, not None")
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
if self._mode != _MODE_READ_EOF:
self._read_block(offset, return_data=False)
return self._pos
|
python
|
{
"resource": ""
}
|
q4105
|
Repository.create
|
train
|
def create(cls, user_id, github_id=None, name=None, **kwargs):
"""Create the repository."""
with db.session.begin_nested():
obj = cls(user_id=user_id, github_id=github_id, name=name,
**kwargs)
db.session.add(obj)
return obj
|
python
|
{
"resource": ""
}
|
q4106
|
Repository.get
|
train
|
def get(cls, user_id, github_id=None, name=None, check_owner=True):
"""Return a repository.
:param integer user_id: User identifier.
:param integer github_id: GitHub repository identifier.
:param str name: GitHub repository full name.
:returns: The repository object.
:raises: :py:exc:`~sqlalchemy.orm.exc.NoResultFound`: if the repository
doesn't exist.
:raises: :py:exc:`~sqlalchemy.orm.exc.MultipleResultsFound`: if
multiple repositories with the specified GitHub id and/or name
exist.
:raises: :py:exc:`RepositoryAccessError`: if the user is not the owner
of the repository.
"""
repo = cls.query.filter((Repository.github_id == github_id) |
(Repository.name == name)).one()
if (check_owner and repo and repo.user_id and
repo.user_id != int(user_id)):
raise RepositoryAccessError(
u'User {user} cannot access repository {repo}({repo_id}).'
.format(user=user_id, repo=name, repo_id=github_id)
)
return repo
|
python
|
{
"resource": ""
}
|
q4107
|
Repository.enable
|
train
|
def enable(cls, user_id, github_id, name, hook):
"""Enable webhooks for a repository.
If the repository does not exist it will create one.
:param user_id: User identifier.
:param repo_id: GitHub repository identifier.
:param name: Fully qualified name of the repository.
:param hook: GitHub hook identifier.
"""
try:
repo = cls.get(user_id, github_id=github_id, name=name)
except NoResultFound:
repo = cls.create(user_id=user_id, github_id=github_id, name=name)
repo.hook = hook
repo.user_id = user_id
return repo
|
python
|
{
"resource": ""
}
|
q4108
|
Repository.disable
|
train
|
def disable(cls, user_id, github_id, name):
"""Disable webhooks for a repository.
Disables the webhook from a repository if it exists in the DB.
:param user_id: User identifier.
:param repo_id: GitHub id of the repository.
:param name: Fully qualified name of the repository.
"""
repo = cls.get(user_id, github_id=github_id, name=name)
repo.hook = None
repo.user_id = None
return repo
|
python
|
{
"resource": ""
}
|
q4109
|
Release.create
|
train
|
def create(cls, event):
"""Create a new Release model."""
# Check if the release has already been received
release_id = event.payload['release']['id']
existing_release = Release.query.filter_by(
release_id=release_id,
).first()
if existing_release:
raise ReleaseAlreadyReceivedError(
u'{release} has already been received.'
.format(release=existing_release)
)
# Create the Release
repo_id = event.payload['repository']['id']
repo = Repository.get(user_id=event.user_id, github_id=repo_id)
if repo.enabled:
with db.session.begin_nested():
release = cls(
release_id=release_id,
tag=event.payload['release']['tag_name'],
repository=repo,
event=event,
status=ReleaseStatus.RECEIVED,
)
db.session.add(release)
return release
else:
current_app.logger.warning(
u'Release creation attempt on disabled {repo}.'
.format(repo=repo)
)
raise RepositoryDisabledError(
u'{repo} is not enabled for webhooks.'.format(repo=repo)
)
|
python
|
{
"resource": ""
}
|
q4110
|
Release.record
|
train
|
def record(self):
"""Get Record object."""
if self.recordmetadata:
return Record(self.recordmetadata.json, model=self.recordmetadata)
else:
return None
|
python
|
{
"resource": ""
}
|
q4111
|
get_badge_image_url
|
train
|
def get_badge_image_url(pid, ext='svg'):
"""Return the badge for a DOI."""
return url_for('invenio_formatter_badges.badge',
title=pid.pid_type, value=pid.pid_value, ext=ext)
|
python
|
{
"resource": ""
}
|
q4112
|
index_old
|
train
|
def index_old(user_id, repo_name):
"""Generate a badge for a specific GitHub repository."""
pid = get_pid_of_latest_release_or_404(name=repo_name)
return redirect(get_badge_image_url(pid))
|
python
|
{
"resource": ""
}
|
q4113
|
disconnect_github
|
train
|
def disconnect_github(access_token, repo_hooks):
"""Uninstall webhooks."""
# Note at this point the remote account and all associated data have
# already been deleted. The celery task is passed the access_token to make
# some last cleanup and afterwards delete itself remotely.
import github3
from .api import GitHubAPI
try:
gh = github3.login(token=access_token)
for repo_id, repo_hook in repo_hooks:
ghrepo = gh.repository_with_id(repo_id)
if ghrepo:
hook = ghrepo.hook(repo_hook)
if hook and hook.delete():
info_msg = u'Deleted hook {hook} from {repo}'.format(
hook=hook.id, repo=ghrepo.full_name)
current_app.logger.info(info_msg)
# If we finished our clean-up successfully, we can revoke the token
GitHubAPI.revoke_token(access_token)
except Exception as exc:
# Retry in case GitHub may be down...
disconnect_github.retry(exc=exc)
|
python
|
{
"resource": ""
}
|
q4114
|
sync_hooks
|
train
|
def sync_hooks(user_id, repositories):
"""Sync repository hooks for a user."""
from .api import GitHubAPI
try:
# Sync hooks
gh = GitHubAPI(user_id=user_id)
for repo_id in repositories:
try:
with db.session.begin_nested():
gh.sync_repo_hook(repo_id)
# We commit per repository, because while the task is running
# the user might enable/disable a hook.
db.session.commit()
except RepositoryAccessError as e:
current_app.logger.warning(e.message, exc_info=True)
except NoResultFound:
pass # Repository not in DB yet
except Exception as exc:
sync_hooks.retry(exc=exc)
|
python
|
{
"resource": ""
}
|
q4115
|
process_release
|
train
|
def process_release(release_id, verify_sender=False):
"""Process a received Release."""
from invenio_db import db
from invenio_rest.errors import RESTException
from .errors import InvalidSenderError
from .models import Release, ReleaseStatus
from .proxies import current_github
release_model = Release.query.filter(
Release.release_id == release_id,
Release.status.in_([ReleaseStatus.RECEIVED, ReleaseStatus.FAILED]),
).one()
release_model.status = ReleaseStatus.PROCESSING
db.session.commit()
release = current_github.release_api_class(release_model)
if verify_sender and not release.verify_sender():
raise InvalidSenderError(
u'Invalid sender for event {event} for user {user}'
.format(event=release.event.id, user=release.event.user_id)
)
def _get_err_obj(msg):
"""Generate the error entry with a Sentry ID."""
err = {'errors': msg}
if hasattr(g, 'sentry_event_id'):
err['error_id'] = str(g.sentry_event_id)
return err
try:
release.publish()
release.model.status = ReleaseStatus.PUBLISHED
except RESTException as rest_ex:
release.model.errors = json.loads(rest_ex.get_body())
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
# TODO: We may want to handle GitHub errors differently in the future
# except GitHubError as github_ex:
# release.model.errors = {'error': str(e)}
# release.model.status = ReleaseStatus.FAILED
# current_app.logger.exception(
# 'Error while processing {release}'
# .format(release=release.model))
except CustomGitHubMetadataError as e:
release.model.errors = _get_err_obj(str(e))
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
except Exception:
release.model.errors = _get_err_obj('Unknown error occured.')
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
finally:
db.session.commit()
|
python
|
{
"resource": ""
}
|
q4116
|
naturaltime
|
train
|
def naturaltime(val):
"""Get humanized version of time."""
val = val.replace(tzinfo=pytz.utc) \
if isinstance(val, datetime) else parse(val)
now = datetime.utcnow().replace(tzinfo=pytz.utc)
return humanize.naturaltime(now - val)
|
python
|
{
"resource": ""
}
|
q4117
|
index
|
train
|
def index():
"""Display list of the user's repositories."""
github = GitHubAPI(user_id=current_user.id)
token = github.session_token
ctx = dict(connected=False)
if token:
# The user is authenticated and the token we have is still valid.
if github.account.extra_data.get('login') is None:
github.init_account()
db.session.commit()
# Sync if needed
if request.method == 'POST' or github.check_sync():
# When we're in an XHR request, we want to synchronously sync hooks
github.sync(async_hooks=(not request.is_xhr))
db.session.commit()
# Generate the repositories view object
extra_data = github.account.extra_data
repos = extra_data['repos']
if repos:
# 'Enhance' our repos dict, from our database model
db_repos = Repository.query.filter(
Repository.github_id.in_([int(k) for k in repos.keys()]),
).all()
for repo in db_repos:
repos[str(repo.github_id)]['instance'] = repo
repos[str(repo.github_id)]['latest'] = GitHubRelease(
repo.latest_release())
last_sync = humanize.naturaltime(
(utcnow() - parse_timestamp(extra_data['last_sync'])))
ctx.update({
'connected': True,
'repos': sorted(repos.items(), key=lambda x: x[1]['full_name']),
'last_sync': last_sync,
})
return render_template(current_app.config['GITHUB_TEMPLATE_INDEX'], **ctx)
|
python
|
{
"resource": ""
}
|
q4118
|
hook
|
train
|
def hook():
"""Install or remove GitHub webhook."""
repo_id = request.json['id']
github = GitHubAPI(user_id=current_user.id)
repos = github.account.extra_data['repos']
if repo_id not in repos:
abort(404)
if request.method == 'DELETE':
try:
if github.remove_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return '', 204
else:
abort(400)
except Exception:
abort(403)
elif request.method == 'POST':
try:
if github.create_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return '', 201
else:
abort(400)
except Exception:
abort(403)
else:
abort(400)
|
python
|
{
"resource": ""
}
|
q4119
|
GitHubAPI.access_token
|
train
|
def access_token(self):
"""Return OAuth access token."""
if self.user_id:
return RemoteToken.get(
self.user_id, self.remote.consumer_key
).access_token
return self.remote.get_request_token()[0]
|
python
|
{
"resource": ""
}
|
q4120
|
GitHubAPI.session_token
|
train
|
def session_token(self):
"""Return OAuth session token."""
session_token = None
if self.user_id is not None:
session_token = token_getter(self.remote)
if session_token:
token = RemoteToken.get(
self.user_id, self.remote.consumer_key,
access_token=session_token[0]
)
return token
return None
|
python
|
{
"resource": ""
}
|
q4121
|
GitHubAPI.webhook_url
|
train
|
def webhook_url(self):
"""Return the url to be used by a GitHub webhook."""
webhook_token = ProviderToken.query.filter_by(
id=self.account.extra_data['tokens']['webhook']
).first()
if webhook_token:
wh_url = current_app.config.get('GITHUB_WEBHOOK_RECEIVER_URL')
if wh_url:
return wh_url.format(token=webhook_token.access_token)
else:
raise RuntimeError('You must set GITHUB_WEBHOOK_RECEIVER_URL.')
|
python
|
{
"resource": ""
}
|
q4122
|
GitHubAPI.init_account
|
train
|
def init_account(self):
"""Setup a new GitHub account."""
ghuser = self.api.me()
# Setup local access tokens to be used by the webhooks
hook_token = ProviderToken.create_personal(
'github-webhook',
self.user_id,
scopes=['webhooks:event'],
is_internal=True,
)
# Initial structure of extra data
self.account.extra_data = dict(
id=ghuser.id,
login=ghuser.login,
name=ghuser.name,
tokens=dict(
webhook=hook_token.id,
),
repos=dict(),
last_sync=iso_utcnow(),
)
db.session.add(self.account)
# Sync data from GitHub, but don't check repository hooks yet.
self.sync(hooks=False)
|
python
|
{
"resource": ""
}
|
q4123
|
GitHubAPI.sync
|
train
|
def sync(self, hooks=True, async_hooks=True):
"""Synchronize user repositories.
:param bool hooks: True for syncing hooks.
:param bool async_hooks: True for sending of an asynchronous task to
sync hooks.
.. note::
Syncing happens from GitHub's direction only. This means that we
consider the information on GitHub as valid, and we overwrite our
own state based on this information.
"""
active_repos = {}
github_repos = {repo.id: repo for repo in self.api.repositories()
if repo.permissions['admin']}
for gh_repo_id, gh_repo in github_repos.items():
active_repos[gh_repo_id] = {
'id': gh_repo_id,
'full_name': gh_repo.full_name,
'description': gh_repo.description,
}
if hooks:
self._sync_hooks(list(active_repos.keys()),
asynchronous=async_hooks)
# Update changed names for repositories stored in DB
db_repos = Repository.query.filter(
Repository.user_id == self.user_id,
Repository.github_id.in_(github_repos.keys())
)
for repo in db_repos:
gh_repo = github_repos.get(repo.github_id)
if gh_repo and repo.name != gh_repo.full_name:
repo.name = gh_repo.full_name
db.session.add(repo)
# Remove ownership from repositories that the user has no longer
# 'admin' permissions, or have been deleted.
Repository.query.filter(
Repository.user_id == self.user_id,
~Repository.github_id.in_(github_repos.keys())
).update(dict(user_id=None, hook=None), synchronize_session=False)
# Update repos and last sync
self.account.extra_data.update(dict(
repos=active_repos,
last_sync=iso_utcnow(),
))
self.account.extra_data.changed()
db.session.add(self.account)
|
python
|
{
"resource": ""
}
|
q4124
|
GitHubAPI._sync_hooks
|
train
|
def _sync_hooks(self, repos, asynchronous=True):
"""Check if a hooks sync task needs to be started."""
if not asynchronous:
for repo_id in repos:
try:
with db.session.begin_nested():
self.sync_repo_hook(repo_id)
db.session.commit()
except RepositoryAccessError as e:
current_app.logger.warning(e.message, exc_info=True)
except NoResultFound:
pass # Repository not in DB yet
else:
# FIXME: We have to commit, in order to have all necessary data?
db.session.commit()
sync_hooks.delay(self.user_id, repos)
|
python
|
{
"resource": ""
}
|
q4125
|
GitHubAPI.sync_repo_hook
|
train
|
def sync_repo_hook(self, repo_id):
"""Sync a GitHub repo's hook with the locally stored repo."""
# Get the hook that we may have set in the past
gh_repo = self.api.repository_with_id(repo_id)
hooks = (hook.id for hook in gh_repo.hooks()
if hook.config.get('url', '') == self.webhook_url)
hook_id = next(hooks, None)
# If hook on GitHub exists, get or create corresponding db object and
# enable the hook. Otherwise remove the old hook information.
if hook_id:
Repository.enable(user_id=self.user_id,
github_id=gh_repo.id,
name=gh_repo.full_name,
hook=hook_id)
else:
Repository.disable(user_id=self.user_id,
github_id=gh_repo.id,
name=gh_repo.full_name)
|
python
|
{
"resource": ""
}
|
q4126
|
GitHubAPI.check_sync
|
train
|
def check_sync(self):
"""Check if sync is required based on last sync date."""
# If refresh interval is not specified, we should refresh every time.
expiration = utcnow()
refresh_td = current_app.config.get('GITHUB_REFRESH_TIMEDELTA')
if refresh_td:
expiration -= refresh_td
last_sync = parse_timestamp(self.account.extra_data['last_sync'])
return last_sync < expiration
|
python
|
{
"resource": ""
}
|
q4127
|
GitHubAPI.create_hook
|
train
|
def create_hook(self, repo_id, repo_name):
"""Create repository hook."""
config = dict(
url=self.webhook_url,
content_type='json',
secret=current_app.config['GITHUB_SHARED_SECRET'],
insecure_ssl='1' if current_app.config['GITHUB_INSECURE_SSL']
else '0',
)
ghrepo = self.api.repository_with_id(repo_id)
if ghrepo:
try:
hook = ghrepo.create_hook(
'web', # GitHub identifier for webhook service
config,
events=['release'],
)
except github3.GitHubError as e:
# Check if hook is already installed
hook_errors = (m for m in e.errors
if m['code'] == 'custom' and
m['resource'] == 'Hook')
if next(hook_errors, None):
hooks = (h for h in ghrepo.hooks()
if h.config.get('url', '') == config['url'])
hook = next(hooks, None)
if hook:
hook.edit(config=config, events=['release'])
finally:
if hook:
Repository.enable(user_id=self.user_id,
github_id=repo_id,
name=repo_name,
hook=hook.id)
return True
return False
|
python
|
{
"resource": ""
}
|
q4128
|
GitHubAPI.remove_hook
|
train
|
def remove_hook(self, repo_id, name):
"""Remove repository hook."""
ghrepo = self.api.repository_with_id(repo_id)
if ghrepo:
hooks = (h for h in ghrepo.hooks()
if h.config.get('url', '') == self.webhook_url)
hook = next(hooks, None)
if not hook or hook.delete():
Repository.disable(user_id=self.user_id,
github_id=repo_id,
name=name)
return True
return False
|
python
|
{
"resource": ""
}
|
q4129
|
GitHubAPI._dev_api
|
train
|
def _dev_api(cls):
"""Get a developer instance for GitHub API access."""
gh = github3.GitHub()
gh.set_client_id(cls.remote.consumer_key, cls.remote.consumer_secret)
return gh
|
python
|
{
"resource": ""
}
|
q4130
|
GitHubRelease.deposit_class
|
train
|
def deposit_class(self):
"""Return a class implementing `publish` method."""
cls = current_app.config['GITHUB_DEPOSIT_CLASS']
if isinstance(cls, string_types):
cls = import_string(cls)
assert isinstance(cls, type)
return cls
|
python
|
{
"resource": ""
}
|
q4131
|
GitHubRelease.repo_model
|
train
|
def repo_model(self):
"""Return repository model from database."""
return Repository.query.filter_by(
user_id=self.event.user_id,
github_id=self.repository['id'],
).one()
|
python
|
{
"resource": ""
}
|
q4132
|
GitHubRelease.title
|
train
|
def title(self):
"""Extract title from a release."""
if self.event:
if self.release['name']:
return u'{0}: {1}'.format(
self.repository['full_name'], self.release['name']
)
return u'{0} {1}'.format(self.repo_model.name, self.model.tag)
|
python
|
{
"resource": ""
}
|
q4133
|
GitHubRelease.description
|
train
|
def description(self):
"""Extract description from a release."""
if self.release.get('body'):
return markdown(self.release['body'])
elif self.repository.get('description'):
return self.repository['description']
return 'No description provided.'
|
python
|
{
"resource": ""
}
|
q4134
|
GitHubRelease.related_identifiers
|
train
|
def related_identifiers(self):
"""Yield related identifiers."""
yield dict(
identifier=u'https://github.com/{0}/tree/{1}'.format(
self.repository['full_name'], self.release['tag_name']
),
relation='isSupplementTo',
)
|
python
|
{
"resource": ""
}
|
q4135
|
GitHubRelease.defaults
|
train
|
def defaults(self):
"""Return default metadata."""
return dict(
access_right='open',
description=self.description,
license='other-open',
publication_date=self.release['published_at'][:10],
related_identifiers=list(self.related_identifiers),
version=self.version,
title=self.title,
upload_type='software',
)
|
python
|
{
"resource": ""
}
|
q4136
|
GitHubRelease.extra_metadata
|
train
|
def extra_metadata(self):
"""Get extra metadata for file in repository."""
return get_extra_metadata(
self.gh.api,
self.repository['owner']['login'],
self.repository['name'],
self.release['tag_name'],
)
|
python
|
{
"resource": ""
}
|
q4137
|
GitHubRelease.files
|
train
|
def files(self):
"""Extract files to download from GitHub payload."""
tag_name = self.release['tag_name']
repo_name = self.repository['full_name']
zipball_url = self.release['zipball_url']
filename = u'{name}-{tag}.zip'.format(name=repo_name, tag=tag_name)
response = self.gh.api.session.head(zipball_url)
assert response.status_code == 302, \
u'Could not retrieve archive from GitHub: {0}'.format(zipball_url)
yield filename, zipball_url
|
python
|
{
"resource": ""
}
|
q4138
|
GitHubRelease.metadata
|
train
|
def metadata(self):
"""Return extracted metadata."""
output = dict(self.defaults)
output.update(self.extra_metadata)
return output
|
python
|
{
"resource": ""
}
|
q4139
|
GitHubRelease.pid
|
train
|
def pid(self):
"""Get PID object for the Release record."""
if self.model.status == ReleaseStatus.PUBLISHED and self.record:
fetcher = current_pidstore.fetchers[
current_app.config.get('GITHUB_PID_FETCHER')]
return fetcher(self.record.id, self.record)
|
python
|
{
"resource": ""
}
|
q4140
|
GitHubRelease.publish
|
train
|
def publish(self):
"""Publish GitHub release as record."""
with db.session.begin_nested():
deposit = self.deposit_class.create(self.metadata)
deposit['_deposit']['created_by'] = self.event.user_id
deposit['_deposit']['owners'] = [self.event.user_id]
# Fetch the deposit files
for key, url in self.files:
deposit.files[key] = self.gh.api.session.get(
url, stream=True).raw
deposit.publish()
recid, record = deposit.fetch_published()
self.model.recordmetadata = record.model
|
python
|
{
"resource": ""
}
|
q4141
|
GitHubReceiver.run
|
train
|
def run(self, event):
"""Process an event.
.. note::
We should only do basic server side operation here, since we send
the rest of the processing to a Celery task which will be mainly
accessing the GitHub API.
"""
repo_id = event.payload['repository']['id']
# Ping event - update the ping timestamp of the repository
if 'hook_id' in event.payload and 'zen' in event.payload:
repository = Repository.query.filter_by(
github_id=repo_id
).one()
repository.ping = datetime.utcnow()
db.session.commit()
return
# Release event
if 'release' in event.payload and \
event.payload.get('action') == 'published':
try:
release = Release.create(event)
db.session.commit()
# FIXME: If we want to skip the processing, we should do it
# here (eg. We're in the middle of a migration).
# if current_app.config['GITHUB_PROCESS_RELEASES']:
process_release.delay(
release.release_id,
verify_sender=self.verify_sender
)
except (ReleaseAlreadyReceivedError, RepositoryDisabledError) as e:
event.response_code = 409
event.response = dict(message=str(e), status=409)
except RepositoryAccessError as e:
event.response = 403
event.response = dict(message=str(e), status=403)
|
python
|
{
"resource": ""
}
|
q4142
|
parse_timestamp
|
train
|
def parse_timestamp(x):
"""Parse ISO8601 formatted timestamp."""
dt = dateutil.parser.parse(x)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=pytz.utc)
return dt
|
python
|
{
"resource": ""
}
|
q4143
|
get_extra_metadata
|
train
|
def get_extra_metadata(gh, owner, repo_name, ref):
"""Get the metadata file."""
try:
content = gh.repository(owner, repo_name).file_contents(
path=current_app.config['GITHUB_METADATA_FILE'], ref=ref
)
if not content:
# File does not exists in the given ref
return {}
return json.loads(content.decoded.decode('utf-8'))
except ValueError:
raise CustomGitHubMetadataError(
u'Metadata file "{file}" is not valid JSON.'
.format(file=current_app.config['GITHUB_METADATA_FILE'])
)
|
python
|
{
"resource": ""
}
|
q4144
|
get_owner
|
train
|
def get_owner(gh, owner):
"""Get owner of repository as a creator."""
try:
u = gh.user(owner)
name = u.name or u.login
company = u.company or ''
return [dict(name=name, affiliation=company)]
except Exception:
return None
|
python
|
{
"resource": ""
}
|
q4145
|
get_contributors
|
train
|
def get_contributors(gh, repo_id):
"""Get list of contributors to a repository."""
try:
# FIXME: Use `github3.Repository.contributors` to get this information
contrib_url = gh.repository_with_id(repo_id).contributors_url
r = requests.get(contrib_url)
if r.status_code == 200:
contributors = r.json()
def get_author(contributor):
r = requests.get(contributor['url'])
if r.status_code == 200:
data = r.json()
return dict(
name=(data['name'] if 'name' in data and data['name']
else data['login']),
affiliation=data.get('company') or '',
)
# Sort according to number of contributions
contributors.sort(key=itemgetter('contributions'))
contributors = [get_author(x) for x in reversed(contributors)
if x['type'] == 'User']
contributors = filter(lambda x: x is not None, contributors)
return contributors
except Exception:
return None
|
python
|
{
"resource": ""
}
|
q4146
|
account_setup
|
train
|
def account_setup(remote, token=None, response=None,
account_setup=None):
"""Setup user account."""
gh = GitHubAPI(user_id=token.remote_account.user_id)
with db.session.begin_nested():
gh.init_account()
# Create user <-> external id link.
oauth_link_external_id(
token.remote_account.user,
dict(id=str(gh.account.extra_data['id']), method="github")
)
|
python
|
{
"resource": ""
}
|
q4147
|
account_post_init
|
train
|
def account_post_init(remote, token=None):
"""Perform post initialization."""
gh = GitHubAPI(user_id=token.remote_account.user_id)
repos = [r.id for r in gh.api.repositories() if r.permissions['admin']]
sync_hooks.delay(token.remote_account.user_id, repos)
|
python
|
{
"resource": ""
}
|
q4148
|
disconnect
|
train
|
def disconnect(remote):
"""Disconnect callback handler for GitHub."""
# User must be authenticated
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
external_method = 'github'
external_ids = [i.id for i in current_user.external_identifiers
if i.method == external_method]
if external_ids:
oauth_unlink_external_id(dict(id=external_ids[0],
method=external_method))
user_id = int(current_user.get_id())
token = RemoteToken.get(user_id, remote.consumer_key)
if token:
extra_data = token.remote_account.extra_data
# Delete the token that we issued for GitHub to deliver webhooks
webhook_token_id = extra_data.get('tokens', {}).get('webhook')
ProviderToken.query.filter_by(id=webhook_token_id).delete()
# Disable GitHub webhooks from our side
db_repos = Repository.query.filter_by(user_id=user_id).all()
# Keep repositories with hooks to pass to the celery task later on
repos_with_hooks = [(r.github_id, r.hook) for r in db_repos if r.hook]
for repo in db_repos:
try:
Repository.disable(user_id=user_id,
github_id=repo.github_id,
name=repo.name)
except NoResultFound:
# If the repository doesn't exist, no action is necessary
pass
db.session.commit()
# Send Celery task for webhooks removal and token revocation
disconnect_github.delay(token.access_token, repos_with_hooks)
# Delete the RemoteAccount (along with the associated RemoteToken)
token.remote_account.delete()
return redirect(url_for('invenio_oauthclient_settings.index'))
|
python
|
{
"resource": ""
}
|
q4149
|
comments_are_open
|
train
|
def comments_are_open(content_object):
"""
Return whether comments are still open for a given target object.
"""
moderator = get_model_moderator(content_object.__class__)
if moderator is None:
return True
# Check the 'enable_field', 'auto_close_field' and 'close_after',
# by reusing the basic Django policies.
return CommentModerator.allow(moderator, None, content_object, None)
|
python
|
{
"resource": ""
}
|
q4150
|
comments_are_moderated
|
train
|
def comments_are_moderated(content_object):
"""
Return whether comments are moderated for a given target object.
"""
moderator = get_model_moderator(content_object.__class__)
if moderator is None:
return False
# Check the 'auto_moderate_field', 'moderate_after',
# by reusing the basic Django policies.
return CommentModerator.moderate(moderator, None, content_object, None)
|
python
|
{
"resource": ""
}
|
q4151
|
FluentCommentsModerator.email
|
train
|
def email(self, comment, content_object, request):
"""
Overwritten for a better email notification.
"""
if not self.email_notification:
return
send_comment_posted(comment, request)
|
python
|
{
"resource": ""
}
|
q4152
|
send_comment_posted
|
train
|
def send_comment_posted(comment, request):
"""
Send the email to staff that an comment was posted.
While the django_comments module has email support,
it doesn't pass the 'request' to the context.
This also changes the subject to show the page title.
"""
recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS]
site = get_current_site(request)
content_object = comment.content_object
content_title = force_text(content_object)
if comment.is_removed:
subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title)
elif not comment.is_public:
subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title)
else:
subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title)
context = {
'site': site,
'comment': comment,
'content_object': content_object
}
message = render_to_string("comments/comment_notification_email.txt", context, request=request)
if appsettings.FLUENT_COMMENTS_MULTIPART_EMAILS:
html_message = render_to_string("comments/comment_notification_email.html", context, request=request)
else:
html_message = None
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
recipient_list, fail_silently=True, html_message=html_message)
|
python
|
{
"resource": ""
}
|
q4153
|
AjaxCommentTags.get_context_data
|
train
|
def get_context_data(self, parent_context, *tag_args, **tag_kwargs):
"""
The main logic for the inclusion node, analogous to ``@register.inclusion_node``.
"""
target_object = tag_args[0] # moved one spot due to .pop(0)
new_context = {
'STATIC_URL': parent_context.get('STATIC_URL', None),
'USE_THREADEDCOMMENTS': appsettings.USE_THREADEDCOMMENTS,
'target_object': target_object,
}
# Be configuration independent:
if new_context['STATIC_URL'] is None:
try:
request = parent_context['request']
except KeyError:
new_context.update({'STATIC_URL': settings.STATIC_URL})
else:
new_context.update(context_processors.static(request))
return new_context
|
python
|
{
"resource": ""
}
|
q4154
|
get_comment_template_name
|
train
|
def get_comment_template_name(comment):
"""
Internal function for the rendering of comments.
"""
ctype = ContentType.objects.get_for_id(comment.content_type_id)
return [
"comments/%s/%s/comment.html" % (ctype.app_label, ctype.model),
"comments/%s/comment.html" % ctype.app_label,
"comments/comment.html"
]
|
python
|
{
"resource": ""
}
|
q4155
|
get_comments_for_model
|
train
|
def get_comments_for_model(content_object, include_moderated=False):
"""
Return the QuerySet with all comments for a given model.
"""
qs = get_comments_model().objects.for_model(content_object)
if not include_moderated:
qs = qs.filter(is_public=True, is_removed=False)
return qs
|
python
|
{
"resource": ""
}
|
q4156
|
CaptchaFormMixin._reorder_fields
|
train
|
def _reorder_fields(self, ordering):
"""
Test that the 'captcha' field is really present.
This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration.
"""
if 'captcha' not in ordering:
raise ImproperlyConfigured(
"When using 'FLUENT_COMMENTS_FIELD_ORDER', "
"make sure the 'captcha' field included too to use '{}' form. ".format(
self.__class__.__name__
)
)
super(CaptchaFormMixin, self)._reorder_fields(ordering)
# Avoid making captcha required for previews.
if self.is_preview:
self.fields.pop('captcha')
|
python
|
{
"resource": ""
}
|
q4157
|
_render_errors
|
train
|
def _render_errors(field):
"""
Render form errors in crispy-forms style.
"""
template = '{0}/layout/field_errors.html'.format(appsettings.CRISPY_TEMPLATE_PACK)
return render_to_string(template, {
'field': field,
'form_show_errors': True,
})
|
python
|
{
"resource": ""
}
|
q4158
|
get_form
|
train
|
def get_form():
"""
Return the form to use for commenting.
"""
global form_class
from fluent_comments import appsettings
if form_class is None:
if appsettings.FLUENT_COMMENTS_FORM_CLASS:
from django.utils.module_loading import import_string
form_class = import_string(appsettings.FLUENT_COMMENTS_FORM_CLASS)
else:
from fluent_comments.forms import FluentCommentForm
form_class = FluentCommentForm
return form_class
|
python
|
{
"resource": ""
}
|
q4159
|
load_default_moderator
|
train
|
def load_default_moderator():
"""
Find a moderator object
"""
if appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'default':
# Perform spam checks
return moderation.FluentCommentsModerator(None)
elif appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'deny':
# Deny all comments not from known registered models.
return moderation.AlwaysDeny(None)
elif str(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR).lower() == 'none':
# Disables default moderator
return moderation.NullModerator(None)
elif '.' in appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR:
return import_string(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR)(None)
else:
raise ImproperlyConfigured(
"Bad FLUENT_COMMENTS_DEFAULT_MODERATOR value. Provide default/deny/none or a dotted path"
)
|
python
|
{
"resource": ""
}
|
q4160
|
on_comment_will_be_posted
|
train
|
def on_comment_will_be_posted(sender, comment, request, **kwargs):
"""
Make sure both the Ajax and regular comments are checked for moderation.
This signal is also used to link moderators to the comment posting.
"""
content_object = comment.content_object
moderator = moderation.get_model_moderator(content_object.__class__)
if moderator and comment.__class__ is not CommentModel:
# Help with some hard to diagnose problems. The default Django moderator connects
# to the configured comment model. When this model differs from the signal sender,
# the the form stores a different model then COMMENTS_APP provides.
moderator = None
logger.warning(
"Comment of type '%s' was not moderated by '%s', "
"because the parent '%s' has a moderator installed for '%s' instead",
comment.__class__.__name__, moderator.__class__.__name__,
content_object.__class__.__name__, CommentModel.__name__
)
if moderator is None:
logger.info(
"Using default moderator for comment '%s' on parent '%s'",
comment.__class__.__name__, content_object.__class__.__name__
)
_run_default_moderator(comment, content_object, request)
|
python
|
{
"resource": ""
}
|
q4161
|
_run_default_moderator
|
train
|
def _run_default_moderator(comment, content_object, request):
"""
Run the default moderator
"""
# The default moderator will likely not check things like "auto close".
# It can still provide akismet and bad word checking.
if not default_moderator.allow(comment, content_object, request):
# Comment will be disallowed outright (HTTP 403 response)
return False
if default_moderator.moderate(comment, content_object, request):
comment.is_public = False
|
python
|
{
"resource": ""
}
|
q4162
|
akismet_check
|
train
|
def akismet_check(comment, content_object, request):
"""
Connects to Akismet and evaluates to True if Akismet marks this comment as spam.
:rtype: akismet.SpamStatus
"""
# Return previously cached response
akismet_result = getattr(comment, '_akismet_result_', None)
if akismet_result is not None:
return akismet_result
# Get Akismet data
AKISMET_API_KEY = appsettings.AKISMET_API_KEY
if not AKISMET_API_KEY:
raise ImproperlyConfigured('You must set AKISMET_API_KEY to use comment moderation with Akismet.')
current_domain = get_current_site(request).domain
auto_blog_url = '{0}://{1}/'.format(request.is_secure() and 'https' or 'http', current_domain)
blog_url = appsettings.AKISMET_BLOG_URL or auto_blog_url
akismet = Akismet(
AKISMET_API_KEY,
blog=blog_url,
is_test=int(bool(appsettings.AKISMET_IS_TEST)),
application_user_agent='django-fluent-comments/{0}'.format(fluent_comments.__version__),
)
akismet_data = _get_akismet_data(blog_url, comment, content_object, request)
akismet_result = akismet.check(**akismet_data) # raises AkismetServerError when key is invalid
setattr(comment, "_akismet_result_", akismet_result)
return akismet_result
|
python
|
{
"resource": ""
}
|
q4163
|
Overpass._handle_remark_msg
|
train
|
def _handle_remark_msg(self, msg):
"""
Try to parse the message provided with the remark tag or element.
:param str msg: The message
:raises overpy.exception.OverpassRuntimeError: If message starts with 'runtime error:'
:raises overpy.exception.OverpassRuntimeRemark: If message starts with 'runtime remark:'
:raises overpy.exception.OverpassUnknownError: If we are unable to identify the error
"""
msg = msg.strip()
if msg.startswith("runtime error:"):
raise exception.OverpassRuntimeError(msg=msg)
elif msg.startswith("runtime remark:"):
raise exception.OverpassRuntimeRemark(msg=msg)
raise exception.OverpassUnknownError(msg=msg)
|
python
|
{
"resource": ""
}
|
q4164
|
Overpass.query
|
train
|
def query(self, query):
"""
Query the Overpass API
:param String|Bytes query: The query string in Overpass QL
:return: The parsed result
:rtype: overpy.Result
"""
if not isinstance(query, bytes):
query = query.encode("utf-8")
retry_num = 0
retry_exceptions = []
do_retry = True if self.max_retry_count > 0 else False
while retry_num <= self.max_retry_count:
if retry_num > 0:
time.sleep(self.retry_timeout)
retry_num += 1
try:
f = urlopen(self.url, query)
except HTTPError as e:
f = e
response = f.read(self.read_chunk_size)
while True:
data = f.read(self.read_chunk_size)
if len(data) == 0:
break
response = response + data
f.close()
if f.code == 200:
if PY2:
http_info = f.info()
content_type = http_info.getheader("content-type")
else:
content_type = f.getheader("Content-Type")
if content_type == "application/json":
return self.parse_json(response)
if content_type == "application/osm3s+xml":
return self.parse_xml(response)
e = exception.OverpassUnknownContentType(content_type)
if not do_retry:
raise e
retry_exceptions.append(e)
continue
if f.code == 400:
msgs = []
for msg in self._regex_extract_error_msg.finditer(response):
tmp = self._regex_remove_tag.sub(b"", msg.group("msg"))
try:
tmp = tmp.decode("utf-8")
except UnicodeDecodeError:
tmp = repr(tmp)
msgs.append(tmp)
e = exception.OverpassBadRequest(
query,
msgs=msgs
)
if not do_retry:
raise e
retry_exceptions.append(e)
continue
if f.code == 429:
e = exception.OverpassTooManyRequests
if not do_retry:
raise e
retry_exceptions.append(e)
continue
if f.code == 504:
e = exception.OverpassGatewayTimeout
if not do_retry:
raise e
retry_exceptions.append(e)
continue
e = exception.OverpassUnknownHTTPStatusCode(f.code)
if not do_retry:
raise e
retry_exceptions.append(e)
continue
raise exception.MaxRetriesReached(retry_count=retry_num, exceptions=retry_exceptions)
|
python
|
{
"resource": ""
}
|
q4165
|
Overpass.parse_json
|
train
|
def parse_json(self, data, encoding="utf-8"):
"""
Parse raw response from Overpass service.
:param data: Raw JSON Data
:type data: String or Bytes
:param encoding: Encoding to decode byte string
:type encoding: String
:return: Result object
:rtype: overpy.Result
"""
if isinstance(data, bytes):
data = data.decode(encoding)
data = json.loads(data, parse_float=Decimal)
if "remark" in data:
self._handle_remark_msg(msg=data.get("remark"))
return Result.from_json(data, api=self)
|
python
|
{
"resource": ""
}
|
q4166
|
Result.expand
|
train
|
def expand(self, other):
"""
Add all elements from an other result to the list of elements of this result object.
It is used by the auto resolve feature.
:param other: Expand the result with the elements from this result.
:type other: overpy.Result
:raises ValueError: If provided parameter is not instance of :class:`overpy.Result`
"""
if not isinstance(other, Result):
raise ValueError("Provided argument has to be instance of overpy:Result()")
other_collection_map = {Node: other.nodes, Way: other.ways, Relation: other.relations, Area: other.areas}
for element_type, own_collection in self._class_collection_map.items():
for element in other_collection_map[element_type]:
if is_valid_type(element, element_type) and element.id not in own_collection:
own_collection[element.id] = element
|
python
|
{
"resource": ""
}
|
q4167
|
Result.append
|
train
|
def append(self, element):
"""
Append a new element to the result.
:param element: The element to append
:type element: overpy.Element
"""
if is_valid_type(element, Element):
self._class_collection_map[element.__class__].setdefault(element.id, element)
|
python
|
{
"resource": ""
}
|
q4168
|
Result.get_elements
|
train
|
def get_elements(self, filter_cls, elem_id=None):
"""
Get a list of elements from the result and filter the element type by a class.
:param filter_cls:
:param elem_id: ID of the object
:type elem_id: Integer
:return: List of available elements
:rtype: List
"""
result = []
if elem_id is not None:
try:
result = [self._class_collection_map[filter_cls][elem_id]]
except KeyError:
result = []
else:
for e in self._class_collection_map[filter_cls].values():
result.append(e)
return result
|
python
|
{
"resource": ""
}
|
q4169
|
Result.from_json
|
train
|
def from_json(cls, data, api=None):
"""
Create a new instance and load data from json object.
:param data: JSON data returned by the Overpass API
:type data: Dict
:param api:
:type api: overpy.Overpass
:return: New instance of Result object
:rtype: overpy.Result
"""
result = cls(api=api)
for elem_cls in [Node, Way, Relation, Area]:
for element in data.get("elements", []):
e_type = element.get("type")
if hasattr(e_type, "lower") and e_type.lower() == elem_cls._type_value:
result.append(elem_cls.from_json(element, result=result))
return result
|
python
|
{
"resource": ""
}
|
q4170
|
Result.get_area
|
train
|
def get_area(self, area_id, resolve_missing=False):
"""
Get an area by its ID.
:param area_id: The area ID
:type area_id: Integer
:param resolve_missing: Query the Overpass API if the area is missing in the result set.
:return: The area
:rtype: overpy.Area
:raises overpy.exception.DataIncomplete: The requested way is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the area can't be resolved.
"""
areas = self.get_areas(area_id=area_id)
if len(areas) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing area is disabled")
query = ("\n"
"[out:json];\n"
"area({area_id});\n"
"out body;\n"
)
query = query.format(
area_id=area_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
areas = self.get_areas(area_id=area_id)
if len(areas) == 0:
raise exception.DataIncomplete("Unable to resolve requested areas")
return areas[0]
|
python
|
{
"resource": ""
}
|
q4171
|
Result.get_node
|
train
|
def get_node(self, node_id, resolve_missing=False):
"""
Get a node by its ID.
:param node_id: The node ID
:type node_id: Integer
:param resolve_missing: Query the Overpass API if the node is missing in the result set.
:return: The node
:rtype: overpy.Node
:raises overpy.exception.DataIncomplete: At least one referenced node is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and at least one node can't be resolved.
"""
nodes = self.get_nodes(node_id=node_id)
if len(nodes) == 0:
if not resolve_missing:
raise exception.DataIncomplete("Resolve missing nodes is disabled")
query = ("\n"
"[out:json];\n"
"node({node_id});\n"
"out body;\n"
)
query = query.format(
node_id=node_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
nodes = self.get_nodes(node_id=node_id)
if len(nodes) == 0:
raise exception.DataIncomplete("Unable to resolve all nodes")
return nodes[0]
|
python
|
{
"resource": ""
}
|
q4172
|
Result.get_relation
|
train
|
def get_relation(self, rel_id, resolve_missing=False):
"""
Get a relation by its ID.
:param rel_id: The relation ID
:type rel_id: Integer
:param resolve_missing: Query the Overpass API if the relation is missing in the result set.
:return: The relation
:rtype: overpy.Relation
:raises overpy.exception.DataIncomplete: The requested relation is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the relation can't be resolved.
"""
relations = self.get_relations(rel_id=rel_id)
if len(relations) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing relations is disabled")
query = ("\n"
"[out:json];\n"
"relation({relation_id});\n"
"out body;\n"
)
query = query.format(
relation_id=rel_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
relations = self.get_relations(rel_id=rel_id)
if len(relations) == 0:
raise exception.DataIncomplete("Unable to resolve requested reference")
return relations[0]
|
python
|
{
"resource": ""
}
|
q4173
|
Result.get_way
|
train
|
def get_way(self, way_id, resolve_missing=False):
"""
Get a way by its ID.
:param way_id: The way ID
:type way_id: Integer
:param resolve_missing: Query the Overpass API if the way is missing in the result set.
:return: The way
:rtype: overpy.Way
:raises overpy.exception.DataIncomplete: The requested way is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the way can't be resolved.
"""
ways = self.get_ways(way_id=way_id)
if len(ways) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing way is disabled")
query = ("\n"
"[out:json];\n"
"way({way_id});\n"
"out body;\n"
)
query = query.format(
way_id=way_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
ways = self.get_ways(way_id=way_id)
if len(ways) == 0:
raise exception.DataIncomplete("Unable to resolve requested way")
return ways[0]
|
python
|
{
"resource": ""
}
|
q4174
|
Element.get_center_from_json
|
train
|
def get_center_from_json(cls, data):
"""
Get center information from json data
:param data: json data
:return: tuple with two elements: lat and lon
:rtype: tuple
"""
center_lat = None
center_lon = None
center = data.get("center")
if isinstance(center, dict):
center_lat = center.get("lat")
center_lon = center.get("lon")
if center_lat is None or center_lon is None:
raise ValueError("Unable to get lat or lon of way center.")
center_lat = Decimal(center_lat)
center_lon = Decimal(center_lon)
return (center_lat, center_lon)
|
python
|
{
"resource": ""
}
|
q4175
|
Node.from_json
|
train
|
def from_json(cls, data, result=None):
"""
Create new Node element from JSON data
:param data: Element data from JSON
:type data: Dict
:param result: The result this element belongs to
:type result: overpy.Result
:return: New instance of Node
:rtype: overpy.Node
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
node_id = data.get("id")
lat = data.get("lat")
lon = data.get("lon")
attributes = {}
ignore = ["type", "id", "lat", "lon", "tags"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(node_id=node_id, lat=lat, lon=lon, tags=tags, attributes=attributes, result=result)
|
python
|
{
"resource": ""
}
|
q4176
|
Way.get_nodes
|
train
|
def get_nodes(self, resolve_missing=False):
"""
Get the nodes defining the geometry of the way
:param resolve_missing: Try to resolve missing nodes.
:type resolve_missing: Boolean
:return: List of nodes
:rtype: List of overpy.Node
:raises overpy.exception.DataIncomplete: At least one referenced node is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and at least one node can't be resolved.
"""
result = []
resolved = False
for node_id in self._node_ids:
try:
node = self._result.get_node(node_id)
except exception.DataIncomplete:
node = None
if node is not None:
result.append(node)
continue
if not resolve_missing:
raise exception.DataIncomplete("Resolve missing nodes is disabled")
# We tried to resolve the data but some nodes are still missing
if resolved:
raise exception.DataIncomplete("Unable to resolve all nodes")
query = ("\n"
"[out:json];\n"
"way({way_id});\n"
"node(w);\n"
"out body;\n"
)
query = query.format(
way_id=self.id
)
tmp_result = self._result.api.query(query)
self._result.expand(tmp_result)
resolved = True
try:
node = self._result.get_node(node_id)
except exception.DataIncomplete:
node = None
if node is None:
raise exception.DataIncomplete("Unable to resolve all nodes")
result.append(node)
return result
|
python
|
{
"resource": ""
}
|
q4177
|
Way.from_json
|
train
|
def from_json(cls, data, result=None):
"""
Create new Way element from JSON data
:param data: Element data from JSON
:type data: Dict
:param result: The result this element belongs to
:type result: overpy.Result
:return: New instance of Way
:rtype: overpy.Way
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
way_id = data.get("id")
node_ids = data.get("nodes")
(center_lat, center_lon) = cls.get_center_from_json(data=data)
attributes = {}
ignore = ["center", "id", "nodes", "tags", "type"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(
attributes=attributes,
center_lat=center_lat,
center_lon=center_lon,
node_ids=node_ids,
tags=tags,
result=result,
way_id=way_id
)
|
python
|
{
"resource": ""
}
|
q4178
|
Relation.from_json
|
train
|
def from_json(cls, data, result=None):
"""
Create new Relation element from JSON data
:param data: Element data from JSON
:type data: Dict
:param result: The result this element belongs to
:type result: overpy.Result
:return: New instance of Relation
:rtype: overpy.Relation
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
rel_id = data.get("id")
(center_lat, center_lon) = cls.get_center_from_json(data=data)
members = []
supported_members = [RelationNode, RelationWay, RelationRelation]
for member in data.get("members", []):
type_value = member.get("type")
for member_cls in supported_members:
if member_cls._type_value == type_value:
members.append(
member_cls.from_json(
member,
result=result
)
)
attributes = {}
ignore = ["id", "members", "tags", "type"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(
rel_id=rel_id,
attributes=attributes,
center_lat=center_lat,
center_lon=center_lon,
members=members,
tags=tags,
result=result
)
|
python
|
{
"resource": ""
}
|
q4179
|
RelationMember.from_json
|
train
|
def from_json(cls, data, result=None):
"""
Create new RelationMember element from JSON data
:param child: Element data from JSON
:type child: Dict
:param result: The result this element belongs to
:type result: overpy.Result
:return: New instance of RelationMember
:rtype: overpy.RelationMember
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
ref = data.get("ref")
role = data.get("role")
attributes = {}
ignore = ["geometry", "type", "ref", "role"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
geometry = data.get("geometry")
if isinstance(geometry, list):
geometry_orig = geometry
geometry = []
for v in geometry_orig:
geometry.append(
RelationWayGeometryValue(
lat=v.get("lat"),
lon=v.get("lon")
)
)
else:
geometry = None
return cls(
attributes=attributes,
geometry=geometry,
ref=ref,
role=role,
result=result
)
|
python
|
{
"resource": ""
}
|
q4180
|
RelationMember.from_xml
|
train
|
def from_xml(cls, child, result=None):
"""
Create new RelationMember from XML data
:param child: XML node to be parsed
:type child: xml.etree.ElementTree.Element
:param result: The result this element belongs to
:type result: overpy.Result
:return: New relation member oject
:rtype: overpy.RelationMember
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
"""
if child.attrib.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
ref = child.attrib.get("ref")
if ref is not None:
ref = int(ref)
role = child.attrib.get("role")
attributes = {}
ignore = ["geometry", "ref", "role", "type"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
geometry = None
for sub_child in child:
if sub_child.tag.lower() == "nd":
if geometry is None:
geometry = []
geometry.append(
RelationWayGeometryValue(
lat=Decimal(sub_child.attrib["lat"]),
lon=Decimal(sub_child.attrib["lon"])
)
)
return cls(
attributes=attributes,
geometry=geometry,
ref=ref,
role=role,
result=result
)
|
python
|
{
"resource": ""
}
|
q4181
|
OSMSAXHandler.startElement
|
train
|
def startElement(self, name, attrs):
"""
Handle opening elements.
:param name: Name of the element
:type name: String
:param attrs: Attributes of the element
:type attrs: Dict
"""
if name in self.ignore_start:
return
try:
handler = getattr(self, '_handle_start_%s' % name)
except AttributeError:
raise KeyError("Unknown element start '%s'" % name)
handler(attrs)
|
python
|
{
"resource": ""
}
|
q4182
|
OSMSAXHandler.endElement
|
train
|
def endElement(self, name):
"""
Handle closing elements
:param name: Name of the element
:type name: String
"""
if name in self.ignore_end:
return
try:
handler = getattr(self, '_handle_end_%s' % name)
except AttributeError:
raise KeyError("Unknown element end '%s'" % name)
handler()
|
python
|
{
"resource": ""
}
|
q4183
|
OSMSAXHandler._handle_start_center
|
train
|
def _handle_start_center(self, attrs):
"""
Handle opening center element
:param attrs: Attributes of the element
:type attrs: Dict
"""
center_lat = attrs.get("lat")
center_lon = attrs.get("lon")
if center_lat is None or center_lon is None:
raise ValueError("Unable to get lat or lon of way center.")
self._curr["center_lat"] = Decimal(center_lat)
self._curr["center_lon"] = Decimal(center_lon)
|
python
|
{
"resource": ""
}
|
q4184
|
OSMSAXHandler._handle_start_tag
|
train
|
def _handle_start_tag(self, attrs):
"""
Handle opening tag element
:param attrs: Attributes of the element
:type attrs: Dict
"""
try:
tag_key = attrs['k']
except KeyError:
raise ValueError("Tag without name/key.")
self._curr['tags'][tag_key] = attrs.get('v')
|
python
|
{
"resource": ""
}
|
q4185
|
OSMSAXHandler._handle_start_node
|
train
|
def _handle_start_node(self, attrs):
"""
Handle opening node element
:param attrs: Attributes of the element
:type attrs: Dict
"""
self._curr = {
'attributes': dict(attrs),
'lat': None,
'lon': None,
'node_id': None,
'tags': {}
}
if attrs.get('id', None) is not None:
self._curr['node_id'] = int(attrs['id'])
del self._curr['attributes']['id']
if attrs.get('lat', None) is not None:
self._curr['lat'] = Decimal(attrs['lat'])
del self._curr['attributes']['lat']
if attrs.get('lon', None) is not None:
self._curr['lon'] = Decimal(attrs['lon'])
del self._curr['attributes']['lon']
|
python
|
{
"resource": ""
}
|
q4186
|
OSMSAXHandler._handle_end_node
|
train
|
def _handle_end_node(self):
"""
Handle closing node element
"""
self._result.append(Node(result=self._result, **self._curr))
self._curr = {}
|
python
|
{
"resource": ""
}
|
q4187
|
OSMSAXHandler._handle_start_way
|
train
|
def _handle_start_way(self, attrs):
"""
Handle opening way element
:param attrs: Attributes of the element
:type attrs: Dict
"""
self._curr = {
'center_lat': None,
'center_lon': None,
'attributes': dict(attrs),
'node_ids': [],
'tags': {},
'way_id': None
}
if attrs.get('id', None) is not None:
self._curr['way_id'] = int(attrs['id'])
del self._curr['attributes']['id']
|
python
|
{
"resource": ""
}
|
q4188
|
OSMSAXHandler._handle_end_way
|
train
|
def _handle_end_way(self):
"""
Handle closing way element
"""
self._result.append(Way(result=self._result, **self._curr))
self._curr = {}
|
python
|
{
"resource": ""
}
|
q4189
|
OSMSAXHandler._handle_start_area
|
train
|
def _handle_start_area(self, attrs):
"""
Handle opening area element
:param attrs: Attributes of the element
:type attrs: Dict
"""
self._curr = {
'attributes': dict(attrs),
'tags': {},
'area_id': None
}
if attrs.get('id', None) is not None:
self._curr['area_id'] = int(attrs['id'])
del self._curr['attributes']['id']
|
python
|
{
"resource": ""
}
|
q4190
|
OSMSAXHandler._handle_end_area
|
train
|
def _handle_end_area(self):
"""
Handle closing area element
"""
self._result.append(Area(result=self._result, **self._curr))
self._curr = {}
|
python
|
{
"resource": ""
}
|
q4191
|
OSMSAXHandler._handle_start_nd
|
train
|
def _handle_start_nd(self, attrs):
"""
Handle opening nd element
:param attrs: Attributes of the element
:type attrs: Dict
"""
if isinstance(self.cur_relation_member, RelationWay):
if self.cur_relation_member.geometry is None:
self.cur_relation_member.geometry = []
self.cur_relation_member.geometry.append(
RelationWayGeometryValue(
lat=Decimal(attrs["lat"]),
lon=Decimal(attrs["lon"])
)
)
else:
try:
node_ref = attrs['ref']
except KeyError:
raise ValueError("Unable to find required ref value.")
self._curr['node_ids'].append(int(node_ref))
|
python
|
{
"resource": ""
}
|
q4192
|
OSMSAXHandler._handle_start_relation
|
train
|
def _handle_start_relation(self, attrs):
"""
Handle opening relation element
:param attrs: Attributes of the element
:type attrs: Dict
"""
self._curr = {
'attributes': dict(attrs),
'members': [],
'rel_id': None,
'tags': {}
}
if attrs.get('id', None) is not None:
self._curr['rel_id'] = int(attrs['id'])
del self._curr['attributes']['id']
|
python
|
{
"resource": ""
}
|
q4193
|
OSMSAXHandler._handle_end_relation
|
train
|
def _handle_end_relation(self):
"""
Handle closing relation element
"""
self._result.append(Relation(result=self._result, **self._curr))
self._curr = {}
|
python
|
{
"resource": ""
}
|
q4194
|
OSMSAXHandler._handle_start_member
|
train
|
def _handle_start_member(self, attrs):
"""
Handle opening member element
:param attrs: Attributes of the element
:type attrs: Dict
"""
params = {
# ToDo: Parse attributes
'attributes': {},
'ref': None,
'result': self._result,
'role': None
}
if attrs.get('ref', None):
params['ref'] = int(attrs['ref'])
if attrs.get('role', None):
params['role'] = attrs['role']
cls_map = {
"area": RelationArea,
"node": RelationNode,
"relation": RelationRelation,
"way": RelationWay
}
cls = cls_map.get(attrs["type"])
if cls is None:
raise ValueError("Undefined type for member: '%s'" % attrs['type'])
self.cur_relation_member = cls(**params)
self._curr['members'].append(self.cur_relation_member)
|
python
|
{
"resource": ""
}
|
q4195
|
get_street
|
train
|
def get_street(street, areacode, api=None):
"""
Retrieve streets in a given bounding area
:param overpy.Overpass api: First street of intersection
:param String street: Name of street
:param String areacode: The OSM id of the bounding area
:return: Parsed result
:raises overpy.exception.OverPyException: If something bad happens.
"""
if api is None:
api = overpy.Overpass()
query = """
area(%s)->.location;
(
way[highway][name="%s"](area.location);
- (
way[highway=service](area.location);
way[highway=track](area.location);
);
);
out body;
>;
out skel qt;
"""
data = api.query(query % (areacode, street))
return data
|
python
|
{
"resource": ""
}
|
q4196
|
get_intersection
|
train
|
def get_intersection(street1, street2, areacode, api=None):
"""
Retrieve intersection of two streets in a given bounding area
:param overpy.Overpass api: First street of intersection
:param String street1: Name of first street of intersection
:param String street2: Name of second street of intersection
:param String areacode: The OSM id of the bounding area
:return: List of intersections
:raises overpy.exception.OverPyException: If something bad happens.
"""
if api is None:
api = overpy.Overpass()
query = """
area(%s)->.location;
(
way[highway][name="%s"](area.location); node(w)->.n1;
way[highway][name="%s"](area.location); node(w)->.n2;
);
node.n1.n2;
out meta;
"""
data = api.query(query % (areacode, street1, street2))
return data.get_nodes()
|
python
|
{
"resource": ""
}
|
q4197
|
check_platforms
|
train
|
def check_platforms(platforms):
"""Checks if the platforms have a valid platform code"""
if len(platforms) > 0:
return all(platform in PLATFORM_IDS for platform in platforms)
return True
|
python
|
{
"resource": ""
}
|
q4198
|
main
|
train
|
def main(active, upcoming, hiring, short, goto, platforms, time):
"""A CLI for active and upcoming programming challenges from various platforms"""
if not check_platforms(platforms):
raise IncorrectParametersException('Invlaid code for platform. Please check the platform ids')
try:
if active:
active_challenges = active_contests(platforms)
if goto:
webbrowser.open(active_challenges[goto - 1]["contest_url"], new=2)
else:
writers.write_contests(active_challenges, "active")
return
if upcoming:
upcoming_challenges = upcoming_contests(platforms, time)
if goto:
goto = int(goto)
webbrowser.open(upcoming_challenges[goto - 1]["contest_url"], new=2)
else:
writers.write_contests(upcoming_challenges, "upcoming")
return
if hiring:
hiring_challenges = hiring_contests()
if goto:
webbrowser.open(hiring_challenges[goto - 1]["contest_url"], new=2)
else:
writers.write_contests(hiring_challenges, "hiring")
return
if short:
short_challenges = short_contests(platforms)
if goto:
goto = int(goto)
webbrowser.open(short_challenges[goto - 1]["contest_url"], new=2)
else:
writers.write_contests(short_challenges, "short")
return
all_contests = get_all_contests(platforms, time)
if goto:
webbrowser.open(all_contests[goto - 1]["contest_url"], new=2)
else:
writers.write_contests(all_contests, "all")
except IncorrectParametersException as e:
click.secho(e.message, fg="red", bold=True)
|
python
|
{
"resource": ""
}
|
q4199
|
colors
|
train
|
def colors():
"""Creates an enum for colors"""
enums = dict(
TIME_LEFT="red",
CONTEST_NAME="yellow",
HOST="green",
MISC="blue",
TIME_TO_START="green",
)
return type('Enum', (), enums)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.