text_prompt
stringlengths 157
13.1k
| code_prompt
stringlengths 7
19.8k
⌀ |
---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def by_works(self, kind=None, role_name=None):
""" Get the Creators involved in the most Works. kind - If supplied, only Works with that `kind` value will be counted. role_name - If supplied, only Works on which the role is that will be counted. e.g. To get all 'movie' Works on which the Creators had the role 'Director': Creator.objects.by_works(kind='movie', role_name='Director') """ |
if not spectator_apps.is_enabled('events'):
raise ImproperlyConfigured("To use the CreatorManager.by_works() method, 'spectator.events' must by in INSTALLED_APPS.")
qs = self.get_queryset()
filter_kwargs = {}
if kind is not None:
filter_kwargs['works__kind'] = kind
if role_name is not None:
filter_kwargs['work_roles__role_name'] = role_name
if filter_kwargs:
qs = qs.filter(**filter_kwargs)
qs = qs.annotate(num_works=Count('works', distinct=True)) \
.order_by('-num_works', 'name_sort')
return qs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def index():
"""Query Elasticsearch using Invenio query syntax.""" |
page = request.values.get('page', 1, type=int)
size = request.values.get('size', 2, type=int)
search = ExampleSearch()[(page - 1) * size:page * size]
if 'q' in request.values:
search = search.query(QueryString(query=request.values.get('q')))
search = search.sort(
request.values.get('sort', 'title')
)
search = ExampleSearch.faceted_search(search=search)
results = search.execute().to_dict()
return jsonify({'hits': results.get('hits')}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clean_options(self, using_keytab=False, principal=None, keytab_file=None, ccache_file=None, password=None):
"""Clean argument to related object :param bool using_keytab: refer to ``krbContext.__init__``. :param str principal: refer to ``krbContext.__init__``. :param str keytab_file: refer to ``krbContext.__init__``. :param str ccache_file: refer to ``krbContext.__init__``. :param str password: refer to ``krbContext.__init__``. :return: a mapping containing cleaned names and values, which are used internally. :rtype: dict :raises ValueError: principal is missing or given keytab file does not exist, when initialize from a keytab. """ |
cleaned = {}
if using_keytab:
if principal is None:
raise ValueError('Principal is required when using key table.')
princ_name = gssapi.names.Name(
principal, gssapi.names.NameType.kerberos_principal)
if keytab_file is None:
cleaned['keytab'] = DEFAULT_KEYTAB
elif not os.path.exists(keytab_file):
raise ValueError(
'Keytab file {0} does not exist.'.format(keytab_file))
else:
cleaned['keytab'] = keytab_file
else:
if principal is None:
principal = get_login()
princ_name = gssapi.names.Name(principal,
gssapi.names.NameType.user)
cleaned['using_keytab'] = using_keytab
cleaned['principal'] = princ_name
cleaned['ccache'] = ccache_file or DEFAULT_CCACHE
cleaned['password'] = password
return cleaned |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_with_keytab(self):
"""Initialize credential cache with keytab""" |
creds_opts = {
'usage': 'initiate',
'name': self._cleaned_options['principal'],
}
store = {}
if self._cleaned_options['keytab'] != DEFAULT_KEYTAB:
store['client_keytab'] = self._cleaned_options['keytab']
if self._cleaned_options['ccache'] != DEFAULT_CCACHE:
store['ccache'] = self._cleaned_options['ccache']
if store:
creds_opts['store'] = store
creds = gssapi.creds.Credentials(**creds_opts)
try:
creds.lifetime
except gssapi.exceptions.ExpiredCredentialsError:
new_creds_opts = copy.deepcopy(creds_opts)
# Get new credential and put it into a temporary ccache
if 'store' in new_creds_opts:
new_creds_opts['store']['ccache'] = _get_temp_ccache()
else:
new_creds_opts['store'] = {'ccache': _get_temp_ccache()}
creds = gssapi.creds.Credentials(**new_creds_opts)
# Then, store new credential back to original specified ccache,
# whatever a given ccache file or the default one.
_store = None
# If default cccache is used, no need to specify ccache in store
# parameter passed to ``creds.store``.
if self._cleaned_options['ccache'] != DEFAULT_CCACHE:
_store = {'ccache': store['ccache']}
creds.store(usage='initiate', store=_store, overwrite=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_with_password(self):
"""Initialize credential cache with password **Causion:** once you enter password from command line, or pass it to API directly, the given password is not encrypted always. Although getting credential with password works, from security point of view, it is strongly recommended **NOT** use it in any formal production environment. If you need to initialize credential in an application to application Kerberos authentication context, keytab has to be used. :raises IOError: when trying to prompt to input password from command line but no attry is available. """ |
creds_opts = {
'usage': 'initiate',
'name': self._cleaned_options['principal'],
}
if self._cleaned_options['ccache'] != DEFAULT_CCACHE:
creds_opts['store'] = {'ccache': self._cleaned_options['ccache']}
cred = gssapi.creds.Credentials(**creds_opts)
try:
cred.lifetime
except gssapi.exceptions.ExpiredCredentialsError:
password = self._cleaned_options['password']
if not password:
if not sys.stdin.isatty():
raise IOError(
'krbContext is not running from a terminal. So, you '
'need to run kinit with your principal manually before'
' anything goes.')
# If there is no password specified via API call, prompt to
# enter one in order to continue to get credential. BUT, in
# some cases, blocking program and waiting for input of
# password is really bad, which may be only suitable for some
# simple use cases, for example, writing some scripts to test
# something that need Kerberos authentication. Anyway, whether
# it is really to enter a password from command line, it
# depends on concrete use cases totally.
password = getpass.getpass()
cred = gssapi.raw.acquire_cred_with_password(
self._cleaned_options['principal'], password)
ccache = self._cleaned_options['ccache']
if ccache == DEFAULT_CCACHE:
gssapi.raw.store_cred(cred.creds,
usage='initiate',
overwrite=True)
else:
gssapi.raw.store_cred_into({'ccache': ccache},
cred.creds,
usage='initiate',
overwrite=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def templates(self):
"""Generate a dictionary with template names and file paths.""" |
templates = {}
result = []
if self.entry_point_group_templates:
result = self.load_entry_point_group_templates(
self.entry_point_group_templates) or []
for template in result:
for name, path in template.items():
templates[name] = path
return templates |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_mappings(self, alias, package_name):
"""Register mappings from a package under given alias. :param alias: The alias. :param package_name: The package name. """ |
# For backwards compatibility, we also allow for ES2 mappings to be
# placed at the root level of the specified package path, and not in
# the `<package-path>/v2` directory.
if ES_VERSION[0] == 2:
try:
resource_listdir(package_name, 'v2')
package_name += '.v2'
except (OSError, IOError) as ex:
if getattr(ex, 'errno', 0) != errno.ENOENT:
raise
warnings.warn(
"Having mappings in a path which doesn't specify the "
"Elasticsearch version is deprecated. Please move your "
"mappings to a subfolder named according to the "
"Elasticsearch version which your mappings are intended "
"for. (e.g. '{}/v2/{}')".format(
package_name, alias),
PendingDeprecationWarning)
else:
package_name = '{}.v{}'.format(package_name, ES_VERSION[0])
def _walk_dir(aliases, *parts):
root_name = build_index_name(self.app, *parts)
resource_name = os.path.join(*parts)
if root_name not in aliases:
self.number_of_indexes += 1
data = aliases.get(root_name, {})
for filename in resource_listdir(package_name, resource_name):
index_name = build_index_name(
self.app,
*(parts + (filename, ))
)
file_path = os.path.join(resource_name, filename)
if resource_isdir(package_name, file_path):
_walk_dir(data, *(parts + (filename, )))
continue
ext = os.path.splitext(filename)[1]
if ext not in {'.json', }:
continue
assert index_name not in data, 'Duplicate index'
data[index_name] = self.mappings[index_name] = \
resource_filename(
package_name, os.path.join(resource_name, filename))
self.number_of_indexes += 1
aliases[root_name] = data
# Start the recursion here:
_walk_dir(self.aliases, alias) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_templates(self, directory):
"""Register templates from the provided directory. :param directory: The templates directory. """ |
try:
resource_listdir(directory, 'v{}'.format(ES_VERSION[0]))
directory = '{}/v{}'.format(directory, ES_VERSION[0])
except (OSError, IOError) as ex:
if getattr(ex, 'errno', 0) == errno.ENOENT:
raise OSError(
"Please move your templates to a subfolder named "
"according to the Elasticsearch version "
"which your templates are intended "
"for. (e.g. '{}.v{}')".format(directory,
ES_VERSION[0]))
result = {}
module_name, parts = directory.split('.')[0], directory.split('.')[1:]
parts = tuple(parts)
def _walk_dir(parts):
resource_name = os.path.join(*parts)
for filename in resource_listdir(module_name, resource_name):
template_name = build_index_name(
self.app,
*(parts[1:] + (filename, ))
)
file_path = os.path.join(resource_name, filename)
if resource_isdir(module_name, file_path):
_walk_dir((parts + (filename, )))
continue
ext = os.path.splitext(filename)[1]
if ext not in {'.json', }:
continue
result[template_name] = resource_filename(
module_name, os.path.join(resource_name, filename))
# Start the recursion here:
_walk_dir(parts)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _client_builder(self):
"""Build Elasticsearch client.""" |
client_config = self.app.config.get('SEARCH_CLIENT_CONFIG') or {}
client_config.setdefault(
'hosts', self.app.config.get('SEARCH_ELASTIC_HOSTS'))
client_config.setdefault('connection_class', RequestsHttpConnection)
return Elasticsearch(**client_config) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def client(self):
"""Return client for current application.""" |
if self._client is None:
self._client = self._client_builder()
return self._client |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def flush_and_refresh(self, index):
"""Flush and refresh one or more indices. .. warning:: Do not call this method unless you know what you are doing. This method is only intended to be called during tests. """ |
self.client.indices.flush(wait_if_ongoing=True, index=index)
self.client.indices.refresh(index=index)
self.client.cluster.health(
wait_for_status='yellow', request_timeout=30)
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cluster_version(self):
"""Get version of Elasticsearch running on the cluster.""" |
versionstr = self.client.info()['version']['number']
return [int(x) for x in versionstr.split('.')] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def active_aliases(self):
"""Get a filtered list of aliases based on configuration. Returns aliases and their mappings that are defined in the `SEARCH_MAPPINGS` config variable. If the `SEARCH_MAPPINGS` is set to `None` (the default), all aliases are included. """ |
whitelisted_aliases = self.app.config.get('SEARCH_MAPPINGS')
if whitelisted_aliases is None:
return self.aliases
else:
return {k: v for k, v in self.aliases.items()
if k in whitelisted_aliases} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(self, ignore=None):
"""Yield tuple with created index name and responses from a client.""" |
ignore = ignore or []
def _create(tree_or_filename, alias=None):
"""Create indices and aliases by walking DFS."""
# Iterate over aliases:
for name, value in tree_or_filename.items():
if isinstance(value, dict):
for result in _create(value, alias=name):
yield result
else:
with open(value, 'r') as body:
yield name, self.client.indices.create(
index=name,
body=json.load(body),
ignore=ignore,
)
if alias:
yield alias, self.client.indices.put_alias(
index=list(_get_indices(tree_or_filename)),
name=alias,
ignore=ignore,
)
for result in _create(self.active_aliases):
yield result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def put_templates(self, ignore=None):
"""Yield tuple with registered template and response from client.""" |
ignore = ignore or []
def _replace_prefix(template_path, body):
"""Replace index prefix in template request body."""
pattern = '__SEARCH_INDEX_PREFIX__'
prefix = self.app.config['SEARCH_INDEX_PREFIX'] or ''
if prefix:
assert pattern in body, "You are using the prefix `{0}`, "
"but the template `{1}` does not contain the "
"pattern `{2}`.".format(prefix, template_path, pattern)
return body.replace(pattern, prefix)
def _put_template(template):
"""Put template in search client."""
with open(self.templates[template], 'r') as fp:
body = fp.read()
replaced_body = _replace_prefix(self.templates[template], body)
return self.templates[template],\
current_search_client.indices.put_template(
name=template,
body=json.loads(replaced_body),
ignore=ignore,
)
for template in self.templates:
yield _put_template(template) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self, ignore=None):
"""Yield tuple with deleted index name and responses from a client.""" |
ignore = ignore or []
def _delete(tree_or_filename, alias=None):
"""Delete indexes and aliases by walking DFS."""
if alias:
yield alias, self.client.indices.delete_alias(
index=list(_get_indices(tree_or_filename)),
name=alias,
ignore=ignore,
)
# Iterate over aliases:
for name, value in tree_or_filename.items():
if isinstance(value, dict):
for result in _delete(value, alias=name):
yield result
else:
yield name, self.client.indices.delete(
index=name,
ignore=ignore,
)
for result in _delete(self.active_aliases):
yield result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
"""Start the poor_consumer.""" |
try:
opts, args = getopt.getopt(sys.argv[1:], "h:v", ["help", "nack=",
"servers=", "queues="])
except getopt.GetoptError as err:
print str(err)
usage()
sys.exit()
# defaults
nack = 0.0
verbose = False
servers = "localhost:7712,localhost:7711"
queues = "test"
for o, a in opts:
if o == "-v":
verbose = True
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("--nack"):
nack = float(a)
elif o in ("--servers"):
servers = a
elif o in ("--queues"):
queues = a
else:
assert False, "unhandled option"
# prepare servers and queus for pydisque
servers = servers.split(",")
queues = queues.split(",")
c = Client(servers)
c.connect()
while True:
jobs = c.get_job(queues)
for queue_name, job_id, job in jobs:
rnd = random.random()
# as this is a test processor, we don't do any validation on
# the actual job body, so lets just pay attention to id's
if rnd >= nack:
print ">>> received job:", job_id
c.ack_job(job_id)
else:
print ">>> bouncing job:", job_id
c.nack_job(job_id) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Connect to one of the Disque nodes. You can get current connection with connected_node property :returns: nothing """ |
self.connected_node = None
for i, node in self.nodes.items():
host, port = i.split(':')
port = int(port)
redis_client = redis.Redis(host, port, **self.client_kw_args)
try:
ret = redis_client.execute_command('HELLO')
format_version, node_id = ret[0], ret[1]
others = ret[2:]
self.nodes[i] = Node(node_id, host, port, redis_client)
self.connected_node = self.nodes[i]
except redis.exceptions.ConnectionError:
pass
if not self.connected_node:
raise ConnectionError('couldnt connect to any nodes')
logger.info("connected to node %s" % self.connected_node) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def execute_command(self, *args, **kwargs):
"""Execute a command on the connected server.""" |
try:
return self.get_connection().execute_command(*args, **kwargs)
except ConnectionError as e:
logger.warn('trying to reconnect')
self.connect()
logger.warn('connected')
raise |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_job(self, queue_name, job, timeout=200, replicate=None, delay=None, retry=None, ttl=None, maxlen=None, asynchronous=None):
""" Add a job to a queue. ADDJOB queue_name job <ms-timeout> [REPLICATE <count>] [DELAY <sec>] [RETRY <sec>] [TTL <sec>] [MAXLEN <count>] [ASYNC] :param queue_name: is the name of the queue, any string, basically. :param job: is a string representing the job. :param timeout: is the command timeout in milliseconds. :param replicate: count is the number of nodes the job should be replicated to. :param delay: sec is the number of seconds that should elapse before the job is queued by any server. :param retry: sec period after which, if no ACK is received, the job is put again into the queue for delivery. If RETRY is 0, the job has an at-most-once delivery semantics. :param ttl: sec is the max job life in seconds. After this time, the job is deleted even if it was not successfully delivered. :param maxlen: count specifies that if there are already count messages queued for the specified queue name, the message is refused and an error reported to the client. :param asynchronous: asks the server to let the command return ASAP and replicate the job to other nodes in the background. The job gets queued ASAP, while normally the job is put into the queue only when the client gets a positive reply. Changing the name of this argument as async is reserved keyword in python 3.7 :returns: job_id """ |
command = ['ADDJOB', queue_name, job, timeout]
if replicate:
command += ['REPLICATE', replicate]
if delay:
command += ['DELAY', delay]
if retry is not None:
command += ['RETRY', retry]
if ttl:
command += ['TTL', ttl]
if maxlen:
command += ['MAXLEN', maxlen]
if asynchronous:
command += ['ASYNC']
# TODO(canardleteer): we need to handle "-PAUSE" messages more
# appropriately, for now it's up to the person using the library
# to handle a generic ResponseError on their own.
logger.debug("sending job - %s", command)
job_id = self.execute_command(*command)
logger.debug("sent job - %s", command)
logger.debug("job_id: %s " % job_id)
return job_id |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_job(self, queues, timeout=None, count=None, nohang=False, withcounters=False):
""" Return some number of jobs from specified queues. GETJOB [NOHANG] [TIMEOUT <ms-timeout>] [COUNT <count>] [WITHCOUNTERS] FROM :param queues: name of queues :returns: list of tuple(job_id, queue_name, job), tuple(job_id, queue_name, job, nacks, additional_deliveries) or empty list :rtype: list """ |
assert queues
command = ['GETJOB']
if nohang:
command += ['NOHANG']
if timeout:
command += ['TIMEOUT', timeout]
if count:
command += ['COUNT', count]
if withcounters:
command += ['WITHCOUNTERS']
command += ['FROM'] + queues
results = self.execute_command(*command)
if not results:
return []
if withcounters:
return [(job_id, queue_name, job, nacks, additional_deliveries) for
job_id, queue_name, job, _, nacks, _, additional_deliveries in results]
else:
return [(job_id, queue_name, job) for
job_id, queue_name, job in results] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def show(self, job_id, return_dict=False):
""" Describe the job. :param job_id: """ |
rtn = self.execute_command('SHOW', job_id)
if return_dict:
grouped = self._grouper(rtn, 2)
rtn = dict((a, b) for a, b in grouped)
return rtn |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pause(self, queue_name, kw_in=None, kw_out=None, kw_all=None, kw_none=None, kw_state=None, kw_bcast=None):
""" Pause a queue. Unfortunately, the PAUSE keywords are mostly reserved words in Python, so I've been a little creative in the function variable names. Open to suggestions to change it (canardleteer) :param queue_name: The job queue we are modifying. :param kw_in: pause the queue in input. :param kw_out: pause the queue in output. :param kw_all: pause the queue in input and output (same as specifying both the in and out options). :param kw_none: clear the paused state in input and output. :param kw_state: just report the current queue state. :param kw_bcast: send a PAUSE command to all the reachable nodes of the cluster to set the same queue in the other nodes to the same state. """ |
command = ["PAUSE", queue_name]
if kw_in:
command += ["in"]
if kw_out:
command += ["out"]
if kw_all:
command += ["all"]
if kw_none:
command += ["none"]
if kw_state:
command += ["state"]
if kw_bcast:
command += ["bcast"]
return self.execute_command(*command) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def qscan(self, cursor=0, count=None, busyloop=None, minlen=None, maxlen=None, importrate=None):
""" Iterate all the existing queues in the local node. :param count: An hint about how much work to do per iteration. :param busyloop: Block and return all the elements in a busy loop. :param minlen: Don't return elements with less than count jobs queued. :param maxlen: Don't return elements with more than count jobs queued. :param importrate: Only return elements with an job import rate (from other nodes) >= rate. """ |
command = ["QSCAN", cursor]
if count:
command += ["COUNT", count]
if busyloop:
command += ["BUSYLOOP"]
if minlen:
command += ["MINLEN", minlen]
if maxlen:
command += ["MAXLEN", maxlen]
if importrate:
command += ["IMPORTRATE", importrate]
return self.execute_command(*command) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def jscan(self, cursor=0, count=None, busyloop=None, queue=None, state=None, reply=None):
"""Iterate all the existing jobs in the local node. :param count: An hint about how much work to do per iteration. :param busyloop: Block and return all the elements in a busy loop. :param queue: Return only jobs in the specified queue. :param state: Must be a list - Return jobs in the specified state. Can be used multiple times for a logic OR. :param reply: None or string {"all", "id"} - Job reply type. Type can be all or id. Default is to report just the job ID. If all is specified the full job state is returned like for the SHOW command. """ |
command = ["JSCAN", cursor]
if count:
command += ["COUNT", count]
if busyloop:
command += ["BUSYLOOP"]
if queue:
command += ["QUEUE", queue]
if type(state) is list:
for s in state:
command += ["STATE", s]
if reply:
command += ["REPLY", reply]
return self.execute_command(*command) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_index_name(app, *parts):
"""Build an index name from parts. :param parts: Parts that should be combined to make an index name. """ |
base_index = os.path.splitext(
'-'.join([part for part in parts if part])
)[0]
return prefix_index(app=app, index=base_index) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def es_version_check(f):
"""Decorator to check Elasticsearch version.""" |
@wraps(f)
def inner(*args, **kwargs):
cluster_ver = current_search.cluster_version[0]
client_ver = ES_VERSION[0]
if cluster_ver != client_ver:
raise click.ClickException(
'Elasticsearch version mismatch. Invenio was installed with '
'Elasticsearch v{client_ver}.x support, but the cluster runs '
'Elasticsearch v{cluster_ver}.x.'.format(
client_ver=client_ver,
cluster_ver=cluster_ver,
))
return f(*args, **kwargs)
return inner |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init(force):
"""Initialize registered aliases and mappings.""" |
click.secho('Creating indexes...', fg='green', bold=True, file=sys.stderr)
with click.progressbar(
current_search.create(ignore=[400] if force else None),
length=current_search.number_of_indexes) as bar:
for name, response in bar:
bar.label = name
click.secho('Putting templates...', fg='green', bold=True, file=sys.stderr)
with click.progressbar(
current_search.put_templates(ignore=[400] if force else None),
length=len(current_search.templates.keys())) as bar:
for response in bar:
bar.label = response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def destroy(force):
"""Destroy all indexes.""" |
click.secho('Destroying indexes...', fg='red', bold=True, file=sys.stderr)
with click.progressbar(
current_search.delete(ignore=[400, 404] if force else None),
length=current_search.number_of_indexes) as bar:
for name, response in bar:
bar.label = name |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(index_name, force, verbose):
"""Delete index by its name.""" |
result = current_search_client.indices.delete(
index=index_name,
ignore=[400, 404] if force else None,
)
if verbose:
click.echo(json.dumps(result)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def put(index_name, doc_type, identifier, body, force, verbose):
"""Index input data.""" |
result = current_search_client.index(
index=index_name,
doc_type=doc_type or index_name,
id=identifier,
body=json.load(body),
op_type='index' if force or identifier is None else 'create',
)
if verbose:
click.echo(json.dumps(result)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_records(self, ids):
"""Return records by their identifiers. :param ids: A list of record identifier. :returns: A list of records. """ |
return self.query(Ids(values=[str(id_) for id_ in ids])) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def faceted_search(cls, query=None, filters=None, search=None):
"""Return faceted search instance with defaults set. :param query: Elastic DSL query object (``Q``). :param filters: Dictionary with selected facet values. :param search: An instance of ``Search`` class. (default: ``cls()``). """ |
search_ = search or cls()
class RecordsFacetedSearch(FacetedSearch):
"""Pass defaults from ``cls.Meta`` object."""
index = prefix_index(app=current_app, index=search_._index[0])
doc_types = getattr(search_.Meta, 'doc_types', ['_all'])
fields = getattr(search_.Meta, 'fields', ('*', ))
facets = getattr(search_.Meta, 'facets', {})
def search(self):
"""Use ``search`` or ``cls()`` instead of default Search."""
# Later versions of `elasticsearch-dsl` (>=5.1.0) changed the
# Elasticsearch FacetedResponse class constructor signature.
if ES_VERSION[0] > 2:
return search_.response_class(FacetedResponse)
return search_.response_class(partial(FacetedResponse, self))
return RecordsFacetedSearch(query=query, filters=filters or {}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def with_preference_param(self):
"""Add the preference param to the ES request and return a new Search. The preference param avoids the bouncing effect with multiple replicas, documented on ES documentation. See: https://www.elastic.co/guide/en/elasticsearch/guide/current /_search_options.html#_preference for more information. """ |
user_hash = self._get_user_hash()
if user_hash:
return self.params(preference=user_hash)
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_user_agent(self):
"""Retrieve the request's User-Agent, if available. Taken from Flask Login utils.py. """ |
user_agent = request.headers.get('User-Agent')
if user_agent:
user_agent = user_agent.encode('utf-8')
return user_agent or '' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_user_hash(self):
"""Calculate a digest based on request's User-Agent and IP address.""" |
if request:
user_hash = '{ip}-{ua}'.format(ip=request.remote_addr,
ua=self._get_user_agent())
alg = hashlib.md5()
alg.update(user_hash.encode('utf8'))
return alg.hexdigest()
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def beautify(filename=None, json_str=None):
"""Beautify JSON string or file. Keyword arguments: :param filename: use its contents as json string instead of json_str param. :param json_str: json string to be beautified. """ |
if filename is not None:
with open(filename) as json_file:
json_str = json.load(json_file)
return json.dumps(json_str, indent=4, sort_keys=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace(pretty, old_str, new_str):
""" Replace strings giving some info on where the replacement was done """ |
out_str = ''
line_number = 1
changes = 0
for line in pretty.splitlines(keepends=True):
new_line = line.replace(old_str, new_str)
if line.find(old_str) != -1:
logging.debug('%s', line_number)
logging.debug('< %s', line)
logging.debug('> %s', new_line)
changes += 1
out_str += new_line
line_number += 1
logging.info('Total changes(%s): %s', old_str, changes)
return out_str |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def receive_connection():
"""Wait for and then return a connected socket.. Opens a TCP connection on port 8080, and waits for a single client. """ |
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("localhost", 8080))
server.listen(1)
client = server.accept()[0]
server.close()
return client |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send_message(client, message):
"""Send message to client and close the connection.""" |
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def watch(logger_name, level=DEBUG, out=stdout):
""" Quick wrapper for using the Watcher. :param logger_name: name of logger to watch :param level: minimum log level to show (default INFO) :param out: where to send output (default stdout) :return: Watcher instance """ |
watcher = Watcher(logger_name)
watcher.watch(level, out)
return watcher |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_user_agent():
""" Obtain the default user agent string sent to the server after a successful handshake. """ |
from sys import platform, version_info
template = "neobolt/{} Python/{}.{}.{}-{}-{} ({})"
fields = (version,) + tuple(version_info) + (platform,)
return template.format(*fields) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def import_best(c_module, py_module):
""" Import the best available module, with C preferred to pure Python. """ |
from importlib import import_module
from os import getenv
pure_python = getenv("PURE_PYTHON", "")
if pure_python:
return import_module(py_module)
else:
try:
return import_module(c_module)
except ImportError:
return import_module(py_module) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def hydrate(self, values):
""" Convert PackStream values into native values. """ |
def hydrate_(obj):
if isinstance(obj, Structure):
try:
f = self.hydration_functions[obj.tag]
except KeyError:
# If we don't recognise the structure type, just return it as-is
return obj
else:
return f(*map(hydrate_, obj.fields))
elif isinstance(obj, list):
return list(map(hydrate_, obj))
elif isinstance(obj, dict):
return {key: hydrate_(value) for key, value in obj.items()}
else:
return obj
return tuple(map(hydrate_, values)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorize_url(self, duration, scopes, state, implicit=False):
"""Return the URL used out-of-band to grant access to your application. :param duration: Either ``permanent`` or ``temporary``. ``temporary`` authorizations generate access tokens that last only 1 hour. ``permanent`` authorizations additionally generate a refresh token that can be indefinitely used to generate new hour-long access tokens. Only ``temporary`` can be specified if ``implicit`` is set to ``True``. :param scopes: A list of OAuth scopes to request authorization for. :param state: A string that will be reflected in the callback to ``redirect_uri``. This value should be temporarily unique to the client for whom the URL was generated for. :param implicit: (optional) Use the implicit grant flow (default: False). This flow is only available for UntrustedAuthenticators. """ |
if self.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
if implicit and not isinstance(self, UntrustedAuthenticator):
raise InvalidInvocation(
"Only UntrustedAuthentictor instances can "
"use the implicit grant flow."
)
if implicit and duration != "temporary":
raise InvalidInvocation(
"The implicit grant flow only supports "
"temporary access tokens."
)
params = {
"client_id": self.client_id,
"duration": duration,
"redirect_uri": self.redirect_uri,
"response_type": "token" if implicit else "code",
"scope": " ".join(scopes),
"state": state,
}
url = self._requestor.reddit_url + const.AUTHORIZATION_PATH
request = Request("GET", url, params=params)
return request.prepare().url |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def revoke_token(self, token, token_type=None):
"""Ask Reddit to revoke the provided token. :param token: The access or refresh token to revoke. :param token_type: (Optional) When provided, hint to Reddit what the token type is for a possible efficiency gain. The value can be either ``access_token`` or ``refresh_token``. """ |
data = {"token": token}
if token_type is not None:
data["token_type_hint"] = token_type
url = self._requestor.reddit_url + const.REVOKE_TOKEN_PATH
self._post(url, success_status=codes["no_content"], **data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorize(self, code):
"""Obtain and set authorization tokens based on ``code``. :param code: The code obtained by an out-of-band authorization request to Reddit. """ |
if self._authenticator.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
self._request_token(
code=code,
grant_type="authorization_code",
redirect_uri=self._authenticator.redirect_uri,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def refresh(self):
"""Obtain a new access token from the refresh_token.""" |
if self.refresh_token is None:
raise InvalidInvocation("refresh token not provided")
self._request_token(
grant_type="refresh_token", refresh_token=self.refresh_token
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def refresh(self):
"""Obtain a new access token.""" |
grant_type = "https://oauth.reddit.com/grants/installed_client"
self._request_token(grant_type=grant_type, device_id=self._device_id) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def refresh(self):
"""Obtain a new personal-use script type access token.""" |
self._request_token(
grant_type="password",
username=self._username,
password=self._password,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def request(self, *args, **kwargs):
"""Issue the HTTP request capturing any errors that may occur.""" |
try:
return self._http.request(*args, timeout=TIMEOUT, **kwargs)
except Exception as exc:
raise RequestException(exc, args, kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _jamo_to_hangul_char(lead, vowel, tail=0):
"""Return the Hangul character for the given jamo characters. """ |
lead = ord(lead) - _JAMO_LEAD_OFFSET
vowel = ord(vowel) - _JAMO_VOWEL_OFFSET
tail = ord(tail) - _JAMO_TAIL_OFFSET if tail else 0
return chr(tail + (vowel - 1) * 28 + (lead - 1) * 588 + _JAMO_OFFSET) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_unicode_name(char):
"""Fetch the unicode name for jamo characters. """ |
if char not in _JAMO_TO_NAME.keys() and char not in _HCJ_TO_NAME.keys():
raise InvalidJamoError("Not jamo or nameless jamo character", char)
else:
if is_hcj(char):
return _HCJ_TO_NAME[char]
return _JAMO_TO_NAME[char] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_jamo(character):
"""Test if a single character is a jamo character. Valid jamo includes all modern and archaic jamo, as well as all HCJ. Non-assigned code points are invalid. """ |
code = ord(character)
return 0x1100 <= code <= 0x11FF or\
0xA960 <= code <= 0xA97C or\
0xD7B0 <= code <= 0xD7C6 or 0xD7CB <= code <= 0xD7FB or\
is_hcj(character) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_jamo_compound(character):
"""Test if a single character is a compound, i.e., a consonant cluster, double consonant, or dipthong. """ |
if len(character) != 1:
return False
# Consider instead:
# raise TypeError('is_jamo_compound() expected a single character')
if is_jamo(character):
return character in JAMO_COMPOUNDS
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_jamo_class(jamo):
"""Determine if a jamo character is a lead, vowel, or tail. Integers and U+11xx characters are valid arguments. HCJ consonants are not valid here. get_jamo_class should return the class ["lead" | "vowel" | "tail"] of a given character or integer. Note: jamo class directly corresponds to the Unicode 7.0 specification, thus includes filler characters as having a class. """ |
# TODO: Perhaps raise a separate error for U+3xxx jamo.
if jamo in JAMO_LEADS or jamo == chr(0x115F):
return "lead"
if jamo in JAMO_VOWELS or jamo == chr(0x1160) or\
0x314F <= ord(jamo) <= 0x3163:
return "vowel"
if jamo in JAMO_TAILS:
return "tail"
else:
raise InvalidJamoError("Invalid or classless jamo argument.", jamo) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def hangul_to_jamo(hangul_string):
"""Convert a string of Hangul to jamo. Arguments may be iterables of characters. hangul_to_jamo should split every Hangul character into U+11xx jamo characters for any given string. Non-hangul characters are not changed. hangul_to_jamo is the generator version of h2j, the string version. """ |
return (_ for _ in
chain.from_iterable(_hangul_char_to_jamo(_) for _ in
hangul_string)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def jamo_to_hangul(lead, vowel, tail=''):
"""Return the Hangul character for the given jamo input. Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters, or HCJ are valid inputs. Outputs a one-character Hangul string. This function is identical to j2h. """ |
# Internally, we convert everything to a jamo char,
# then pass it to _jamo_to_hangul_char
lead = hcj_to_jamo(lead, "lead")
vowel = hcj_to_jamo(vowel, "vowel")
if not tail or ord(tail) == 0:
tail = None
elif is_hcj(tail):
tail = hcj_to_jamo(tail, "tail")
if (is_jamo(lead) and get_jamo_class(lead) == "lead") and\
(is_jamo(vowel) and get_jamo_class(vowel) == "vowel") and\
((not tail) or (is_jamo(tail) and get_jamo_class(tail) == "tail")):
result = _jamo_to_hangul_char(lead, vowel, tail)
if is_hangul_char(result):
return result
raise InvalidJamoError("Could not synthesize characters to Hangul.",
'\x00') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compose_jamo(*parts):
"""Return the compound jamo for the given jamo input. Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters, or HCJ are valid inputs. Outputs a one-character jamo string. """ |
# Internally, we convert everything to a jamo char,
# then pass it to _jamo_to_hangul_char
# NOTE: Relies on hcj_to_jamo not strictly requiring "position" arg.
for p in parts:
if not (type(p) == str and len(p) == 1 and 2 <= len(parts) <= 3):
raise TypeError("compose_jamo() expected 2-3 single characters " +
"but received " + str(parts),
'\x00')
hcparts = [j2hcj(_) for _ in parts]
hcparts = tuple(hcparts)
if hcparts in _COMPONENTS_REVERSE_LOOKUP:
return _COMPONENTS_REVERSE_LOOKUP[hcparts]
raise InvalidJamoError(
"Could not synthesize characters to compound: " + ", ".join(
str(_) + "(U+" + str(hex(ord(_)))[2:] +
")" for _ in parts), '\x00') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def synth_hangul(string):
"""Convert jamo characters in a string into hcj as much as possible.""" |
raise NotImplementedError
return ''.join([''.join(''.join(jamo_to_hcj(_)) for _ in string)]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorization_error_class(response):
"""Return an exception instance that maps to the OAuth Error. :param response: The HTTP response containing a www-authenticate error. """ |
message = response.headers.get("www-authenticate")
if message:
error = message.replace('"', "").rsplit("=", 1)[1]
else:
error = response.status_code
return _auth_error_mapping[error](response) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _last_bookmark(b0, b1):
""" Return the latest of two bookmarks by looking for the maximum integer value following the last colon in the bookmark string. """ |
n = [None, None]
_, _, n[0] = b0.rpartition(":")
_, _, n[1] = b1.rpartition(":")
for i in range(2):
try:
n[i] = int(n[i])
except ValueError:
raise ValueError("Invalid bookmark: {}".format(b0))
return b0 if n[0] > n[1] else b1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(address, **config):
""" Connect and perform a handshake and return a valid Connection object, assuming a protocol version can be agreed. """ |
ssl_context = make_ssl_context(**config)
last_error = None
# Establish a connection to the host and port specified
# Catches refused connections see:
# https://docs.python.org/2/library/errno.html
log_debug("[#0000] C: <RESOLVE> %s", address)
resolver = Resolver(custom_resolver=config.get("resolver"))
resolver.addresses.append(address)
resolver.custom_resolve()
resolver.dns_resolve()
for resolved_address in resolver.addresses:
try:
s = _connect(resolved_address, **config)
s, der_encoded_server_certificate = _secure(s, address[0], ssl_context)
connection = _handshake(s, resolved_address, der_encoded_server_certificate, **config)
except Exception as error:
last_error = error
else:
return connection
if last_error is None:
raise ServiceUnavailable("Failed to resolve addresses for %s" % address)
else:
raise last_error |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue. :arg signature: the signature of the message :arg fields: the fields of the message as a tuple :arg response: a response object to handle callbacks """ |
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset(self):
""" Add a RESET message to the outgoing queue, send it and consume all remaining messages. """ |
def fail(metadata):
raise ProtocolError("RESET failed %r" % metadata)
log_debug("[#%04X] C: RESET", self.local_port)
self._append(b"\x0F", response=Response(self, on_failure=fail))
self.sync() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send(self):
""" Send all queued messages to the server. """ |
data = self.output_buffer.view()
if not data:
return
if self.closed():
raise self.Error("Failed to write to closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to write to defunct connection {!r}".format(self.server.address))
self.socket.sendall(data)
self.output_buffer.clear() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _fetch(self):
""" Receive at least one message from the server, if available. :return: 2-tuple of number of detail messages and number of summary messages fetched """ |
if self.closed():
raise self.Error("Failed to read from closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to read from defunct connection {!r}".format(self.server.address))
if not self.responses:
return 0, 0
self._receive()
details, summary_signature, summary_metadata = self._unpack()
if details:
log_debug("[#%04X] S: RECORD * %d", self.local_port, len(details)) # TODO
self.responses[0].on_records(details)
if summary_signature is None:
return len(details), 0
response = self.responses.popleft()
response.complete = True
if summary_signature == b"\x70":
log_debug("[#%04X] S: SUCCESS %r", self.local_port, summary_metadata)
response.on_success(summary_metadata or {})
elif summary_signature == b"\x7E":
self._last_run_statement = None
log_debug("[#%04X] S: IGNORED", self.local_port)
response.on_ignored(summary_metadata or {})
elif summary_signature == b"\x7F":
self._last_run_statement = None
log_debug("[#%04X] S: FAILURE %r", self.local_port, summary_metadata)
response.on_failure(summary_metadata or {})
else:
self._last_run_statement = None
raise ProtocolError("Unexpected response message with signature %02X" % summary_signature)
return len(details), 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sync(self):
""" Send and fetch all outstanding messages. :return: 2-tuple of number of detail messages and number of summary messages fetched """ |
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
detail_delta, summary_delta = self.fetch()
detail_count += detail_delta
summary_count += summary_delta
return detail_count, summary_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def acquire_direct(self, address):
""" Acquire a connection to a given address from the pool. The address supplied should always be an IP address, not a host name. This method is thread safe. """ |
if self.closed():
raise ServiceUnavailable("Connection pool closed")
with self.lock:
try:
connections = self.connections[address]
except KeyError:
connections = self.connections[address] = deque()
connection_acquisition_start_timestamp = perf_counter()
while True:
# try to find a free connection in pool
for connection in list(connections):
if connection.closed() or connection.defunct() or connection.timedout():
connections.remove(connection)
continue
if not connection.in_use:
connection.in_use = True
return connection
# all connections in pool are in-use
infinite_connection_pool = (self._max_connection_pool_size < 0 or
self._max_connection_pool_size == float("inf"))
can_create_new_connection = infinite_connection_pool or len(connections) < self._max_connection_pool_size
if can_create_new_connection:
try:
connection = self.connector(address)
except ServiceUnavailable:
self.remove(address)
raise
else:
connection.pool = self
connection.in_use = True
connections.append(connection)
return connection
# failed to obtain a connection from pool because the pool is full and no free connection in the pool
span_timeout = self._connection_acquisition_timeout - (perf_counter() - connection_acquisition_start_timestamp)
if span_timeout > 0:
self.cond.wait(span_timeout)
# if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot
# tell if the condition is notified or timed out when we come to this line
if self._connection_acquisition_timeout <= (perf_counter() - connection_acquisition_start_timestamp):
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(
self._connection_acquisition_timeout))
else:
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(self._connection_acquisition_timeout)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def release(self, connection):
""" Release a connection back into the pool. This method is thread safe. """ |
with self.lock:
connection.in_use = False
self.cond.notify_all() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given address. """ |
try:
connections = self.connections[address]
except KeyError:
return 0
else:
return sum(1 if connection.in_use else 0 for connection in connections) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, closing all idle connection to that address """ |
with self.lock:
try:
connections = self.connections[address]
except KeyError: # already removed from the connection pool
return
for conn in list(connections):
if not conn.in_use:
connections.remove(conn)
try:
conn.close()
except IOError:
pass
if not connections:
self.remove(address) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove(self, address):
""" Remove an address from the connection pool, if present, closing all connections to that address. """ |
with self.lock:
for connection in self.connections.pop(address, ()):
try:
connection.close()
except IOError:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self):
""" Close all connections and empty the pool. This method is thread safe. """ |
if self._closed:
return
try:
with self.lock:
if not self._closed:
self._closed = True
for address in list(self.connections):
self.remove(address)
except TypeError as e:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_records(self, records):
""" Called when one or more RECORD messages have been received. """ |
handler = self.handlers.get("on_records")
if callable(handler):
handler(records) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_success(self, metadata):
""" Called when a SUCCESS message has been received. """ |
handler = self.handlers.get("on_success")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_failure(self, metadata):
""" Called when a FAILURE message has been received. """ |
self.connection.reset()
handler = self.handlers.get("on_failure")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
raise CypherError.hydrate(**metadata) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def on_ignored(self, metadata=None):
""" Called when an IGNORED message has been received. """ |
handler = self.handlers.get("on_ignored")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cached_property(prop):
""" A replacement for the property decorator that will only compute the attribute's value on the first call and serve a cached copy from then on. """ |
def cache_wrapper(self):
if not hasattr(self, "_cache"):
self._cache = {}
if prop.__name__ not in self._cache:
return_value = prop(self)
if isgenerator(return_value):
return_value = tuple(return_value)
self._cache[prop.__name__] = return_value
return self._cache[prop.__name__]
return property(cache_wrapper) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _convert_value_to_native(value):
""" Converts pysnmp objects into native Python objects. """ |
if isinstance(value, Counter32):
return int(value.prettyPrint())
if isinstance(value, Counter64):
return int(value.prettyPrint())
if isinstance(value, Gauge32):
return int(value.prettyPrint())
if isinstance(value, Integer):
return int(value.prettyPrint())
if isinstance(value, Integer32):
return int(value.prettyPrint())
if isinstance(value, Unsigned32):
return int(value.prettyPrint())
if isinstance(value, IpAddress):
return str(value.prettyPrint())
if isinstance(value, OctetString):
try:
return value.asOctets().decode(value.encoding)
except UnicodeDecodeError:
return value.asOctets()
if isinstance(value, TimeTicks):
return timedelta(seconds=int(value.prettyPrint()) / 100.0)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, oid):
""" Get a single OID value. """ |
snmpsecurity = self._get_snmp_security()
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.getCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
oid,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
_, value = objects[0]
value = _convert_value_to_native(value)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def table(self, oid, columns=None, column_value_mapping=None, non_repeaters=0, max_repetitions=20, fetch_all_columns=True):
""" Get a table of values with the given OID prefix. """ |
snmpsecurity = self._get_snmp_security()
base_oid = oid.strip(".")
if not fetch_all_columns and not columns:
raise ValueError("please use the columns argument to "
"indicate which columns to fetch")
if fetch_all_columns:
columns_to_fetch = [""]
else:
columns_to_fetch = ["." + str(col_id) for col_id in columns.keys()]
full_obj_table = []
for col in columns_to_fetch:
try:
engine_error, pdu_error, pdu_error_index, obj_table = self._cmdgen.bulkCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
non_repeaters,
max_repetitions,
oid + col,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
# remove any trailing rows from the next subtree
try:
while not str(obj_table[-1][0][0].getOid()).lstrip(".").startswith(
base_oid + col + "."
):
obj_table.pop()
except IndexError:
pass
# append this column to full result
full_obj_table += obj_table
t = Table(columns=columns, column_value_mapping=column_value_mapping)
for row in full_obj_table:
for name, value in row:
oid = str(name.getOid()).strip(".")
value = _convert_value_to_native(value)
column, row_id = oid[len(base_oid) + 1:].split(".", 1)
t._add_value(int(column), row_id, value)
return t |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_parser():
"""Load parser for command line arguments. It parses argv/input into args variable. """ |
desc = Colors.LIGHTBLUE + textwrap.dedent(
'''\
Welcome to
_ _ _
__ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_
/ _` | | | | __/ _ \ | '_ \| | | | | __/ _ \| '__| '__/ _ \ '_ \| __|
| (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_
\__,_|\__,_|\__\___/____| .__/ \__, |___\__\___/|_| |_| \___|_| |_|\__|
|_____|_| |___/_____|
------------------------------------
auto_py_torrent is an automated tool for download files by obtaining
torrents or magnets that are in different provided pages that the
user can choose.
Its goal is to make it easier for users to find the files they want
and download them instantly.
An auto_py_torrent command is provided in which the user can
currently choose between two modes, best_rated and list mode, then it
selects one of the torrent tracking pages for multimedia content and
finally enter the text of what you want to download.
------------------------------------
''') + Colors.ENDC
usage_info = Colors.LGREEN + textwrap.dedent(
'''\
Use "%(prog)s --help" for more information.
Examples:
use "%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic.
use "%(prog)s 0 0 "The simpsons" # best rated.
use "%(prog)s 1 0 "The simpsons" # list rated.
Mode options:
0: best_rated. # Download the most rated file.
1: list. # Get a list, and select one of them.
Page list options:
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
''') + Colors.ENDC
epi = Colors.LIGHTPURPLE + textwrap.dedent(
'''\
-> Thanks for using auto_py_torrent!
''') + Colors.ENDC
# Parent and only parser.
parser = argparse.ArgumentParser(
add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
usage=usage_info,
description=desc,
epilog=epi)
parser.add_argument('mode', action='store',
choices=range(len(MODES)),
type=int,
help='Select mode of file download.\n'
' e.g: 0(rated) or 1(list).')
parser.add_argument('torr_page', action='store',
choices=range(len(TORRENTS)),
type=int,
help='Select tracking page to download from.\n'
' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.')
parser.add_argument('str_search', action='store',
type=str,
help='Input torrent string to search.\n'
' e.g: "String search"')
return(parser) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def insert(args):
"""Insert args values into instance variables.""" |
string_search = args.str_search
mode_search = MODES[args.mode]
page = list(TORRENTS[args.torr_page].keys())[0]
key_search = TORRENTS[args.torr_page][page]['key_search']
torrent_page = TORRENTS[args.torr_page][page]['page']
domain = TORRENTS[args.torr_page][page]['domain']
return([args, string_search, mode_search, page,
key_search, torrent_page, domain]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run_it():
"""Search and download torrents until the user says it so.""" |
initialize()
parser = get_parser()
args = None
first_parse = True
while(True):
if first_parse is True:
first_parse = False
args = parser.parse_args()
else:
print(textwrap.dedent(
'''\
Search again like in the beginning.
-- You can either choose best rated or list mode.
-- This time, you can insert the search string without double quotes.
Remember the list mode options!
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
'''))
print('Or.. if you want to exit just write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
input_parse = input('>> ').replace("'", "").replace('"', '')
if input_parse in ['Q', 'q']:
sys.exit(1)
args = parser.parse_args(input_parse.split(' ', 2))
if args.str_search.strip() == "":
print('Please insert an appropiate non-empty string.')
else:
auto = AutoPy(*insert(args))
auto.get_content()
auto.select_torrent()
auto.download_torrent() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def open_magnet(self):
"""Open magnet according to os.""" |
if sys.platform.startswith('linux'):
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
elif sys.platform.startswith('win32'):
os.startfile(self.magnet)
elif sys.platform.startswith('cygwin'):
os.startfile(self.magnet)
elif sys.platform.startswith('darwin'):
subprocess.Popen(['open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_magnet(self, url):
"""Get magnet from torrent page. Url already got domain.""" |
content_most_rated = requests.get(url)
rated_soup = BeautifulSoup(content_most_rated.content, 'lxml')
if self.page == 'torrent_project':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Download'))['href']
elif self.page == 'the_pirate_bay':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Get this torrent'))['href']
elif self.page == '1337x':
div1337 = rated_soup.find(
'div', {'class': 'torrent-category-detail'})
self.magnet = div1337.find('a', href=re.compile('magnet'))['href']
elif self.page == 'isohunt':
self.magnet = rated_soup.find(
'a', href=re.compile('magnet'))['href']
else:
print('Wrong page to get magnet!')
sys.exit(1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def download_torrent(self):
"""Download torrent. Rated implies download the unique best rated torrent found. Otherwise: get the magnet and download it. """ |
try:
if self.back_to_menu is True:
return
if self.found_torrents is False:
print('Nothing found.')
return
if self.mode_search == 'best_rated':
print('Downloading..')
self.open_magnet()
elif self.mode_search == 'list':
if self.selected is not None:
# t_p, pirate and 1337x got magnet inside, else direct.
if self.page in ['eztv', 'limetorrents']:
self.magnet = self.hrefs[int(self.selected)]
print('Downloading..')
self.open_magnet()
elif self.page in ['the_pirate_bay',
'torrent_project',
'1337x',
'isohunt']:
url = self.hrefs[int(self.selected)]
self.get_magnet(url)
print('Downloading..')
self.open_magnet()
else:
print('Bad selected page.')
else:
print('Nothing selected.')
sys.exit(1)
except Exception:
print(traceback.format_exc())
sys.exit(0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def handle_select(self):
"""Handle user's input in list mode.""" |
self.selected = input('>> ')
if self.selected in ['Q', 'q']:
sys.exit(1)
elif self.selected in ['B', 'b']:
self.back_to_menu = True
return True
elif is_num(self.selected):
if 0 <= int(self.selected) <= len(self.hrefs) - 1:
self.back_to_menu = False
return True
else:
print(Colors.FAIL +
'Wrong index. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
else:
print(Colors.FAIL +
'Invalid input. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def select_torrent(self):
"""Select torrent. First check if specific element/info is obtained in content_page. Specify to user if it wants best rated torrent or select one from list. If the user wants best rated: Directly obtain magnet/torrent. Else: build table with all data and enable the user select the torrent. """ |
try:
self.found_torrents = not bool(self.key_search in
self.content_page.text)
if not self.found_torrents:
print('No torrents found.')
sys.exit(1)
self.soupify()
if self.mode_search == 'list':
self.build_table()
if len(self.hrefs) == 1:
print('Press "0" to download it.')
elif len(self.hrefs) >= 2:
print('\nSelect one of the following torrents. ' +
'Enter a number between: 0 and ' +
str(len(self.hrefs) - 1))
print('If you want to exit write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
print('If you want to go back to menu and search again write "' +
Colors.LGREEN + 'B' + Colors.ENDC + '" or "' +
Colors.LGREEN + 'b' + Colors.ENDC + '".')
while not(self.picked_choice):
self.picked_choice = self.handle_select()
except Exception:
print('ERROR select_torrent: ')
logging.error(traceback.format_exc())
sys.exit(0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_url(self):
"""Build appropiate encoded URL. This implies the same way of searching a torrent as in the page itself. """ |
url = requests.utils.requote_uri(
self.torrent_page + self.string_search)
if self.page == '1337x':
return(url + '/1/')
elif self.page == 'limetorrents':
return(url + '/')
else:
return(url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_content(self):
"""Get content of the page through url.""" |
url = self.build_url()
try:
self.content_page = requests.get(url)
if not(self.content_page.status_code == requests.codes.ok):
self.content_page.raise_for_status()
except requests.exceptions.RequestException as ex:
logging.info('A requests exception has ocurred: ' + str(ex))
logging.error(traceback.format_exc())
sys.exit(0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _recycle(self):
""" Reclaim buffer space before the origin. Note: modifies buffer size """ |
origin = self._origin
if origin == 0:
return False
available = self._extent - origin
self._data[:available] = self._data[origin:self._extent]
self._extent = available
self._origin = 0
#log_debug("Recycled %d bytes" % origin)
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def frame_message(self):
""" Construct a frame around the first complete message in the buffer. """ |
if self._frame is not None:
self.discard_message()
panes = []
p = origin = self._origin
extent = self._extent
while p < extent:
available = extent - p
if available < 2:
break
chunk_size, = struct_unpack(">H", self._view[p:(p + 2)])
p += 2
if chunk_size == 0:
self._limit = p
self._frame = MessageFrame(memoryview(self._view[origin:self._limit]), panes)
return True
q = p + chunk_size
panes.append((p - origin, q - origin))
p = q
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function. :param request_function: A function call that returns an HTTP response object. :param set_header_callback: A callback function used to set the request headers. This callback is called after any necessary sleep time occurs. :param *args: The positional arguments to ``request_function``. :param **kwargs: The keyword arguments to ``request_function``. """ |
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delay(self):
"""Sleep for an amount of time to remain under the rate limit.""" |
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers. This method should only be called following a HTTP request to reddit. Response headers that do not contain x-ratelimit fields will be treated as a single request. This behavior is to error on the safe-side as such responses should trigger exceptions that indicate invalid behavior. """ |
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def custom_resolve(self):
""" If a custom resolver is defined, perform custom resolution on the contained addresses. :return: """ |
if not callable(self.custom_resolver):
return
new_addresses = []
for address in self.addresses:
for new_address in self.custom_resolver(address):
new_addresses.append(new_address)
self.addresses = new_addresses |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dns_resolve(self):
""" Perform DNS resolution on the contained addresses. :return: """ |
new_addresses = []
for address in self.addresses:
try:
info = getaddrinfo(address[0], address[1], 0, SOCK_STREAM, IPPROTO_TCP)
except gaierror:
raise AddressError("Cannot resolve address {!r}".format(address))
else:
for _, _, _, _, address in info:
if len(address) == 4 and address[3] != 0:
# skip any IPv6 addresses with a non-zero scope id
# as these appear to cause problems on some platforms
continue
new_addresses.append(address)
self.addresses = new_addresses |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def matching_line(lines, keyword):
""" Returns the first matching line in a list of lines. @see match() """ |
for line in lines:
matching = match(line,keyword)
if matching != None:
return matching
return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.