text_prompt
stringlengths 157
13.1k
| code_prompt
stringlengths 7
19.8k
⌀ |
---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_class(self, ioclass):
"""Add one VNXIOClass instance to policy. .. note: due to the limitation of VNX, need to stop the policy first. """ |
current_ioclasses = self.ioclasses
if ioclass.name in current_ioclasses.name:
return
current_ioclasses.append(ioclass)
self.modify(new_ioclasses=current_ioclasses) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_class(self, ioclass):
"""Remove VNXIOClass instance from policy.""" |
current_ioclasses = self.ioclasses
new_ioclasses = filter(lambda x: x.name != ioclass.name,
current_ioclasses)
self.modify(new_ioclasses=new_ioclasses) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace_lun(self, *lun_list):
"""Replaces the exiting LUNs to lun_list.""" |
lun_add = self._prepare_luns_add(lun_list)
lun_remove = self._prepare_luns_remove(lun_list, False)
return self.modify(lun_add=lun_add, lun_remove=lun_remove) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_lun(self, add_luns=None, remove_luns=None):
"""Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`""" |
if not add_luns and not remove_luns:
log.debug("Empty add_luns and remove_luns passed in, "
"skip update_lun.")
return RESP_OK
lun_add = self._prepare_luns_add(add_luns)
lun_remove = self._prepare_luns_remove(remove_luns, True)
return self.modify(lun_add=lun_add, lun_remove=lun_remove) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clean(inst):
"""Routine to return FPMU data cleaned to the specified level Parameters inst : (pysat.Instrument) Instrument class object, whose attribute clean_level is used to return the desired level of data selectivity. Returns -------- Void : (NoneType) data in inst is modified in-place. Notes -------- No cleaning currently available for FPMU """ |
inst.data.replace(-999., np.nan, inplace=True) # Te
inst.data.replace(-9.9999998e+30, np.nan, inplace=True) #Ni
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _attach_files(self, files_info):
"""Attaches info returned by instrument list_files routine to Instrument object. """ |
if not files_info.empty:
if (len(files_info.index.unique()) != len(files_info)):
estr = 'WARNING! Duplicate datetimes in provided file '
estr = '{:s}information.\nKeeping one of each '.format(estr)
estr = '{:s}of the duplicates, dropping the rest.'.format(estr)
print(estr)
print(files_info.index.get_duplicates())
idx = np.unique(files_info.index, return_index=True)
files_info = files_info.ix[idx[1]]
#raise ValueError('List of files must have unique datetimes.')
self.files = files_info.sort_index()
date = files_info.index[0]
self.start_date = pds.datetime(date.year, date.month, date.day)
date = files_info.index[-1]
self.stop_date = pds.datetime(date.year, date.month, date.day)
else:
self.start_date = None
self.stop_date = None
# convert to object type
# necessary if Series is empty, enables == checks with strings
self.files = files_info.astype(np.dtype('O')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _store(self):
"""Store currently loaded filelist for instrument onto filesystem""" |
name = self.stored_file_name
# check if current file data is different than stored file list
# if so, move file list to previous file list, store current to file
# if not, do nothing
stored_files = self._load()
if len(stored_files) != len(self.files):
# # of items is different, things are new
new_flag = True
elif len(stored_files) == len(self.files):
# # of items equal, check specifically for equality
if stored_files.eq(self.files).all():
new_flag = False
else:
# not equal, there are new files
new_flag = True
if new_flag:
if self.write_to_disk:
stored_files.to_csv(os.path.join(self.home_path,
'previous_'+name),
date_format='%Y-%m-%d %H:%M:%S.%f')
self.files.to_csv(os.path.join(self.home_path, name),
date_format='%Y-%m-%d %H:%M:%S.%f')
else:
self._previous_file_list = stored_files
self._current_file_list = self.files.copy()
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _load(self, prev_version=False):
"""Load stored filelist and return as Pandas Series Parameters prev_version : boolean if True, will load previous version of file list Returns ------- pandas.Series Full path file names are indexed by datetime Series is empty if there is no file list to load """ |
fname = self.stored_file_name
if prev_version:
fname = os.path.join(self.home_path, 'previous_'+fname)
else:
fname = os.path.join(self.home_path, fname)
if os.path.isfile(fname) and (os.path.getsize(fname) > 0):
if self.write_to_disk:
return pds.read_csv(fname, index_col=0, parse_dates=True,
squeeze=True, header=None)
else:
# grab files from memory
if prev_version:
return self._previous_file_list
else:
return self._current_file_list
else:
return pds.Series([], dtype='a') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _remove_data_dir_path(self, inp=None):
# import string """Remove the data directory path from filenames""" |
# need to add a check in here to make sure data_dir path is actually in
# the filename
if inp is not None:
split_str = os.path.join(self.data_path, '')
return inp.apply(lambda x: x.split(split_str)[-1]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def concat(self, other, strict=False):
"""Concats two metadata objects together. Parameters other : Meta Meta object to be concatenated strict : bool if True, ensure there are no duplicate variable names Notes ----- Uses units and name label of self if other is different Returns ------- Meta Concatenated object """ |
mdata = self.copy()
# checks
if strict:
for key in other.keys():
if key in mdata:
raise RuntimeError('Duplicated keys (variable names) ' +
'across Meta objects in keys().')
for key in other.keys_nD():
if key in mdata:
raise RuntimeError('Duplicated keys (variable names) across '
'Meta objects in keys_nD().')
# make sure labels between the two objects are the same
other_updated = self.apply_default_labels(other)
# concat 1D metadata in data frames to copy of
# current metadata
# <<<<<<< ho_meta_fix
for key in other_updated.keys():
mdata.data.loc[key] = other.data.loc[key]
# add together higher order data
for key in other_updated.keys_nD():
mdata.ho_data[key] = other.ho_data[key]
# =======
# for key in other_updated.keys():
# mdata[key] = other_updated[key]
# # add together higher order data
# for key in other_updated.keys_nD():
# mdata[key] = other_updated[key]
return mdata |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pop(self, name):
"""Remove and return metadata about variable Parameters name : str variable name Returns ------- pandas.Series Series of metadata for variable """ |
# check if present
if name in self:
# get case preserved name for variable
new_name = self.var_case_name(name)
# check if 1D or nD
if new_name in self.keys():
output = self[new_name]
self.data.drop(new_name, inplace=True, axis=0)
else:
output = self.ho_data.pop(new_name)
return output
else:
raise KeyError('Key not present in metadata variables') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def transfer_attributes_to_instrument(self, inst, strict_names=False):
"""Transfer non-standard attributes in Meta to Instrument object. Pysat's load_netCDF and similar routines are only able to attach netCDF4 attributes to a Meta object. This routine identifies these attributes and removes them from the Meta object. Intent is to support simple transfers to the pysat.Instrument object. Will not transfer names that conflict with pysat default attributes. Parameters inst : pysat.Instrument Instrument object to transfer attributes to strict_names : boolean (False) If True, produces an error if the Instrument object already has an attribute with the same name to be copied. Returns ------- None pysat.Instrument object modified in place with new attributes """ |
# base Instrument attributes
banned = inst._base_attr
# get base attribute set, and attributes attached to instance
base_attrb = self._base_attr
this_attrb = dir(self)
# collect these attributes into a dict
adict = {}
transfer_key = []
for key in this_attrb:
if key not in banned:
if key not in base_attrb:
# don't store _ leading attributes
if key[0] != '_':
adict[key] = self.__getattribute__(key)
transfer_key.append(key)
# store any non-standard attributes in Instrument
# get list of instrument objects attributes first
# to check if a duplicate
inst_attr = dir(inst)
for key in transfer_key:
if key not in banned:
if key not in inst_attr:
inst.__setattr__(key, adict[key])
else:
if not strict_names:
# new_name = 'pysat_attr_'+key
inst.__setattr__(key, adict[key])
else:
raise RuntimeError('Attribute ' + key +
'attached to Meta object can not be '
+ 'transferred as it already exists'
+ ' in the Instrument object.') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_csv(cls, name=None, col_names=None, sep=None, **kwargs):
"""Create instrument metadata object from csv. Parameters name : string absolute filename for csv file or name of file stored in pandas instruments location col_names : list-like collection of strings column names in csv and resultant meta object sep : string column seperator for supplied csv filename Note ---- column names must include at least ['name', 'long_name', 'units'], assumed if col_names is None. """ |
import pysat
req_names = ['name','long_name','units']
if col_names is None:
col_names = req_names
elif not all([i in col_names for i in req_names]):
raise ValueError('col_names must include name, long_name, units.')
if sep is None:
sep = ','
if name is None:
raise ValueError('Must supply an instrument name or file path.')
elif not isinstance(name, str):
raise ValueError('keyword name must be related to a string')
elif not os.path.isfile(name):
# Not a real file, assume input is a pysat instrument name
# and look in the standard pysat location.
test = os.path.join(pysat.__path__[0],'instruments',name)
if os.path.isfile(test):
name = test
else:
#trying to form an absolute path for success
test = os.path.abspath(name)
if not os.path.isfile(test):
raise ValueError("Unable to create valid file path.")
else:
#success
name = test
mdata = pds.read_csv(name, names=col_names, sep=sep, **kwargs)
if not mdata.empty:
# make sure the data name is the index
mdata.index = mdata['name']
del mdata['name']
return cls(metadata=mdata)
else:
raise ValueError('Unable to retrieve information from ' + name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def nonraw_instance(receiver):
""" A signal receiver decorator that fetch the complete instance from db when it's passed as raw """ |
@wraps(receiver)
def wrapper(sender, instance, raw, using, **kwargs):
if raw:
instance = sender._default_manager.using(using).get(pk=instance.pk)
return receiver(sender=sender, raw=raw, instance=instance, using=using,
**kwargs)
return wrapper |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def base_definition_pre_delete(sender, instance, **kwargs):
""" This is used to pass data required for deletion to the post_delete signal that is no more available thereafter. """ |
# see CASCADE_MARK_ORIGIN's docstring
cascade_deletion_origin = popattr(
instance._state, '_cascade_deletion_origin', None
)
if cascade_deletion_origin == 'model_def':
return
if (instance.base and issubclass(instance.base, models.Model) and
instance.base._meta.abstract):
instance._state._deletion = instance.model_def.model_class().render_state() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def base_definition_post_delete(sender, instance, **kwargs):
""" Make sure to delete fields inherited from an abstract model base. """ |
if hasattr(instance._state, '_deletion'):
# Make sure to flatten abstract bases since Django
# migrations can't deal with them.
model = popattr(instance._state, '_deletion')
for field in instance.base._meta.fields:
perform_ddl('remove_field', model, field) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def raw_field_definition_proxy_post_save(sender, instance, raw, **kwargs):
""" When proxy field definitions are loaded from a fixture they're not passing through the `field_definition_post_save` signal. Make sure they are. """ |
if raw:
model_class = instance.content_type.model_class()
opts = model_class._meta
if opts.proxy and opts.concrete_model is sender:
field_definition_post_save(
sender=model_class, instance=instance.type_cast(), raw=raw,
**kwargs
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def field_definition_post_save(sender, instance, created, raw, **kwargs):
""" This signal is connected by all FieldDefinition subclasses see comment in FieldDefinitionBase for more details """ |
model_class = instance.model_def.model_class().render_state()
field = instance.construct_for_migrate()
field.model = model_class
if created:
if hasattr(instance._state, '_creation_default_value'):
field.default = instance._state._creation_default_value
delattr(instance._state, '_creation_default_value')
add_column = popattr(instance._state, '_add_column', True)
if add_column:
perform_ddl('add_field', model_class, field)
# If the field definition is raw we must re-create the model class
# since ModelDefinitionAttribute.save won't be called
if raw:
instance.model_def.model_class().mark_as_obsolete()
else:
old_field = instance._state._pre_save_field
delattr(instance._state, '_pre_save_field')
perform_ddl('alter_field', model_class, old_field, field, strict=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _app_cache_deepcopy(obj):
""" An helper that correctly deepcopy model cache state """ |
if isinstance(obj, defaultdict):
return deepcopy(obj)
elif isinstance(obj, dict):
return type(obj)((_app_cache_deepcopy(key), _app_cache_deepcopy(val)) for key, val in obj.items())
elif isinstance(obj, list):
return list(_app_cache_deepcopy(val) for val in obj)
elif isinstance(obj, AppConfig):
app_conf = Empty()
app_conf.__class__ = AppConfig
app_conf.__dict__ = _app_cache_deepcopy(obj.__dict__)
return app_conf
return obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def app_cache_restorer():
""" A context manager that restore model cache state as it was before entering context. """ |
state = _app_cache_deepcopy(apps.__dict__)
try:
yield state
finally:
with apps_lock():
apps.__dict__ = state
# Rebind the app registry models cache to
# individual app config ones.
for app_conf in apps.get_app_configs():
app_conf.models = apps.all_models[app_conf.label]
apps.clear_cache() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def CASCADE_MARK_ORIGIN(collector, field, sub_objs, using):
""" Custom on_delete handler which sets _cascade_deletion_origin on the _state of the all relating objects that will deleted. We use this handler on ModelDefinitionAttribute.model_def, so when we delete a ModelDefinition we can skip field_definition_post_delete and base_definition_post_delete and avoid an incremental columns deletion before the entire table is dropped. """ |
CASCADE(collector, field, sub_objs, using)
if sub_objs:
for obj in sub_objs:
obj._state._cascade_deletion_origin = field.name |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mutable_model_prepared(signal, sender, definition, existing_model_class, **kwargs):
""" Make sure all related model class are created and marked as dependency when a mutable model class is prepared """ |
referenced_models = set()
# Collect all model class the obsolete model class was referring to
if existing_model_class:
for field in existing_model_class._meta.local_fields:
if isinstance(field, RelatedField):
remote_field_model = get_remote_field_model(field)
if not isinstance(remote_field_model, string_types):
referenced_models.add(remote_field_model)
# Add sender as a dependency of all mutable models it refers to
for field in sender._meta.local_fields:
if isinstance(field, RelatedField):
remote_field_model = get_remote_field_model(field)
if not isinstance(remote_field_model, string_types):
referenced_models.add(remote_field_model)
if (issubclass(remote_field_model, MutableModel) and
remote_field_model._definition != sender._definition):
remote_field_model._dependencies.add(sender._definition)
# Mark all model referring to this one as dependencies
related_model_defs = ModelDefinition.objects.filter(
Q(fielddefinitions__foreignkeydefinition__to=definition) |
Q(fielddefinitions__manytomanyfielddefinition__to=definition)
).distinct()
for model_def in related_model_defs:
if model_def != definition:
# Generate model class from definition and add it as a dependency
sender._dependencies.add(model_def.model_class()._definition)
# Clear the referenced models opts related cache
for model_class in referenced_models:
clear_opts_related_cache(model_class) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _model_class_from_pk(definition_cls, definition_pk):
""" Helper used to unpickle MutableModel model class from their definition pk. """ |
try:
return definition_cls.objects.get(pk=definition_pk).model_class()
except definition_cls.DoesNotExist:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clean(self):
""" Make sure the lookup makes sense """ |
if self.lookup == '?': # Randomly sort
return
else:
lookups = self.lookup.split(LOOKUP_SEP)
opts = self.model_def.model_class()._meta
valid = True
while len(lookups):
lookup = lookups.pop(0)
try:
field = opts.get_field(lookup)
except FieldDoesNotExist:
valid = False
else:
if isinstance(field, models.ForeignKey):
opts = get_remote_field_model(field)._meta
elif len(lookups): # Cannot go any deeper
valid = False
finally:
if not valid:
msg = _("This field doesn't exist")
raise ValidationError({'lookup': [msg]}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0):
"""Compute the time-derivative of a Lorentz system.""" |
return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def parse_series_args(topics, fields):
'''Return which topics and which field keys need to be examined
for plotting'''
keys = {}
for field in fields:
for topic in topics:
if field.startswith(topic):
keys[field] = (topic, field[len(topic) + 1:])
return keys |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def bag_to_dataframe(bag_name, include=None, exclude=None, parse_header=False, seconds=False):
'''
Read in a rosbag file and create a pandas data frame that
is indexed by the time the message was recorded in the bag.
:bag_name: String name for the bag file
:include: None, String, or List Topics to include in the dataframe
if None all topics added, if string it is used as regular
expression, if list that list is used.
:exclude: None, String, or List Topics to be removed from those added
using the include option using set difference. If None no topics
removed. If String it is treated as a regular expression. A list
removes those in the list.
:seconds: time index is in seconds
:returns: a pandas dataframe object
'''
# get list of topics to parse
yaml_info = get_bag_info(bag_name)
bag_topics = get_topics(yaml_info)
bag_topics = prune_topics(bag_topics, include, exclude)
length = get_length(bag_topics, yaml_info)
msgs_to_read, msg_type = get_msg_info(yaml_info, bag_topics, parse_header)
bag = rosbag.Bag(bag_name)
dmap = create_data_map(msgs_to_read)
# create datastore
datastore = {}
for topic in dmap.keys():
for f, key in dmap[topic].iteritems():
t = msg_type[topic][f]
if isinstance(t, int) or isinstance(t, float):
arr = np.empty(length)
arr.fill(np.NAN)
elif isinstance(t, list):
arr = np.empty(length)
arr.fill(np.NAN)
for i in range(len(t)):
key_i = '{0}{1}'.format(key, i)
datastore[key_i] = arr.copy()
continue
else:
arr = np.empty(length, dtype=np.object)
datastore[key] = arr
# create the index
index = np.empty(length)
index.fill(np.NAN)
# all of the data is loaded
for idx, (topic, msg, mt) in enumerate(bag.read_messages(topics=bag_topics)):
try:
if seconds:
index[idx] = msg.header.stamp.to_sec()
else:
index[idx] = msg.header.stamp.to_nsec()
except:
if seconds:
index[idx] = mt.to_sec()
else:
index[idx] = mt.to_nsec()
fields = dmap[topic]
for f, key in fields.iteritems():
try:
d = get_message_data(msg, f)
if isinstance(d, tuple):
for i, val in enumerate(d):
key_i = '{0}{1}'.format(key, i)
datastore[key_i][idx] = val
else:
datastore[key][idx] = d
except:
pass
bag.close()
# convert the index
if not seconds:
index = pd.to_datetime(index, unit='ns')
# now we have read all of the messages its time to assemble the dataframe
return pd.DataFrame(data=datastore, index=index) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def create_data_map(msgs_to_read):
'''
Create a data map for usage when parsing the bag
'''
dmap = {}
for topic in msgs_to_read.keys():
base_name = get_key_name(topic) + '__'
fields = {}
for f in msgs_to_read[topic]:
key = (base_name + f).replace('.', '_')
fields[f] = key
dmap[topic] = fields
return dmap |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def prune_topics(bag_topics, include, exclude):
'''prune the topics. If include is None add all to the set of topics to
use if include is a string regex match that string,
if it is a list use the list
If exclude is None do nothing, if string remove the topics with regex,
if it is a list remove those topics'''
topics_to_use = set()
# add all of the topics
if include is None:
for t in bag_topics:
topics_to_use.add(t)
elif isinstance(include, basestring):
check = re.compile(include)
for t in bag_topics:
if re.match(check, t) is not None:
topics_to_use.add(t)
else:
try:
# add all of the includes if it is in the topic
for topic in include:
if topic in bag_topics:
topics_to_use.add(topic)
except:
warnings.warn('Error in topic selection Using All!')
topics_to_use = set()
for t in bag_topics:
topics_to_use.add(t)
to_remove = set()
# now exclude the exclusions
if exclude is None:
pass
elif isinstance(exclude, basestring):
check = re.compile(exclude)
for t in list(topics_to_use):
if re.match(check, t) is not None:
to_remove.add(t)
else:
for remove in exclude:
if remove in exclude:
to_remove.add(remove)
# final set stuff to get topics to use
topics_to_use = topics_to_use - to_remove
# return a list for the results
return list(topics_to_use) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_msg_info(yaml_info, topics, parse_header=True):
'''
Get info from all of the messages about what they contain
and will be added to the dataframe
'''
topic_info = yaml_info['topics']
msgs = {}
classes = {}
for topic in topics:
base_key = get_key_name(topic)
msg_paths = []
msg_types = {}
for info in topic_info:
if info['topic'] == topic:
msg_class = get_message_class(info['type'])
if msg_class is None:
warnings.warn(
'Could not find types for ' + topic + ' skpping ')
else:
(msg_paths, msg_types) = get_base_fields(msg_class(), "",
parse_header)
msgs[topic] = msg_paths
classes[topic] = msg_types
return (msgs, classes) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_topics(yaml_info):
''' Returns the names of all of the topics in the bag, and prints them
to stdout if requested
'''
# Pull out the topic info
names = []
# Store all of the topics in a dictionary
topics = yaml_info['topics']
for topic in topics:
names.append(topic['topic'])
return names |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_base_fields(msg, prefix='', parse_header=True):
'''function to get the full names of every message field in the message'''
slots = msg.__slots__
ret_val = []
msg_types = dict()
for i in slots:
slot_msg = getattr(msg, i)
if not parse_header and i == 'header':
continue
if hasattr(slot_msg, '__slots__'):
(subs, type_map) = get_base_fields(
slot_msg, prefix=prefix + i + '.',
parse_header=parse_header,
)
for i in subs:
ret_val.append(i)
for k, v in type_map.items():
msg_types[k] = v
else:
ret_val.append(prefix + i)
msg_types[prefix + i] = slot_msg
return (ret_val, msg_types) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def jsonify_payload(self):
""" Dump the payload to JSON """ |
# Assume already json serialized
if isinstance(self.payload, string_types):
return self.payload
return json.dumps(self.payload, cls=StandardJSONEncoder) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send(self):
""" Send the webhook method """ |
payload = self.payload
sending_metadata = {'success': False}
post_attributes = {'timeout': self.timeout}
if self.custom_headers:
post_attributes['headers'] = self.custom_headers
if not post_attributes.get('headers', None):
post_attributes['headers'] = {}
post_attributes['headers']['Content-Type'] = self.encoding
post_attributes['data'] = self.format_payload()
if self.signing_secret:
post_attributes['headers']['x-hub-signature'] = self.create_signature(post_attributes['data'], \
self.signing_secret)
for i, wait in enumerate(range(len(self.attempts) - 1)):
self.attempt = i + 1
sending_metadata['attempt'] = self.attempt
try:
print(self.url)
self.response = requests.post(self.url, **post_attributes)
if sys.version > '3':
# Converts bytes object to str object in Python 3+
self.response_content = self.response.content.decode('utf-8')
else:
self.response_content = self.response.content
sending_metadata['status_code'] = self.response.status_code
# anything with a 200 status code is a success
if self.response.status_code >= 200 and self.response.status_code < 300:
# Exit the sender method. Here we provide the payload as a result.
# This is useful for reporting.
self.notify("Attempt {}: Successfully sent webhook {}".format(
self.attempt, self.hash_value)
)
sending_metadata['response'] = self.response_content
sending_metadata['success'] = True
break
else:
self.error = "Status code (%d). Message: %s" % (self.response.status_code, self.response.text)
except Exception as ex:
err_formatted = str(ex).replace('"',"'")
sending_metadata['response'] = '{"status_code": 500, "status":"failure","error":"'+err_formatted+'"}'
self.error = err_formatted
self.notify("Attempt {}: Could not send webhook {}".format(
self.attempt, self.hash_value)
)
self.notify_debug("Webhook {}. Body: {}".format(
self.hash_value, self.payload)
)
# If last attempt
if self.attempt == (len(self.attempts) - 1):
self.notify_error("Failed to send webhook {}. Body: {}".format(
self.hash_value, self.payload)
)
else:
# Wait a bit before the next attempt
sleep(wait)
sending_metadata['error'] = None if sending_metadata['success'] or not self.error else self.error
sending_metadata['post_attributes'] = post_attributes
merged_dict = sending_metadata.copy()
if isinstance(payload, string_types):
payload = {'payload': payload}
# Add the hash value if there is one.
if self.hash_value is not None and len(self.hash_value) > 0:
payload['hash'] = self.hash_value
merged_dict.update(payload)
return merged_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generateIdentityKeyPair():
""" Generate an identity key pair. Clients should only do this once, at install time. @return the generated IdentityKeyPair. """ |
keyPair = Curve.generateKeyPair()
publicKey = IdentityKey(keyPair.getPublicKey())
serialized = '0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c' \
'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293' \
'edfbcd82129b14a88791ac81365c'
serialized = binascii.unhexlify(serialized.encode())
identityKeyPair = IdentityKeyPair(publicKey, keyPair.getPrivateKey())
return identityKeyPair |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generatePreKeys(start, count):
""" Generate a list of PreKeys. Clients should do this at install time, and subsequently any time the list of PreKeys stored on the server runs low. PreKey IDs are shorts, so they will eventually be repeated. Clients should store PreKeys in a circular buffer, so that they are repeated as infrequently as possible. @param start The starting PreKey ID, inclusive. @param count The number of PreKeys to generate. @return the list of generated PreKeyRecords. """ |
results = []
start -= 1
for i in range(0, count):
preKeyId = ((start + i) % (Medium.MAX_VALUE - 1)) + 1
results.append(PreKeyRecord(preKeyId, Curve.generateKeyPair()))
return results |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_valid_transition(enum, from_value, to_value):
""" Validate that to_value is a valid choice and that to_value is a valid transition from from_value. """ |
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_available_choice(enum, to_value):
""" Validate that to_value is defined as a value in enum. """ |
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def open(self):
"""Open the connection wit the device.""" |
try:
self.device.open()
except ConnectTimeoutError as cte:
raise ConnectionException(cte.message)
self.device.timeout = self.timeout
self.device._conn._session.transport.set_keepalive(self.keepalive)
if hasattr(self.device, "cu"):
# make sure to remove the cu attr from previous session
# ValueError: requested attribute name cu already exists
del self.device.cu
self.device.bind(cu=Config)
if self.config_lock:
self._lock() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compare_config(self):
"""Compare candidate config with running.""" |
diff = self.device.cu.diff()
if diff is None:
return ''
else:
return diff.strip() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_facts(self):
"""Return facts of the device.""" |
output = self.device.facts
uptime = self.device.uptime or -1
interfaces = junos_views.junos_iface_table(self.device)
interfaces.get()
interface_list = interfaces.keys()
return {
'vendor': u'Juniper',
'model': py23_compat.text_type(output['model']),
'serial_number': py23_compat.text_type(output['serialnumber']),
'os_version': py23_compat.text_type(output['version']),
'hostname': py23_compat.text_type(output['hostname']),
'fqdn': py23_compat.text_type(output['fqdn']),
'uptime': uptime,
'interface_list': interface_list
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_ntp_peers(self):
"""Return the NTP peers configured on the device.""" |
ntp_table = junos_views.junos_ntp_peers_config_table(self.device)
ntp_table.get()
ntp_peers = ntp_table.items()
if not ntp_peers:
return {}
return {napalm_base.helpers.ip(peer[0]): {} for peer in ntp_peers} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_ntp_servers(self):
"""Return the NTP servers configured on the device.""" |
ntp_table = junos_views.junos_ntp_servers_config_table(self.device)
ntp_table.get()
ntp_servers = ntp_table.items()
if not ntp_servers:
return {}
return {napalm_base.helpers.ip(server[0]): {} for server in ntp_servers} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_probes_config(self):
"""Return the configuration of the RPM probes.""" |
probes = {}
probes_table = junos_views.junos_rpm_probes_config_table(self.device)
probes_table.get()
probes_table_items = probes_table.items()
for probe_test in probes_table_items:
test_name = py23_compat.text_type(probe_test[0])
test_details = {
p[0]: p[1] for p in probe_test[1]
}
probe_name = napalm_base.helpers.convert(
py23_compat.text_type, test_details.pop('probe_name'))
target = napalm_base.helpers.convert(
py23_compat.text_type, test_details.pop('target', ''))
test_interval = napalm_base.helpers.convert(int, test_details.pop('test_interval', '0'))
probe_count = napalm_base.helpers.convert(int, test_details.pop('probe_count', '0'))
probe_type = napalm_base.helpers.convert(
py23_compat.text_type, test_details.pop('probe_type', ''))
source = napalm_base.helpers.convert(
py23_compat.text_type, test_details.pop('source_address', ''))
if probe_name not in probes.keys():
probes[probe_name] = {}
probes[probe_name][test_name] = {
'probe_type': probe_type,
'target': target,
'source': source,
'probe_count': probe_count,
'test_interval': test_interval
}
return probes |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_probes_results(self):
"""Return the results of the RPM probes.""" |
probes_results = {}
probes_results_table = junos_views.junos_rpm_probes_results_table(self.device)
probes_results_table.get()
probes_results_items = probes_results_table.items()
for probe_result in probes_results_items:
probe_name = py23_compat.text_type(probe_result[0])
test_results = {
p[0]: p[1] for p in probe_result[1]
}
test_results['last_test_loss'] = napalm_base.helpers.convert(
int, test_results.pop('last_test_loss'), 0)
for test_param_name, test_param_value in test_results.items():
if isinstance(test_param_value, float):
test_results[test_param_name] = test_param_value * 1e-3
# convert from useconds to mseconds
test_name = test_results.pop('test_name', '')
source = test_results.get('source', u'')
if source is None:
test_results['source'] = u''
if probe_name not in probes_results.keys():
probes_results[probe_name] = {}
probes_results[probe_name][test_name] = test_results
return probes_results |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def traceroute(self, destination, source=C.TRACEROUTE_SOURCE, ttl=C.TRACEROUTE_TTL, timeout=C.TRACEROUTE_TIMEOUT, vrf=C.TRACEROUTE_VRF):
"""Execute traceroute and return results.""" |
traceroute_result = {}
# calling form RPC does not work properly :(
# but defined junos_route_instance_table just in case
source_str = ''
maxttl_str = ''
wait_str = ''
vrf_str = ''
if source:
source_str = ' source {source}'.format(source=source)
if ttl:
maxttl_str = ' ttl {ttl}'.format(ttl=ttl)
if timeout:
wait_str = ' wait {timeout}'.format(timeout=timeout)
if vrf:
vrf_str = ' routing-instance {vrf}'.format(vrf=vrf)
traceroute_command = 'traceroute {destination}{source}{maxttl}{wait}{vrf}'.format(
destination=destination,
source=source_str,
maxttl=maxttl_str,
wait=wait_str,
vrf=vrf_str
)
traceroute_rpc = E('command', traceroute_command)
rpc_reply = self.device._conn.rpc(traceroute_rpc)._NCElement__doc
# make direct RPC call via NETCONF
traceroute_results = rpc_reply.find('.//traceroute-results')
traceroute_failure = napalm_base.helpers.find_txt(
traceroute_results, 'traceroute-failure', '')
error_message = napalm_base.helpers.find_txt(
traceroute_results, 'rpc-error/error-message', '')
if traceroute_failure and error_message:
return {'error': '{}: {}'.format(traceroute_failure, error_message)}
traceroute_result['success'] = {}
for hop in traceroute_results.findall('hop'):
ttl_value = napalm_base.helpers.convert(
int, napalm_base.helpers.find_txt(hop, 'ttl-value'), 1)
if ttl_value not in traceroute_result['success']:
traceroute_result['success'][ttl_value] = {'probes': {}}
for probe in hop.findall('probe-result'):
probe_index = napalm_base.helpers.convert(
int, napalm_base.helpers.find_txt(probe, 'probe-index'), 0)
ip_address = napalm_base.helpers.convert(
napalm_base.helpers.ip, napalm_base.helpers.find_txt(probe, 'ip-address'), '*')
host_name = py23_compat.text_type(
napalm_base.helpers.find_txt(probe, 'host-name', '*'))
rtt = napalm_base.helpers.convert(
float, napalm_base.helpers.find_txt(probe, 'rtt'), 0) * 1e-3 # ms
traceroute_result['success'][ttl_value]['probes'][probe_index] = {
'ip_address': ip_address,
'host_name': host_name,
'rtt': rtt
}
return traceroute_result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, request, format=None):
""" Remove all auth tokens owned by request.user. """ |
tokens = Token.objects.filter(user=request.user)
for token in tokens:
token.delete()
content = {'success': _('User logged out.')}
return Response(content, status=status.HTTP_200_OK) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_attrs_to_values(self, response={}):
""" Set attributes to dictionary values so can access via dot notation. """ |
for key in response.keys():
setattr(self, key, response[key]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def doc(self, groups=None, set_location=True, **properties):
"""Add flask route to autodoc for automatic documentation Any route decorated with this method will be added to the list of routes to be documented by the generate() or html() methods. By default, the route is added to the 'all' group. By specifying group or groups argument, the route can be added to one or multiple other groups as well, besides the 'all' group. If set_location is True, the location of the function will be stored. NOTE: this assumes that the decorator is placed just before the function (in the normal way). Custom parameters may also be passed in beyond groups, if they are named something not already in the dict descibed in the docstring for the generare() function, they will be added to the route's properties, which can be accessed from the template. If a parameter is passed in with a name that is already in the dict, but not of a reserved name, the passed parameter overrides that dict value. """ |
def decorator(f):
# Get previous group list (if any)
if f in self.func_groups:
groupset = self.func_groups[f]
else:
groupset = set()
# Set group[s]
if type(groups) is list:
groupset.update(groups)
elif type(groups) is str:
groupset.add(groups)
groupset.add('all')
self.func_groups[f] = groupset
self.func_props[f] = properties
# Set location
if set_location:
caller_frame = inspect.stack()[1]
self.func_locations[f] = {
'filename': caller_frame[1],
'line': caller_frame[2],
}
return f
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unsaved_files_dialog( self, all_files=False, with_cancel=True, with_discard=True):
"""Return true if OK to continue with close or quit or whatever""" |
for image in self.images:
if image.metadata.changed() and (all_files or image.selected):
break
else:
return True
dialog = QtWidgets.QMessageBox()
dialog.setWindowTitle(self.tr('Photini: unsaved data'))
dialog.setText(self.tr('<h3>Some images have unsaved metadata.</h3>'))
dialog.setInformativeText(self.tr('Do you want to save your changes?'))
dialog.setIcon(QtWidgets.QMessageBox.Warning)
buttons = QtWidgets.QMessageBox.Save
if with_cancel:
buttons |= QtWidgets.QMessageBox.Cancel
if with_discard:
buttons |= QtWidgets.QMessageBox.Discard
dialog.setStandardButtons(buttons)
dialog.setDefaultButton(QtWidgets.QMessageBox.Save)
result = dialog.exec_()
if result == QtWidgets.QMessageBox.Save:
self._save_files()
return True
return result == QtWidgets.QMessageBox.Discard |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_ISO_8601(cls, date_string, time_string, tz_string):
"""Sufficiently general ISO 8601 parser. Inputs must be in "basic" format, i.e. no '-' or ':' separators. See https://en.wikipedia.org/wiki/ISO_8601 """ |
# parse tz_string
if tz_string:
tz_offset = (int(tz_string[1:3]) * 60) + int(tz_string[3:])
if tz_string[0] == '-':
tz_offset = -tz_offset
else:
tz_offset = None
if time_string == '000000':
# assume no time information
time_string = ''
tz_offset = None
datetime_string = date_string + time_string[:13]
precision = min((len(datetime_string) - 2) // 2, 7)
if precision <= 0:
return None
fmt = ''.join(('%Y', '%m', '%d', '%H', '%M', '%S', '.%f')[:precision])
return cls(
(datetime.strptime(datetime_string, fmt), precision, tz_offset)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def xpath(self, xpath):
""" Finds another node by XPath originating at the current node. """ |
return [self.get_node_factory().create(node_id)
for node_id in self._get_xpath_ids(xpath).split(",")
if node_id] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def css(self, css):
""" Finds another node by a CSS selector relative to the current node. """ |
return [self.get_node_factory().create(node_id)
for node_id in self._get_css_ids(css).split(",")
if node_id] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_bool_attr(self, name):
""" Returns the value of a boolean HTML attribute like `checked` or `disabled` """ |
val = self.get_attr(name)
return val is not None and val.lower() in ("true", name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_attr(self, name, value):
""" Sets the value of an attribute. """ |
self.exec_script("node.setAttribute(%s, %s)" % (repr(name), repr(value))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def value(self):
""" Returns the node's value. """ |
if self.is_multi_select():
return [opt.value()
for opt in self.xpath(".//option")
if opt["selected"]]
else:
return self._invoke("value") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_header(self, key, value):
""" Sets a HTTP header for future requests. """ |
self.conn.issue_command("Header", _normalize_header(key), value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def headers(self):
""" Returns a list of the last HTTP response headers. Header keys are normalized to capitalized form, as in `User-Agent`. """ |
headers = self.conn.issue_command("Headers")
res = []
for header in headers.split("\r"):
key, value = header.split(": ", 1)
for line in value.split("\n"):
res.append((_normalize_header(key), line))
return res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def eval_script(self, expr):
""" Evaluates a piece of Javascript in the context of the current page and returns its value. """ |
ret = self.conn.issue_command("Evaluate", expr)
return json.loads("[%s]" % ret)[0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cookies(self):
""" Returns a list of all cookies in cookie string format. """ |
return [line.strip()
for line in self.conn.issue_command("GetCookies").split("\n")
if line.strip()] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_html(self, html, url = None):
""" Sets custom HTML in our Webkit session and allows to specify a fake URL. Scripts and CSS is dynamically fetched as if the HTML had been loaded from the given URL. """ |
if url:
self.conn.issue_command('SetHtml', html, url)
else:
self.conn.issue_command('SetHtml', html) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_proxy(self, host = "localhost", port = 0, user = "", password = ""):
""" Sets a custom HTTP proxy to use for future requests. """ |
self.conn.issue_command("SetProxy", host, port, user, password) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Returns a new socket connection to this server. """ |
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", self._port))
return sock |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_line(self):
""" Consume one line from the stream. """ |
while True:
newline_idx = self.buf.find(b"\n")
if newline_idx >= 0:
res = self.buf[:newline_idx]
self.buf = self.buf[newline_idx + 1:]
return res
chunk = self.f.recv(4096)
if not chunk:
raise EndOfStreamError()
self.buf += chunk |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, n):
""" Consume `n` characters from the stream. """ |
while len(self.buf) < n:
chunk = self.f.recv(4096)
if not chunk:
raise EndOfStreamError()
self.buf += chunk
res, self.buf = self.buf[:n], self.buf[n:]
return res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_response(self):
""" Reads a complete response packet from the server """ |
result = self.buf.read_line().decode("utf-8")
if not result:
raise NoResponseError("No response received from server.")
msg = self._read_message()
if result != "ok":
raise InvalidResponseError(msg)
return msg |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_message(self):
""" Reads a single size-annotated message from the server """ |
size = int(self.buf.read_line().decode("utf-8"))
return self.buf.read(size).decode("utf-8") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def log_error(self, error, message, detail=None, strip=4):
"Add an error message and optional user message to the error list"
if message:
msg = message + ": " + error
else:
msg = error
tb = traceback.format_stack()
if sys.version_info >= (3, 0):
tb = tb[:-strip]
else:
tb = tb[strip:]
self.errors.append({
'message': msg,
'traceback': tb,
'detail': detail
}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def equal(self, a, b, message=None):
"Check if two values are equal"
if a != b:
self.log_error("{} != {}".format(str(a), str(b)), message)
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def is_not_none(self, a, message=None):
"Check if a value is not None"
if a is None:
self.log_error("{} is None".format(str(a)), message)
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def enable(self):
"""Enable the chip.""" |
# Flip on the power and enable bits.
self._write8(TCS34725_ENABLE, TCS34725_ENABLE_PON)
time.sleep(0.01)
self._write8(TCS34725_ENABLE, (TCS34725_ENABLE_PON | TCS34725_ENABLE_AEN)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_interrupt(self, enabled):
"""Enable or disable interrupts by setting enabled to True or False.""" |
enable_reg = self._readU8(TCS34725_ENABLE)
if enabled:
enable_reg |= TCS34725_ENABLE_AIEN
else:
enable_reg &= ~TCS34725_ENABLE_AIEN
self._write8(TCS34725_ENABLE, enable_reg)
time.sleep(1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _dict_to_html_attributes(d):
""" Converts a dictionary to a string of ``key=\"value\"`` pairs. If ``None`` is provided as the dictionary an empty string is returned, i.e. no html attributes are generated. Parameters d : dict Dictionary to convert to html attributes. Returns ------- str where ``N`` is the total number of ``(key, value)`` pairs. """ |
if d is None:
return ""
return "".join(" {}=\"{}\"".format(key, value) for key, value in iter(d.items())) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _list_of_dicts_to_column_headers(list_of_dicts):
""" Detects if all entries in an list of ``dict``'s have identical keys. Returns the keys if all keys are the same and ``None`` otherwise. Parameters list_of_dicts : list List of dictionaries to test for identical keys. Returns ------- list or None List of column headers if all dictionary posessed the same keys. Returns ``None`` otherwise. """ |
if len(list_of_dicts) < 2 or not all(isinstance(item, dict) for item in list_of_dicts):
return None
column_headers = list_of_dicts[0].keys()
for d in list_of_dicts[1:]:
if len(d.keys()) != len(column_headers) or not all(header in d for header in column_headers):
return None
return column_headers |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _markup(self, entry):
""" Recursively generates HTML for the current entry. Parameters entry : object Object to convert to HTML. Maybe be a single entity or contain multiple and/or nested objects. Returns ------- str String of HTML formatted json. """ |
if entry is None:
return ""
if isinstance(entry, list):
list_markup = "<ul>"
for item in entry:
list_markup += "<li>{:s}</li>".format(self._markup(item))
list_markup += "</ul>"
return list_markup
if isinstance(entry, dict):
return self.convert(entry)
# default to stringifying entry
return str(entry) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _maybe_club(self, list_of_dicts):
""" If all keys in a list of dicts are identical, values from each ``dict`` are clubbed, i.e. inserted under a common column heading. If the keys are not identical ``None`` is returned, and the list should be converted to HTML per the normal ``convert`` function. Parameters list_of_dicts : list List to attempt to club. Returns ------- str or None String of HTML if list was successfully clubbed. Returns ``None`` otherwise. Example ------- Given the following json object:: { "sampleData": [ {"a":1, "b":2, "c":3}, {"a":5, "b":6, "c":7}] } Calling ``_maybe_club`` would result in the following HTML table: _____________________________ | | | | | | | a | c | b | | sampleData |---|---|---| | | 1 | 3 | 2 | | | 5 | 7 | 6 | Adapted from a contribution from @muellermichel to ``json2html``. """ |
column_headers = JsonConverter._list_of_dicts_to_column_headers(list_of_dicts)
if column_headers is None:
# common headers not found, return normal markup
html_output = self._markup(list_of_dicts)
else:
html_output = self._table_opening_tag
html_output += self._markup_header_row(column_headers)
for list_entry in list_of_dicts:
html_output += "<tr><td>"
html_output += "</td><td>".join(self._markup(list_entry[column_header]) for column_header in column_headers)
html_output += "</td></tr>"
html_output += "</table>"
return self._markup_table_cell(html_output) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def msg(self, msg=None, ret_r=False):
'''code's message'''
if msg or ret_r:
self._msg = msg
return self
return self._msg |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def detail(self, detail=None, ret_r=False):
'''code's detail'''
if detail or ret_r:
self._detail = detail
return self
return self._detail |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _init(self, clnt):
'''initialize api by YunpianClient'''
assert clnt, "clnt is None"
self._clnt = clnt
self._apikey = clnt.apikey()
self._version = clnt.conf(YP_VERSION, defval=VERSION_V2)
self._charset = clnt.conf(HTTP_CHARSET, defval=CHARSET_UTF8)
self._name = self.__class__.__module__.split('.')[-1] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def verify_param(self, param, must=[], r=None):
'''return Code.ARGUMENT_MISSING if every key in must not found in param'''
if APIKEY not in param:
param[APIKEY] = self.apikey()
r = Result() if r is None else r
for p in must:
if p not in param:
r.code(Code.ARGUMENT_MISSING).detail('missing-' + p)
break
return r |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def custom_conf(self, conf):
'''custom apikey and http parameters'''
if conf:
for (key, val) in conf.items():
self.__conf[key] = val
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def api(self, name):
'''return special API by package's name'''
assert name, 'name is none'
if flow.__name__ == name:
api = flow.FlowApi()
elif sign.__name__ == name:
api = sign.SignApi()
elif sms.__name__ == name:
api = sms.SmsApi()
elif tpl.__name__ == name:
api = tpl.TplApi()
elif user.__name__ == name:
api = user.UserApi()
elif voice.__name__ == name:
api = voice.VoiceApi()
assert api, "not found api-" + name
api._init(self._clnt)
return api |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def conf(self, key=None, defval=None):
'''return YunpianConf if key=None, else return value in YunpianConf'''
if key is None:
return self._ypconf
val = self._ypconf.conf(key)
return defval if val is None else val |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def post(self, url, data, charset=CHARSET_UTF8, headers={}):
'''response json text'''
if 'Api-Lang' not in headers:
headers['Api-Lang'] = 'python'
if 'Content-Type' not in headers:
headers['Content-Type'] = "application/x-www-form-urlencoded;charset=" + charset
rsp = requests.post(url, data, headers=headers,
timeout=(int(self.conf(HTTP_CONN_TIMEOUT, '10')), int(self.conf(HTTP_SO_TIMEOUT, '30'))))
return json.loads(rsp.text) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def app_resolver(app_name=None, pattern_kwargs=None, name=None):
'''
Registers the given app_name with DMP and adds convention-based
url patterns for it.
This function is meant to be called in a project's urls.py.
'''
urlconf = URLConf(app_name, pattern_kwargs)
resolver = re_path(
'^{}/?'.format(app_name) if app_name is not None else '',
include(urlconf),
name=urlconf.app_name,
)
# this next line is a workaround for Django's URLResolver class not having
# a `name` attribute, which is expected in Django's technical_404.html.
resolver.name = getattr(resolver, 'name', name or app_name)
return resolver |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def dmp_paths_for_app(app_name, pattern_kwargs=None, pretty_app_name=None):
'''Utility function that creates the default patterns for an app'''
dmp = apps.get_app_config('django_mako_plus')
# Because these patterns are subpatterns within the app's resolver,
# we don't include the /app/ in the pattern -- it's already been
# handled by the app's resolver.
#
# Also note how the each pattern below defines the four kwargs--
# either as 1) a regex named group or 2) in kwargs.
return [
# page.function/urlparams
dmp_path(
r'^(?P<dmp_page>[_a-zA-Z0-9\-]+)\.(?P<dmp_function>[_a-zA-Z0-9\.\-]+)/(?P<dmp_urlparams>.+?)/?$',
merge_dicts({
'dmp_app': app_name or dmp.options['DEFAULT_APP'],
}, pattern_kwargs),
'DMP /{}/page.function/urlparams'.format(pretty_app_name),
app_name,
),
# page.function
dmp_path(
r'^(?P<dmp_page>[_a-zA-Z0-9\-]+)\.(?P<dmp_function>[_a-zA-Z0-9\.\-]+)/?$',
merge_dicts({
'dmp_app': app_name or dmp.options['DEFAULT_APP'],
'dmp_urlparams': '',
}, pattern_kwargs),
'DMP /{}/page.function'.format(pretty_app_name),
app_name,
),
# page/urlparams
dmp_path(
r'^(?P<dmp_page>[_a-zA-Z0-9\-]+)/(?P<dmp_urlparams>.+?)/?$',
merge_dicts({
'dmp_app': app_name or dmp.options['DEFAULT_APP'],
'dmp_function': 'process_request',
}, pattern_kwargs),
'DMP /{}/page/urlparams'.format(pretty_app_name),
app_name,
),
# page
dmp_path(
r'^(?P<dmp_page>[_a-zA-Z0-9\-]+)/?$',
merge_dicts({
'dmp_app': app_name or dmp.options['DEFAULT_APP'],
'dmp_function': 'process_request',
'dmp_urlparams': '',
}, pattern_kwargs),
'DMP /{}/page'.format(pretty_app_name),
app_name,
),
# empty
dmp_path(
r'^$',
merge_dicts({
'dmp_app': app_name or dmp.options['DEFAULT_APP'],
'dmp_function': 'process_request',
'dmp_urlparams': '',
'dmp_page': dmp.options['DEFAULT_PAGE'],
}, pattern_kwargs),
'DMP /{}'.format(pretty_app_name),
app_name,
),
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def dmp_path(regex, kwargs=None, name=None, app_name=None):
'''
Creates a DMP-style, convention-based pattern that resolves
to various view functions based on the 'dmp_page' value.
The following should exist as 1) regex named groups or
2) items in the kwargs dict:
dmp_app Should resolve to a name in INSTALLED_APPS.
If missing, defaults to DEFAULT_APP.
dmp_page The page name, which should resolve to a module:
project_dir/{dmp_app}/views/{dmp_page}.py
If missing, defaults to DEFAULT_PAGE.
dmp_function The function name (or View class name) within the module.
If missing, defaults to 'process_request'
dmp_urlparams The urlparams string to parse.
If missing, defaults to ''.
The reason for this convenience function is to be similar to
Django functions like url(), re_path(), and path().
'''
return PagePattern(regex, kwargs, name, app_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def alternate_syntax(local, using, **kwargs):
'''
A Mako filter that renders a block of text using a different template engine
than Mako. The named template engine must be listed in settings.TEMPLATES.
The template context variables are available in the embedded template.
Specify kwargs to add additional variables created within the template.
This is a kludge that should be used sparingly. The `dmp_include` template tag
is often a better option.
The following examples assume you have installed the django_mustache template
engine in settings.py:
## Simple expression in Mustache syntax:
${ '{{ name }}' | template_syntax(local, 'django_mustache') }
## Embedded Mustache code block:
<%block filter="template_syntax(local, 'django_mustache')">
{{#repo}}
<b>{{name}}</b>
{{/repo}}
{{^repo}}
No repos :(
{{/repo}}
</%block>
Rendering Django or Jinja2 templates should be done with `django_syntax` and
`jinja2_syntax` because it doesn't require the using parameter.
'''
# get the request (the MakoTemplateAdapter above places this in the context)
request = local.context['request'] if isinstance(local.context, RequestContext) else None
# get the current Mako template object so we can attach the compiled string for later use
# Mako caches and automatically recreates this if the file changes
mako_template = local.template
if not hasattr(mako_template, '__compiled_template_syntax'):
mako_template.__compiled_template_syntax = {}
# create a closure so we can still get to context and using (Mako filters take exactly one parameter: the string to filter)
def wrap(template_st):
# get the template object, or create and cache it
try:
template = mako_template.__compiled_template_syntax[template_st]
except KeyError:
engine = engines[using]
template = engine.from_string(template_st)
# using full string, even if long, as the key doesn't really affect performance of python's hash (see http://stackoverflow.com/questions/28150047/efficiency-of-long-str-keys-in-python-dictionary)
mako_template.__compiled_template_syntax[template_st] = template
# create a copy the context and add any kwargs to it
dcontext = dict(local.context)
dcontext.update(kwargs)
# print a debug statement to the log
log.debug('rendering embedded expression or block using %s template engine', using)
# render the template with the context
return template.render(context=dcontext, request=request)
# return the embedded function
return wrap |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_view_function(module_name, function_name, fallback_app=None, fallback_template=None, verify_decorator=True):
'''
Retrieves a view function from the cache, finding it if the first time.
Raises ViewDoesNotExist if not found. This is called by resolver.py.
'''
# first check the cache (without doing locks)
key = ( module_name, function_name )
try:
return CACHED_VIEW_FUNCTIONS[key]
except KeyError:
with rlock:
# try again now that we're locked
try:
return CACHED_VIEW_FUNCTIONS[key]
except KeyError:
# if we get here, we need to load the view function
func = find_view_function(module_name, function_name, fallback_app, fallback_template, verify_decorator)
# cache in production mode
if not settings.DEBUG:
CACHED_VIEW_FUNCTIONS[key] = func
return func
# the code should never be able to get here
raise Exception("Django-Mako-Plus error: get_view_function() should not have been able to get to this point. Please notify the owner of the DMP project. Thanks.") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def find_view_function(module_name, function_name, fallback_app=None, fallback_template=None, verify_decorator=True):
'''
Finds a view function, class-based view, or template view.
Raises ViewDoesNotExist if not found.
'''
dmp = apps.get_app_config('django_mako_plus')
# I'm first calling find_spec first here beacuse I don't want import_module in
# a try/except -- there are lots of reasons that importing can fail, and I just want to
# know whether the file actually exists. find_spec raises AttributeError if not found.
try:
spec = find_spec(module_name)
except ValueError:
spec = None
if spec is None:
# no view module, so create a view function that directly renders the template
try:
return create_view_for_template(fallback_app, fallback_template)
except TemplateDoesNotExist as e:
raise ViewDoesNotExist('view module {} not found, and fallback template {} could not be loaded ({})'.format(module_name, fallback_template, e))
# load the module and function
try:
module = import_module(module_name)
func = getattr(module, function_name)
func.view_type = 'function'
except ImportError as e:
raise ViewDoesNotExist('module "{}" could not be imported: {}'.format(module_name, e))
except AttributeError as e:
raise ViewDoesNotExist('module "{}" found successfully, but "{}" was not found: {}'.format(module_name, function_name, e))
# if class-based view, call as_view() to get a view function to it
if inspect.isclass(func) and issubclass(func, View):
func = func.as_view()
func.view_type = 'class'
# if regular view function, check the decorator
elif verify_decorator and not view_function.is_decorated(func):
raise ViewDoesNotExist("view {}.{} was found successfully, but it must be decorated with @view_function or be a subclass of django.views.generic.View.".format(module_name, function_name))
# attach a converter to the view function
if dmp.options['PARAMETER_CONVERTER'] is not None:
try:
converter = import_qualified(dmp.options['PARAMETER_CONVERTER'])(func)
setattr(func, CONVERTER_ATTRIBUTE_NAME, converter)
except ImportError as e:
raise ImproperlyConfigured('Cannot find PARAMETER_CONVERTER: {}'.format(str(e)))
# return the function/class
return func |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def iter_related(self):
'''
Generator function that iterates this object's related providers,
which includes this provider.
'''
for tpl in self.provider_run.templates:
yield tpl.providers[self.index] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_cache_item(self):
'''Gets the cached item. Raises AttributeError if it hasn't been set.'''
if settings.DEBUG:
raise AttributeError('Caching disabled in DEBUG mode')
return getattr(self.template, self.options['template_cache_key']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def flatten(*args):
'''Generator that recursively flattens embedded lists, tuples, etc.'''
for arg in args:
if isinstance(arg, collections.Iterable) and not isinstance(arg, (str, bytes)):
yield from flatten(*arg)
else:
yield arg |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def crc32(filename):
'''
Calculates the CRC checksum for a file.
Using CRC32 because security isn't the issue and don't need perfect noncollisions.
We just need to know if a file has changed.
On my machine, crc32 was 20 times faster than any hashlib algorithm,
including blake and md5 algorithms.
'''
result = 0
with open(filename, 'rb') as fin:
while True:
chunk = fin.read(48)
if len(chunk) == 0:
break
result = zlib.crc32(chunk, result)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def compile_mako_files(self, app_config):
'''Compiles the Mako templates within the apps of this system'''
# go through the files in the templates, scripts, and styles directories
for subdir_name in self.SEARCH_DIRS:
subdir = subdir_name.format(
app_path=app_config.path,
app_name=app_config.name,
)
def recurse_path(path):
self.message('searching for Mako templates in {}'.format(path), 1)
if os.path.exists(path):
for filename in os.listdir(path):
filepath = os.path.join(path, filename)
_, ext = os.path.splitext(filename)
if filename.startswith('__'): # __dmpcache__, __pycache__
continue
elif os.path.isdir(filepath):
recurse_path(filepath)
elif ext.lower() in ( '.htm', '.html', '.mako' ):
# create the template object, which creates the compiled .py file
self.message('compiling {}'.format(filepath), 2)
try:
get_template_for_path(filepath)
except TemplateSyntaxError:
if not self.options.get('ignore_template_errors'):
raise
recurse_path(subdir) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def from_string(self, template_code):
'''
Compiles a template from the given string.
This is one of the required methods of Django template engines.
'''
dmp = apps.get_app_config('django_mako_plus')
mako_template = Template(template_code, imports=dmp.template_imports, input_encoding=dmp.options['DEFAULT_TEMPLATE_ENCODING'])
return MakoTemplateAdapter(mako_template) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _register_converter(cls, conv_func, conv_type):
'''Triggered by the @converter_function decorator'''
cls.converters.append(ConverterFunctionInfo(conv_func, conv_type, len(cls.converters)))
cls._sort_converters() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _sort_converters(cls, app_ready=False):
'''Sorts the converter functions'''
# app_ready is True when called from DMP's AppConfig.ready()
# we can't sort before then because models aren't ready
cls._sorting_enabled = cls._sorting_enabled or app_ready
if cls._sorting_enabled:
for converter in cls.converters:
converter.prepare_sort_key()
cls.converters.sort(key=attrgetter('sort_key')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def convert_parameters(self, request, *args, **kwargs):
'''
Iterates the urlparams and converts them according to the
type hints in the current view function. This is the primary
function of the class.
'''
args = list(args)
urlparam_i = 0
parameters = self.view_parameters.get(request.method.lower()) or self.view_parameters.get(None)
if parameters is not None:
# add urlparams into the arguments and convert the values
for parameter_i, parameter in enumerate(parameters):
# skip request object, *args, **kwargs
if parameter_i == 0 or parameter.kind is inspect.Parameter.VAR_POSITIONAL or parameter.kind is inspect.Parameter.VAR_KEYWORD:
pass
# value in kwargs?
elif parameter.name in kwargs:
kwargs[parameter.name] = self.convert_value(kwargs[parameter.name], parameter, request)
# value in args?
elif parameter_i - 1 < len(args):
args[parameter_i - 1] = self.convert_value(args[parameter_i - 1], parameter, request)
# urlparam value?
elif urlparam_i < len(request.dmp.urlparams):
kwargs[parameter.name] = self.convert_value(request.dmp.urlparams[urlparam_i], parameter, request)
urlparam_i += 1
# can we assign a default value?
elif parameter.default is not inspect.Parameter.empty:
kwargs[parameter.name] = self.convert_value(parameter.default, parameter, request)
# fallback is None
else:
kwargs[parameter.name] = self.convert_value(None, parameter, request)
return args, kwargs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def convert_value(self, value, parameter, request):
'''
Converts a parameter value in the view function call.
value: value from request.dmp.urlparams to convert
The value will always be a string, even if empty '' (never None).
parameter: an instance of django_mako_plus.ViewParameter that holds this parameter's
name, type, position, etc.
request: the current request object.
"converter functions" register with this class using the @parameter_converter
decorator. See converters.py for the built-in converters.
This function goes through the list of registered converter functions,
selects the most-specific one that matches the parameter.type, and
calls it to convert the value.
If the converter function raises a ValueError, it is caught and
switched to an Http404 to tell the browser that the requested URL
doesn't resolve to a page.
Other useful exceptions that converter functions can raise are:
Any extension of BaseRedirectException (RedirectException,
InternalRedirectException, JavascriptRedirectException, ...)
Http404: returns a Django Http404 response
'''
try:
# we don't convert anything without type hints
if parameter.type is inspect.Parameter.empty:
if log.isEnabledFor(logging.DEBUG):
log.debug('skipping conversion of parameter `%s` because it has no type hint', parameter.name)
return value
# find the converter method for this type
# I'm iterating through the list to find the most specific match first
# The list is sorted by specificity so subclasses come before their superclasses
for ci in self.converters:
if issubclass(parameter.type, ci.convert_type):
if log.isEnabledFor(logging.DEBUG):
log.debug('converting parameter `%s` using %s', parameter.name, ci.convert_func)
return ci.convert_func(value, parameter)
# if we get here, there wasn't a converter or this type
raise ImproperlyConfigured(message='No parameter converter exists for type: {}. Do you need to add an @parameter_converter function for the type?'.format(parameter.type))
except (BaseRedirectException, Http404):
log.info('Exception raised during conversion of parameter %s (%s): %s', parameter.position, parameter.name, e)
raise # allow these to pass through to the router
except ValueError as e:
log.info('ValueError raised during conversion of parameter %s (%s): %s', parameter.position, parameter.name, e)
raise ConverterHttp404(value, parameter, 'A parameter could not be converted - see the logs for more detail') from e
except Exception as e:
log.info('Exception raised during conversion of parameter %s (%s): %s', parameter.position, parameter.name, e)
raise ConverterException(value, parameter, 'A parameter could not be converted - see the logs for more detail') from e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.