text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Read a numpy data array from the zip file
<END_TASK>
<USER_TASK:>
Description:
def read_data(fp, local_files, dir_files, name_bytes):
"""
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
""" |
if name_bytes in dir_files:
fp.seek(local_files[dir_files[name_bytes][1]][1])
return numpy.load(fp)
return None |
<SYSTEM_TASK:>
Read json properties from the zip file
<END_TASK>
<USER_TASK:>
Description:
def read_json(fp, local_files, dir_files, name_bytes):
"""
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
""" |
if name_bytes in dir_files:
json_pos = local_files[dir_files[name_bytes][1]][1]
json_len = local_files[dir_files[name_bytes][1]][2]
fp.seek(json_pos)
json_properties = fp.read(json_len)
return json.loads(json_properties.decode("utf-8"))
return None |
<SYSTEM_TASK:>
Rewrite the json properties in the zip file
<END_TASK>
<USER_TASK:>
Description:
def rewrite_zip(file_path, properties):
"""
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
""" |
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
# check to make sure directory has two files, named data.npy and metadata.json, and that data.npy is first
# TODO: check compression, etc.
if len(dir_files) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files[b"data.npy"][1] == 0:
fp.seek(dir_files[b"metadata.json"][1])
dir_data_list = list()
local_file_pos = dir_files[b"data.npy"][1]
local_file = local_files[local_file_pos]
dir_data_list.append((local_file_pos, b"data.npy", local_file[2], local_file[3]))
write_zip_fp(fp, None, properties, dir_data_list)
else:
data = None
if b"data.npy" in dir_files:
fp.seek(local_files[dir_files[b"data.npy"][1]][1])
data = numpy.load(fp)
fp.seek(0)
write_zip_fp(fp, data, properties) |
<SYSTEM_TASK:>
Return whether the given absolute file path is an ndata file.
<END_TASK>
<USER_TASK:>
Description:
def is_matching(cls, file_path):
"""
Return whether the given absolute file path is an ndata file.
""" |
if file_path.endswith(".ndata") and os.path.exists(file_path):
try:
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
contains_data = b"data.npy" in dir_files
contains_metadata = b"metadata.json" in dir_files
file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
# TODO: make sure ndata isn't compressed, or handle it
if len(dir_files) != file_count or file_count == 0:
return False
return True
except Exception as e:
logging.error("Exception parsing ndata file: %s", file_path)
logging.error(str(e))
return False |
<SYSTEM_TASK:>
Write data to the ndata file specified by reference.
<END_TASK>
<USER_TASK:>
Description:
def write_data(self, data, file_datetime):
"""
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
""" |
with self.__lock:
assert data is not None
absolute_file_path = self.__file_path
#logging.debug("WRITE data file %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
properties = self.read_properties() if os.path.exists(absolute_file_path) else dict()
write_zip(absolute_file_path, data, properties)
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp)) |
<SYSTEM_TASK:>
Write properties to the ndata file specified by reference.
<END_TASK>
<USER_TASK:>
Description:
def write_properties(self, properties, file_datetime):
"""
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
""" |
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("WRITE properties %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
exists = os.path.exists(absolute_file_path)
if exists:
rewrite_zip(absolute_file_path, Utility.clean_dict(properties))
else:
write_zip(absolute_file_path, None, Utility.clean_dict(properties))
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp)) |
<SYSTEM_TASK:>
Read properties from the ndata file reference
<END_TASK>
<USER_TASK:>
Description:
def read_properties(self):
"""
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
""" |
with self.__lock:
absolute_file_path = self.__file_path
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
properties = read_json(fp, local_files, dir_files, b"metadata.json")
return properties |
<SYSTEM_TASK:>
Read data from the ndata file reference
<END_TASK>
<USER_TASK:>
Description:
def read_data(self):
"""
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
""" |
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("READ data file %s", absolute_file_path)
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
return read_data(fp, local_files, dir_files, b"data.npy")
return None |
<SYSTEM_TASK:>
Remove the ndata file reference
<END_TASK>
<USER_TASK:>
Description:
def remove(self):
"""
Remove the ndata file reference
:param reference: the reference to remove
""" |
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("DELETE data file %s", absolute_file_path)
if os.path.isfile(absolute_file_path):
os.remove(absolute_file_path) |
<SYSTEM_TASK:>
Build the dynamic menu for the selected display panel.
<END_TASK>
<USER_TASK:>
Description:
def build_menu(self, display_type_menu, document_controller, display_panel):
"""Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
""" |
dynamic_live_actions = list()
def switch_to_display_content(display_panel_type):
self.switch_to_display_content(document_controller, display_panel, display_panel_type, display_panel.display_item)
empty_action = display_type_menu.add_menu_item(_("Clear Display Panel"), functools.partial(switch_to_display_content, "empty-display-panel"))
display_type_menu.add_separator()
data_item_display_action = display_type_menu.add_menu_item(_("Display Item"), functools.partial(switch_to_display_content, "data-display-panel"))
thumbnail_browser_action = display_type_menu.add_menu_item(_("Thumbnail Browser"), functools.partial(switch_to_display_content, "thumbnail-browser-display-panel"))
grid_browser_action = display_type_menu.add_menu_item(_("Grid Browser"), functools.partial(switch_to_display_content, "browser-display-panel"))
display_type_menu.add_separator()
display_panel_type = display_panel.display_panel_type
empty_action.checked = display_panel_type == "empty" and display_panel.display_panel_controller is None
data_item_display_action.checked = display_panel_type == "data_item"
thumbnail_browser_action.checked = display_panel_type == "horizontal"
grid_browser_action.checked = display_panel_type == "grid"
dynamic_live_actions.append(empty_action)
dynamic_live_actions.append(data_item_display_action)
dynamic_live_actions.append(thumbnail_browser_action)
dynamic_live_actions.append(grid_browser_action)
for factory in self.__display_controller_factories.values():
dynamic_live_actions.extend(factory.build_menu(display_type_menu, display_panel))
return dynamic_live_actions |
<SYSTEM_TASK:>
Return the bounds property in relative coordinates.
<END_TASK>
<USER_TASK:>
Description:
def bounds(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the bounds property in relative coordinates.
Bounds is a tuple ((top, left), (height, width))""" |
... |
<SYSTEM_TASK:>
Return the vector property in relative coordinates.
<END_TASK>
<USER_TASK:>
Description:
def vector(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the vector property in relative coordinates.
Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end)).""" |
... |
<SYSTEM_TASK:>
Subclasses should call this to add content in the section's top level column.
<END_TASK>
<USER_TASK:>
Description:
def add_widget_to_content(self, widget):
"""Subclasses should call this to add content in the section's top level column.""" |
self.__section_content_column.add_spacing(4)
self.__section_content_column.add(widget) |
<SYSTEM_TASK:>
Set the data and mark the canvas item for updating.
<END_TASK>
<USER_TASK:>
Description:
def color_map_data(self, data: numpy.ndarray) -> None:
"""Set the data and mark the canvas item for updating.
Data should be an ndarray of shape (256, 3) with type uint8
""" |
self.__color_map_data = data
self.update() |
<SYSTEM_TASK:>
In classification this returns the classes, in
<END_TASK>
<USER_TASK:>
Description:
def predict(self, v=None, X=None):
"""In classification this returns the classes, in
regression it is equivalent to the decision function""" |
if X is None:
X = v
v = None
m = self.model(v=v)
return m.predict(X) |
<SYSTEM_TASK:>
Returns the subscribers for a given object.
<END_TASK>
<USER_TASK:>
Description:
def get_subscribers(obj):
"""
Returns the subscribers for a given object.
:param obj: Any object.
""" |
ctype = ContentType.objects.get_for_model(obj)
return Subscription.objects.filter(content_type=ctype, object_id=obj.pk) |
<SYSTEM_TASK:>
Returns ``True`` if the user is subscribed to the given object.
<END_TASK>
<USER_TASK:>
Description:
def is_subscribed(user, obj):
"""
Returns ``True`` if the user is subscribed to the given object.
:param user: A ``User`` instance.
:param obj: Any object.
""" |
if not user.is_authenticated():
return False
ctype = ContentType.objects.get_for_model(obj)
try:
Subscription.objects.get(
user=user, content_type=ctype, object_id=obj.pk)
except Subscription.DoesNotExist:
return False
return True |
<SYSTEM_TASK:>
Create a new subclass of Context which incorporates instance attributes and new descriptors.
<END_TASK>
<USER_TASK:>
Description:
def _promote(self, name, instantiate=True):
"""Create a new subclass of Context which incorporates instance attributes and new descriptors.
This promotes an instance and its instance attributes up to being a class with class attributes, then
returns an instance of that class.
""" |
metaclass = type(self.__class__)
contents = self.__dict__.copy()
cls = metaclass(str(name), (self.__class__, ), contents)
if instantiate:
return cls()
return cls |
<SYSTEM_TASK:>
Run simulation for each candidate
<END_TASK>
<USER_TASK:>
Description:
def run(self,candidates,parameters):
"""
Run simulation for each candidate
This run method will loop through each candidate and run the simulation
corresponding to its parameter values. It will populate an array called
traces with the resulting voltage traces for the simulation and return it.
""" |
traces = []
start_time = time.time()
if self.num_parallel_evaluations == 1:
for candidate_i in range(len(candidates)):
candidate = candidates[candidate_i]
sim_var = dict(zip(parameters,candidate))
pyneuroml.pynml.print_comment_v('\n\n - RUN %i (%i/%i); variables: %s\n'%(self.count,candidate_i+1,len(candidates),sim_var))
self.count+=1
t,v = self.run_individual(sim_var)
traces.append([t,v])
else:
import pp
ppservers = ()
job_server = pp.Server(self.num_parallel_evaluations, ppservers=ppservers)
pyneuroml.pynml.print_comment_v('Running %i candidates across %i local processes'%(len(candidates),job_server.get_ncpus()))
jobs = []
for candidate_i in range(len(candidates)):
candidate = candidates[candidate_i]
sim_var = dict(zip(parameters,candidate))
pyneuroml.pynml.print_comment_v('\n\n - PARALLEL RUN %i (%i/%i of curr candidates); variables: %s\n'%(self.count,candidate_i+1,len(candidates),sim_var))
self.count+=1
cand_dir = self.generate_dir+"/CANDIDATE_%s"%candidate_i
if not os.path.exists(cand_dir):
os.mkdir(cand_dir)
vars = (sim_var,
self.ref,
self.neuroml_file,
self.nml_doc,
self.still_included,
cand_dir,
self.target,
self.sim_time,
self.dt,
self.simulator)
job = job_server.submit(run_individual, vars, (), ("pyneuroml.pynml",'pyneuroml.tune.NeuroMLSimulation','shutil','neuroml'))
jobs.append(job)
for job_i in range(len(jobs)):
job = jobs[job_i]
pyneuroml.pynml.print_comment_v("Checking parallel job %i/%i; set running so far: %i"%(job_i,len(jobs),self.count))
t,v = job()
traces.append([t,v])
#pyneuroml.pynml.print_comment_v("Obtained: %s"%result)
####job_server.print_stats()
job_server.destroy()
print("-------------------------------------------")
end_time = time.time()
tot = (end_time-start_time)
pyneuroml.pynml.print_comment_v('Ran %i candidates in %s seconds (~%ss per job)'%(len(candidates),tot,tot/len(candidates)))
return traces |
<SYSTEM_TASK:>
Executed prior to processing a request.
<END_TASK>
<USER_TASK:>
Description:
def prepare(self, context):
"""Executed prior to processing a request.""" |
if __debug__:
log.debug("Assigning thread local request context.")
self.local.context = context |
<SYSTEM_TASK:>
Python-standard WSGI-HTTP server for testing purposes.
<END_TASK>
<USER_TASK:>
Description:
def simple(application, host='127.0.0.1', port=8080):
"""Python-standard WSGI-HTTP server for testing purposes.
The additional work performed here is to match the default startup output of "waitress".
This is not a production quality interface and will be have badly under load.
""" |
# Try to be handy as many terminals allow clicking links.
print("serving on http://{0}:{1}".format(host, port))
# Bind and launch the server; this is a blocking operation.
make_server(host, int(port), application).serve_forever() |
<SYSTEM_TASK:>
A specialized version of the reference WSGI-CGI server to adapt to Microsoft IIS quirks.
<END_TASK>
<USER_TASK:>
Description:
def iiscgi(application):
"""A specialized version of the reference WSGI-CGI server to adapt to Microsoft IIS quirks.
This is not a production quality interface and will behave badly under load.
""" |
try:
from wsgiref.handlers import IISCGIHandler
except ImportError:
print("Python 3.2 or newer is required.")
if not __debug__:
warnings.warn("Interactive debugging and other persistence-based processes will not work.")
IISCGIHandler().run(application) |
<SYSTEM_TASK:>
Basic FastCGI support via flup.
<END_TASK>
<USER_TASK:>
Description:
def serve(application, host='127.0.0.1', port=8080, socket=None, **options):
"""Basic FastCGI support via flup.
This web server has many, many options. Please see the Flup project documentation for details.
""" |
# Allow either on-disk socket (recommended) or TCP/IP socket use.
if not socket:
bindAddress = (host, int(port))
else:
bindAddress = socket
# Bind and start the blocking web server interface.
WSGIServer(application, bindAddress=bindAddress, **options).run() |
<SYSTEM_TASK:>
Helper method. Returns kwargs needed to filter the correct object.
<END_TASK>
<USER_TASK:>
Description:
def _get_method_kwargs(self):
"""
Helper method. Returns kwargs needed to filter the correct object.
Can also be used to create the correct object.
""" |
method_kwargs = {
'user': self.user,
'content_type': self.ctype,
'object_id': self.content_object.pk,
}
return method_kwargs |
<SYSTEM_TASK:>
Adds a subscription for the given user to the given object.
<END_TASK>
<USER_TASK:>
Description:
def save(self, *args, **kwargs):
"""Adds a subscription for the given user to the given object.""" |
method_kwargs = self._get_method_kwargs()
try:
subscription = Subscription.objects.get(**method_kwargs)
except Subscription.DoesNotExist:
subscription = Subscription.objects.create(**method_kwargs)
return subscription |
<SYSTEM_TASK:>
Add the usual suspects to the context.
<END_TASK>
<USER_TASK:>
Description:
def prepare(self, context):
"""Add the usual suspects to the context.
This adds `request`, `response`, and `path` to the `RequestContext` instance.
""" |
if __debug__:
log.debug("Preparing request context.", extra=dict(request=id(context)))
# Bridge in WebOb `Request` and `Response` objects.
# Extensions shouldn't rely on these, using `environ` where possible instead.
context.request = Request(context.environ)
context.response = Response(request=context.request)
# Record the initial path representing the point where a front-end web server bridged to us.
context.environ['web.base'] = context.request.script_name
# Track the remaining (unprocessed) path elements.
context.request.remainder = context.request.path_info.split('/')
if context.request.remainder and not context.request.remainder[0]:
del context.request.remainder[0]
# Track the "breadcrumb list" of dispatch through distinct controllers.
context.path = Bread() |
<SYSTEM_TASK:>
Called as dispatch descends into a tier.
<END_TASK>
<USER_TASK:>
Description:
def dispatch(self, context, consumed, handler, is_endpoint):
"""Called as dispatch descends into a tier.
The base extension uses this to maintain the "current url".
""" |
request = context.request
if __debug__:
log.debug("Handling dispatch event.", extra=dict(
request = id(context),
consumed = consumed,
handler = safe_name(handler),
endpoint = is_endpoint
))
# The leading path element (leading slash) requires special treatment.
if not consumed and context.request.path_info_peek() == '':
consumed = ['']
nConsumed = 0
if consumed:
# Migrate path elements consumed from the `PATH_INFO` to `SCRIPT_NAME` WSGI environment variables.
if not isinstance(consumed, (list, tuple)):
consumed = consumed.split('/')
for element in consumed:
if element == context.request.path_info_peek():
context.request.path_info_pop()
nConsumed += 1
else:
break
# Update the breadcrumb list.
context.path.append(Crumb(handler, Path(request.script_name)))
if consumed: # Lastly, update the remaining path element list.
request.remainder = request.remainder[nConsumed:] |
<SYSTEM_TASK:>
Perform appropriate metadata wrangling for returned open file handles.
<END_TASK>
<USER_TASK:>
Description:
def render_file(self, context, result):
"""Perform appropriate metadata wrangling for returned open file handles.""" |
if __debug__:
log.debug("Processing file-like object.", extra=dict(request=id(context), result=repr(result)))
response = context.response
response.conditional_response = True
modified = mktime(gmtime(getmtime(result.name)))
response.last_modified = datetime.fromtimestamp(modified)
ct, ce = guess_type(result.name)
if not ct: ct = 'application/octet-stream'
response.content_type, response.content_encoding = ct, ce
response.etag = unicode(modified)
result.seek(0, 2) # Seek to the end of the file.
response.content_length = result.tell()
result.seek(0) # Seek back to the start of the file.
response.body_file = result
return True |
<SYSTEM_TASK:>
Attempt to serve generator responses through stream encoding.
<END_TASK>
<USER_TASK:>
Description:
def render_generator(self, context, result):
"""Attempt to serve generator responses through stream encoding.
This allows for direct use of cinje template functions, which are generators, as returned views.
""" |
context.response.encoding = 'utf8'
context.response.app_iter = (
(i.encode('utf8') if isinstance(i, unicode) else i) # Stream encode unicode chunks.
for i in result if i is not None # Skip None values.
)
return True |
<SYSTEM_TASK:>
Render serialized responses.
<END_TASK>
<USER_TASK:>
Description:
def render_serialization(self, context, result):
"""Render serialized responses.""" |
resp = context.response
serial = context.serialize
match = context.request.accept.best_match(serial.types, default_match=self.default)
result = serial[match](result)
if isinstance(result, str):
result = result.decode('utf-8')
resp.charset = 'utf-8'
resp.content_type = match
resp.text = result
return True |
<SYSTEM_TASK:>
In general if we need to put a file on a folder, we use this method
<END_TASK>
<USER_TASK:>
Description:
def generic_insert_with_folder(folder_name, file_name, template_name, args):
"""
In general if we need to put a file on a folder, we use this method
""" |
# First we make sure views are a package instead a file
if not os.path.isdir(
os.path.join(
args['django_application_folder'],
folder_name
)
):
os.mkdir(os.path.join(args['django_application_folder'], folder_name))
codecs.open(
os.path.join(
args['django_application_folder'],
folder_name,
'__init__.py'
),
'w+'
)
view_file = create_or_open(
os.path.join(
folder_name,
'{}.py'.format(file_name)
),
'',
args
)
# Load content from template
render_template_with_args_in_file(
view_file,
os.path.join(
BASE_TEMPLATES_DIR,
template_name
),
model_name=args['model_name'],
model_prefix=args['model_prefix'],
model_name_lower=args['model_name'].lower(),
application_name=args['django_application_folder'].split("/")[-1]
)
view_file.close() |
<SYSTEM_TASK:>
The recommended development HTTP server.
<END_TASK>
<USER_TASK:>
Description:
def serve(application, host='127.0.0.1', port=8080, threads=4, **kw):
"""The recommended development HTTP server.
Note that this server performs additional buffering and will not honour chunked encoding breaks.
""" |
# Bind and start the server; this is a blocking process.
serve_(application, host=host, port=int(port), threads=int(threads), **kw) |
<SYSTEM_TASK:>
Plot the result of the simulation once it's been intialized
<END_TASK>
<USER_TASK:>
Description:
def show(self):
"""
Plot the result of the simulation once it's been intialized
""" |
from matplotlib import pyplot as plt
if self.already_run:
for ref in self.volts.keys():
plt.plot(self.t, self.volts[ref], label=ref)
plt.title("Simulation voltage vs time")
plt.legend()
plt.xlabel("Time [ms]")
plt.ylabel("Voltage [mV]")
else:
pynml.print_comment("First you have to 'go()' the simulation.", True)
plt.show() |
<SYSTEM_TASK:>
Inspect and potentially mutate the given handler's arguments.
<END_TASK>
<USER_TASK:>
Description:
def mutate(self, context, handler, args, kw):
"""Inspect and potentially mutate the given handler's arguments.
The args list and kw dictionary may be freely modified, though invalid arguments to the handler will fail.
""" |
def cast(arg, val):
if arg not in annotations:
return
cast = annotations[key]
try:
val = cast(val)
except (ValueError, TypeError) as e:
parts = list(e.args)
parts[0] = parts[0] + " processing argument '{}'".format(arg)
e.args = tuple(parts)
raise
return val
annotations = getattr(handler.__func__ if hasattr(handler, '__func__') else handler, '__annotations__', None)
if not annotations:
return
argspec = getfullargspec(handler)
arglist = list(argspec.args)
if ismethod(handler):
del arglist[0]
for i, value in enumerate(list(args)):
key = arglist[i]
if key in annotations:
args[i] = cast(key, value)
# Convert keyword arguments
for key, value in list(items(kw)):
if key in annotations:
kw[key] = cast(key, value) |
<SYSTEM_TASK:>
Transform the value returned by the controller endpoint.
<END_TASK>
<USER_TASK:>
Description:
def transform(self, context, handler, result):
"""Transform the value returned by the controller endpoint.
This extension transforms returned values if the endpoint has a return type annotation.
""" |
handler = handler.__func__ if hasattr(handler, '__func__') else handler
annotation = getattr(handler, '__annotations__', {}).get('return', None)
if annotation:
return (annotation, result)
return result |
<SYSTEM_TASK:>
Execute a command in specific working directory
<END_TASK>
<USER_TASK:>
Description:
def execute_command_in_dir(command, directory, verbose=DEFAULTS['v'],
prefix="Output: ", env=None):
"""Execute a command in specific working directory""" |
if os.name == 'nt':
directory = os.path.normpath(directory)
print_comment("Executing: (%s) in directory: %s" % (command, directory),
verbose)
if env is not None:
print_comment("Extra env variables %s" % (env), verbose)
try:
if os.name == 'nt':
return_string = subprocess.check_output(command,
cwd=directory,
shell=True,
env=env,
close_fds=False)
else:
return_string = subprocess.check_output(command,
cwd=directory,
shell=True,
stderr=subprocess.STDOUT,
env=env,
close_fds=True)
return_string = return_string.decode("utf-8") # For Python 3
print_comment('Command completed. Output: \n %s%s' % \
(prefix,return_string.replace('\n','\n '+prefix)),
verbose)
return return_string
except AttributeError:
# For python 2.6...
print_comment_v('Assuming Python 2.6...')
return_string = subprocess.Popen(command,
cwd=directory,
shell=True,
stdout=subprocess.PIPE).communicate()[0]
return return_string
except subprocess.CalledProcessError as e:
print_comment_v('*** Problem running command: \n %s'%e)
print_comment_v('%s%s'%(prefix,e.output.decode().replace('\n','\n'+prefix)))
return None
except:
print_comment_v('*** Unknown problem running command: %s'%e)
return None
print_comment("Finished execution", verbose) |
<SYSTEM_TASK:>
Executed after dispatch has returned and the response populated, prior to anything being sent to the client.
<END_TASK>
<USER_TASK:>
Description:
def after(self, context, exc=None):
"""Executed after dispatch has returned and the response populated, prior to anything being sent to the client.""" |
duration = context._duration = round((time.time() - context._start_time) * 1000) # Convert to ms.
delta = unicode(duration)
# Default response augmentation.
if self.header:
context.response.headers[self.header] = delta
if self.log:
self.log("Response generated in " + delta + " seconds.", extra=dict(
duration = duration,
request = id(context)
)) |
<SYSTEM_TASK:>
Prepare the incoming configuration and ensure certain expected values are present.
<END_TASK>
<USER_TASK:>
Description:
def _configure(self, config):
"""Prepare the incoming configuration and ensure certain expected values are present.
For example, this ensures BaseExtension is included in the extension list, and populates the logging config.
""" |
config = config or dict()
# We really need this to be there.
if 'extensions' not in config: config['extensions'] = list()
if not any(isinstance(ext, BaseExtension) for ext in config['extensions']):
# Always make sure the BaseExtension is present since request/response objects are handy.
config['extensions'].insert(0, BaseExtension())
if not any(isinstance(ext, arguments.ArgumentExtension) for ext in config['extensions']):
# Prepare a default set of argument mutators.
config['extensions'].extend([
arguments.ValidateArgumentsExtension(),
arguments.ContextArgsExtension(),
arguments.RemainderArgsExtension(),
arguments.QueryStringArgsExtension(),
arguments.FormEncodedKwargsExtension(),
arguments.JSONKwargsExtension(),
])
config['extensions'].append(self) # Allow the application object itself to register callbacks.
try:
addLoggingLevel('trace', logging.DEBUG - 5)
except AttributeError:
pass
# Tests are skipped on these as we have no particular need to test Python's own logging mechanism.
level = config.get('logging', {}).get('level', None)
if level: # pragma: no cover
logging.basicConfig(level=getattr(logging, level.upper()))
elif 'logging' in config: # pragma: no cover
logging.config.dictConfig(config['logging'])
return config |
<SYSTEM_TASK:>
Sphinx role for linking to a user profile. Defaults to linking to
<END_TASK>
<USER_TASK:>
Description:
def user_role(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Sphinx role for linking to a user profile. Defaults to linking to
Github profiles, but the profile URIS can be configured via the
``issues_user_uri`` config value.
Examples: ::
:user:`sloria`
Anchor text also works: ::
:user:`Steven Loria <sloria>`
""" |
options = options or {}
content = content or []
has_explicit_title, title, target = split_explicit_title(text)
target = utils.unescape(target).strip()
title = utils.unescape(title).strip()
config = inliner.document.settings.env.app.config
if config.issues_user_uri:
ref = config.issues_user_uri.format(user=target)
else:
ref = "https://github.com/{0}".format(target)
if has_explicit_title:
text = title
else:
text = "@{0}".format(target)
link = nodes.reference(text=text, refuri=ref, **options)
return [link], [] |
<SYSTEM_TASK:>
Parse metadata to obtain list of mustache templates,
<END_TASK>
<USER_TASK:>
Description:
def prepare(doc):
""" Parse metadata to obtain list of mustache templates,
then load those templates.
""" |
doc.mustache_files = doc.get_metadata('mustache')
if isinstance(doc.mustache_files, basestring): # process single YAML value stored as string
if not doc.mustache_files:
doc.mustache_files = None # switch empty string back to None
else:
doc.mustache_files = [ doc.mustache_files ] # put non-empty string in list
# with open('debug.txt', 'a') as the_file:
# the_file.write(str(doc.mustache_files))
# the_file.write('\n')
if doc.mustache_files is not None:
doc.mustache_hashes = [yaml.load(open(file, 'r').read()) for file in doc.mustache_files]
doc.mhash = { k: v for mdict in doc.mustache_hashes for k, v in mdict.items() } # combine list of dicts into a single dict
doc.mrenderer = pystache.Renderer(escape=lambda u: u, missing_tags='strict')
else:
doc.mhash = None |
<SYSTEM_TASK:>
Apply combined mustache template to all strings in document.
<END_TASK>
<USER_TASK:>
Description:
def action(elem, doc):
""" Apply combined mustache template to all strings in document.
""" |
if type(elem) == Str and doc.mhash is not None:
elem.text = doc.mrenderer.render(elem.text, doc.mhash)
return elem |
<SYSTEM_TASK:>
Determine the name of the callback to wrap around the json output.
<END_TASK>
<USER_TASK:>
Description:
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
""" |
request = renderer_context.get('request', None)
params = request and get_query_params(request) or {}
return params.get(self.callback_parameter, self.default_callback) |
<SYSTEM_TASK:>
Renders into jsonp, wrapping the json output in a callback function.
<END_TASK>
<USER_TASK:>
Description:
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
""" |
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');' |
<SYSTEM_TASK:>
Parse two input arguments and return two lists of file names
<END_TASK>
<USER_TASK:>
Description:
def parse_path(f1, f2):
"""Parse two input arguments and return two lists of file names""" |
import glob
# if second argument is missing or is a wild card, point it
# to the current directory
f2 = f2.strip()
if f2 == '' or f2 == '*':
f2 = './'
# if the first argument is a directory, use all GEIS files
if os.path.isdir(f1):
f1 = os.path.join(f1, '*.??h')
list1 = glob.glob(f1)
list1 = [name for name in list1 if name[-1] == 'h' and name[-4] == '.']
# if the second argument is a directory, use file names in the
# first argument to construct file names, i.e.
# abc.xyh will be converted to abc_xyf.fits
if os.path.isdir(f2):
list2 = []
for file in list1:
name = os.path.split(file)[-1]
fitsname = name[:-4] + '_' + name[-3:-1] + 'f.fits'
list2.append(os.path.join(f2, fitsname))
else:
list2 = [s.strip() for s in f2.split(",")]
if list1 == [] or list2 == []:
err_msg = ""
if list1 == []:
err_msg += "Input files `{:s}` not usable/available. ".format(f1)
if list2 == []:
err_msg += "Input files `{:s}` not usable/available. ".format(f2)
raise IOError(err_msg)
else:
return list1, list2 |
<SYSTEM_TASK:>
Determine if the filename provided to the function belongs to
<END_TASK>
<USER_TASK:>
Description:
def checkASN(filename):
"""
Determine if the filename provided to the function belongs to
an association.
Parameters
----------
filename: string
Returns
-------
validASN : boolean value
""" |
# Extract the file extn type:
extnType = filename[filename.rfind('_')+1:filename.rfind('.')]
# Determine if this extn name is valid for an assocation file
if isValidAssocExtn(extnType):
return True
else:
return False |
<SYSTEM_TASK:>
Determine the number of inputfiles provided by the user and the
<END_TASK>
<USER_TASK:>
Description:
def countinputs(inputlist):
"""
Determine the number of inputfiles provided by the user and the
number of those files that are association tables
Parameters
----------
inputlist : string
the user input
Returns
-------
numInputs: int
number of inputs provided by the user
numASNfiles: int
number of association files provided as input
""" |
# Initialize return values
numInputs = 0
numASNfiles = 0
# User irafglob to count the number of inputfiles
files = irafglob(inputlist, atfile=None)
# Use the "len" ufunc to count the number of entries in the list
numInputs = len(files)
# Loop over the list and see if any of the entries are association files
for file in files:
if (checkASN(file) == True):
numASNfiles += 1
return numInputs,numASNfiles |
<SYSTEM_TASK:>
Returns a formatted string with the current local time.
<END_TASK>
<USER_TASK:>
Description:
def getLTime():
"""Returns a formatted string with the current local time.""" |
_ltime = _time.localtime(_time.time())
tlm_str = _time.strftime('%H:%M:%S (%d/%m/%Y)', _ltime)
return tlm_str |
<SYSTEM_TASK:>
Returns a formatted string with the current date.
<END_TASK>
<USER_TASK:>
Description:
def getDate():
"""Returns a formatted string with the current date.""" |
_ltime = _time.localtime(_time.time())
date_str = _time.strftime('%Y-%m-%dT%H:%M:%S',_ltime)
return date_str |
<SYSTEM_TASK:>
Converts an integer 'input' into its component bit values as a list of
<END_TASK>
<USER_TASK:>
Description:
def interpretDQvalue(input):
"""
Converts an integer 'input' into its component bit values as a list of
power of 2 integers.
For example, the bit value 1027 would return [1, 2, 1024]
""" |
nbits = 16
# We will only support integer values up to 2**128
for iexp in [16, 32, 64, 128]:
# Find out whether the input value is less than 2**iexp
if (input // (2 ** iexp)) == 0:
# when it finally is, we have identified how many bits can be used to
# describe this input bitvalue
nbits = iexp
break
# Find out how 'dtype' values are described on this machine
a = np.zeros(1, dtype='int16')
atype_descr = a.dtype.descr[0][1]
# Use this description to build the description we need for our input integer
dtype_str = atype_descr[:2] + str(nbits // 8)
result = np.zeros(nbits + 1, dtype=dtype_str)
# For each bit, determine whether it has been set in the input value or not
for n in range(nbits + 1):
i = 2 ** n
if input & i > 0:
# record which bit has been set as the power-of-2 integer
result[n] = i
# Return the non-zero unique values as a Python list
return np.delete(np.unique(result), 0).tolist() |
<SYSTEM_TASK:>
Checks whether files are writable. It is up to the calling routine to raise
<END_TASK>
<USER_TASK:>
Description:
def verifyWriteMode(files):
"""
Checks whether files are writable. It is up to the calling routine to raise
an Exception, if desired.
This function returns True, if all files are writable and False, if any are
not writable. In addition, for all files found to not be writable, it will
print out the list of names of affected files.
""" |
# Start by insuring that input is a list of filenames,
# if only a single filename has been given as input,
# convert it to a list with len == 1.
if not isinstance(files, list):
files = [files]
# Keep track of the name of each file which is not writable
not_writable = []
writable = True
# Check each file in input list
for fname in files:
try:
f = open(fname,'a')
f.close()
del f
except:
not_writable.append(fname)
writable = False
if not writable:
print('The following file(s) do not have write permission!')
for fname in not_writable:
print(' ', fname)
return writable |
<SYSTEM_TASK:>
Build rootname for a new file.
<END_TASK>
<USER_TASK:>
Description:
def buildNewRootname(filename, extn=None, extlist=None):
"""
Build rootname for a new file.
Use 'extn' for new filename if given, does NOT append a suffix/extension at
all.
Does NOT check to see if it exists already. Will ALWAYS return a new
filename.
""" |
# Search known suffixes to replace ('_crj.fits',...)
_extlist = copy.deepcopy(EXTLIST)
# Also, add a default where '_dth.fits' replaces
# whatever extension was there ('.fits','.c1h',...)
#_extlist.append('.')
# Also append any user-specified extensions...
if extlist:
_extlist += extlist
if isinstance(filename, fits.HDUList):
try:
filename = filename.filename()
except:
raise ValueError("Can't determine the filename of an waivered HDUList object.")
for suffix in _extlist:
_indx = filename.find(suffix)
if _indx > 0: break
if _indx < 0:
# default to entire rootname
_indx = len(filename)
if extn is None:
extn = ''
return filename[:_indx] + extn |
<SYSTEM_TASK:>
Build a new rootname for an existing file and given extension.
<END_TASK>
<USER_TASK:>
Description:
def buildRootname(filename, ext=None):
"""
Build a new rootname for an existing file and given extension.
Any user supplied extensions to use for searching for file need to be
provided as a list of extensions.
Examples
--------
::
>>> rootname = buildRootname(filename, ext=['_dth.fits']) # doctest: +SKIP
""" |
if filename in ['' ,' ', None]:
return None
fpath, fname = os.path.split(filename)
if ext is not None and '_' in ext[0]:
froot = os.path.splitext(fname)[0].split('_')[0]
else:
froot = fname
if fpath in ['', ' ', None]:
fpath = os.curdir
# Get complete list of filenames from current directory
flist = os.listdir(fpath)
#First, assume given filename is complete and verify
# it exists...
rootname = None
for name in flist:
if name == froot:
rootname = froot
break
elif name == froot + '.fits':
rootname = froot + '.fits'
break
# If we have an incomplete filename, try building a default
# name and seeing if it exists...
#
# Set up default list of suffix/extensions to add to rootname
_extlist = []
for extn in EXTLIST:
_extlist.append(extn)
if rootname is None:
# Add any user-specified extension to list of extensions...
if ext is not None:
for i in ext:
_extlist.insert(0,i)
# loop over all extensions looking for a filename that matches...
for extn in _extlist:
# Start by looking for filename with exactly
# the same case a provided in ASN table...
rname = froot + extn
for name in flist:
if rname == name:
rootname = name
break
if rootname is None:
# Try looking for all lower-case filename
# instead of a mixed-case filename as required
# by the pipeline.
rname = froot.lower() + extn
for name in flist:
if rname == name:
rootname = name
break
if rootname is not None:
break
# If we still haven't found the file, see if we have the
# info to build one...
if rootname is None and ext is not None:
# Check to see if we have a full filename to start with...
_indx = froot.find('.')
if _indx > 0:
rootname = froot[:_indx] + ext[0]
else:
rootname = froot + ext[0]
if fpath not in ['.', '', ' ', None]:
rootname = os.path.join(fpath, rootname)
# It will be up to the calling routine to verify
# that a valid rootname, rather than 'None', was returned.
return rootname |
<SYSTEM_TASK:>
General, write-safe method for returning a keyword value from the header of
<END_TASK>
<USER_TASK:>
Description:
def getKeyword(filename, keyword, default=None, handle=None):
"""
General, write-safe method for returning a keyword value from the header of
a IRAF recognized image.
Returns the value as a string.
""" |
# Insure that there is at least 1 extension specified...
if filename.find('[') < 0:
filename += '[0]'
_fname, _extn = parseFilename(filename)
if not handle:
# Open image whether it is FITS or GEIS
_fimg = openImage(_fname)
else:
# Use what the user provides, after insuring
# that it is a proper PyFITS object.
if isinstance(handle, fits.HDUList):
_fimg = handle
else:
raise ValueError('Handle must be %r object!' % fits.HDUList)
# Address the correct header
_hdr = getExtn(_fimg, _extn).header
try:
value = _hdr[keyword]
except KeyError:
_nextn = findKeywordExtn(_fimg, keyword)
try:
value = _fimg[_nextn].header[keyword]
except KeyError:
value = ''
if not handle:
_fimg.close()
del _fimg
if value == '':
if default is None:
value = None
else:
value = default
# NOTE: Need to clean up the keyword.. Occasionally the keyword value
# goes right up to the "/" FITS delimiter, and iraf.keypar is incapable
# of realizing this, so it incorporates "/" along with the keyword value.
# For example, after running "pydrizzle" on the image "j8e601bkq_flt.fits",
# the CD keywords look like this:
#
# CD1_1 = 9.221627430999639E-06/ partial of first axis coordinate w.r.t. x
# CD1_2 = -1.0346992614799E-05 / partial of first axis coordinate w.r.t. y
#
# so for CD1_1, iraf.keypar returns:
# "9.221627430999639E-06/"
#
# So, the following piece of code CHECKS for this and FIXES the string,
# very simply by removing the last character if it is a "/".
# This fix courtesy of Anton Koekemoer, 2002.
elif isinstance(value, string_types):
if value[-1:] == '/':
value = value[:-1]
return value |
<SYSTEM_TASK:>
Parse out filename from any specified extensions.
<END_TASK>
<USER_TASK:>
Description:
def parseFilename(filename):
"""
Parse out filename from any specified extensions.
Returns rootname and string version of extension name.
""" |
# Parse out any extension specified in filename
_indx = filename.find('[')
if _indx > 0:
# Read extension name provided
_fname = filename[:_indx]
_extn = filename[_indx + 1:-1]
else:
_fname = filename
_extn = None
return _fname, _extn |
<SYSTEM_TASK:>
Return the number of 'extname' extensions, defaulting to counting the
<END_TASK>
<USER_TASK:>
Description:
def countExtn(fimg, extname='SCI'):
"""
Return the number of 'extname' extensions, defaulting to counting the
number of SCI extensions.
""" |
closefits = False
if isinstance(fimg, string_types):
fimg = fits.open(fimg)
closefits = True
n = 0
for e in fimg:
if 'extname' in e.header and e.header['extname'] == extname:
n += 1
if closefits:
fimg.close()
return n |
<SYSTEM_TASK:>
Returns the PyFITS extension corresponding to extension specified in
<END_TASK>
<USER_TASK:>
Description:
def getExtn(fimg, extn=None):
"""
Returns the PyFITS extension corresponding to extension specified in
filename.
Defaults to returning the first extension with data or the primary
extension, if none have data. If a non-existent extension has been
specified, it raises a `KeyError` exception.
""" |
# If no extension is provided, search for first extension
# in FITS file with data associated with it.
if extn is None:
# Set up default to point to PRIMARY extension.
_extn = fimg[0]
# then look for first extension with data.
for _e in fimg:
if _e.data is not None:
_extn = _e
break
else:
# An extension was provided, so parse it out...
if repr(extn).find(',') > 1:
if isinstance(extn, tuple):
# We have a tuple possibly created by parseExtn(), so
# turn it into a list for easier manipulation.
_extns = list(extn)
if '' in _extns:
_extns.remove('')
else:
_extns = extn.split(',')
# Two values given for extension:
# for example, 'sci,1' or 'dq,1'
try:
_extn = fimg[_extns[0], int(_extns[1])]
except KeyError:
_extn = None
for e in fimg:
hdr = e.header
if ('extname' in hdr and
hdr['extname'].lower() == _extns[0].lower() and
hdr['extver'] == int(_extns[1])):
_extn = e
break
elif repr(extn).find('/') > 1:
# We are working with GEIS group syntax
_indx = str(extn[:extn.find('/')])
_extn = fimg[int(_indx)]
elif isinstance(extn, string_types):
if extn.strip() == '':
_extn = None # force error since invalid name was provided
# Only one extension value specified...
elif extn.isdigit():
# We only have an extension number specified as a string...
_nextn = int(extn)
else:
# We only have EXTNAME specified...
_nextn = None
if extn.lower() == 'primary':
_nextn = 0
else:
i = 0
for hdu in fimg:
isimg = 'extname' in hdu.header
hdr = hdu.header
if isimg and extn.lower() == hdr['extname'].lower():
_nextn = i
break
i += 1
if _nextn < len(fimg):
_extn = fimg[_nextn]
else:
_extn = None
else:
# Only integer extension number given, or default of 0 is used.
if int(extn) < len(fimg):
_extn = fimg[int(extn)]
else:
_extn = None
if _extn is None:
raise KeyError('Extension %s not found' % extn)
return _extn |
<SYSTEM_TASK:>
Search a directory for full filename with optional path.
<END_TASK>
<USER_TASK:>
Description:
def findFile(input):
"""Search a directory for full filename with optional path.""" |
# If no input name is provided, default to returning 'no'(FALSE)
if not input:
return no
# We use 'osfn' here to insure that any IRAF variables are
# expanded out before splitting out the path...
_fdir, _fname = os.path.split(osfn(input))
if _fdir == '':
_fdir = os.curdir
try:
flist = os.listdir(_fdir)
except OSError:
# handle when requested file in on a disconnect network store
return no
_root, _extn = parseFilename(_fname)
found = no
for name in flist:
if name == _root:
# Check to see if given extension, if any, exists
if _extn is None:
found = yes
continue
else:
_split = _extn.split(',')
_extnum = None
_extver = None
if _split[0].isdigit():
_extname = None
_extnum = int(_split[0])
else:
_extname = _split[0]
if len(_split) > 1:
_extver = int(_split[1])
else:
_extver = 1
f = openImage(_root)
f.close()
if _extnum is not None:
if _extnum < len(f):
found = yes
del f
continue
else:
del f
else:
_fext = findExtname(f, _extname, extver=_extver)
if _fext is not None:
found = yes
del f
continue
return found |
<SYSTEM_TASK:>
Checks to see if file specified exists in current or specified directory.
<END_TASK>
<USER_TASK:>
Description:
def checkFileExists(filename, directory=None):
"""
Checks to see if file specified exists in current or specified directory.
Default is current directory. Returns 1 if it exists, 0 if not found.
""" |
if directory is not None:
fname = os.path.join(directory,filename)
else:
fname = filename
_exist = os.path.exists(fname)
return _exist |
<SYSTEM_TASK:>
Copy a file whole from input to output.
<END_TASK>
<USER_TASK:>
Description:
def copyFile(input, output, replace=None):
"""Copy a file whole from input to output.""" |
_found = findFile(output)
if not _found or (_found and replace):
shutil.copy2(input, output) |
<SYSTEM_TASK:>
Utility function for deleting a list of files or a single file.
<END_TASK>
<USER_TASK:>
Description:
def removeFile(inlist):
"""
Utility function for deleting a list of files or a single file.
This function will automatically delete both files of a GEIS image, just
like 'iraf.imdelete'.
""" |
if not isinstance(inlist, string_types):
# We do have a list, so delete all filenames in list.
# Treat like a list of full filenames
_ldir = os.listdir('.')
for f in inlist:
# Now, check to see if there are wildcards which need to be expanded
if f.find('*') >= 0 or f.find('?') >= 0:
# We have a wild card specification
regpatt = f.replace('?', '.?')
regpatt = regpatt.replace('*', '.*')
_reg = re.compile(regpatt)
for file in _ldir:
if _reg.match(file):
_remove(file)
else:
# This is just a single filename
_remove(f)
else:
# It must be a string then, so treat as a single filename
_remove(inlist) |
<SYSTEM_TASK:>
This function will return the index of the extension in a multi-extension
<END_TASK>
<USER_TASK:>
Description:
def findKeywordExtn(ft, keyword, value=None):
"""
This function will return the index of the extension in a multi-extension
FITS file which contains the desired keyword with the given value.
""" |
i = 0
extnum = -1
# Search through all the extensions in the FITS object
for chip in ft:
hdr = chip.header
# Check to make sure the extension has the given keyword
if keyword in hdr:
if value is not None:
# If it does, then does the value match the desired value
# MUST use 'str.strip' to match against any input string!
if hdr[keyword].strip() == value:
extnum = i
break
else:
extnum = i
break
i += 1
# Return the index of the extension which contained the
# desired EXTNAME value.
return extnum |
<SYSTEM_TASK:>
Returns the list number of the extension corresponding to EXTNAME given.
<END_TASK>
<USER_TASK:>
Description:
def findExtname(fimg, extname, extver=None):
"""
Returns the list number of the extension corresponding to EXTNAME given.
""" |
i = 0
extnum = None
for chip in fimg:
hdr = chip.header
if 'EXTNAME' in hdr:
if hdr['EXTNAME'].strip() == extname.upper():
if extver is None or hdr['EXTVER'] == extver:
extnum = i
break
i += 1
return extnum |
<SYSTEM_TASK:>
Undo Python conversion of CL parameter or variable name.
<END_TASK>
<USER_TASK:>
Description:
def untranslateName(s):
"""Undo Python conversion of CL parameter or variable name.""" |
s = s.replace('DOT', '.')
s = s.replace('DOLLAR', '$')
# delete 'PY' at start of name components
if s[:2] == 'PY': s = s[2:]
s = s.replace('.PY', '.')
return s |
<SYSTEM_TASK:>
Returns true if CL variable is defined.
<END_TASK>
<USER_TASK:>
Description:
def defvar(varname):
"""Returns true if CL variable is defined.""" |
if 'pyraf' in sys.modules:
#ONLY if pyraf is already loaded, import iraf into the namespace
from pyraf import iraf
else:
# else set iraf to None so it knows to not use iraf's environment
iraf = None
if iraf:
_irafdef = iraf.envget(varname)
else:
_irafdef = 0
return varname in _varDict or varname in os.environ or _irafdef |
<SYSTEM_TASK:>
Print value of IRAF or OS environment variables.
<END_TASK>
<USER_TASK:>
Description:
def show(*args, **kw):
"""Print value of IRAF or OS environment variables.""" |
if len(kw):
raise TypeError('unexpected keyword argument: %r' % list(kw))
if args:
for arg in args:
print(envget(arg))
else:
# print them all
listVars(prefix=" ", equals="=") |
<SYSTEM_TASK:>
Unset IRAF environment variables.
<END_TASK>
<USER_TASK:>
Description:
def unset(*args, **kw):
"""
Unset IRAF environment variables.
This is not a standard IRAF task, but it is obviously useful. It makes the
resulting variables undefined. It silently ignores variables that are not
defined. It does not change the os environment variables.
""" |
if len(kw) != 0:
raise SyntaxError("unset requires a list of variable names")
for arg in args:
if arg in _varDict:
del _varDict[arg] |
<SYSTEM_TASK:>
Set up connection before executing function, commit and close connection
<END_TASK>
<USER_TASK:>
Description:
def dbcon(func):
"""Set up connection before executing function, commit and close connection
afterwards. Unless a connection already has been created.""" |
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.dbcon is None:
# set up connection
self.dbcon = sqlite3.connect(self.db)
self.dbcur = self.dbcon.cursor()
self.dbcur.execute(SQL_SENSOR_TABLE)
self.dbcur.execute(SQL_TMPO_TABLE)
# execute function
try:
result = func(*args, **kwargs)
except Exception as e:
# on exception, first close connection and then raise
self.dbcon.rollback()
self.dbcon.commit()
self.dbcon.close()
self.dbcon = None
self.dbcur = None
raise e
else:
# commit everything and close connection
self.dbcon.commit()
self.dbcon.close()
self.dbcon = None
self.dbcur = None
else:
result = func(*args, **kwargs)
return result
return wrapper |
<SYSTEM_TASK:>
Add new sensor to the database
<END_TASK>
<USER_TASK:>
Description:
def add(self, sid, token):
"""
Add new sensor to the database
Parameters
----------
sid : str
SensorId
token : str
""" |
try:
self.dbcur.execute(SQL_SENSOR_INS, (sid, token))
except sqlite3.IntegrityError: # sensor entry exists
pass |
<SYSTEM_TASK:>
Remove sensor from the database
<END_TASK>
<USER_TASK:>
Description:
def remove(self, sid):
"""
Remove sensor from the database
Parameters
----------
sid : str
SensorID
""" |
self.dbcur.execute(SQL_SENSOR_DEL, (sid,))
self.dbcur.execute(SQL_TMPO_DEL, (sid,)) |
<SYSTEM_TASK:>
List all tmpo-blocks in the database
<END_TASK>
<USER_TASK:>
Description:
def list(self, *sids):
"""
List all tmpo-blocks in the database
Parameters
----------
sids : list of str
SensorID's for which to list blocks
Optional, leave empty to get them all
Returns
-------
list[list[tuple]]
""" |
if sids == ():
sids = [sid for (sid,) in self.dbcur.execute(SQL_SENSOR_ALL)]
slist = []
for sid in sids:
tlist = []
for tmpo in self.dbcur.execute(SQL_TMPO_ALL, (sid,)):
tlist.append(tmpo)
sid, rid, lvl, bid, ext, ctd, blk = tmpo
self._dprintf(DBG_TMPO_WRITE, ctd, sid, rid, lvl, bid, len(blk))
slist.append(tlist)
return slist |
<SYSTEM_TASK:>
Create data Series
<END_TASK>
<USER_TASK:>
Description:
def series(self, sid, recycle_id=None, head=None, tail=None,
datetime=True):
"""
Create data Series
Parameters
----------
sid : str
recycle_id : optional
head : int | pandas.Timestamp, optional
Start of the interval
default earliest available
tail : int | pandas.Timestamp, optional
End of the interval
default max epoch
datetime : bool
convert index to datetime
default True
Returns
-------
pandas.Series
""" |
if head is None:
head = 0
else:
head = self._2epochs(head)
if tail is None:
tail = EPOCHS_MAX
else:
tail = self._2epochs(tail)
if recycle_id is None:
self.dbcur.execute(SQL_TMPO_RID_MAX, (sid,))
recycle_id = self.dbcur.fetchone()[0]
tlist = self.list(sid)[0]
srlist = []
for _sid, rid, lvl, bid, ext, ctd, blk in tlist:
if (recycle_id == rid
and head < self._blocktail(lvl, bid)
and tail >= bid):
srlist.append(self._blk2series(ext, blk, head, tail))
if len(srlist) > 0:
ts = pd.concat(srlist)
ts.name = sid
if datetime is True:
ts.index = pd.to_datetime(ts.index, unit="s", utc=True)
return ts
else:
return pd.Series([], name=sid) |
<SYSTEM_TASK:>
Create data frame
<END_TASK>
<USER_TASK:>
Description:
def dataframe(self, sids, head=0, tail=EPOCHS_MAX, datetime=True):
"""
Create data frame
Parameters
----------
sids : list[str]
head : int | pandas.Timestamp, optional
Start of the interval
default earliest available
tail : int | pandas.Timestamp, optional
End of the interval
default max epoch
datetime : bool
convert index to datetime
default True
Returns
-------
pandas.DataFrame
""" |
if head is None:
head = 0
else:
head = self._2epochs(head)
if tail is None:
tail = EPOCHS_MAX
else:
tail = self._2epochs(tail)
series = [self.series(sid, head=head, tail=tail, datetime=False)
for sid in sids]
df = pd.concat(series, axis=1)
if datetime is True:
df.index = pd.to_datetime(df.index, unit="s", utc=True)
return df |
<SYSTEM_TASK:>
Alter the passed function signature string to add the given kewords
<END_TASK>
<USER_TASK:>
Description:
def addKwdArgsToSig(sigStr, kwArgsDict):
""" Alter the passed function signature string to add the given kewords """ |
retval = sigStr
if len(kwArgsDict) > 0:
retval = retval.strip(' ,)') # open up the r.h.s. for more args
for k in kwArgsDict:
if retval[-1] != '(': retval += ", "
retval += str(k)+"="+str(kwArgsDict[k])
retval += ')'
retval = retval
return retval |
<SYSTEM_TASK:>
Defines the gaussian function to be used as the model.
<END_TASK>
<USER_TASK:>
Description:
def _gauss_funct(p, fjac=None, x=None, y=None, err=None,
weights=None):
"""
Defines the gaussian function to be used as the model.
""" |
if p[2] != 0.0:
Z = (x - p[1]) / p[2]
model = p[0] * np.e ** (-Z ** 2 / 2.0)
else:
model = np.zeros(np.size(x))
status = 0
if weights is not None:
if err is not None:
print("Warning: Ignoring errors and using weights.\n")
return [status, (y - model) * weights]
elif err is not None:
return [status, (y - model) / err]
else:
return [status, y - model] |
<SYSTEM_TASK:>
Return the gaussian fit as an object.
<END_TASK>
<USER_TASK:>
Description:
def gfit1d(y, x=None, err=None, weights=None, par=None, parinfo=None,
maxiter=200, quiet=0):
"""
Return the gaussian fit as an object.
Parameters
----------
y: 1D Numpy array
The data to be fitted
x: 1D Numpy array
(optional) The x values of the y array. x and y must
have the same shape.
err: 1D Numpy array
(optional) 1D array with measurement errors, must be
the same shape as y
weights: 1D Numpy array
(optiional) 1D array with weights, must be the same
shape as y
par: List
(optional) Starting values for the parameters to be fitted
parinfo: Dictionary of lists
(optional) provides additional information for the
parameters. For a detailed description see nmpfit.py.
Parinfo can be used to limit parameters or keep
some of them fixed.
maxiter: number
Maximum number of iterations to perform
Default: 200
quiet: number
if set to 1, nmpfit does not print to the screen
Default: 0
Examples
--------
>>> x = np.arange(10,20, 0.1)
>>> y= 10*np.e**(-(x-15)**2/4)
>>> print(gfit1d(y,x=x, maxiter=20,quiet=1).params)
[10. 15. 1.41421356]
""" |
y = y.astype(np.float)
if weights is not None:
weights = weights.astype(np.float)
if err is not None:
err = err.astype(np.float)
if x is None and len(y.shape) == 1:
x = np.arange(len(y)).astype(np.float)
if x.shape != y.shape:
print("input arrays X and Y must be of equal shape.\n")
return
fa = {'x': x, 'y': y, 'err': err, 'weights': weights}
if par is not None:
p = par
else:
ysigma = y.std()
ind = np.nonzero(y > ysigma)[0]
if len(ind) != 0:
xind = int(ind.mean())
p2 = x[xind]
p1 = y[xind]
p3 = 1.0
else:
ymax = y.max()
ymin = y.min()
ymean= y.mean()
if (ymax - ymean) > (abs(ymin - ymean)):
p1 = ymax
else: p1 = ymin
ind = (np.nonzero(y == p1))[0]
p2 = x.mean()
p3 = 1.
p = [p1, p2, p3]
m = nmpfit.mpfit(_gauss_funct, p,parinfo = parinfo, functkw=fa,
maxiter=maxiter, quiet=quiet)
if (m.status <= 0): print('error message = ', m.errmsg)
return m |
<SYSTEM_TASK:>
filter lets django managers use `objects.filter` on a hashable object.
<END_TASK>
<USER_TASK:>
Description:
def filter(self, *args, **kwargs):
"""filter lets django managers use `objects.filter` on a hashable object.""" |
obj = kwargs.pop(self.object_property_name, None)
if obj is not None:
kwargs['object_hash'] = self.model._compute_hash(obj)
return super().filter(*args, **kwargs) |
<SYSTEM_TASK:>
this method allows django managers use `objects.get_or_create` and
<END_TASK>
<USER_TASK:>
Description:
def _extract_model_params(self, defaults, **kwargs):
"""this method allows django managers use `objects.get_or_create` and
`objects.update_or_create` on a hashable object.
""" |
obj = kwargs.pop(self.object_property_name, None)
if obj is not None:
kwargs['object_hash'] = self.model._compute_hash(obj)
lookup, params = super()._extract_model_params(defaults, **kwargs)
if obj is not None:
params[self.object_property_name] = obj
del params['object_hash']
return lookup, params |
<SYSTEM_TASK:>
a private method that persists an estimator object to the filesystem
<END_TASK>
<USER_TASK:>
Description:
def persist(self):
"""a private method that persists an estimator object to the filesystem""" |
if self.object_hash:
data = dill.dumps(self.object_property)
f = ContentFile(data)
self.object_file.save(self.object_hash, f, save=False)
f.close()
self._persisted = True
return self._persisted |
<SYSTEM_TASK:>
a private method that loads an estimator object from the filesystem
<END_TASK>
<USER_TASK:>
Description:
def load(self):
"""a private method that loads an estimator object from the filesystem""" |
if self.is_file_persisted:
self.object_file.open()
temp = dill.loads(self.object_file.read())
self.set_object(temp)
self.object_file.close() |
<SYSTEM_TASK:>
Return an Estimator object given the path of the file, relative to the MEDIA_ROOT
<END_TASK>
<USER_TASK:>
Description:
def create_from_file(cls, filename):
"""Return an Estimator object given the path of the file, relative to the MEDIA_ROOT""" |
obj = cls()
obj.object_file = filename
obj.load()
return obj |
<SYSTEM_TASK:>
Return our application dir. Create it if it doesn't exist.
<END_TASK>
<USER_TASK:>
Description:
def getAppDir():
""" Return our application dir. Create it if it doesn't exist. """ |
# Be sure the resource dir exists
theDir = os.path.expanduser('~/.')+APP_NAME.lower()
if not os.path.exists(theDir):
try:
os.mkdir(theDir)
except OSError:
print('Could not create "'+theDir+'" to save GUI settings.')
theDir = "./"+APP_NAME.lower()
return theDir |
<SYSTEM_TASK:>
Read a config file and pull out the value of a given keyword.
<END_TASK>
<USER_TASK:>
Description:
def getEmbeddedKeyVal(cfgFileName, kwdName, dflt=None):
""" Read a config file and pull out the value of a given keyword. """ |
# Assume this is a ConfigObj file. Use that s/w to quickly read it and
# put it in dict format. Assume kwd is at top level (not in a section).
# The input may also be a .cfgspc file.
#
# Only use ConfigObj here as a tool to generate a dict from a file - do
# not use the returned object as a ConfigObj per se. As such, we can call
# with "simple" format, ie. no cfgspc, no val'n, and "list_values"=False.
try:
junkObj = configobj.ConfigObj(cfgFileName, list_values=False)
except:
if kwdName == TASK_NAME_KEY:
raise KeyError('Can not parse as a parameter config file: '+ \
'\n\t'+os.path.realpath(cfgFileName))
else:
raise KeyError('Unfound key "'+kwdName+'" while parsing: '+ \
'\n\t'+os.path.realpath(cfgFileName))
if kwdName in junkObj:
retval = junkObj[kwdName]
del junkObj
return retval
# Not found
if dflt is not None:
del junkObj
return dflt
else:
if kwdName == TASK_NAME_KEY:
raise KeyError('Can not parse as a parameter config file: '+ \
'\n\t'+os.path.realpath(cfgFileName))
else:
raise KeyError('Unfound key "'+kwdName+'" while parsing: '+ \
'\n\t'+os.path.realpath(cfgFileName)) |
<SYSTEM_TASK:>
This is a specialized function which is meant only to keep the
<END_TASK>
<USER_TASK:>
Description:
def getCfgFilesInDirForTask(aDir, aTask, recurse=False):
""" This is a specialized function which is meant only to keep the
same code from needlessly being much repeated throughout this
application. This must be kept as fast and as light as possible.
This checks a given directory for .cfg files matching a given
task. If recurse is True, it will check subdirectories.
If aTask is None, it returns all files and ignores aTask.
""" |
if recurse:
flist = irafutils.rglob(aDir, '*.cfg')
else:
flist = glob.glob(aDir+os.sep+'*.cfg')
if aTask:
retval = []
for f in flist:
try:
if aTask == getEmbeddedKeyVal(f, TASK_NAME_KEY, ''):
retval.append(f)
except Exception as e:
print('Warning: '+str(e))
return retval
else:
return flist |
<SYSTEM_TASK:>
See if the user has one of their own local .cfg files for this task,
<END_TASK>
<USER_TASK:>
Description:
def getUsrCfgFilesForPyPkg(pkgName):
""" See if the user has one of their own local .cfg files for this task,
such as might be created automatically during the save of a read-only
package, and return their names. """ |
# Get the python package and it's .cfg file
thePkg, theFile = findCfgFileForPkg(pkgName, '.cfg')
# See if the user has any of their own local .cfg files for this task
tname = getEmbeddedKeyVal(theFile, TASK_NAME_KEY)
flist = getCfgFilesInDirForTask(getAppDir(), tname)
return flist |
<SYSTEM_TASK:>
See if we have write-privileges to this file. If we do, and we
<END_TASK>
<USER_TASK:>
Description:
def checkSetReadOnly(fname, raiseOnErr = False):
""" See if we have write-privileges to this file. If we do, and we
are not supposed to, then fix that case. """ |
if os.access(fname, os.W_OK):
# We can write to this but it is supposed to be read-only. Fix it.
# Take away usr-write, leave group and other alone, though it
# may be simpler to just force/set it to: r--r--r-- or r--------
irafutils.setWritePrivs(fname, False, ignoreErrors= not raiseOnErr) |
<SYSTEM_TASK:>
Return True if this string name denotes a hidden par or section
<END_TASK>
<USER_TASK:>
Description:
def isHiddenName(astr):
""" Return True if this string name denotes a hidden par or section """ |
if astr is not None and len(astr) > 2 and astr.startswith('_') and \
astr.endswith('_'):
return True
else:
return False |
<SYSTEM_TASK:>
Set or reset the internal param list from the dict's contents.
<END_TASK>
<USER_TASK:>
Description:
def syncParamList(self, firstTime, preserve_order=True):
""" Set or reset the internal param list from the dict's contents. """ |
# See the note in setParam about this design.
# Get latest par values from dict. Make sure we do not
# change the id of the __paramList pointer here.
new_list = self._getParamsFromConfigDict(self, initialPass=firstTime)
# dumpCfgspcTo=sys.stdout)
# Have to add this odd last one for the sake of the GUI (still?)
if self._forUseWithEpar:
new_list.append(basicpar.IrafParS(['$nargs','s','h','N']))
if len(self.__paramList) > 0 and preserve_order:
# Here we have the most up-to-date data from the actual data
# model, the ConfigObj dict, and we need to use it to fill in
# our param list. BUT, we need to preserve the order our list
# has had up until now (by unique parameter name).
namesInOrder = [p.fullName() for p in self.__paramList]
assert len(namesInOrder) == len(new_list), \
'Mismatch in num pars, had: '+str(len(namesInOrder))+ \
', now we have: '+str(len(new_list))+', '+ \
str([p.fullName() for p in new_list])
self.__paramList[:] = [] # clear list, keep same pointer
# create a flat dict view of new_list, for ease of use in next step
new_list_dict = {} # can do in one step in v2.7
for par in new_list: new_list_dict[par.fullName()] = par
# populate
for fn in namesInOrder:
self.__paramList.append(new_list_dict[fn])
else:
# Here we just take the data in whatever order it came.
self.__paramList[:] = new_list |
<SYSTEM_TASK:>
Return a par list just like ours, but with all default values.
<END_TASK>
<USER_TASK:>
Description:
def getDefaultParList(self):
""" Return a par list just like ours, but with all default values. """ |
# The code below (create a new set-to-dflts obj) is correct, but it
# adds a tenth of a second to startup. Clicking "Defaults" in the
# GUI does not call this. But this can be used to set the order seen.
# But first check for rare case of no cfg file name
if self.filename is None:
# this is a .cfgspc-only kind of object so far
self.filename = self.getDefaultSaveFilename(stub=True)
return copy.deepcopy(self.__paramList)
tmpObj = ConfigObjPars(self.filename, associatedPkg=self.__assocPkg,
setAllToDefaults=True, strict=False)
return tmpObj.getParList() |
<SYSTEM_TASK:>
This may be overridden by a subclass.
<END_TASK>
<USER_TASK:>
Description:
def run(self, *args, **kw):
""" This may be overridden by a subclass. """ |
if self._runFunc is not None:
# remove the two args sent by EditParDialog which we do not use
if 'mode' in kw: kw.pop('mode')
if '_save' in kw: kw.pop('_save')
return self._runFunc(self, *args, **kw)
else:
raise taskpars.NoExecError('No way to run task "'+self.__taskName+\
'". You must either override the "run" method in your '+ \
'ConfigObjPars subclass, or you must supply a "run" '+ \
'function in your package.') |
<SYSTEM_TASK:>
Print all the trigger logic to a string and return it.
<END_TASK>
<USER_TASK:>
Description:
def triggerLogicToStr(self):
""" Print all the trigger logic to a string and return it. """ |
try:
import json
except ImportError:
return "Cannot dump triggers/dependencies/executes (need json)"
retval = "TRIGGERS:\n"+json.dumps(self._allTriggers, indent=3)
retval += "\nDEPENDENCIES:\n"+json.dumps(self._allDepdcs, indent=3)
retval += "\nTO EXECUTE:\n"+json.dumps(self._allExecutes, indent=3)
retval += "\n"
return retval |
<SYSTEM_TASK:>
Given a config file, find its associated config-spec file, and
<END_TASK>
<USER_TASK:>
Description:
def _findAssociatedConfigSpecFile(self, cfgFileName):
""" Given a config file, find its associated config-spec file, and
return the full pathname of the file. """ |
# Handle simplest 2 cases first: co-located or local .cfgspc file
retval = "."+os.sep+self.__taskName+".cfgspc"
if os.path.isfile(retval): return retval
retval = os.path.dirname(cfgFileName)+os.sep+self.__taskName+".cfgspc"
if os.path.isfile(retval): return retval
# Also try the resource dir
retval = self.getDefaultSaveFilename()+'spc' # .cfgspc
if os.path.isfile(retval): return retval
# Now try and see if there is a matching .cfgspc file in/under an
# associated package, if one is defined.
if self.__assocPkg is not None:
x, theFile = findCfgFileForPkg(None, '.cfgspc',
pkgObj = self.__assocPkg,
taskName = self.__taskName)
return theFile
# Finally try to import the task name and see if there is a .cfgspc
# file in that directory
x, theFile = findCfgFileForPkg(self.__taskName, '.cfgspc',
taskName = self.__taskName)
if os.path.exists(theFile):
return theFile
# unfound
raise NoCfgFileError('Unfound config-spec file for task: "'+ \
self.__taskName+'"') |
<SYSTEM_TASK:>
Return a list, in order, of any parameters marked with "pos=N" in
<END_TASK>
<USER_TASK:>
Description:
def getPosArgs(self):
""" Return a list, in order, of any parameters marked with "pos=N" in
the .cfgspc file. """ |
if len(self._posArgs) < 1: return []
# The first item in the tuple is the index, so we now sort by it
self._posArgs.sort()
# Build a return list
retval = []
for idx, scope, name in self._posArgs:
theDict, val = findScopedPar(self, scope, name)
retval.append(val)
return retval |
<SYSTEM_TASK:>
Return numbers from inputs or raise VdtParamError.
<END_TASK>
<USER_TASK:>
Description:
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a')) # doctest: +SKIP
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
""" |
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, number_or_string_types):
try:
out_params.append(fun(val))
except ValueError:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params |
<SYSTEM_TASK:>
Check if the value represents a boolean.
<END_TASK>
<USER_TASK:>
Description:
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor = Validator()
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '') # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up') # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
""" |
if isinstance(value, string_types):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value) |
<SYSTEM_TASK:>
Check that the supplied value is an Internet Protocol address, v.4,
<END_TASK>
<USER_TASK:>
Description:
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor = Validator()
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256') # doctest: +SKIP
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5') # doctest: +SKIP
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
""" |
if not isinstance(value, string_types):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value |
<SYSTEM_TASK:>
Check that the value is a list of values.
<END_TASK>
<USER_TASK:>
Description:
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor = Validator()
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2)) # doctest: +SKIP
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6)) # doctest: +SKIP
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4)) # doctest: +SKIP
[1, 2, 3, 4]
>>> vtor.check('list', 0) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12') # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
""" |
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, string_types):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value) |
<SYSTEM_TASK:>
Check that the value is a list of integers.
<END_TASK>
<USER_TASK:>
Description:
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor = Validator()
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a']) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
""" |
return [is_integer(mem) for mem in is_list(value, min, max)] |
<SYSTEM_TASK:>
Check that the value is a list of booleans.
<END_TASK>
<USER_TASK:>
Description:
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor = Validator()
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a']) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
""" |
return [is_boolean(mem) for mem in is_list(value, min, max)] |
<SYSTEM_TASK:>
Check that the value is a list of floats.
<END_TASK>
<USER_TASK:>
Description:
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor = Validator()
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a']) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
""" |
return [is_float(mem) for mem in is_list(value, min, max)] |
<SYSTEM_TASK:>
Check that the value is a list of strings.
<END_TASK>
<USER_TASK:>
Description:
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor = Validator()
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1]) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello') # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
""" |
if isinstance(value, string_types):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.