text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
r"""
<END_TASK>
<USER_TASK:>
Description:
def acs2d(input, exec_path='', time_stamps=False, verbose=False, quiet=False,
exe_args=None):
r"""
Run the acs2d.e executable as from the shell.
Output is automatically named based on input suffix:
+--------------------+----------------+------------------------------+
| INPUT | OUTPUT | EXPECTED DATA |
+====================+================+==============================+
| ``*_raw.fits`` | ``*_flt.fits`` | SBC image. |
+--------------------+----------------+------------------------------+
| ``*_blv_tmp.fits`` | ``*_flt.fits`` | ACSCCD output. |
+--------------------+----------------+------------------------------+
| ``*_blc_tmp.fits`` | ``*_flc.fits`` | ACSCCD output with PCTECORR. |
+--------------------+----------------+------------------------------+
| ``*_crj_tmp.fits`` | ``*_crj.fits`` | ACSREJ output. |
+--------------------+----------------+------------------------------+
| ``*_crc_tmp.fits`` | ``*_crc.fits`` | ACSREJ output with PCTECORR. |
+--------------------+----------------+------------------------------+
Parameters
----------
input : str or list of str
Input filenames in one of these formats:
* a single filename ('j1234567q_blv_tmp.fits')
* a Python list of filenames
* a partial filename with wildcards ('\*blv_tmp.fits')
* filename of an ASN table ('j12345670_asn.fits')
* an at-file (``@input``)
exec_path : str, optional
The complete path to ACS2D executable.
If not given, run ACS2D given by 'acs2d.e'.
time_stamps : bool, optional
Set to True to turn on the printing of time stamps.
verbose : bool, optional
Set to True for verbose output.
quiet : bool, optional
Set to True for quiet output.
exe_args : list, optional
Arbitrary arguments passed to underlying executable call.
Note: Implementation uses subprocess.call and whitespace is not
permitted. E.g. use exe_args=['--nThreads', '1']
""" |
from stsci.tools import parseinput # Optional package dependency
if exec_path:
if not os.path.exists(exec_path):
raise OSError('Executable not found: ' + exec_path)
call_list = [exec_path]
else:
call_list = ['acs2d.e']
# Parse input to get list of filenames to process.
# acs2d.e only takes 'file1,file2,...'
infiles, dummy_out = parseinput.parseinput(input)
call_list.append(','.join(infiles))
if time_stamps:
call_list.append('-t')
if verbose:
call_list.append('-v')
if quiet:
call_list.append('-q')
if exe_args:
call_list.extend(exe_args)
subprocess.check_call(call_list) |
<SYSTEM_TASK:>
Check the inputs to ensure they are valid.
<END_TASK>
<USER_TASK:>
Description:
def _check_inputs(self):
"""Check the inputs to ensure they are valid.
Returns
-------
status : bool
True if all inputs are valid, False if one is not.
""" |
valid_detector = True
valid_filter = True
valid_date = True
# Determine the submitted detector is valid
if self.detector not in self._valid_detectors:
msg = ('{} is not a valid detector option.\n'
'Please choose one of the following:\n{}\n'
'{}'.format(self.detector,
'\n'.join(self._valid_detectors),
self._msg_div))
LOG.error(msg)
valid_detector = False
# Determine if the submitted filter is valid
if (self.filt is not None and valid_detector and
self.filt not in self.valid_filters[self.detector]):
msg = ('{} is not a valid filter for {}\n'
'Please choose one of the following:\n{}\n'
'{}'.format(self.filt, self.detector,
'\n'.join(self.valid_filters[self.detector]),
self._msg_div))
LOG.error(msg)
valid_filter = False
# Determine if the submitted date is valid
date_check = self._check_date()
if date_check is not None:
LOG.error('{}\n{}'.format(date_check, self._msg_div))
valid_date = False
if not valid_detector or not valid_filter or not valid_date:
return False
return True |
<SYSTEM_TASK:>
Convenience method for determining if the input date is valid.
<END_TASK>
<USER_TASK:>
Description:
def _check_date(self, fmt='%Y-%m-%d'):
"""Convenience method for determining if the input date is valid.
Parameters
----------
fmt : str
The format of the date string. The default is ``%Y-%m-%d``, which
corresponds to ``YYYY-MM-DD``.
Returns
-------
status : str or `None`
If the date is valid, returns `None`. If the date is invalid,
returns a message explaining the issue.
""" |
result = None
try:
dt_obj = dt.datetime.strptime(self.date, fmt)
except ValueError:
result = '{} does not match YYYY-MM-DD format'.format(self.date)
else:
if dt_obj < self._acs_installation_date:
result = ('The observation date cannot occur '
'before ACS was installed ({})'
.format(self._acs_installation_date.strftime(fmt)))
elif dt_obj > self._extrapolation_date:
result = ('The observation date cannot occur after the '
'maximum allowable date, {}. Extrapolations of the '
'instrument throughput after this date lead to '
'high uncertainties and are therefore invalid.'
.format(self._extrapolation_date.strftime(fmt)))
finally:
return result |
<SYSTEM_TASK:>
Submit a request to the ACS Zeropoint Calculator.
<END_TASK>
<USER_TASK:>
Description:
def _submit_request(self):
"""Submit a request to the ACS Zeropoint Calculator.
If an exception is raised during the request, an error message is
given. Otherwise, the response is saved in the corresponding
attribute.
""" |
try:
self._response = urlopen(self._url)
except URLError as e:
msg = ('{}\n{}\nThe query failed! Please check your inputs. '
'If the error persists, submit a ticket to the '
'ACS Help Desk at hsthelp.stsci.edu with the error message '
'displayed above.'.format(str(e), self._msg_div))
LOG.error(msg)
self._failed = True
else:
self._failed = False |
<SYSTEM_TASK:>
Parse and format the results returned by the ACS Zeropoint Calculator.
<END_TASK>
<USER_TASK:>
Description:
def _parse_and_format(self):
""" Parse and format the results returned by the ACS Zeropoint Calculator.
Using ``beautifulsoup4``, find all the ``<tb> </tb>`` tags present in
the response. Format the results into an astropy.table.QTable with
corresponding units and assign it to the zpt_table attribute.
""" |
soup = BeautifulSoup(self._response.read(), 'html.parser')
# Grab all elements in the table returned by the ZPT calc.
td = soup.find_all('td')
# Remove the units attached to PHOTFLAM and PHOTPLAM column names.
td = [val.text.split(' ')[0] for val in td]
# Turn the single list into a 2-D numpy array
data = np.reshape(td,
(int(len(td) / self._block_size), self._block_size))
# Create the QTable, note that sometimes self._response will be empty
# even though the return was successful; hence the try/except to catch
# any potential index errors. Provide the user with a message and
# set the zpt_table to None.
try:
tab = QTable(data[1:, :],
names=data[0],
dtype=[str, float, float, float, float, float])
except IndexError as e:
msg = ('{}\n{}\n There was an issue parsing the request. '
'Try resubmitting the query. If this issue persists, please '
'submit a ticket to the Help Desk at'
'https://stsci.service-now.com/hst'
.format(e, self._msg_div))
LOG.info(msg)
self._zpt_table = None
else:
# If and only if no exception was raised, attach the units to each
# column of the QTable. Note we skip the FILTER column because
# Quantity objects in astropy must be numerical (i.e. not str)
for col in tab.colnames:
if col.lower() == 'filter':
continue
tab[col].unit = self._data_units[col]
self._zpt_table = tab |
<SYSTEM_TASK:>
Submit the request to the ACS Zeropoints Calculator.
<END_TASK>
<USER_TASK:>
Description:
def fetch(self):
"""Submit the request to the ACS Zeropoints Calculator.
This method will:
* submit the request
* parse the response
* format the results into a table with the correct units
Returns
-------
tab : `astropy.table.QTable` or `None`
If the request was successful, returns a table; otherwise, `None`.
""" |
LOG.info('Checking inputs...')
valid_inputs = self._check_inputs()
if valid_inputs:
LOG.info('Submitting request to {}'.format(self._url))
self._submit_request()
if self._failed:
return
LOG.info('Parsing the response and formatting the results...')
self._parse_and_format()
return self.zpt_table
LOG.error('Please fix the incorrect input(s)') |
<SYSTEM_TASK:>
Returns a QuerySet of all model instances that can be edited by the
<END_TASK>
<USER_TASK:>
Description:
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site.
""" |
qs = self.model._default_manager.get_queryset()
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs |
<SYSTEM_TASK:>
Instantiates a class-based view to provide listing functionality for
<END_TASK>
<USER_TASK:>
Description:
def index_view(self, request):
"""
Instantiates a class-based view to provide listing functionality for
the assigned model. The view class used can be overridden by changing
the 'index_view_class' attribute.
""" |
kwargs = {'model_admin': self}
view_class = self.index_view_class
return view_class.as_view(**kwargs)(request) |
<SYSTEM_TASK:>
Instantiates a class-based view to provide 'creation' functionality for
<END_TASK>
<USER_TASK:>
Description:
def create_view(self, request):
"""
Instantiates a class-based view to provide 'creation' functionality for
the assigned model, or redirect to Wagtail's create view if the
assigned model extends 'Page'. The view class used can be overridden by
changing the 'create_view_class' attribute.
""" |
kwargs = {'model_admin': self}
view_class = self.create_view_class
return view_class.as_view(**kwargs)(request) |
<SYSTEM_TASK:>
Instantiates a class-based view to provide a view that allows a parent
<END_TASK>
<USER_TASK:>
Description:
def choose_parent_view(self, request):
"""
Instantiates a class-based view to provide a view that allows a parent
page to be chosen for a new object, where the assigned model extends
Wagtail's Page model, and there is more than one potential parent for
new instances. The view class used can be overridden by changing the
'choose_parent_view_class' attribute.
""" |
kwargs = {'model_admin': self}
view_class = self.choose_parent_view_class
return view_class.as_view(**kwargs)(request) |
<SYSTEM_TASK:>
Instantiates a class-based view to provide 'edit' functionality for the
<END_TASK>
<USER_TASK:>
Description:
def edit_view(self, request, object_id):
"""
Instantiates a class-based view to provide 'edit' functionality for the
assigned model, or redirect to Wagtail's edit view if the assigned
model extends 'Page'. The view class used can be overridden by changing
the 'edit_view_class' attribute.
""" |
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.edit_view_class
return view_class.as_view(**kwargs)(request) |
<SYSTEM_TASK:>
Instantiates a class-based view to provide 'delete confirmation'
<END_TASK>
<USER_TASK:>
Description:
def confirm_delete_view(self, request, object_id):
"""
Instantiates a class-based view to provide 'delete confirmation'
functionality for the assigned model, or redirect to Wagtail's delete
confirmation view if the assigned model extends 'Page'. The view class
used can be overridden by changing the 'confirm_delete_view_class'
attribute.
""" |
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.confirm_delete_view_class
return view_class.as_view(**kwargs)(request) |
<SYSTEM_TASK:>
Utility function that provides a list of templates to try for a given
<END_TASK>
<USER_TASK:>
Description:
def get_templates(self, action='index'):
"""
Utility function that provides a list of templates to try for a given
view, when the template isn't overridden by one of the template
attributes on the class.
""" |
app = self.opts.app_label
model_name = self.opts.model_name
return [
'wagtailmodeladmin/%s/%s/%s.html' % (app, model_name, action),
'wagtailmodeladmin/%s/%s.html' % (app, action),
'wagtailmodeladmin/%s.html' % (action,),
] |
<SYSTEM_TASK:>
Utilised by Wagtail's 'register_permissions' hook to allow permissions
<END_TASK>
<USER_TASK:>
Description:
def get_permissions_for_registration(self):
"""
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a model to be assigned to groups in settings. This is only required
if the model isn't a Page model, and isn't registered as a Snippet
""" |
from wagtail.wagtailsnippets.models import SNIPPET_MODELS
if not self.is_pagemodel and self.model not in SNIPPET_MODELS:
return self.permission_helper.get_all_model_permissions()
return Permission.objects.none() |
<SYSTEM_TASK:>
Utilised by Wagtail's 'register_menu_item' hook to create a menu
<END_TASK>
<USER_TASK:>
Description:
def get_menu_item(self):
"""
Utilised by Wagtail's 'register_menu_item' hook to create a menu
for this group with a SubMenu linking to listing pages for any
associated ModelAdmin instances
""" |
if self.modeladmin_instances:
submenu = SubMenu(self.get_submenu_items())
return GroupMenuItem(self, self.get_menu_order(), submenu) |
<SYSTEM_TASK:>
Utilised by Wagtail's 'register_permissions' hook to allow permissions
<END_TASK>
<USER_TASK:>
Description:
def get_permissions_for_registration(self):
"""
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a all models grouped by this class to be assigned to Groups in
settings.
""" |
qs = Permission.objects.none()
for instance in self.modeladmin_instances:
qs = qs | instance.get_permissions_for_registration()
return qs |
<SYSTEM_TASK:>
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
<END_TASK>
<USER_TASK:>
Description:
def get_admin_urls_for_registration(self):
"""
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
used by any associated ModelAdmin instances
""" |
urls = []
for instance in self.modeladmin_instances:
urls.extend(instance.get_admin_urls_for_registration())
return urls |
<SYSTEM_TASK:>
If there aren't any visible items in the submenu, don't bother to show
<END_TASK>
<USER_TASK:>
Description:
def is_shown(self, request):
"""
If there aren't any visible items in the submenu, don't bother to show
this menu item
""" |
for menuitem in self.menu._registered_menu_items:
if menuitem.is_shown(request):
return True
return False |
<SYSTEM_TASK:>
Returns a tuple containing a queryset to implement the search,
<END_TASK>
<USER_TASK:>
Description:
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
""" |
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
if self.search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in self.search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct |
<SYSTEM_TASK:>
Return a label to display for a field
<END_TASK>
<USER_TASK:>
Description:
def get_field_label(self, field_name, field=None):
""" Return a label to display for a field """ |
label = None
if field is not None:
label = getattr(field, 'verbose_name', None)
if label is None:
label = getattr(field, 'name', None)
if label is None:
label = field_name
return label.capitalize() |
<SYSTEM_TASK:>
Return a display value for a field
<END_TASK>
<USER_TASK:>
Description:
def get_field_display_value(self, field_name, field=None):
""" Return a display value for a field """ |
"""
Firstly, check for a 'get_fieldname_display' property/method on
the model, and return the value of that, if present.
"""
val_funct = getattr(self.instance, 'get_%s_display' % field_name, None)
if val_funct is not None:
if callable(val_funct):
return val_funct()
return val_funct
"""
Secondly, if we have a real field, we can try to display something
more useful for it.
"""
if field is not None:
try:
field_type = field.get_internal_type()
if (
field_type == 'ForeignKey' and
field.related_model == get_image_model()
):
# The field is an image
return self.get_image_field_display(field_name, field)
if (
field_type == 'ForeignKey' and
field.related_model == Document
):
# The field is a document
return self.get_document_field_display(field_name, field)
except AttributeError:
pass
"""
Resort to getting the value of 'field_name' from the instance.
"""
return getattr(self.instance, field_name,
self.model_admin.get_empty_value_display()) |
<SYSTEM_TASK:>
Render an image
<END_TASK>
<USER_TASK:>
Description:
def get_image_field_display(self, field_name, field):
""" Render an image """ |
image = getattr(self.instance, field_name)
if image:
fltr, _ = Filter.objects.get_or_create(spec='max-400x400')
rendition = image.get_rendition(fltr)
return rendition.img_tag
return self.model_admin.get_empty_value_display() |
<SYSTEM_TASK:>
Render a link to a document
<END_TASK>
<USER_TASK:>
Description:
def get_document_field_display(self, field_name, field):
""" Render a link to a document """ |
document = getattr(self.instance, field_name)
if document:
return mark_safe(
'<a href="%s">%s <span class="meta">(%s, %s)</span></a>' % (
document.url,
document.title,
document.file_extension.upper(),
filesizeformat(document.file.size),
)
)
return self.model_admin.get_empty_value_display() |
<SYSTEM_TASK:>
Return a dictionary containing `label` and `value` values to display
<END_TASK>
<USER_TASK:>
Description:
def get_dict_for_field(self, field_name):
"""
Return a dictionary containing `label` and `value` values to display
for a field.
""" |
try:
field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
field = None
return {
'label': self.get_field_label(field_name, field),
'value': self.get_field_display_value(field_name, field),
} |
<SYSTEM_TASK:>
Give array shape and desired indices, return indices that are
<END_TASK>
<USER_TASK:>
Description:
def _get_valid_indices(shape, ix0, ix1, iy0, iy1):
"""Give array shape and desired indices, return indices that are
correctly bounded by the shape.""" |
ymax, xmax = shape
if ix0 < 0:
ix0 = 0
if ix1 > xmax:
ix1 = xmax
if iy0 < 0:
iy0 = 0
if iy1 > ymax:
iy1 = ymax
if iy1 <= iy0 or ix1 <= ix0:
raise IndexError(
'array[{0}:{1},{2}:{3}] is invalid'.format(iy0, iy1, ix0, ix1))
return list(map(int, [ix0, ix1, iy0, iy1])) |
<SYSTEM_TASK:>
Transform a point from original image coordinates to rotated image
<END_TASK>
<USER_TASK:>
Description:
def _rotate_point(point, angle, ishape, rshape, reverse=False):
"""Transform a point from original image coordinates to rotated image
coordinates and back. It assumes the rotation point is the center of an
image.
This works on a simple rotation transformation::
newx = (startx) * np.cos(angle) - (starty) * np.sin(angle)
newy = (startx) * np.sin(angle) + (starty) * np.cos(angle)
It takes into account the differences in image size.
Parameters
----------
point : tuple
Point to be rotated, in the format of ``(x, y)`` measured from
origin.
angle : float
The angle in degrees to rotate the point by as measured
counter-clockwise from the X axis.
ishape : tuple
The shape of the original image, taken from ``image.shape``.
rshape : tuple
The shape of the rotated image, in the form of ``rotate.shape``.
reverse : bool, optional
Transform from rotated coordinates back to non-rotated image.
Returns
-------
rotated_point : tuple
Rotated point in the format of ``(x, y)`` as measured from origin.
""" |
# unpack the image and rotated images shapes
if reverse:
angle = (angle * -1)
temp = ishape
ishape = rshape
rshape = temp
# transform into center of image coordinates
yhalf, xhalf = ishape
yrhalf, xrhalf = rshape
yhalf = yhalf / 2
xhalf = xhalf / 2
yrhalf = yrhalf / 2
xrhalf = xrhalf / 2
startx = point[0] - xhalf
starty = point[1] - yhalf
# do the rotation
newx = startx * np.cos(angle) - starty * np.sin(angle)
newy = startx * np.sin(angle) + starty * np.cos(angle)
# add back the padding from changing the size of the image
newx = newx + xrhalf
newy = newy + yrhalf
return (newx, newy) |
<SYSTEM_TASK:>
Update the given image and DQ extension with the given
<END_TASK>
<USER_TASK:>
Description:
def update_dq(filename, ext, mask, dqval=16384, verbose=True):
"""Update the given image and DQ extension with the given
satellite trails mask and flag.
Parameters
----------
filename : str
FITS image filename to update.
ext : int, str, or tuple
DQ extension, as accepted by ``astropy.io.fits``, to update.
mask : ndarray
Boolean mask, with `True` marking the satellite trail(s).
This can be the result(s) from :func:`make_mask`.
dqval : int, optional
DQ value to use for the trail. Default value of 16384 is
tailored for ACS/WFC.
verbose : bool, optional
Print extra information to the terminal.
""" |
with fits.open(filename, mode='update') as pf:
dqarr = pf[ext].data
old_mask = (dqval & dqarr) != 0 # Existing flagged trails
new_mask = mask & ~old_mask # Only flag previously unflagged trails
npix_updated = np.count_nonzero(new_mask)
# Update DQ extension only if necessary
if npix_updated > 0:
pf[ext].data[new_mask] += dqval
pf['PRIMARY'].header.add_history('{0} satdet v{1}({2})'.format(
time.ctime(), __version__, __vdate__))
pf['PRIMARY'].header.add_history(
' Updated {0} px in EXT {1} with DQ={2}'.format(
npix_updated, ext, dqval))
if verbose:
fname = '{0}[{1}]'.format(filename, ext)
print('DQ flag value is {0}'.format(dqval))
print('Input... flagged NPIX={0}'.format(np.count_nonzero(mask)))
print('Existing flagged NPIX={0}'.format(np.count_nonzero(old_mask)))
print('Newly... flagged NPIX={0}'.format(npix_updated))
if npix_updated > 0:
print('{0} updated'.format(fname))
else:
print('No updates necessary for {0}'.format(fname)) |
<SYSTEM_TASK:>
Decorator for subscribing a function to a specific event.
<END_TASK>
<USER_TASK:>
Description:
def on(self, event: str) -> Callable:
""" Decorator for subscribing a function to a specific event.
:param event: Name of the event to subscribe to.
:type event: str
:return: The outer function.
:rtype: Callable
""" |
def outer(func):
self.add_event(func, event)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return outer |
<SYSTEM_TASK:>
Adds a function to a event.
<END_TASK>
<USER_TASK:>
Description:
def add_event(self, func: Callable, event: str) -> None:
""" Adds a function to a event.
:param func: The function to call when event is emitted
:type func: Callable
:param event: Name of the event.
:type event: str
""" |
self._events[event].add(func) |
<SYSTEM_TASK:>
Emit an event and run the subscribed functions.
<END_TASK>
<USER_TASK:>
Description:
def emit(self, event: str, *args, **kwargs) -> None:
""" Emit an event and run the subscribed functions.
:param event: Name of the event.
:type event: str
.. notes:
Passing in threads=True as a kwarg allows to run emitted events
as separate threads. This can significantly speed up code execution
depending on the code being executed.
""" |
threads = kwargs.pop('threads', None)
if threads:
events = [
Thread(target=f, args=args, kwargs=kwargs) for f in
self._event_funcs(event)
]
for event in events:
event.start()
else:
for func in self._event_funcs(event):
func(*args, **kwargs) |
<SYSTEM_TASK:>
Specifically only emits certain subscribed events.
<END_TASK>
<USER_TASK:>
Description:
def emit_only(self, event: str, func_names: Union[str, List[str]], *args,
**kwargs) -> None:
""" Specifically only emits certain subscribed events.
:param event: Name of the event.
:type event: str
:param func_names: Function(s) to emit.
:type func_names: Union[ str | List[str] ]
""" |
if isinstance(func_names, str):
func_names = [func_names]
for func in self._event_funcs(event):
if func.__name__ in func_names:
func(*args, **kwargs) |
<SYSTEM_TASK:>
Decorator that emits events after the function is completed.
<END_TASK>
<USER_TASK:>
Description:
def emit_after(self, event: str) -> Callable:
""" Decorator that emits events after the function is completed.
:param event: Name of the event.
:type event: str
:return: Callable
.. note:
This plainly just calls functions without passing params into the
subscribed callables. This is great if you want to do some kind
of post processing without the callable requiring information
before doing so.
""" |
def outer(func):
@wraps(func)
def wrapper(*args, **kwargs):
returned = func(*args, **kwargs)
self.emit(event)
return returned
return wrapper
return outer |
<SYSTEM_TASK:>
Removes a subscribed function from a specific event.
<END_TASK>
<USER_TASK:>
Description:
def remove_event(self, func_name: str, event: str) -> None:
""" Removes a subscribed function from a specific event.
:param func_name: The name of the function to be removed.
:type func_name: str
:param event: The name of the event.
:type event: str
:raise EventDoesntExist if there func_name doesn't exist in event.
""" |
event_funcs_copy = self._events[event].copy()
for func in self._event_funcs(event):
if func.__name__ == func_name:
event_funcs_copy.remove(func)
if self._events[event] == event_funcs_copy:
err_msg = "function doesn't exist inside event {} ".format(event)
raise EventDoesntExist(err_msg)
else:
self._events[event] = event_funcs_copy |
<SYSTEM_TASK:>
Returns an Iterable of the functions subscribed to a event.
<END_TASK>
<USER_TASK:>
Description:
def _event_funcs(self, event: str) -> Iterable[Callable]:
""" Returns an Iterable of the functions subscribed to a event.
:param event: Name of the event.
:type event: str
:return: A iterable to do things with.
:rtype: Iterable
""" |
for func in self._events[event]:
yield func |
<SYSTEM_TASK:>
Returns string name of each function subscribed to an event.
<END_TASK>
<USER_TASK:>
Description:
def _event_func_names(self, event: str) -> List[str]:
""" Returns string name of each function subscribed to an event.
:param event: Name of the event.
:type event: str
:return: Names of functions subscribed to a specific event.
:rtype: list
""" |
return [func.__name__ for func in self._events[event]] |
<SYSTEM_TASK:>
Returns the total amount of subscribed events.
<END_TASK>
<USER_TASK:>
Description:
def _subscribed_event_count(self) -> int:
""" Returns the total amount of subscribed events.
:return: Integer amount events.
:rtype: int
""" |
event_counter = Counter() # type: Dict[Any, int]
for key, values in self._events.items():
event_counter[key] = len(values)
return sum(event_counter.values()) |
<SYSTEM_TASK:>
Clean each input image.
<END_TASK>
<USER_TASK:>
Description:
def perform_correction(image, output, stat="pmode1", maxiter=15, sigrej=2.0,
lower=None, upper=None, binwidth=0.3,
mask=None, dqbits=None,
rpt_clean=0, atol=0.01, clobber=False, verbose=True):
"""
Clean each input image.
Parameters
----------
image : str
Input image name.
output : str
Output image name.
mask : `numpy.ndarray`
Mask array.
maxiter, sigrej, clobber
See :func:`clean`.
dqbits : int, str, or None
Data quality bits to be considered as "good" (or "bad").
See :func:`clean` for more details.
rpt_clean : int
An integer indicating how many *additional* times stripe cleaning
should be performed on the input image. Default = 0.
atol : float, None
The threshold for maximum absolute value of bias stripe correction
below which repeated cleanings can stop. When `atol` is `None`
cleaning will be repeated `rpt_clean` number of times.
Default = 0.01 [e].
verbose : bool
Print informational messages. Default = True.
""" |
# construct the frame to be cleaned, including the
# associated data stuctures needed for cleaning
frame = StripeArray(image)
# combine user mask with image's DQ array:
mask = _mergeUserMaskAndDQ(frame.dq, mask, dqbits)
# Do the stripe cleaning
Success, NUpdRows, NMaxIter, Bkgrnd, STDDEVCorr, MaxCorr, Nrpt = clean_streak(
frame, stat=stat, maxiter=maxiter, sigrej=sigrej,
lower=lower, upper=upper, binwidth=binwidth, mask=mask,
rpt_clean=rpt_clean, atol=atol, verbose=verbose
)
if Success:
if verbose:
LOG.info('perform_correction - ===== Overall statistics for '
'de-stripe corrections: =====')
if (STDDEVCorr > 1.5*0.9):
LOG.warning('perform_correction - STDDEV of applied de-stripe '
'corrections ({:.3g}) exceeds\nknown bias striping '
'STDDEV of 0.9e (see ISR ACS 2011-05) more than '
'1.5 times.'.format(STDDEVCorr))
elif verbose:
LOG.info('perform_correction - STDDEV of applied de-stripe '
'corrections {:.3g}.'.format(STDDEVCorr))
if verbose:
LOG.info('perform_correction - Estimated background: '
'{:.5g}.'.format(Bkgrnd))
LOG.info('perform_correction - Maximum applied correction: '
'{:.3g}.'.format(MaxCorr))
LOG.info('perform_correction - Effective number of clipping '
'iterations: {}.'.format(NMaxIter))
LOG.info('perform_correction - Effective number of additional '
'(repeated) cleanings: {}.'.format(Nrpt))
LOG.info('perform_correction - Total number of corrected rows: '
'{}.'.format(NUpdRows))
frame.write_corrected(output, clobber=clobber)
frame.close() |
<SYSTEM_TASK:>
Write out the destriped data.
<END_TASK>
<USER_TASK:>
Description:
def write_corrected(self, output, clobber=False):
"""Write out the destriped data.""" |
# un-apply the flatfield if necessary
if self.flatcorr != 'COMPLETE':
self.science = self.science / self.invflat
self.err = self.err / self.invflat
# un-apply the post-flash if necessary
if self.flshcorr != 'COMPLETE':
self.science = self.science + self.flash
# un-apply the dark if necessary
if self.darkcorr != 'COMPLETE':
self.science = self.science + self.dark
# reverse the amp merge
if (self.ampstring == 'ABCD'):
tmp_1, tmp_2 = np.split(self.science, 2, axis=1)
self.hdulist['sci', 1].data = tmp_1.copy()
self.hdulist['sci', 2].data = tmp_2[::-1, :].copy()
tmp_1, tmp_2 = np.split(self.err, 2, axis=1)
self.hdulist['err', 1].data = tmp_1.copy()
self.hdulist['err', 2].data = tmp_2[::-1, :].copy()
else:
self.hdulist['sci', 1].data = self.science.copy()
self.hdulist['err', 1].data = self.err.copy()
# Write the output
self.hdulist.writeto(output, overwrite=clobber) |
<SYSTEM_TASK:>
Run the calacs.e executable as from the shell.
<END_TASK>
<USER_TASK:>
Description:
def calacs(input_file, exec_path=None, time_stamps=False, temp_files=False,
verbose=False, debug=False, quiet=False, single_core=False,
exe_args=None):
"""
Run the calacs.e executable as from the shell.
By default this will run the calacs given by 'calacs.e'.
Parameters
----------
input_file : str
Name of input file.
exec_path : str, optional
The complete path to a calacs executable.
time_stamps : bool, optional
Set to True to turn on the printing of time stamps.
temp_files : bool, optional
Set to True to have CALACS save temporary files.
verbose : bool, optional
Set to True for verbose output.
debug : bool, optional
Set to True to turn on debugging output.
quiet : bool, optional
Set to True for quiet output.
single_core : bool, optional
CTE correction in CALACS will by default try to use all available
CPUs on your computer. Set this to True to force the use of just
one CPU.
exe_args : list, optional
Arbitrary arguments passed to underlying executable call.
Note: Implementation uses subprocess.call and whitespace is not
permitted. E.g. use exe_args=['--nThreads', '1']
""" |
if exec_path:
if not os.path.exists(exec_path):
raise OSError('Executable not found: ' + exec_path)
call_list = [exec_path]
else:
call_list = ['calacs.e']
if time_stamps:
call_list.append('-t')
if temp_files:
call_list.append('-s')
if verbose:
call_list.append('-v')
if debug:
call_list.append('-d')
if quiet:
call_list.append('-q')
if single_core:
call_list.append('-1')
if not os.path.exists(input_file):
raise IOError('Input file not found: ' + input_file)
call_list.append(input_file)
if exe_args:
call_list.extend(exe_args)
subprocess.check_call(call_list) |
<SYSTEM_TASK:>
Return a boolean to indicate whether the supplied user has any
<END_TASK>
<USER_TASK:>
Description:
def has_any_permissions(self, user):
"""
Return a boolean to indicate whether the supplied user has any
permissions at all on the associated model
""" |
for perm in self.get_all_model_permissions():
if self.has_specific_permission(user, perm.codename):
return True
return False |
<SYSTEM_TASK:>
r"""
<END_TASK>
<USER_TASK:>
Description:
def acsrej(input, output, exec_path='', time_stamps=False, verbose=False,
shadcorr=False, crrejtab='', crmask=False, scalense=None,
initgues='', skysub='', crsigmas='', crradius=None, crthresh=None,
badinpdq=None, newbias=False, readnoise_only=False, exe_args=None):
r"""
Run the acsrej.e executable as from the shell.
Parameters
----------
input : str or list of str
Input filenames in one of these formats:
* a Python list of filenames
* a partial filename with wildcards ('\*flt.fits')
* filename of an ASN table ('j12345670_asn.fits')
* an at-file (``@input``)
output : str
Output filename.
exec_path : str, optional
The complete path to ACSREJ executable.
If not given, run ACSREJ given by 'acsrej.e'.
time_stamps : bool, optional
Set to True to turn on the printing of time stamps.
verbose : bool, optional
Set to True for verbose output.
shadcorr : bool, optional
Perform shutter shading correction.
If this is False but SHADCORR is set to PERFORM in
the header of the first image, the correction will
be applied anyway.
Only use this with CCD image, not SBC MAMA.
crrejtab : str, optional
CRREJTAB to use. If not given, will use CRREJTAB
given in the primary header of the first input image.
crmask : bool, optional
Flag CR-rejected pixels in input files.
If False, will use CRMASK value in CRREJTAB.
scalense : float, optional
Multiplicative scale factor (in percents) applied to noise.
Acceptable values are 0 to 100, inclusive.
If None, will use SCALENSE from CRREJTAB.
initgues : {'med', 'min'}, optional
Scheme for computing initial-guess image.
If not given, will use INITGUES from CRREJTAB.
skysub : {'none', 'mode'}, optional
Scheme for computing sky levels to be subtracted.
If not given, will use SKYSUB from CRREJTAB.
crsigmas : str, optional
Cosmic ray rejection thresholds given in the format of 'sig1,sig2,...'.
Number of sigmas given will be the number of rejection
iterations done. At least 1 and at most 20 sigmas accepted.
If not given, will use CRSIGMAS from CRREJTAB.
crradius : float, optional
Radius (in pixels) to propagate the cosmic ray.
If None, will use CRRADIUS from CRREJTAB.
crthresh : float, optional
Cosmic ray rejection propagation threshold.
If None, will use CRTHRESH from CRREJTAB.
badinpdq : int, optional
Data quality flag used for cosmic ray rejection.
If None, will use BADINPDQ from CRREJTAB.
newbias : bool, optional
This option has been deprecated. Use ``readnoise_only``.
readnoise_only : bool, optional
ERR is just read noise, not Poisson noise.
This is used for BIAS images.
exe_args : list, optional
Arbitrary arguments passed to underlying executable call.
Note: Implementation uses subprocess.call and whitespace is not
permitted. E.g. use exe_args=['--nThreads', '1']
""" |
from stsci.tools import parseinput # Optional package dependency
if exec_path:
if not os.path.exists(exec_path):
raise OSError('Executable not found: ' + exec_path)
call_list = [exec_path]
else:
call_list = ['acsrej.e']
# Parse input to get list of filenames to process.
# acsrej.e only takes 'file1,file2,...'
infiles, dummy_out = parseinput.parseinput(input)
call_list.append(','.join(infiles))
call_list.append(output)
if time_stamps:
call_list.append('-t')
if verbose:
call_list.append('-v')
if shadcorr:
call_list.append('-shadcorr')
if crrejtab:
call_list += ['-table', crrejtab]
if crmask:
call_list.append('-crmask')
if scalense is not None:
if scalense < 0 or scalense > 100:
raise ValueError('SCALENSE must be 0 to 100')
call_list += ['-scale', str(scalense)]
if initgues:
if initgues not in ('med', 'min'):
raise ValueError('INITGUES must be "med" or "min"')
call_list += ['-init', initgues]
if skysub:
if skysub not in ('none', 'mode'):
raise ValueError('SKYSUB must be "none" or "mode"')
call_list += ['-sky', skysub]
if crsigmas:
call_list += ['-sigmas', crsigmas]
if crradius is not None:
call_list += ['-radius', str(crradius)]
if crthresh is not None:
call_list += ['-thresh ', str(crthresh)]
if badinpdq is not None:
call_list += ['-pdq', str(badinpdq)]
# Backward-compatibility for readnoise_only.
# TODO: Remove this option entirely in a future release.
if newbias:
warnings.warn('newbias is deprecated, use readnoise_only',
ACSREJDeprecationWarning)
readnoise_only = newbias
if readnoise_only:
call_list.append('-readnoise_only')
if exe_args:
call_list.extend(exe_args)
subprocess.check_call(call_list) |
<SYSTEM_TASK:>
Call all parent after fork callables, release the lock and print
<END_TASK>
<USER_TASK:>
Description:
def parent_after_fork_release():
"""
Call all parent after fork callables, release the lock and print
all prepare and parent callback exceptions.
""" |
prepare_exceptions = list(_prepare_call_exceptions)
del _prepare_call_exceptions[:]
exceptions = _call_atfork_list(_parent_call_list)
_fork_lock.release()
_print_exception_list(prepare_exceptions, 'before fork')
_print_exception_list(exceptions, 'after fork from parent') |
<SYSTEM_TASK:>
Given a list of sys.exc_info tuples, print them all using the traceback
<END_TASK>
<USER_TASK:>
Description:
def _print_exception_list(exceptions, message, output_file=None):
"""
Given a list of sys.exc_info tuples, print them all using the traceback
module preceeded by a message and separated by a blank line.
""" |
output_file = output_file or sys.stderr
message = 'Exception %s:\n' % message
for exc_type, exc_value, exc_traceback in exceptions:
output_file.write(message)
traceback.print_exception(exc_type, exc_value, exc_traceback,
file=output_file)
output_file.write('\n') |
<SYSTEM_TASK:>
Wraps an object with a Maybe instance.
<END_TASK>
<USER_TASK:>
Description:
def maybe(value):
"""Wraps an object with a Maybe instance.
>>> maybe("I'm a value")
Something("I'm a value")
>>> maybe(None);
Nothing
Testing for value:
>>> maybe("I'm a value").is_some()
True
>>> maybe("I'm a value").is_none()
False
>>> maybe(None).is_some()
False
>>> maybe(None).is_none()
True
Simplifying IF statements:
>>> maybe("I'm a value").get()
"I'm a value"
>>> maybe("I'm a value").or_else(lambda: "No value")
"I'm a value"
>>> maybe(None).get()
Traceback (most recent call last):
...
NothingValueError: No such element
>>> maybe(None).or_else(lambda: "value")
'value'
>>> maybe(None).or_else("value")
'value'
Wrap around values from object's attributes:
class Person(object):
def __init__(name):
self.eran = name
eran = maybe(Person('eran'))
>>> eran.name
Something('eran')
>>> eran.phone_number
Nothing
>>> eran.phone_number.or_else('no phone number')
'no phone number'
>>> maybe(4) + 8
Something(12)
>>> maybe(4) - 2
Something(2)
>>> maybe(4) * 2
Something(8)
And methods:
>>> maybe('VALUE').lower().get()
'value'
>>> maybe(None).invalid().method().or_else('unknwon')
'unknwon'
Enabled easily using NestedDictionaries without having to worry
if a value is missing.
For example lets assume we want to load some value from the
following dictionary:
nested_dict = maybe({
'store': {
'name': 'MyStore',
'departments': {
'sales': { 'head_count': '10' }
}
}
})
>>> nested_dict['store']['name'].get()
'MyStore'
>>> nested_dict['store']['address']
Nothing
>>> nested_dict['store']['address']['street'].or_else('No Address Specified')
'No Address Specified'
>>> nested_dict['store']['address']['street'].or_none() is None
True
>>> nested_dict['store']['address']['street'].or_empty_list()
[]
>>> nested_dict['store']['departments']['sales']['head_count'].or_else('0')
'10'
>>> nested_dict['store']['departments']['marketing']['head_count'].or_else('0')
'0'
""" |
if isinstance(value, Maybe):
return value
if value is not None:
return Something(value)
return Nothing() |
<SYSTEM_TASK:>
Collects the result of a sign or auth order using the
<END_TASK>
<USER_TASK:>
Description:
def collect(self, order_ref):
"""Collects the result of a sign or auth order using the
``orderRef`` as reference.
RP should keep on calling collect every two seconds as long as status
indicates pending. RP must abort if status indicates failed. The user
identity is returned when complete.
Example collect results returned while authentication or signing is
still pending:
.. code-block:: json
{
"orderRef":"131daac9-16c6-4618-beb0-365768f37288",
"status":"pending",
"hintCode":"userSign"
}
Example collect result when authentication or signing has failed:
.. code-block:: json
{
"orderRef":"131daac9-16c6-4618-beb0-365768f37288",
"status":"failed",
"hintCode":"userCancel"
}
Example collect result when authentication or signing is successful
and completed:
.. code-block:: json
{
"orderRef":"131daac9-16c6-4618-beb0-365768f37288",
"status":"complete",
"completionData": {
"user": {
"personalNumber":"190000000000",
"name":"Karl Karlsson",
"givenName":"Karl",
"surname":"Karlsson"
},
"device": {
"ipAddress":"192.168.0.1"
},
"cert": {
"notBefore":"1502983274000",
"notAfter":"1563549674000"
},
"signature":"<base64-encoded data>",
"ocspResponse":"<base64-encoded data>"
}
}
See `BankID Relying Party Guidelines Version: 3.0 <https://www.bankid.com/assets/bankid/rp/bankid-relying-party-guidelines-v3.0.pdf>`_
for more details about how to inform end user of the current status,
whether it is pending, failed or completed.
:param order_ref: The ``orderRef`` UUID returned from auth or sign.
:type order_ref: str
:return: The CollectResponse parsed to a dictionary.
:rtype: dict
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
""" |
response = self.client.post(
self._collect_endpoint, json={"orderRef": order_ref}
)
if response.status_code == 200:
return response.json()
else:
raise get_json_error_class(response) |
<SYSTEM_TASK:>
Cancels an ongoing sign or auth order.
<END_TASK>
<USER_TASK:>
Description:
def cancel(self, order_ref):
"""Cancels an ongoing sign or auth order.
This is typically used if the user cancels the order
in your service or app.
:param order_ref: The UUID string specifying which order to cancel.
:type order_ref: str
:return: Boolean regarding success of cancellation.
:rtype: bool
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
""" |
response = self.client.post(self._cancel_endpoint, json={"orderRef": order_ref})
if response.status_code == 200:
return response.json() == {}
else:
raise get_json_error_class(response) |
<SYSTEM_TASK:>
Put data in output queue, rebuild the prompt and entered data
<END_TASK>
<USER_TASK:>
Description:
def writemessage(self, text):
"""Put data in output queue, rebuild the prompt and entered data""" |
# Need to grab the input queue lock to ensure the entered data doesn't change
# before we're done rebuilding it.
# Note that writemessage will eventually call writecooked
self.IQUEUELOCK.acquire()
TelnetHandlerBase.writemessage(self, text)
self.IQUEUELOCK.release() |
<SYSTEM_TASK:>
Put data directly into the output queue
<END_TASK>
<USER_TASK:>
Description:
def writecooked(self, text):
"""Put data directly into the output queue""" |
# Ensure this is the only thread writing
self.OQUEUELOCK.acquire()
TelnetHandlerBase.writecooked(self, text)
self.OQUEUELOCK.release() |
<SYSTEM_TASK:>
Splits a PKCS12 certificate into Base64-encoded DER certificate and key.
<END_TASK>
<USER_TASK:>
Description:
def split_certificate(certificate_path, destination_folder, password=None):
"""Splits a PKCS12 certificate into Base64-encoded DER certificate and key.
This method splits a potentially password-protected
`PKCS12 <https://en.wikipedia.org/wiki/PKCS_12>`_ certificate
(format ``.p12`` or ``.pfx``) into one certificate and one key part, both in
`pem <https://en.wikipedia.org/wiki/X.509#Certificate_filename_extensions>`_
format.
:returns: Tuple of certificate and key string data.
:rtype: tuple
""" |
try:
# Attempt Linux and Darwin call first.
p = subprocess.Popen(
["openssl", "version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
sout, serr = p.communicate()
openssl_executable_version = sout.decode().lower()
if not (
openssl_executable_version.startswith("openssl")
or openssl_executable_version.startswith("libressl")
):
raise BankIDError(
"OpenSSL executable could not be found. "
"Splitting cannot be performed."
)
openssl_executable = "openssl"
except Exception:
# Attempt to call on standard Git for Windows path.
p = subprocess.Popen(
["C:\\Program Files\\Git\\mingw64\\bin\\openssl.exe", "version"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
sout, serr = p.communicate()
if not sout.decode().lower().startswith("openssl"):
raise BankIDError(
"OpenSSL executable could not be found. "
"Splitting cannot be performed."
)
openssl_executable = "C:\\Program Files\\Git\\mingw64\\bin\\openssl.exe"
if not os.path.exists(os.path.abspath(os.path.expanduser(destination_folder))):
os.makedirs(os.path.abspath(os.path.expanduser(destination_folder)))
# Paths to output files.
out_cert_path = os.path.join(
os.path.abspath(os.path.expanduser(destination_folder)), "certificate.pem"
)
out_key_path = os.path.join(
os.path.abspath(os.path.expanduser(destination_folder)), "key.pem"
)
# Use openssl for converting to pem format.
pipeline_1 = [
openssl_executable,
"pkcs12",
"-in",
"{0}".format(certificate_path),
"-passin" if password is not None else "",
"pass:{0}".format(password) if password is not None else "",
"-out",
"{0}".format(out_cert_path),
"-clcerts",
"-nokeys",
]
p = subprocess.Popen(
list(filter(None, pipeline_1)), stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
p.communicate()
pipeline_2 = [
openssl_executable,
"pkcs12",
"-in",
"{0}".format(certificate_path),
"-passin" if password is not None else "",
"pass:{0}".format(password) if password is not None else "",
"-out",
"{0}".format(out_key_path),
"-nocerts",
"-nodes",
]
p = subprocess.Popen(
list(filter(None, pipeline_2)), stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
p.communicate()
# Return path tuples.
return out_cert_path, out_key_path |
<SYSTEM_TASK:>
Deal properly with inserted chars in a line.
<END_TASK>
<USER_TASK:>
Description:
def _readline_insert(self, char, echo, insptr, line):
"""Deal properly with inserted chars in a line.""" |
if not self._readline_do_echo(echo):
return
# Write out the remainder of the line
self.write(char + ''.join(line[insptr:]))
# Cursor Left to the current insert point
char_count = len(line) - insptr
self.write(self.CODES['CSRLEFT'] * char_count) |
<SYSTEM_TASK:>
Send a packet with line ending.
<END_TASK>
<USER_TASK:>
Description:
def writeline(self, text):
"""Send a packet with line ending.""" |
log.debug('writing line %r' % text)
self.write(text+chr(10)) |
<SYSTEM_TASK:>
Write out an asynchronous message, then reconstruct the prompt and entered text.
<END_TASK>
<USER_TASK:>
Description:
def writemessage(self, text):
"""Write out an asynchronous message, then reconstruct the prompt and entered text.""" |
log.debug('writing message %r', text)
self.write(chr(10)+text+chr(10))
self.write(self._current_prompt+''.join(self._current_line)) |
<SYSTEM_TASK:>
Send a packet to the socket. This function cooks output.
<END_TASK>
<USER_TASK:>
Description:
def write(self, text):
"""Send a packet to the socket. This function cooks output.""" |
text = str(text) # eliminate any unicode or other snigglets
text = text.replace(IAC, IAC+IAC)
text = text.replace(chr(10), chr(13)+chr(10))
self.writecooked(text) |
<SYSTEM_TASK:>
Get one character from the raw queue. Optionally blocking.
<END_TASK>
<USER_TASK:>
Description:
def _inputcooker_getc(self, block=True):
"""Get one character from the raw queue. Optionally blocking.
Raise EOFError on end of stream. SHOULD ONLY BE CALLED FROM THE
INPUT COOKER.""" |
if self.rawq:
ret = self.rawq[0]
self.rawq = self.rawq[1:]
return ret
if not block:
if not self.inputcooker_socket_ready():
return ''
ret = self.sock.recv(20)
self.eof = not(ret)
self.rawq = self.rawq + ret
if self.eof:
raise EOFError
return self._inputcooker_getc(block) |
<SYSTEM_TASK:>
Put the cooked data in the correct queue
<END_TASK>
<USER_TASK:>
Description:
def _inputcooker_store(self, char):
"""Put the cooked data in the correct queue""" |
if self.sb:
self.sbdataq = self.sbdataq + char
else:
self.inputcooker_store_queue(char) |
<SYSTEM_TASK:>
Input Cooker - Transfer from raw queue to cooked queue.
<END_TASK>
<USER_TASK:>
Description:
def inputcooker(self):
"""Input Cooker - Transfer from raw queue to cooked queue.
Set self.eof when connection is closed. Don't block unless in
the midst of an IAC sequence.
""" |
try:
while True:
c = self._inputcooker_getc()
if not self.iacseq:
if c == IAC:
self.iacseq += c
continue
elif c == chr(13) and not(self.sb):
c2 = self._inputcooker_getc(block=False)
if c2 == theNULL or c2 == '':
c = chr(10)
elif c2 == chr(10):
c = c2
else:
self._inputcooker_ungetc(c2)
c = chr(10)
elif c in [x[0] for x in self.ESCSEQ.keys()]:
'Looks like the begining of a key sequence'
codes = c
for keyseq in self.ESCSEQ.keys():
if len(keyseq) == 0:
continue
while codes == keyseq[:len(codes)] and len(codes) <= keyseq:
if codes == keyseq:
c = self.ESCSEQ[keyseq]
break
codes = codes + self._inputcooker_getc()
if codes == keyseq:
break
self._inputcooker_ungetc(codes[1:])
codes = codes[0]
self._inputcooker_store(c)
elif len(self.iacseq) == 1:
'IAC: IAC CMD [OPTION only for WILL/WONT/DO/DONT]'
if c in (DO, DONT, WILL, WONT):
self.iacseq += c
continue
self.iacseq = ''
if c == IAC:
self._inputcooker_store(c)
else:
if c == SB: # SB ... SE start.
self.sb = 1
self.sbdataq = ''
elif c == SE: # SB ... SE end.
self.sb = 0
# Callback is supposed to look into
# the sbdataq
self.options_handler(self.sock, c, NOOPT)
elif len(self.iacseq) == 2:
cmd = self.iacseq[1]
self.iacseq = ''
if cmd in (DO, DONT, WILL, WONT):
self.options_handler(self.sock, cmd, c)
except (EOFError, socket.error):
pass |
<SYSTEM_TASK:>
Collect the progress status of the order with the specified
<END_TASK>
<USER_TASK:>
Description:
def collect(self, order_ref):
"""Collect the progress status of the order with the specified
order reference.
:param order_ref: The UUID string specifying which order to
collect status from.
:type order_ref: str
:return: The CollectResponse parsed to a dictionary.
:rtype: dict
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
""" |
try:
out = self.client.service.Collect(order_ref)
except Error as e:
raise get_error_class(e, "Could not complete Collect call.")
return self._dictify(out) |
<SYSTEM_TASK:>
Transforms the replies to a regular Python dict with
<END_TASK>
<USER_TASK:>
Description:
def _dictify(self, doc):
"""Transforms the replies to a regular Python dict with
strings and datetimes.
Tested with BankID version 2.5 return data.
:param doc: The response as interpreted by :py:mod:`zeep`.
:returns: The response parsed to a dict.
:rtype: dict
""" |
return {
k: (self._dictify(doc[k]) if hasattr(doc[k], "_xsd_type") else doc[k])
for k in doc
} |
<SYSTEM_TASK:>
Load all image formats if needed.
<END_TASK>
<USER_TASK:>
Description:
def intercept_image_formats(self, options):
"""
Load all image formats if needed.
""" |
if 'entityTypes' in options:
for entity in options['entityTypes']:
if entity['type'] == ENTITY_TYPES.IMAGE and 'imageFormats' in entity:
if entity['imageFormats'] == '__all__':
entity['imageFormats'] = get_all_image_formats()
return options |
<SYSTEM_TASK:>
Copies a Delphi TDateTime timestamp from a string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies a Delphi TDateTime timestamp from a string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the time string is invalid or not supported.
""" |
date_time_values = self._CopyDateTimeFromString(time_string)
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
microseconds = date_time_values.get('microseconds', None)
if year > 9999:
raise ValueError('Unsupported year value: {0:d}.'.format(year))
timestamp = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
timestamp = float(timestamp) / definitions.SECONDS_PER_DAY
timestamp += self._DELPHI_TO_POSIX_BASE
if microseconds is not None:
timestamp += float(microseconds) / definitions.MICROSECONDS_PER_DAY
self._normalized_timestamp = None
self._timestamp = timestamp
self.is_local_time = False |
<SYSTEM_TASK:>
Adjusts the date and time values for a time zone offset.
<END_TASK>
<USER_TASK:>
Description:
def _AdjustForTimeZoneOffset(
self, year, month, day_of_month, hours, minutes, time_zone_offset):
"""Adjusts the date and time values for a time zone offset.
Args:
year (int): year e.g. 1970.
month (int): month, where 1 represents January.
day_of_month (int): day of the month, where 1 represents the first day.
hours (int): hours.
minutes (int): minutes.
time_zone_offset (int): time zone offset in number of minutes from UTC.
Returns:
tuple[int, int, int, int, int, int]: time zone correct year, month,
day_of_month, hours and minutes values.
""" |
hours_from_utc, minutes_from_utc = divmod(time_zone_offset, 60)
minutes += minutes_from_utc
# Since divmod makes sure the sign of minutes_from_utc is positive
# we only need to check the upper bound here, because hours_from_utc
# remains signed it is corrected accordingly.
if minutes >= 60:
minutes -= 60
hours += 1
hours += hours_from_utc
if hours < 0:
hours += 24
day_of_month -= 1
elif hours >= 24:
hours -= 24
day_of_month += 1
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1:
month -= 1
if month < 1:
month = 12
year -= 1
day_of_month += self._GetDaysPerMonth(year, month)
elif day_of_month > days_per_month:
month += 1
if month > 12:
month = 1
year += 1
day_of_month -= days_per_month
return year, month, day_of_month, hours, minutes |
<SYSTEM_TASK:>
Copies a date from a string.
<END_TASK>
<USER_TASK:>
Description:
def _CopyDateFromString(self, date_string):
"""Copies a date from a string.
Args:
date_string (str): date value formatted as: YYYY-MM-DD
Returns:
tuple[int, int, int]: year, month, day of month.
Raises:
ValueError: if the date string is invalid or not supported.
""" |
date_string_length = len(date_string)
# The date string should at least contain 'YYYY-MM-DD'.
if date_string_length < 10:
raise ValueError('Date string too short.')
if date_string[4] != '-' or date_string[7] != '-':
raise ValueError('Invalid date string.')
try:
year = int(date_string[0:4], 10)
except ValueError:
raise ValueError('Unable to parse year.')
try:
month = int(date_string[5:7], 10)
except ValueError:
raise ValueError('Unable to parse month.')
try:
day_of_month = int(date_string[8:10], 10)
except ValueError:
raise ValueError('Unable to parse day of month.')
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
return year, month, day_of_month |
<SYSTEM_TASK:>
Copies a time from a string.
<END_TASK>
<USER_TASK:>
Description:
def _CopyTimeFromString(self, time_string):
"""Copies a time from a string.
Args:
time_string (str): time value formatted as:
hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The seconds fraction and
time zone offset are optional.
Returns:
tuple[int, int, int, int, int]: hours, minutes, seconds, microseconds,
time zone offset in minutes.
Raises:
ValueError: if the time string is invalid or not supported.
""" |
time_string_length = len(time_string)
# The time string should at least contain 'hh:mm:ss'.
if time_string_length < 8:
raise ValueError('Time string too short.')
if time_string[2] != ':' or time_string[5] != ':':
raise ValueError('Invalid time string.')
try:
hours = int(time_string[0:2], 10)
except ValueError:
raise ValueError('Unable to parse hours.')
if hours not in range(0, 24):
raise ValueError('Hours value: {0:d} out of bounds.'.format(hours))
try:
minutes = int(time_string[3:5], 10)
except ValueError:
raise ValueError('Unable to parse minutes.')
if minutes not in range(0, 60):
raise ValueError('Minutes value: {0:d} out of bounds.'.format(minutes))
try:
seconds = int(time_string[6:8], 10)
except ValueError:
raise ValueError('Unable to parse day of seconds.')
# TODO: support a leap second?
if seconds not in range(0, 60):
raise ValueError('Seconds value: {0:d} out of bounds.'.format(seconds))
microseconds = None
time_zone_offset = None
time_zone_string_index = 8
while time_zone_string_index < time_string_length:
if time_string[time_zone_string_index] in ('+', '-'):
break
time_zone_string_index += 1
# The calculations that follow rely on the time zone string index
# to point beyond the string in case no time zone offset was defined.
if time_zone_string_index == time_string_length - 1:
time_zone_string_index += 1
if time_string_length > 8 and time_string[8] == '.':
time_fraction_length = time_zone_string_index - 9
if time_fraction_length not in (3, 6):
raise ValueError('Invalid time string.')
try:
time_fraction = time_string[9:time_zone_string_index]
time_fraction = int(time_fraction, 10)
except ValueError:
raise ValueError('Unable to parse time fraction.')
if time_fraction_length == 3:
time_fraction *= 1000
microseconds = time_fraction
if time_zone_string_index < time_string_length:
if (time_string_length - time_zone_string_index != 6 or
time_string[time_zone_string_index + 3] != ':'):
raise ValueError('Invalid time string.')
try:
hours_from_utc = int(time_string[
time_zone_string_index + 1:time_zone_string_index + 3])
except ValueError:
raise ValueError('Unable to parse time zone hours offset.')
if hours_from_utc not in range(0, 15):
raise ValueError('Time zone hours offset value out of bounds.')
try:
minutes_from_utc = int(time_string[
time_zone_string_index + 4:time_zone_string_index + 6])
except ValueError:
raise ValueError('Unable to parse time zone minutes offset.')
if minutes_from_utc not in range(0, 60):
raise ValueError('Time zone minutes offset value out of bounds.')
# pylint: disable=invalid-unary-operand-type
time_zone_offset = (hours_from_utc * 60) + minutes_from_utc
# Note that when the sign of the time zone offset is negative
# the difference needs to be added. We do so by flipping the sign.
if time_string[time_zone_string_index] != '-':
time_zone_offset = -time_zone_offset
return hours, minutes, seconds, microseconds, time_zone_offset |
<SYSTEM_TASK:>
Retrieves the day of the year for a specific day of a month in a year.
<END_TASK>
<USER_TASK:>
Description:
def _GetDayOfYear(self, year, month, day_of_month):
"""Retrieves the day of the year for a specific day of a month in a year.
Args:
year (int): year e.g. 1970.
month (int): month, where 1 represents January.
day_of_month (int): day of the month, where 1 represents the first day.
Returns:
int: day of year.
Raises:
ValueError: if the month or day of month value is out of bounds.
""" |
if month not in range(1, 13):
raise ValueError('Month value out of bounds.')
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
day_of_year = day_of_month
for past_month in range(1, month):
day_of_year += self._GetDaysPerMonth(year, past_month)
return day_of_year |
<SYSTEM_TASK:>
Retrieves the number of days in a month of a specific year.
<END_TASK>
<USER_TASK:>
Description:
def _GetDaysPerMonth(self, year, month):
"""Retrieves the number of days in a month of a specific year.
Args:
year (int): year e.g. 1970.
month (int): month, where 1 represents January.
Returns:
int: number of days in the month.
Raises:
ValueError: if the month value is out of bounds.
""" |
if month not in range(1, 13):
raise ValueError('Month value out of bounds.')
days_per_month = self._DAYS_PER_MONTH[month - 1]
if month == 2 and self._IsLeapYear(year):
days_per_month += 1
return days_per_month |
<SYSTEM_TASK:>
Retrieves the number of days in a century.
<END_TASK>
<USER_TASK:>
Description:
def _GetNumberOfDaysInCentury(self, year):
"""Retrieves the number of days in a century.
Args:
year (int): year in the century e.g. 1970.
Returns:
int: number of (remaining) days in the century.
Raises:
ValueError: if the year value is out of bounds.
""" |
if year < 0:
raise ValueError('Year value out of bounds.')
year, _ = divmod(year, 100)
if self._IsLeapYear(year):
return 36525
return 36524 |
<SYSTEM_TASK:>
Retrieves the number of seconds from the date and time elements.
<END_TASK>
<USER_TASK:>
Description:
def _GetNumberOfSecondsFromElements(
self, year, month, day_of_month, hours, minutes, seconds):
"""Retrieves the number of seconds from the date and time elements.
Args:
year (int): year e.g. 1970.
month (int): month, where 1 represents January.
day_of_month (int): day of the month, where 1 represents the first day.
hours (int): hours.
minutes (int): minutes.
seconds (int): seconds.
Returns:
int: number of seconds since January 1, 1970 00:00:00 or None if year,
month or day of month are not set.
Raises:
ValueError: if the time elements are invalid.
""" |
if not year or not month or not day_of_month:
return None
# calendar.timegm does not sanity check the time elements.
if hours is None:
hours = 0
elif hours not in range(0, 24):
raise ValueError('Hours value: {0!s} out of bounds.'.format(hours))
if minutes is None:
minutes = 0
elif minutes not in range(0, 60):
raise ValueError('Minutes value: {0!s} out of bounds.'.format(minutes))
# TODO: support a leap second?
if seconds is None:
seconds = 0
elif seconds not in range(0, 60):
raise ValueError('Seconds value: {0!s} out of bounds.'.format(seconds))
# Note that calendar.timegm() does not raise when date is: 2013-02-29.
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
# calendar.timegm requires the time tuple to contain at least
# 6 integer values.
time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds)
number_of_seconds = calendar.timegm(time_elements_tuple)
return int(number_of_seconds) |
<SYSTEM_TASK:>
Copies the date time value to a stat timestamp tuple.
<END_TASK>
<USER_TASK:>
Description:
def CopyToStatTimeTuple(self):
"""Copies the date time value to a stat timestamp tuple.
Returns:
tuple[int, int]: a POSIX timestamp in seconds and the remainder in
100 nano seconds or (None, None) on error.
""" |
normalized_timestamp = self._GetNormalizedTimestamp()
if normalized_timestamp is None:
return None, None
if self._precision in (
definitions.PRECISION_1_NANOSECOND,
definitions.PRECISION_100_NANOSECONDS,
definitions.PRECISION_1_MICROSECOND,
definitions.PRECISION_1_MILLISECOND,
definitions.PRECISION_100_MILLISECONDS):
remainder = int((normalized_timestamp % 1) * self._100NS_PER_SECOND)
return int(normalized_timestamp), remainder
return int(normalized_timestamp), None |
<SYSTEM_TASK:>
Copies the date time value to an ISO 8601 date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeStringISO8601(self):
"""Copies the date time value to an ISO 8601 date and time string.
Returns:
str: date and time value formatted as an ISO 8601 date and time string or
None if the timestamp cannot be copied to a date and time string.
""" |
date_time_string = self.CopyToDateTimeString()
if date_time_string:
date_time_string = date_time_string.replace(' ', 'T')
date_time_string = '{0:s}Z'.format(date_time_string)
return date_time_string |
<SYSTEM_TASK:>
Retrieves a timestamp that is compatible with plaso.
<END_TASK>
<USER_TASK:>
Description:
def GetPlasoTimestamp(self):
"""Retrieves a timestamp that is compatible with plaso.
Returns:
int: a POSIX timestamp in microseconds or None if no timestamp is
available.
""" |
normalized_timestamp = self._GetNormalizedTimestamp()
if normalized_timestamp is None:
return None
normalized_timestamp *= definitions.MICROSECONDS_PER_SECOND
normalized_timestamp = normalized_timestamp.quantize(
1, rounding=decimal.ROUND_HALF_UP)
return int(normalized_timestamp) |
<SYSTEM_TASK:>
Retrieves the time of day represented by the date and time values.
<END_TASK>
<USER_TASK:>
Description:
def GetTimeOfDay(self):
"""Retrieves the time of day represented by the date and time values.
Returns:
tuple[int, int, int]: hours, minutes, seconds or (None, None, None)
if the date and time values do not represent a time of day.
""" |
normalized_timestamp = self._GetNormalizedTimestamp()
if normalized_timestamp is None:
return None, None, None
_, hours, minutes, seconds = self._GetTimeValues(normalized_timestamp)
return hours, minutes, seconds |
<SYSTEM_TASK:>
Copies a fake timestamp from a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies a fake timestamp from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
""" |
date_time_values = self._CopyDateTimeFromString(time_string)
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self._microseconds = date_time_values.get('microseconds', None)
self.is_local_time = False |
<SYSTEM_TASK:>
Copies the RFC2579 date-time to a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeString(self):
"""Copies the RFC2579 date-time to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss.#" or
None if the number of seconds is missing.
""" |
if self._number_of_seconds is None:
return None
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:01d}'.format(
self.year, self.month, self.day_of_month, self.hours, self.minutes,
self.seconds, self.deciseconds) |
<SYSTEM_TASK:>
Retrieves the number of seconds from a FAT date time.
<END_TASK>
<USER_TASK:>
Description:
def _GetNumberOfSeconds(self, fat_date_time):
"""Retrieves the number of seconds from a FAT date time.
Args:
fat_date_time (int): FAT date time.
Returns:
int: number of seconds since January 1, 1980 00:00:00.
Raises:
ValueError: if the month, day of month, hours, minutes or seconds
value is out of bounds.
""" |
day_of_month = (fat_date_time & 0x1f)
month = ((fat_date_time >> 5) & 0x0f)
year = (fat_date_time >> 9) & 0x7f
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
number_of_days = self._GetDayOfYear(1980 + year, month, day_of_month)
number_of_days -= 1
for past_year in range(0, year):
number_of_days += self._GetNumberOfDaysInYear(past_year)
fat_date_time >>= 16
seconds = (fat_date_time & 0x1f) * 2
minutes = (fat_date_time >> 5) & 0x3f
hours = (fat_date_time >> 11) & 0x1f
if hours not in range(0, 24):
raise ValueError('Hours value out of bounds.')
if minutes not in range(0, 60):
raise ValueError('Minutes value out of bounds.')
if seconds not in range(0, 60):
raise ValueError('Seconds value out of bounds.')
number_of_seconds = (((hours * 60) + minutes) * 60) + seconds
number_of_seconds += number_of_days * definitions.SECONDS_PER_DAY
return number_of_seconds |
<SYSTEM_TASK:>
Copies the FILETIME timestamp to a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeString(self):
"""Copies the FILETIME timestamp to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss.#######" or
None if the timestamp is missing or invalid.
""" |
if (self._timestamp is None or self._timestamp < 0 or
self._timestamp > self._UINT64_MAX):
return None
timestamp, remainder = divmod(self._timestamp, self._100NS_PER_SECOND)
number_of_days, hours, minutes, seconds = self._GetTimeValues(timestamp)
year, month, day_of_month = self._GetDateValuesWithEpoch(
number_of_days, self._EPOCH)
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:07d}'.format(
year, month, day_of_month, hours, minutes, seconds, remainder) |
<SYSTEM_TASK:>
Creates a precision helper.
<END_TASK>
<USER_TASK:>
Description:
def CreatePrecisionHelper(cls, precision):
"""Creates a precision helper.
Args:
precision (str): precision of the date and time value, which should
be one of the PRECISION_VALUES in definitions.
Returns:
class: date time precision helper class.
Raises:
ValueError: if the precision value is unsupported.
""" |
precision_helper_class = cls._PRECISION_CLASSES.get(precision, None)
if not precision_helper_class:
raise ValueError('Unsupported precision: {0!s}'.format(precision))
return precision_helper_class |
<SYSTEM_TASK:>
Copies a APFS timestamp from a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies a APFS timestamp from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the date and time value is not supported.
""" |
super(APFSTime, self)._CopyFromDateTimeString(time_string)
if (self._timestamp is None or self._timestamp < self._INT64_MIN or
self._timestamp > self._INT64_MAX):
raise ValueError('Date time value not supported.') |
<SYSTEM_TASK:>
Copies the APFS timestamp to a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeString(self):
"""Copies the APFS timestamp to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss.#########" or
None if the timestamp is missing or invalid.
""" |
if (self._timestamp is None or self._timestamp < self._INT64_MIN or
self._timestamp > self._INT64_MAX):
return None
return super(APFSTime, self)._CopyToDateTimeString() |
<SYSTEM_TASK:>
Copies the Cocoa timestamp to a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyToDateTimeString(self):
"""Copies the Cocoa timestamp to a date and time string.
Returns:
str: date and time value formatted as: YYYY-MM-DD hh:mm:ss.###### or
None if the timestamp cannot be copied to a date and time string.
""" |
if self._timestamp is None:
return None
number_of_days, hours, minutes, seconds = self._GetTimeValues(
int(self._timestamp))
year, month, day_of_month = self._GetDateValuesWithEpoch(
number_of_days, self._EPOCH)
microseconds = int(
(self._timestamp % 1) * definitions.MICROSECONDS_PER_SECOND)
return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:06d}'.format(
year, month, day_of_month, hours, minutes, seconds, microseconds) |
<SYSTEM_TASK:>
Send an SMSMessage instance using a connection given by the specified `backend`.
<END_TASK>
<USER_TASK:>
Description:
def send_sms_message(sms_message, backend=None, fail_silently=False):
"""
Send an SMSMessage instance using a connection given by the specified `backend`.
""" |
with get_sms_connection(backend=backend, fail_silently=fail_silently) as connection:
result = connection.send_messages([sms_message])
return result |
<SYSTEM_TASK:>
Receives a list of SMSMessage instances and returns a list of RQ `Job` instances.
<END_TASK>
<USER_TASK:>
Description:
def send_messages(self, sms_messages):
"""
Receives a list of SMSMessage instances and returns a list of RQ `Job` instances.
""" |
results = []
for message in sms_messages:
try:
assert message.connection is None
except AssertionError:
if not self.fail_silently:
raise
backend = self.backend
fail_silently = self.fail_silently
result = django_rq.enqueue(self._send, message, backend=backend, fail_silently=fail_silently)
results.append(result)
return results |
<SYSTEM_TASK:>
Copies a date and time from an ISO 8601 date and time string.
<END_TASK>
<USER_TASK:>
Description:
def _CopyDateTimeFromStringISO8601(self, time_string):
"""Copies a date and time from an ISO 8601 date and time string.
Args:
time_string (str): time value formatted as:
hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The fraction of second and
time zone offset are optional.
Returns:
tuple[int, int, int, int, int]: hours, minutes, seconds, microseconds,
time zone offset in minutes.
Raises:
ValueError: if the time string is invalid or not supported.
""" |
if not time_string:
raise ValueError('Invalid time string.')
time_string_length = len(time_string)
year, month, day_of_month = self._CopyDateFromString(time_string)
if time_string_length <= 10:
return {
'year': year,
'month': month,
'day_of_month': day_of_month}
# If a time of day is specified the time string it should at least
# contain 'YYYY-MM-DDThh'.
if time_string[10] != 'T':
raise ValueError(
'Invalid time string - missing as date and time separator.')
hours, minutes, seconds, microseconds, time_zone_offset = (
self._CopyTimeFromStringISO8601(time_string[11:]))
if time_zone_offset:
year, month, day_of_month, hours, minutes = self._AdjustForTimeZoneOffset(
year, month, day_of_month, hours, minutes, time_zone_offset)
date_time_values = {
'year': year,
'month': month,
'day_of_month': day_of_month,
'hours': hours,
'minutes': minutes,
'seconds': seconds}
if microseconds is not None:
date_time_values['microseconds'] = microseconds
return date_time_values |
<SYSTEM_TASK:>
Copies time elements from a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies time elements from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
""" |
date_time_values = self._CopyDateTimeFromString(time_string)
self._CopyFromDateTimeValues(date_time_values) |
<SYSTEM_TASK:>
Copies time elements from an ISO 8601 date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromStringISO8601(self, time_string):
"""Copies time elements from an ISO 8601 date and time string.
Currently not supported:
* Duration notation: "P..."
* Week notation "2016-W33"
* Date with week number notation "2016-W33-3"
* Date without year notation "--08-17"
* Ordinal date notation "2016-230"
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DDThh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the time string is invalid or not supported.
""" |
date_time_values = self._CopyDateTimeFromStringISO8601(time_string)
self._CopyFromDateTimeValues(date_time_values) |
<SYSTEM_TASK:>
Copies a SYSTEMTIME structure from a date and time string.
<END_TASK>
<USER_TASK:>
Description:
def CopyFromDateTimeString(self, time_string):
"""Copies a SYSTEMTIME structure from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the date string is invalid or not supported.
""" |
date_time_values = self._CopyDateTimeFromString(time_string)
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
microseconds = date_time_values.get('microseconds', 0)
milliseconds, _ = divmod(
microseconds, definitions.MICROSECONDS_PER_MILLISECOND)
if year < 1601 or year > 30827:
raise ValueError('Unsupported year value: {0:d}.'.format(year))
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self.year = year
self.month = month
self.day_of_month = day_of_month
# TODO: calculate day of week on demand.
self.day_of_week = None
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.milliseconds = milliseconds
self.is_local_time = False |
<SYSTEM_TASK:>
This is a copy of CharField.prepare_template, except that it adds a fake
<END_TASK>
<USER_TASK:>
Description:
def _prepare_template(self, obj, needs_request=False):
"""
This is a copy of CharField.prepare_template, except that it adds a fake
request to the context, which is mainly needed to render CMS placeholders
""" |
if self.instance_name is None and self.template_name is None:
raise SearchFieldError("This field requires either its instance_name variable to be populated or an explicit template_name in order to load the correct template.")
if self.template_name is not None:
template_names = self.template_name
if not isinstance(template_names, (list, tuple)):
template_names = [template_names]
else:
template_names = ['search/indexes/%s/%s_%s.txt' % (obj._meta.app_label, obj._meta.module_name, self.instance_name)]
t = loader.select_template(template_names)
ctx = {'object': obj}
if needs_request:
request = rf.get("/")
request.session = {}
ctx['request'] = request
return t.render(Context(ctx)) |
<SYSTEM_TASK:>
gets the translated value of field name. If `FALLBACK`evaluates to `True` and the field
<END_TASK>
<USER_TASK:>
Description:
def get_value(self, context, obj, field_name):
"""
gets the translated value of field name. If `FALLBACK`evaluates to `True` and the field
has no translation for the current language, it tries to find a fallback value, using
the languages defined in `settings.LANGUAGES`.
""" |
try:
language = get_language()
value = self.get_translated_value(obj, field_name, language)
if value:
return value
if self.FALLBACK:
for lang, lang_name in settings.LANGUAGES:
if lang == language:
# already tried this one...
continue
value = self.get_translated_value(obj, field_name, lang)
if value:
return value
untranslated = getattr(obj, field_name)
if self._is_truthy(untranslated):
return untranslated
else:
return self.EMPTY_VALUE
except Exception:
if settings.TEMPLATE_DEBUG:
raise
return self.EMPTY_VALUE |
<SYSTEM_TASK:>
A convenience function to ease debugging. It will print the node structure that's returned from CommonMark
<END_TASK>
<USER_TASK:>
Description:
def customWalker(node, space=''):
"""
A convenience function to ease debugging. It will print the node structure that's returned from CommonMark
The usage would be something like:
>>> content = Parser().parse('Some big text block\n===================\n\nwith content\n')
>>> customWalker(content)
document
heading
text Some big text block
paragraph
text with content
Spaces are used to convey nesting
""" |
txt = ''
try:
txt = node.literal
except:
pass
if txt is None or txt == '':
print('{}{}'.format(space, node.t))
else:
print('{}{}\t{}'.format(space, node.t, txt))
cur = node.first_child
if cur:
while cur is not None:
customWalker(cur, space + ' ')
cur = cur.nxt |
<SYSTEM_TASK:>
Process a paragraph, which includes all content under it
<END_TASK>
<USER_TASK:>
Description:
def paragraph(node):
"""
Process a paragraph, which includes all content under it
""" |
text = ''
if node.string_content is not None:
text = node.string_content
o = nodes.paragraph('', ' '.join(text))
o.line = node.sourcepos[0][0]
for n in MarkDown(node):
o.append(n)
return o |
<SYSTEM_TASK:>
A hyperlink. Note that alt text doesn't work, since there's no apparent way to do that in docutils
<END_TASK>
<USER_TASK:>
Description:
def reference(node):
"""
A hyperlink. Note that alt text doesn't work, since there's no apparent way to do that in docutils
""" |
o = nodes.reference()
o['refuri'] = node.destination
if node.title:
o['name'] = node.title
for n in MarkDown(node):
o += n
return o |
<SYSTEM_TASK:>
A title node. It has no children
<END_TASK>
<USER_TASK:>
Description:
def title(node):
"""
A title node. It has no children
""" |
return nodes.title(node.first_child.literal, node.first_child.literal) |
<SYSTEM_TASK:>
An image element
<END_TASK>
<USER_TASK:>
Description:
def image(node):
"""
An image element
The first child is the alt text. reStructuredText can't handle titles
""" |
o = nodes.image(uri=node.destination)
if node.first_child is not None:
o['alt'] = node.first_child.literal
return o |
<SYSTEM_TASK:>
Returns a list of nodes, containing CommonMark nodes converted to docutils nodes
<END_TASK>
<USER_TASK:>
Description:
def MarkDown(node):
"""
Returns a list of nodes, containing CommonMark nodes converted to docutils nodes
""" |
cur = node.first_child
# Go into each child, in turn
output = []
while cur is not None:
t = cur.t
if t == 'paragraph':
output.append(paragraph(cur))
elif t == 'text':
output.append(text(cur))
elif t == 'softbreak':
output.append(softbreak(cur))
elif t == 'linebreak':
output.append(hardbreak(cur))
elif t == 'link':
output.append(reference(cur))
elif t == 'heading':
output.append(title(cur))
elif t == 'emph':
output.append(emphasis(cur))
elif t == 'strong':
output.append(strong(cur))
elif t == 'code':
output.append(literal(cur))
elif t == 'code_block':
output.append(literal_block(cur))
elif t == 'html_inline' or t == 'html_block':
output.append(raw(cur))
elif t == 'block_quote':
output.append(block_quote(cur))
elif t == 'thematic_break':
output.append(transition(cur))
elif t == 'image':
output.append(image(cur))
elif t == 'list':
output.append(listNode(cur))
elif t == 'item':
output.append(listItem(cur))
elif t == 'MDsection':
output.append(section(cur))
else:
print('Received unhandled type: {}. Full print of node:'.format(t))
cur.pretty()
cur = cur.nxt
return output |
<SYSTEM_TASK:>
Correct the nxt and parent for each child
<END_TASK>
<USER_TASK:>
Description:
def finalizeSection(section):
"""
Correct the nxt and parent for each child
""" |
cur = section.first_child
last = section.last_child
if last is not None:
last.nxt = None
while cur is not None:
cur.parent = section
cur = cur.nxt |
<SYSTEM_TASK:>
Sections aren't handled by CommonMark at the moment.
<END_TASK>
<USER_TASK:>
Description:
def nestSections(block, level=1):
"""
Sections aren't handled by CommonMark at the moment.
This function adds sections to a block of nodes.
'title' nodes with an assigned level below 'level' will be put in a child section.
If there are no child nodes with titles of level 'level' then nothing is done
""" |
cur = block.first_child
if cur is not None:
children = []
# Do we need to do anything?
nest = False
while cur is not None:
if cur.t == 'heading' and cur.level == level:
nest = True
break
cur = cur.nxt
if not nest:
return
section = Node('MDsection', 0)
section.parent = block
cur = block.first_child
while cur is not None:
if cur.t == 'heading' and cur.level == level:
# Found a split point, flush the last section if needed
if section.first_child is not None:
finalizeSection(section)
children.append(section)
section = Node('MDsection', 0)
nxt = cur.nxt
# Avoid adding sections without titles at the start
if section.first_child is None:
if cur.t == 'heading' and cur.level == level:
section.append_child(cur)
else:
children.append(cur)
else:
section.append_child(cur)
cur = nxt
# If there's only 1 child then don't bother
if section.first_child is not None:
finalizeSection(section)
children.append(section)
block.first_child = None
block.last_child = None
nextLevel = level + 1
for child in children:
# Handle nesting
if child.t == 'MDsection':
nestSections(child, level=nextLevel)
# Append
if block.first_child is None:
block.first_child = child
else:
block.last_child.nxt = child
child.parent = block
child.nxt = None
child.prev = block.last_child
block.last_child = child |
<SYSTEM_TASK:>
Parses a block of text, returning a list of docutils nodes
<END_TASK>
<USER_TASK:>
Description:
def parseMarkDownBlock(text):
"""
Parses a block of text, returning a list of docutils nodes
>>> parseMarkdownBlock("Some\n====\n\nblock of text\n\nHeader\n======\n\nblah\n")
[]
""" |
block = Parser().parse(text)
# CommonMark can't nest sections, so do it manually
nestSections(block)
return MarkDown(block) |
<SYSTEM_TASK:>
Given a list of reStructuredText or MarkDown sections, return a docutils node list
<END_TASK>
<USER_TASK:>
Description:
def renderList(l, markDownHelp, settings=None):
"""
Given a list of reStructuredText or MarkDown sections, return a docutils node list
""" |
if len(l) == 0:
return []
if markDownHelp:
from sphinxarg.markdown import parseMarkDownBlock
return parseMarkDownBlock('\n\n'.join(l) + '\n')
else:
all_children = []
for element in l:
if isinstance(element, str):
if settings is None:
settings = OptionParser(components=(Parser,)).get_default_values()
document = new_document(None, settings)
Parser().parse(element + '\n', document)
all_children += document.children
elif isinstance(element, nodes.definition):
all_children += element
return all_children |
<SYSTEM_TASK:>
Process all 'action groups', which are also include 'Options' and 'Required
<END_TASK>
<USER_TASK:>
Description:
def print_action_groups(data, nested_content, markDownHelp=False, settings=None):
"""
Process all 'action groups', which are also include 'Options' and 'Required
arguments'. A list of nodes is returned.
""" |
definitions = map_nested_definitions(nested_content)
nodes_list = []
if 'action_groups' in data:
for action_group in data['action_groups']:
# Every action group is comprised of a section, holding a title, the description, and the option group (members)
section = nodes.section(ids=[action_group['title']])
section += nodes.title(action_group['title'], action_group['title'])
desc = []
if action_group['description']:
desc.append(action_group['description'])
# Replace/append/prepend content to the description according to nested content
subContent = []
if action_group['title'] in definitions:
classifier, s, subContent = definitions[action_group['title']]
if classifier == '@replace':
desc = [s]
elif classifier == '@after':
desc.append(s)
elif classifier == '@before':
desc.insert(0, s)
elif classifier == '@skip':
continue
if len(subContent) > 0:
for k, v in map_nested_definitions(subContent).items():
definitions[k] = v
# Render appropriately
for element in renderList(desc, markDownHelp):
section += element
localDefinitions = definitions
if len(subContent) > 0:
localDefinitions = {k: v for k, v in definitions.items()}
for k, v in map_nested_definitions(subContent).items():
localDefinitions[k] = v
items = []
# Iterate over action group members
for entry in action_group['options']:
"""
Members will include:
default The default value. This may be ==SUPPRESS==
name A list of option names (e.g., ['-h', '--help']
help The help message string
There may also be a 'choices' member.
"""
# Build the help text
arg = []
if 'choices' in entry:
arg.append('Possible choices: {}\n'.format(", ".join([str(c) for c in entry['choices']])))
if 'help' in entry:
arg.append(entry['help'])
if entry['default'] is not None and entry['default'] not in ['"==SUPPRESS=="', '==SUPPRESS==']:
if entry['default'] == '':
arg.append('Default: ""')
else:
arg.append('Default: {}'.format(entry['default']))
# Handle nested content, the term used in the dict has the comma removed for simplicity
desc = arg
term = ' '.join(entry['name'])
if term in localDefinitions:
classifier, s, subContent = localDefinitions[term]
if classifier == '@replace':
desc = [s]
elif classifier == '@after':
desc.append(s)
elif classifier == '@before':
desc.insert(0, s)
term = ', '.join(entry['name'])
n = nodes.option_list_item('',
nodes.option_group('', nodes.option_string(text=term)),
nodes.description('', *renderList(desc, markDownHelp, settings)))
items.append(n)
section += nodes.option_list('', *items)
nodes_list.append(section)
return nodes_list |
<SYSTEM_TASK:>
If action groups are repeated, then links in the table of contents will
<END_TASK>
<USER_TASK:>
Description:
def ensureUniqueIDs(items):
"""
If action groups are repeated, then links in the table of contents will
just go to the first of the repeats. This may not be desirable, particularly
in the case of subcommands where the option groups have different members.
This function updates the title IDs by adding _repeatX, where X is a number
so that the links are then unique.
""" |
s = set()
for item in items:
for n in item.traverse(descend=True, siblings=True, ascend=False):
if isinstance(n, nodes.section):
ids = n['ids']
for idx, id in enumerate(ids):
if id not in s:
s.add(id)
else:
i = 1
while "{}_repeat{}".format(id, i) in s:
i += 1
ids[idx] = "{}_repeat{}".format(id, i)
s.add(ids[idx])
n['ids'] = ids |
<SYSTEM_TASK:>
Select an arbitrary item, by possition or by reference.
<END_TASK>
<USER_TASK:>
Description:
def select(self, item):
"""Select an arbitrary item, by possition or by reference.""" |
self._on_unselect[self._selected]()
self.selected().unfocus()
if isinstance(item, int):
self._selected = item % len(self)
else:
self._selected = self.items.index(item)
self.selected().focus()
self._on_select[self._selected]() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.