text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Read FITS spectrum.
<END_TASK>
<USER_TASK:>
Description:
def read_fits_spec(filename, ext=1, wave_col='WAVELENGTH', flux_col='FLUX',
wave_unit=u.AA, flux_unit=units.FLAM):
"""Read FITS spectrum.
Wavelength and flux units are extracted from ``TUNIT1`` and ``TUNIT2``
keywords, respectively, from data table (not primary) header.
If these keywords are not present, units are taken from
``wave_unit`` and ``flux_unit`` instead.
Parameters
----------
filename : str or file pointer
Spectrum file name or pointer.
ext: int
FITS extension with table data. Default is 1.
wave_col, flux_col : str
Wavelength and flux column names (case-insensitive).
wave_unit, flux_unit : str or `~astropy.units.core.Unit`
Wavelength and flux units, which default to Angstrom and FLAM,
respectively. These are *only* used if ``TUNIT1`` and ``TUNIT2``
keywords are not present in table (not primary) header.
Returns
-------
header : dict
Primary header only. Extension header is discarded.
wavelengths, fluxes : `~astropy.units.quantity.Quantity`
Wavelength and flux of the spectrum.
""" |
fs = fits.open(filename)
header = dict(fs[str('PRIMARY')].header)
wave_dat = fs[ext].data.field(wave_col).copy()
flux_dat = fs[ext].data.field(flux_col).copy()
fits_wave_unit = fs[ext].header.get('TUNIT1')
fits_flux_unit = fs[ext].header.get('TUNIT2')
if fits_wave_unit is not None:
try:
wave_unit = units.validate_unit(fits_wave_unit)
except (exceptions.SynphotError, ValueError) as e: # pragma: no cover
warnings.warn(
'{0} from FITS header is not valid wavelength unit, using '
'{1}: {2}'.format(fits_wave_unit, wave_unit, e),
AstropyUserWarning)
if fits_flux_unit is not None:
try:
flux_unit = units.validate_unit(fits_flux_unit)
except (exceptions.SynphotError, ValueError) as e: # pragma: no cover
warnings.warn(
'{0} from FITS header is not valid flux unit, using '
'{1}: {2}'.format(fits_flux_unit, flux_unit, e),
AstropyUserWarning)
wave_unit = units.validate_unit(wave_unit)
flux_unit = units.validate_unit(flux_unit)
wavelengths = wave_dat * wave_unit
fluxes = flux_dat * flux_unit
if isinstance(filename, str):
fs.close()
return header, wavelengths, fluxes |
<SYSTEM_TASK:>
Flux equivalencies between PHOTLAM and VEGAMAG.
<END_TASK>
<USER_TASK:>
Description:
def spectral_density_vega(wav, vegaflux):
"""Flux equivalencies between PHOTLAM and VEGAMAG.
Parameters
----------
wav : `~astropy.units.quantity.Quantity`
Quantity associated with values being converted
(e.g., wavelength or frequency).
vegaflux : `~astropy.units.quantity.Quantity`
Flux of Vega at ``wav``.
Returns
-------
eqv : list
List of equivalencies.
""" |
vega_photlam = vegaflux.to(
PHOTLAM, equivalencies=u.spectral_density(wav)).value
def converter(x):
"""Set nan/inf to -99 mag."""
val = -2.5 * np.log10(x / vega_photlam)
result = np.zeros(val.shape, dtype=np.float64) - 99
mask = np.isfinite(val)
if result.ndim > 0:
result[mask] = val[mask]
elif mask:
result = np.asarray(val)
return result
def iconverter(x):
return vega_photlam * 10**(-0.4 * x)
return [(PHOTLAM, VEGAMAG, converter, iconverter)] |
<SYSTEM_TASK:>
Validate unit.
<END_TASK>
<USER_TASK:>
Description:
def validate_unit(input_unit):
"""Validate unit.
To be compatible with existing SYNPHOT data files:
* 'angstroms' and 'inversemicrons' are accepted although
unrecognized by astropy units
* 'transmission', 'extinction', and 'emissivity' are
converted to astropy dimensionless unit
Parameters
----------
input_unit : str or `~astropy.units.core.Unit`
Unit to validate.
Returns
-------
output_unit : `~astropy.units.core.Unit`
Validated unit.
Raises
------
synphot.exceptions.SynphotError
Invalid unit.
""" |
if isinstance(input_unit, str):
input_unit_lowcase = input_unit.lower()
# Backward-compatibility
if input_unit_lowcase == 'angstroms':
output_unit = u.AA
elif input_unit_lowcase == 'inversemicrons':
output_unit = u.micron ** -1
elif input_unit_lowcase in ('transmission', 'extinction',
'emissivity'):
output_unit = THROUGHPUT
elif input_unit_lowcase == 'jy':
output_unit = u.Jy
# Work around mag unit limitations
elif input_unit_lowcase in ('stmag', 'mag(st)'):
output_unit = u.STmag
elif input_unit_lowcase in ('abmag', 'mag(ab)'):
output_unit = u.ABmag
else:
try: # astropy.units is case-sensitive
output_unit = u.Unit(input_unit)
except ValueError: # synphot is case-insensitive
output_unit = u.Unit(input_unit_lowcase)
elif isinstance(input_unit, (u.UnitBase, u.LogUnit)):
output_unit = input_unit
else:
raise exceptions.SynphotError(
'{0} must be a recognized string or '
'astropy.units.core.Unit'.format(input_unit))
return output_unit |
<SYSTEM_TASK:>
Import data from json response.
<END_TASK>
<USER_TASK:>
Description:
def data_import(self, json_response):
"""Import data from json response.""" |
if 'data' not in json_response:
raise PyVLXException('no element data found: {0}'.format(
json.dumps(json_response)))
data = json_response['data']
for item in data:
if 'category' not in item:
raise PyVLXException('no element category: {0}'.format(
json.dumps(item)))
category = item['category']
if category == 'Window opener':
self.load_window_opener(item)
elif category in ['Roller shutter', 'Dual Shutter']:
self.load_roller_shutter(item)
elif category in ['Blind']:
self.load_blind(item)
else:
self.pyvlx.logger.warning(
'WARNING: Could not parse product: %s', category) |
<SYSTEM_TASK:>
Load window opener from JSON.
<END_TASK>
<USER_TASK:>
Description:
def load_window_opener(self, item):
"""Load window opener from JSON.""" |
window = Window.from_config(self.pyvlx, item)
self.add(window) |
<SYSTEM_TASK:>
Return tuple containing columns and rows of controlling terminal, trying harder
<END_TASK>
<USER_TASK:>
Description:
def get_terminal_size(fallback=(80, 24)):
"""
Return tuple containing columns and rows of controlling terminal, trying harder
than shutil.get_terminal_size to find a tty before returning fallback.
Theoretically, stdout, stderr, and stdin could all be different ttys that could
cause us to get the wrong measurements (instead of using the fallback) but the much more
common case is that IO is piped.
""" |
for stream in [sys.__stdout__, sys.__stderr__, sys.__stdin__]:
try:
# Make WINSIZE call to terminal
data = fcntl.ioctl(stream.fileno(), TIOCGWINSZ, b"\x00\x00\00\x00")
except OSError:
pass
else:
# Unpack two shorts from ioctl call
lines, columns = struct.unpack("hh", data)
break
else:
columns, lines = fallback
return columns, lines |
<SYSTEM_TASK:>
Run checks on self.files, printing json object
<END_TASK>
<USER_TASK:>
Description:
def run_json(self):
"""
Run checks on self.files, printing json object
containing information relavent to the CS50 IDE plugin at the end.
""" |
checks = {}
for file in self.files:
try:
results = self._check(file)
except Error as e:
checks[file] = {
"error": e.msg
}
else:
checks[file] = {
"score": results.score,
"comments": results.comment_ratio >= results.COMMENT_MIN,
"diff": "<pre>{}</pre>".format("\n".join(self.html_diff(results.original, results.styled))),
}
json.dump(checks, sys.stdout, indent=4)
print() |
<SYSTEM_TASK:>
Run checks on self.files, printing raw percentage to stdout.
<END_TASK>
<USER_TASK:>
Description:
def run_score(self):
"""
Run checks on self.files, printing raw percentage to stdout.
""" |
diffs = 0
lines = 0
for file in self.files:
try:
results = self._check(file)
except Error as e:
termcolor.cprint(e.msg, "yellow", file=sys.stderr)
continue
diffs += results.diffs
lines += results.lines
try:
print(max(1 - diffs / lines, 0.0))
except ZeroDivisionError:
print(0.0) |
<SYSTEM_TASK:>
Run apropriate check based on `file`'s extension and return it,
<END_TASK>
<USER_TASK:>
Description:
def _check(self, file):
"""
Run apropriate check based on `file`'s extension and return it,
otherwise raise an Error
""" |
if not os.path.exists(file):
raise Error("file \"{}\" not found".format(file))
_, extension = os.path.splitext(file)
try:
check = self.extension_map[extension[1:]]
except KeyError:
magic_type = magic.from_file(file)
for name, cls in self.magic_map.items():
if name in magic_type:
check = cls
break
else:
raise Error("unknown file type \"{}\", skipping...".format(file))
try:
with open(file) as f:
code = "\n".join(line.rstrip() for line in f)
except UnicodeDecodeError:
raise Error("file does not seem to contain text, skipping...")
# Ensure we don't warn about adding trailing newline
try:
if code[-1] != '\n':
code += '\n'
except IndexError:
pass
return check(code) |
<SYSTEM_TASK:>
Returns a generator yielding the side-by-side diff of `old` and `new`).
<END_TASK>
<USER_TASK:>
Description:
def split_diff(old, new):
"""
Returns a generator yielding the side-by-side diff of `old` and `new`).
""" |
return map(lambda l: l.rstrip(),
icdiff.ConsoleDiff(cols=COLUMNS).make_table(old.splitlines(), new.splitlines())) |
<SYSTEM_TASK:>
Returns a generator yielding a unified diff between `old` and `new`.
<END_TASK>
<USER_TASK:>
Description:
def unified(old, new):
"""
Returns a generator yielding a unified diff between `old` and `new`.
""" |
for diff in difflib.ndiff(old.splitlines(), new.splitlines()):
if diff[0] == " ":
yield diff
elif diff[0] == "?":
continue
else:
yield termcolor.colored(diff, "red" if diff[0] == "-" else "green", attrs=["bold"]) |
<SYSTEM_TASK:>
Return color-coded character-based diff between `old` and `new`.
<END_TASK>
<USER_TASK:>
Description:
def char_diff(self, old, new):
"""
Return color-coded character-based diff between `old` and `new`.
""" |
def color_transition(old_type, new_type):
new_color = termcolor.colored("", None, "on_red" if new_type ==
"-" else "on_green" if new_type == "+" else None)
return "{}{}".format(termcolor.RESET, new_color[:-len(termcolor.RESET)])
return self._char_diff(old, new, color_transition) |
<SYSTEM_TASK:>
Returns a char-based diff between `old` and `new` where each character
<END_TASK>
<USER_TASK:>
Description:
def _char_diff(self, old, new, transition, fmt=lambda c: c):
"""
Returns a char-based diff between `old` and `new` where each character
is formatted by `fmt` and transitions between blocks are determined by `transition`.
""" |
differ = difflib.ndiff(old, new)
# Type of difference.
dtype = None
# Buffer for current line.
line = []
while True:
# Get next diff or None if we're at the end.
d = next(differ, (None,))
if d[0] != dtype:
line += transition(dtype, d[0])
dtype = d[0]
if dtype is None:
break
if d[2] == "\n":
if dtype != " ":
self._warn_chars.add((dtype, "\\n"))
# Show added/removed newlines.
line += [fmt(r"\n"), transition(dtype, " ")]
# Don't yield a line if we are removing a newline
if dtype != "-":
yield "".join(line)
line.clear()
line.append(transition(" ", dtype))
elif dtype != " " and d[2] == "\t":
# Show added/removed tabs.
line.append(fmt("\\t"))
self._warn_chars.add((dtype, "\\t"))
else:
line.append(fmt(d[2]))
# Flush buffer before quitting.
last = "".join(line)
# Only print last line if it contains non-ANSI characters.
if re.sub(r"\x1b[^m]*m", "", last):
yield last |
<SYSTEM_TASK:>
Create and return frame from raw bytes.
<END_TASK>
<USER_TASK:>
Description:
def frame_from_raw(raw):
"""Create and return frame from raw bytes.""" |
command, payload = extract_from_frame(raw)
frame = create_frame(command)
if frame is None:
PYVLXLOG.warning("Command %s not implemented, raw: %s", command, ":".join("{:02x}".format(c) for c in raw))
return None
frame.validate_payload_len(payload)
frame.from_payload(payload)
return frame |
<SYSTEM_TASK:>
Create and return empty Frame from Command.
<END_TASK>
<USER_TASK:>
Description:
def create_frame(command):
"""Create and return empty Frame from Command.""" |
# pylint: disable=too-many-branches,too-many-return-statements
if command == Command.GW_ERROR_NTF:
return FrameErrorNotification()
if command == Command.GW_COMMAND_SEND_REQ:
return FrameCommandSendRequest()
if command == Command.GW_COMMAND_SEND_CFM:
return FrameCommandSendConfirmation()
if command == Command.GW_COMMAND_RUN_STATUS_NTF:
return FrameCommandRunStatusNotification()
if command == Command.GW_COMMAND_REMAINING_TIME_NTF:
return FrameCommandRemainingTimeNotification()
if command == Command.GW_SESSION_FINISHED_NTF:
return FrameSessionFinishedNotification()
if command == Command.GW_PASSWORD_ENTER_REQ:
return FramePasswordEnterRequest()
if command == Command.GW_PASSWORD_ENTER_CFM:
return FramePasswordEnterConfirmation()
if command == Command.GW_CS_DISCOVER_NODES_REQ:
return FrameDiscoverNodesRequest()
if command == Command.GW_CS_DISCOVER_NODES_CFM:
return FrameDiscoverNodesConfirmation()
if command == Command.GW_CS_DISCOVER_NODES_NTF:
return FrameDiscoverNodesNotification()
if command == Command.GW_GET_SCENE_LIST_REQ:
return FrameGetSceneListRequest()
if command == Command.GW_GET_SCENE_LIST_CFM:
return FrameGetSceneListConfirmation()
if command == Command.GW_GET_SCENE_LIST_NTF:
return FrameGetSceneListNotification()
if command == Command.GW_GET_NODE_INFORMATION_REQ:
return FrameGetNodeInformationRequest()
if command == Command.GW_GET_NODE_INFORMATION_CFM:
return FrameGetNodeInformationConfirmation()
if command == Command.GW_GET_NODE_INFORMATION_NTF:
return FrameGetNodeInformationNotification()
if command == Command.GW_GET_ALL_NODES_INFORMATION_REQ:
return FrameGetAllNodesInformationRequest()
if command == Command.GW_GET_ALL_NODES_INFORMATION_CFM:
return FrameGetAllNodesInformationConfirmation()
if command == Command.GW_GET_ALL_NODES_INFORMATION_NTF:
return FrameGetAllNodesInformationNotification()
if command == Command.GW_GET_ALL_NODES_INFORMATION_FINISHED_NTF:
return FrameGetAllNodesInformationFinishedNotification()
if command == Command.GW_ACTIVATE_SCENE_REQ:
return FrameActivateSceneRequest()
if command == Command.GW_ACTIVATE_SCENE_CFM:
return FrameActivateSceneConfirmation()
if command == Command.GW_GET_VERSION_REQ:
return FrameGetVersionRequest()
if command == Command.GW_GET_VERSION_CFM:
return FrameGetVersionConfirmation()
if command == Command.GW_GET_PROTOCOL_VERSION_REQ:
return FrameGetProtocolVersionRequest()
if command == Command.GW_GET_PROTOCOL_VERSION_CFM:
return FrameGetProtocolVersionConfirmation()
if command == Command.GW_SET_NODE_NAME_REQ:
return FrameSetNodeNameRequest()
if command == Command.GW_SET_NODE_NAME_CFM:
return FrameSetNodeNameConfirmation()
if command == Command.GW_NODE_INFORMATION_CHANGED_NTF:
return FrameNodeInformationChangedNotification()
if command == Command.GW_GET_STATE_REQ:
return FrameGetStateRequest()
if command == Command.GW_GET_STATE_CFM:
return FrameGetStateConfirmation()
if command == Command.GW_SET_UTC_REQ:
return FrameSetUTCRequest()
if command == Command.GW_SET_UTC_CFM:
return FrameSetUTCConfirmation()
if command == Command.GW_ACTIVATION_LOG_UPDATED_NTF:
return FrameActivationLogUpdatedNotification()
if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_REQ:
return FrameHouseStatusMonitorEnableRequest()
if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_CFM:
return FrameHouseStatusMonitorEnableConfirmation()
if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_REQ:
return FrameHouseStatusMonitorDisableRequest()
if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_CFM:
return FrameHouseStatusMonitorDisableConfirmation()
if command == Command.GW_NODE_STATE_POSITION_CHANGED_NTF:
return FrameNodeStatePositionChangedNotification()
return None |
<SYSTEM_TASK:>
Handle data received.
<END_TASK>
<USER_TASK:>
Description:
def data_received(self, data):
"""Handle data received.""" |
self.tokenizer.feed(data)
while self.tokenizer.has_tokens():
raw = self.tokenizer.get_next_token()
frame = frame_from_raw(raw)
if frame is not None:
self.frame_received_cb(frame) |
<SYSTEM_TASK:>
Connect to gateway via SSL.
<END_TASK>
<USER_TASK:>
Description:
async def connect(self):
"""Connect to gateway via SSL.""" |
tcp_client = TCPTransport(self.frame_received_cb, self.connection_closed_cb)
self.transport, _ = await self.loop.create_connection(
lambda: tcp_client,
host=self.config.host,
port=self.config.port,
ssl=self.create_ssl_context())
self.connected = True |
<SYSTEM_TASK:>
Received message.
<END_TASK>
<USER_TASK:>
Description:
def frame_received_cb(self, frame):
"""Received message.""" |
PYVLXLOG.debug("REC: %s", frame)
for frame_received_cb in self.frame_received_cbs:
# pylint: disable=not-callable
self.loop.create_task(frame_received_cb(frame)) |
<SYSTEM_TASK:>
Update nodes via frame, usually received by house monitor.
<END_TASK>
<USER_TASK:>
Description:
async def process_frame(self, frame):
"""Update nodes via frame, usually received by house monitor.""" |
if isinstance(frame, FrameNodeStatePositionChangedNotification):
if frame.node_id not in self.pyvlx.nodes:
return
node = self.pyvlx.nodes[frame.node_id]
if isinstance(node, OpeningDevice):
node.position = Position(frame.current_position)
await node.after_update()
elif isinstance(frame, FrameGetAllNodesInformationNotification):
if frame.node_id not in self.pyvlx.nodes:
return
node = self.pyvlx.nodes[frame.node_id]
if isinstance(node, OpeningDevice):
node.position = Position(frame.current_position)
await node.after_update() |
<SYSTEM_TASK:>
Add scene, replace existing scene if scene with scene_id is present.
<END_TASK>
<USER_TASK:>
Description:
def add(self, scene):
"""Add scene, replace existing scene if scene with scene_id is present.""" |
if not isinstance(scene, Scene):
raise TypeError()
for i, j in enumerate(self.__scenes):
if j.scene_id == scene.scene_id:
self.__scenes[i] = scene
return
self.__scenes.append(scene) |
<SYSTEM_TASK:>
Return a bitmath instance representing the best human-readable
<END_TASK>
<USER_TASK:>
Description:
def best_prefix(bytes, system=NIST):
"""Return a bitmath instance representing the best human-readable
representation of the number of bytes given by ``bytes``. In addition
to a numeric type, the ``bytes`` parameter may also be a bitmath type.
Optionally select a preferred unit system by specifying the ``system``
keyword. Choices for ``system`` are ``bitmath.NIST`` (default) and
``bitmath.SI``.
Basically a shortcut for:
>>> import bitmath
>>> b = bitmath.Byte(12345)
>>> best = b.best_prefix()
Or:
>>> import bitmath
>>> best = (bitmath.KiB(12345) * 4201).best_prefix()
""" |
if isinstance(bytes, Bitmath):
value = bytes.bytes
else:
value = bytes
return Byte(value).best_prefix(system=system) |
<SYSTEM_TASK:>
Create bitmath instances of the capacity of a system block device
<END_TASK>
<USER_TASK:>
Description:
def query_device_capacity(device_fd):
"""Create bitmath instances of the capacity of a system block device
Make one or more ioctl request to query the capacity of a block
device. Perform any processing required to compute the final capacity
value. Return the device capacity in bytes as a :class:`bitmath.Byte`
instance.
Thanks to the following resources for help figuring this out Linux/Mac
ioctl's for querying block device sizes:
* http://stackoverflow.com/a/12925285/263969
* http://stackoverflow.com/a/9764508/263969
:param file device_fd: A ``file`` object of the device to query the
capacity of (as in ``get_device_capacity(open("/dev/sda"))``).
:return: a bitmath :class:`bitmath.Byte` instance equivalent to the
capacity of the target device in bytes.
""" |
if os_name() != 'posix':
raise NotImplementedError("'bitmath.query_device_capacity' is not supported on this platform: %s" % os_name())
s = os.stat(device_fd.name).st_mode
if not stat.S_ISBLK(s):
raise ValueError("The file descriptor provided is not of a device type")
# The keys of the ``ioctl_map`` dictionary correlate to possible
# values from the ``platform.system`` function.
ioctl_map = {
# ioctls for the "Linux" platform
"Linux": {
"request_params": [
# A list of parameters to calculate the block size.
#
# ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE )
("BLKGETSIZE64", "L", 0x80081272)
# Per <linux/fs.h>, the BLKGETSIZE64 request returns a
# 'u64' sized value. This is an unsigned 64 bit
# integer C type. This means to correctly "buffer" the
# result we need 64 bits, or 8 bytes, of memory.
#
# The struct module documentation include a reference
# chart relating formatting characters to native C
# Types. In this case, using the "native size", the
# table tells us:
#
# * Character 'L' - Unsigned Long C Type (u64) - Loads into a Python int type
#
# Confirm this character is right by running (on Linux):
#
# >>> import struct
# >>> print 8 == struct.calcsize('L')
#
# The result should be true as long as your kernel
# headers define BLKGETSIZE64 as a u64 type (please
# file a bug report at
# https://github.com/tbielawa/bitmath/issues/new if
# this does *not* work for you)
],
# func is how the final result is decided. Because the
# Linux BLKGETSIZE64 call returns the block device
# capacity in bytes as an integer value, no extra
# calculations are required. Simply return the value of
# BLKGETSIZE64.
"func": lambda x: x["BLKGETSIZE64"]
},
# ioctls for the "Darwin" (Mac OS X) platform
"Darwin": {
"request_params": [
# A list of parameters to calculate the block size.
#
# ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE )
("DKIOCGETBLOCKCOUNT", "L", 0x40086419),
# Per <sys/disk.h>: get media's block count - uint64_t
#
# As in the BLKGETSIZE64 example, an unsigned 64 bit
# integer will use the 'L' formatting character
("DKIOCGETBLOCKSIZE", "I", 0x40046418)
# Per <sys/disk.h>: get media's block size - uint32_t
#
# This request returns an unsigned 32 bit integer, or
# in other words: just a normal integer (or 'int' c
# type). That should require 4 bytes of space for
# buffering. According to the struct modules
# 'Formatting Characters' chart:
#
# * Character 'I' - Unsigned Int C Type (uint32_t) - Loads into a Python int type
],
# OS X doesn't have a direct equivalent to the Linux
# BLKGETSIZE64 request. Instead, we must request how many
# blocks (or "sectors") are on the disk, and the size (in
# bytes) of each block. Finally, multiply the two together
# to obtain capacity:
#
# n Block * y Byte
# capacity (bytes) = -------
# 1 Block
"func": lambda x: x["DKIOCGETBLOCKCOUNT"] * x["DKIOCGETBLOCKSIZE"]
# This expression simply accepts a dictionary ``x`` as a
# parameter, and then returns the result of multiplying
# the two named dictionary items together. In this case,
# that means multiplying ``DKIOCGETBLOCKCOUNT``, the total
# number of blocks, by ``DKIOCGETBLOCKSIZE``, the size of
# each block in bytes.
}
}
platform_params = ioctl_map[platform.system()]
results = {}
for req_name, fmt, request_code in platform_params['request_params']:
# Read the systems native size (in bytes) of this format type.
buffer_size = struct.calcsize(fmt)
# Construct a buffer to store the ioctl result in
buffer = ' ' * buffer_size
# This code has been ran on only a few test systems. If it's
# appropriate, maybe in the future we'll add try/except
# conditions for some possible errors. Really only for cases
# where it would add value to override the default exception
# message string.
buffer = fcntl.ioctl(device_fd.fileno(), request_code, buffer)
# Unpack the raw result from the ioctl call into a familiar
# python data type according to the ``fmt`` rules.
result = struct.unpack(fmt, buffer)[0]
# Add the new result to our collection
results[req_name] = result
return Byte(platform_params['func'](results)) |
<SYSTEM_TASK:>
Parse a string with units and try to make a bitmath object out of
<END_TASK>
<USER_TASK:>
Description:
def parse_string(s):
"""Parse a string with units and try to make a bitmath object out of
it.
String inputs may include whitespace characters between the value and
the unit.
""" |
# Strings only please
if not isinstance(s, (str, unicode)):
raise ValueError("parse_string only accepts string inputs but a %s was given" %
type(s))
# get the index of the first alphabetic character
try:
index = list([i.isalpha() for i in s]).index(True)
except ValueError:
# If there's no alphabetic characters we won't be able to .index(True)
raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s)
# split the string into the value and the unit
val, unit = s[:index], s[index:]
# see if the unit exists as a type in our namespace
if unit == "b":
unit_class = Bit
elif unit == "B":
unit_class = Byte
else:
if not (hasattr(sys.modules[__name__], unit) and isinstance(getattr(sys.modules[__name__], unit), type)):
raise ValueError("The unit %s is not a valid bitmath unit" % unit)
unit_class = globals()[unit]
try:
val = float(val)
except ValueError:
raise
try:
return unit_class(val)
except: # pragma: no cover
raise ValueError("Can't parse string %s into a bitmath object" % s) |
<SYSTEM_TASK:>
Attempt to parse a string with ambiguous units and try to make a
<END_TASK>
<USER_TASK:>
Description:
def parse_string_unsafe(s, system=SI):
"""Attempt to parse a string with ambiguous units and try to make a
bitmath object out of it.
This may produce inaccurate results if parsing shell output. For
example `ls` may say a 2730 Byte file is '2.7K'. 2730 Bytes == 2.73 kB
~= 2.666 KiB. See the documentation for all of the important details.
Note the following caveats:
* All inputs are assumed to be byte-based (as opposed to bit based)
* Numerical inputs (those without any units) are assumed to be a
number of bytes
* Inputs with single letter units (k, M, G, etc) are assumed to be SI
units (base-10). Set the `system` parameter to `bitmath.NIST` to
change this behavior.
* Inputs with an `i` character following the leading letter (Ki, Mi,
Gi) are assumed to be NIST units (base 2)
* Capitalization does not matter
""" |
if not isinstance(s, (str, unicode)) and \
not isinstance(s, numbers.Number):
raise ValueError("parse_string_unsafe only accepts string/number inputs but a %s was given" %
type(s))
######################################################################
# Is the input simple to parse? Just a number, or a number
# masquerading as a string perhaps?
# Test case: raw number input (easy!)
if isinstance(s, numbers.Number):
# It's just a number. Assume bytes
return Byte(s)
# Test case: a number pretending to be a string
if isinstance(s, (str, unicode)):
try:
# Can we turn it directly into a number?
return Byte(float(s))
except ValueError:
# Nope, this is not a plain number
pass
######################################################################
# At this point:
# - the input is also not just a number wrapped in a string
# - nor is is just a plain number type
#
# We need to do some more digging around now to figure out exactly
# what we were given and possibly normalize the input into a
# format we can recognize.
# First we'll separate the number and the unit.
#
# Get the index of the first alphabetic character
try:
index = list([i.isalpha() for i in s]).index(True)
except ValueError: # pragma: no cover
# If there's no alphabetic characters we won't be able to .index(True)
raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s)
# Split the string into the value and the unit
val, unit = s[:index], s[index:]
# Don't trust anything. We'll make sure the correct 'b' is in place.
unit = unit.rstrip('Bb')
unit += 'B'
# At this point we can expect `unit` to be either:
#
# - 2 Characters (for SI, ex: kB or GB)
# - 3 Caracters (so NIST, ex: KiB, or GiB)
#
# A unit with any other number of chars is not a valid unit
# SI
if len(unit) == 2:
# Has NIST parsing been requested?
if system == NIST:
# NIST units requested. Ensure the unit begins with a
# capital letter and is followed by an 'i' character.
unit = capitalize_first(unit)
# Insert an 'i' char after the first letter
_unit = list(unit)
_unit.insert(1, 'i')
# Collapse the list back into a 3 letter string
unit = ''.join(_unit)
unit_class = globals()[unit]
else:
# Default parsing (SI format)
#
# Edge-case checking: SI 'thousand' is a lower-case K
if unit.startswith('K'):
unit = unit.replace('K', 'k')
elif not unit.startswith('k'):
# Otherwise, ensure the first char is capitalized
unit = capitalize_first(unit)
# This is an SI-type unit
if unit[0] in SI_PREFIXES:
unit_class = globals()[unit]
# NIST
elif len(unit) == 3:
unit = capitalize_first(unit)
# This is a NIST-type unit
if unit[:2] in NIST_PREFIXES:
unit_class = globals()[unit]
else:
# This is not a unit we recognize
raise ValueError("The unit %s is not a valid bitmath unit" % unit)
try:
unit_class
except UnboundLocalError:
raise ValueError("The unit %s is not a valid bitmath unit" % unit)
return unit_class(float(val)) |
<SYSTEM_TASK:>
Context manager for printing bitmath instances.
<END_TASK>
<USER_TASK:>
Description:
def format(fmt_str=None, plural=False, bestprefix=False):
"""Context manager for printing bitmath instances.
``fmt_str`` - a formatting mini-language compat formatting string. See
the @properties (above) for a list of available items.
``plural`` - True enables printing instances with 's's if they're
plural. False (default) prints them as singular (no trailing 's').
``bestprefix`` - True enables printing instances in their best
human-readable representation. False, the default, prints instances
using their current prefix unit.
""" |
if 'bitmath' not in globals():
import bitmath
if plural:
orig_fmt_plural = bitmath.format_plural
bitmath.format_plural = True
if fmt_str:
orig_fmt_str = bitmath.format_string
bitmath.format_string = fmt_str
yield
if plural:
bitmath.format_plural = orig_fmt_plural
if fmt_str:
bitmath.format_string = orig_fmt_str |
<SYSTEM_TASK:>
A command line interface to basic bitmath operations.
<END_TASK>
<USER_TASK:>
Description:
def cli_script_main(cli_args):
"""
A command line interface to basic bitmath operations.
""" |
choices = ALL_UNIT_TYPES
parser = argparse.ArgumentParser(
description='Converts from one type of size to another.')
parser.add_argument('--from-stdin', default=False, action='store_true',
help='Reads number from stdin rather than the cli')
parser.add_argument(
'-f', '--from', choices=choices, nargs=1,
type=str, dest='fromunit', default=['Byte'],
help='Input type you are converting from. Defaultes to Byte.')
parser.add_argument(
'-t', '--to', choices=choices, required=False, nargs=1, type=str,
help=('Input type you are converting to. '
'Attempts to detect best result if omitted.'), dest='tounit')
parser.add_argument(
'size', nargs='*', type=float,
help='The number to convert.')
args = parser.parse_args(cli_args)
# Not sure how to cover this with tests, or if the functionality
# will remain in this form long enough for it to make writing a
# test worth the effort.
if args.from_stdin: # pragma: no cover
args.size = [float(sys.stdin.readline()[:-1])]
results = []
for size in args.size:
instance = getattr(__import__(
'bitmath', fromlist=['True']), args.fromunit[0])(size)
# If we have a unit provided then use it
if args.tounit:
result = getattr(instance, args.tounit[0])
# Otherwise use the best_prefix call
else:
result = instance.best_prefix()
results.append(result)
return results |
<SYSTEM_TASK:>
Setup basic parameters for this class.
<END_TASK>
<USER_TASK:>
Description:
def _do_setup(self):
"""Setup basic parameters for this class.
`base` is the numeric base which when raised to `power` is equivalent
to 1 unit of the corresponding prefix. I.e., base=2, power=10
represents 2^10, which is the NIST Binary Prefix for 1 Kibibyte.
Likewise, for the SI prefix classes `base` will be 10, and the `power`
for the Kilobyte is 3.
""" |
(self._base, self._power, self._name_singular, self._name_plural) = self._setup()
self._unit_value = self._base ** self._power |
<SYSTEM_TASK:>
Normalize the input value into the fundamental unit for this prefix
<END_TASK>
<USER_TASK:>
Description:
def _norm(self, value):
"""Normalize the input value into the fundamental unit for this prefix
type.
:param number value: The input value to be normalized
:raises ValueError: if the input value is not a type of real number
""" |
if isinstance(value, self.valid_types):
self._byte_value = value * self._unit_value
self._bit_value = self._byte_value * 8.0
else:
raise ValueError("Initialization value '%s' is of an invalid type: %s. "
"Must be one of %s" % (
value,
type(value),
", ".join(str(x) for x in self.valid_types))) |
<SYSTEM_TASK:>
The system of units used to measure an instance
<END_TASK>
<USER_TASK:>
Description:
def system(self):
"""The system of units used to measure an instance""" |
if self._base == 2:
return "NIST"
elif self._base == 10:
return "SI"
else:
# I don't expect to ever encounter this logic branch, but
# hey, it's better to have extra test coverage than
# insufficient test coverage.
raise ValueError("Instances mathematical base is an unsupported value: %s" % (
str(self._base))) |
<SYSTEM_TASK:>
Factory function to return instances of `item` converted into a new
<END_TASK>
<USER_TASK:>
Description:
def from_other(cls, item):
"""Factory function to return instances of `item` converted into a new
instance of ``cls``. Because this is a class method, it may be called
from any bitmath class object without the need to explicitly
instantiate the class ahead of time.
*Implicit Parameter:*
* ``cls`` A bitmath class, implicitly set to the class of the
instance object it is called on
*User Supplied Parameter:*
* ``item`` A :class:`bitmath.Bitmath` subclass instance
*Example:*
>>> import bitmath
>>> kib = bitmath.KiB.from_other(bitmath.MiB(1))
>>> print kib
KiB(1024.0)
""" |
if isinstance(item, Bitmath):
return cls(bits=item.bits)
else:
raise ValueError("The provided items must be a valid bitmath class: %s" %
str(item.__class__)) |
<SYSTEM_TASK:>
Return a representation of this instance formatted with user
<END_TASK>
<USER_TASK:>
Description:
def format(self, fmt):
"""Return a representation of this instance formatted with user
supplied syntax""" |
_fmt_params = {
'base': self.base,
'bin': self.bin,
'binary': self.binary,
'bits': self.bits,
'bytes': self.bytes,
'power': self.power,
'system': self.system,
'unit': self.unit,
'unit_plural': self.unit_plural,
'unit_singular': self.unit_singular,
'value': self.value
}
return fmt.format(**_fmt_params) |
<SYSTEM_TASK:>
Normalize the input value into the fundamental unit for this prefix
<END_TASK>
<USER_TASK:>
Description:
def _norm(self, value):
"""Normalize the input value into the fundamental unit for this prefix
type""" |
self._bit_value = value * self._unit_value
self._byte_value = self._bit_value / 8.0 |
<SYSTEM_TASK:>
Extract payload and command from frame.
<END_TASK>
<USER_TASK:>
Description:
def extract_from_frame(data):
"""Extract payload and command from frame.""" |
if len(data) <= 4:
raise PyVLXException("could_not_extract_from_frame_too_short", data=data)
length = data[0] * 256 + data[1] - 1
if len(data) != length + 3:
raise PyVLXException("could_not_extract_from_frame_invalid_length", data=data, current_length=len(data), expected_length=length + 3)
if calc_crc(data[:-1]) != data[-1]:
raise PyVLXException("could_not_extract_from_frame_invalid_crc", data=data, expected_crc=calc_crc(data[:-1]), current_crc=data[-1])
payload = data[4:-1]
try:
command = Command(data[2] * 256 + data[3])
except ValueError:
raise PyVLXException("could_not_extract_from_frame_command", data=data)
return command, payload |
<SYSTEM_TASK:>
Merge metadata from left and right onto results.
<END_TASK>
<USER_TASK:>
Description:
def _merge_meta(left, right, result, clean=True):
"""Merge metadata from left and right onto results.
This is used during class initialization.
This should also be used by operators to merge metadata after
creating a new instance but before returning it.
Result's metadata is modified in-place.
Parameters
----------
left, right : number, `BaseSpectrum`, or `~astropy.modeling.models`
Inputs of an operation.
result : `BaseSpectrum`
Output spectrum object.
clean : bool
Remove ``'header'`` and ``'expr'`` entries from inputs.
""" |
# Copies are returned because they need some clean-up below.
left = BaseSpectrum._get_meta(left)
right = BaseSpectrum._get_meta(right)
# Remove these from going into result to avoid mess.
# header = FITS header metadata
# expr = ASTROLIB PYSYNPHOT expression
if clean:
for key in ('header', 'expr'):
for d in (left, right):
if key in d:
del d[key]
mid = metadata.merge(left, right, metadata_conflicts='silent')
result.meta = metadata.merge(result.meta, mid,
metadata_conflicts='silent') |
<SYSTEM_TASK:>
Process individual model parameter representing wavelength.
<END_TASK>
<USER_TASK:>
Description:
def _process_wave_param(self, pval):
"""Process individual model parameter representing wavelength.""" |
return self._process_generic_param(
pval, self._internal_wave_unit, equivalencies=u.spectral()) |
<SYSTEM_TASK:>
Optimal wavelengths for sampling the spectrum or bandpass.
<END_TASK>
<USER_TASK:>
Description:
def waveset(self):
"""Optimal wavelengths for sampling the spectrum or bandpass.""" |
w = get_waveset(self.model)
if w is not None:
utils.validate_wavelengths(w)
w = w * self._internal_wave_unit
return w |
<SYSTEM_TASK:>
Validate wavelengths for sampling.
<END_TASK>
<USER_TASK:>
Description:
def _validate_wavelengths(self, wave):
"""Validate wavelengths for sampling.""" |
if wave is None:
if self.waveset is None:
raise exceptions.SynphotError(
'self.waveset is undefined; '
'Provide wavelengths for sampling.')
wavelengths = self.waveset
else:
w = self._process_wave_param(wave)
utils.validate_wavelengths(w)
wavelengths = w * self._internal_wave_unit
return wavelengths |
<SYSTEM_TASK:>
Perform integration.
<END_TASK>
<USER_TASK:>
Description:
def integrate(self, wavelengths=None, **kwargs):
"""Perform integration.
This uses any analytical integral that the
underlying model has (i.e., ``self.model.integral``).
If unavailable, it uses the default fall-back integrator
set in the ``default_integrator`` configuration item.
If wavelengths are provided, flux or throughput is first resampled.
This is useful when user wants to integrate at specific end points
or use custom spacing; In that case, user can pass in desired
sampling array generated with :func:`numpy.linspace`,
:func:`numpy.logspace`, etc.
If not provided, then `waveset` is used.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for integration.
If not a Quantity, assumed to be in Angstrom.
If `None`, `waveset` is used.
kwargs : dict
Optional keywords to ``__call__`` for sampling.
Returns
-------
result : `~astropy.units.quantity.Quantity`
Integrated result.
Raises
------
NotImplementedError
Invalid default integrator.
synphot.exceptions.SynphotError
`waveset` is needed but undefined or cannot integrate
natively in the given ``flux_unit``.
""" |
# Cannot integrate per Hz units natively across wavelength
# without converting them to per Angstrom unit first, so
# less misleading to just disallow that option for now.
if 'flux_unit' in kwargs:
self._validate_flux_unit(kwargs['flux_unit'], wav_only=True)
x = self._validate_wavelengths(wavelengths)
# TODO: When astropy.modeling.models supports this, need to
# make sure that this actually works, and gives correct unit.
# https://github.com/astropy/astropy/issues/5033
# https://github.com/astropy/astropy/pull/5108
try:
m = self.model.integral
except (AttributeError, NotImplementedError):
if conf.default_integrator == 'trapezoid':
y = self(x, **kwargs)
result = abs(np.trapz(y.value, x=x.value))
result_unit = y.unit
else: # pragma: no cover
raise NotImplementedError(
'Analytic integral not available and default integrator '
'{0} is not supported'.format(conf.default_integrator))
else: # pragma: no cover
start = x[0].value
stop = x[-1].value
result = (m(stop) - m(start))
result_unit = self._internal_flux_unit
# Ensure final unit takes account of integration across wavelength
if result_unit != units.THROUGHPUT:
if result_unit == units.PHOTLAM:
result_unit = u.photon / (u.cm**2 * u.s)
elif result_unit == units.FLAM:
result_unit = u.erg / (u.cm**2 * u.s)
else: # pragma: no cover
raise NotImplementedError(
'Integration of {0} is not supported'.format(result_unit))
else:
# Ideally flux can use this too but unfortunately this
# operation results in confusing output unit for flux.
result_unit *= self._internal_wave_unit
return result * result_unit |
<SYSTEM_TASK:>
Force the underlying model to extrapolate.
<END_TASK>
<USER_TASK:>
Description:
def force_extrapolation(self):
"""Force the underlying model to extrapolate.
An example where this is useful: You create a source spectrum
with non-default extrapolation behavior and you wish to force
the underlying empirical model to extrapolate based on nearest point.
.. note::
This is only applicable to `~synphot.models.Empirical1D` model
and should still work even if the source spectrum has been
redshifted.
Returns
-------
is_forced : bool
`True` if the model is successfully forced to be extrapolated,
else `False`.
""" |
# We use _model here in case the spectrum is redshifted.
if isinstance(self._model, Empirical1D):
self._model.fill_value = np.nan
is_forced = True
else:
is_forced = False
return is_forced |
<SYSTEM_TASK:>
Taper the spectrum or bandpass.
<END_TASK>
<USER_TASK:>
Description:
def taper(self, wavelengths=None):
"""Taper the spectrum or bandpass.
The wavelengths to use for the first and last points are
calculated by using the same ratio as for the 2 interior points.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for tapering.
If not a Quantity, assumed to be in Angstrom.
If `None`, `waveset` is used.
Returns
-------
sp : `BaseSpectrum`
Tapered empirical spectrum or bandpass.
``self`` is returned if already tapered (e.g., box model).
""" |
x = self._validate_wavelengths(wavelengths)
# Calculate new end points for tapering
w1 = x[0] ** 2 / x[1]
w2 = x[-1] ** 2 / x[-2]
# Special handling for empirical data.
# This is to be compatible with ASTROLIB PYSYNPHOT behavior.
if isinstance(self._model, Empirical1D):
y1 = self._model.lookup_table[0]
y2 = self._model.lookup_table[-1]
# Other models can just evaluate at new end points
else:
y1 = self(w1)
y2 = self(w2)
# Nothing to do
if y1 == 0 and y2 == 0:
return self # Do we need a deepcopy here?
y = self(x)
if y1 != 0:
x = np.insert(x, 0, w1)
y = np.insert(y, 0, 0.0)
if y2 != 0:
x = np.insert(x, x.size, w2)
y = np.insert(y, y.size, 0.0)
return self.__class__(Empirical1D, points=x, lookup_table=y) |
<SYSTEM_TASK:>
Get sampled spectrum or bandpass in user units.
<END_TASK>
<USER_TASK:>
Description:
def _get_arrays(self, wavelengths, **kwargs):
"""Get sampled spectrum or bandpass in user units.""" |
x = self._validate_wavelengths(wavelengths)
y = self(x, **kwargs)
if isinstance(wavelengths, u.Quantity):
w = x.to(wavelengths.unit, u.spectral())
else:
w = x
return w, y |
<SYSTEM_TASK:>
Plot worker.
<END_TASK>
<USER_TASK:>
Description:
def _do_plot(x, y, title='', xlog=False, ylog=False,
left=None, right=None, bottom=None, top=None,
save_as=''): # pragma: no cover
"""Plot worker.
Parameters
----------
x, y : `~astropy.units.quantity.Quantity`
Wavelength and flux/throughput to plot.
kwargs
See :func:`plot`.
""" |
try:
import matplotlib.pyplot as plt
except ImportError:
log.error('No matplotlib installation found; plotting disabled '
'as a result.')
return
fig, ax = plt.subplots()
ax.plot(x, y)
# Custom wavelength limits
if left is not None:
ax.set_xlim(left=left)
if right is not None:
ax.set_xlim(right=right)
# Custom flux/throughput limit
if bottom is not None:
ax.set_ylim(bottom=bottom)
if top is not None:
ax.set_ylim(top=top)
xu = x.unit
if xu.physical_type == 'frequency':
ax.set_xlabel('Frequency ({0})'.format(xu))
else:
ax.set_xlabel('Wavelength ({0})'.format(xu))
yu = y.unit
if yu is u.dimensionless_unscaled:
ax.set_ylabel('Unitless')
else:
ax.set_ylabel('Flux ({0})'.format(yu))
if title:
ax.set_title(title)
if xlog:
ax.set_xscale('log')
if ylog:
ax.set_yscale('log')
plt.draw()
if save_as:
plt.savefig(save_as)
log.info('Plot saved as {0}'.format(save_as)) |
<SYSTEM_TASK:>
Process individual model parameter representing flux.
<END_TASK>
<USER_TASK:>
Description:
def _process_flux_param(self, pval, wave):
"""Process individual model parameter representing flux.""" |
if isinstance(pval, u.Quantity):
self._validate_flux_unit(pval.unit)
outval = units.convert_flux(self._redshift_model(wave), pval,
self._internal_flux_unit).value
else: # Assume already in internal unit
outval = pval
return outval |
<SYSTEM_TASK:>
Model of the spectrum with given redshift.
<END_TASK>
<USER_TASK:>
Description:
def model(self):
"""Model of the spectrum with given redshift.""" |
if self.z == 0:
m = self._model
else:
# wavelength
if self._internal_wave_unit.physical_type == 'length':
rs = self._redshift_model.inverse
# frequency or wavenumber
# NOTE: This will never execute as long as internal wavelength
# unit remains Angstrom.
else: # pragma: no cover
rs = self._redshift_model
if self.z_type == 'wavelength_only':
m = rs | self._model
else: # conserve_flux
m = rs | self._model | self._redshift_flux_model
return m |
<SYSTEM_TASK:>
Write the spectrum to a FITS file.
<END_TASK>
<USER_TASK:>
Description:
def to_fits(self, filename, wavelengths=None, flux_unit=None, area=None,
vegaspec=None, **kwargs):
"""Write the spectrum to a FITS file.
Parameters
----------
filename : str
Output filename.
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
flux_unit : str or `~astropy.units.core.Unit` or `None`
Flux is converted to this unit before written out.
If not given, internal unit is used.
area, vegaspec
See :func:`~synphot.units.convert_flux`.
kwargs : dict
Keywords accepted by :func:`~synphot.specio.write_fits_spec`.
""" |
w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area,
vegaspec=vegaspec)
# There are some standard keywords that should be added
# to the extension header.
bkeys = {'tdisp1': 'G15.7', 'tdisp2': 'G15.7'}
if 'expr' in self.meta:
bkeys['expr'] = (self.meta['expr'], 'synphot expression')
if 'ext_header' in kwargs:
kwargs['ext_header'].update(bkeys)
else:
kwargs['ext_header'] = bkeys
specio.write_fits_spec(filename, w, y, **kwargs) |
<SYSTEM_TASK:>
Create a spectrum from file.
<END_TASK>
<USER_TASK:>
Description:
def from_file(cls, filename, keep_neg=False, **kwargs):
"""Create a spectrum from file.
If filename has 'fits' or 'fit' suffix, it is read as FITS.
Otherwise, it is read as ASCII.
Parameters
----------
filename : str
Spectrum filename.
keep_neg : bool
See `~synphot.models.Empirical1D`.
kwargs : dict
Keywords acceptable by
:func:`~synphot.specio.read_fits_spec` (if FITS) or
:func:`~synphot.specio.read_ascii_spec` (if ASCII).
Returns
-------
sp : `SourceSpectrum`
Empirical spectrum.
""" |
header, wavelengths, fluxes = specio.read_spec(filename, **kwargs)
return cls(Empirical1D, points=wavelengths, lookup_table=fluxes,
keep_neg=keep_neg, meta={'header': header}) |
<SYSTEM_TASK:>
Creates a bandpass from file.
<END_TASK>
<USER_TASK:>
Description:
def from_file(cls, filename, **kwargs):
"""Creates a bandpass from file.
If filename has 'fits' or 'fit' suffix, it is read as FITS.
Otherwise, it is read as ASCII.
Parameters
----------
filename : str
Bandpass filename.
kwargs : dict
Keywords acceptable by
:func:`~synphot.specio.read_fits_spec` (if FITS) or
:func:`~synphot.specio.read_ascii_spec` (if ASCII).
Returns
-------
bp : `SpectralElement`
Empirical bandpass.
""" |
if 'flux_unit' not in kwargs:
kwargs['flux_unit'] = cls._internal_flux_unit
if ((filename.endswith('fits') or filename.endswith('fit')) and
'flux_col' not in kwargs):
kwargs['flux_col'] = 'THROUGHPUT'
header, wavelengths, throughput = specio.read_spec(filename, **kwargs)
return cls(Empirical1D, points=wavelengths, lookup_table=throughput,
keep_neg=True, meta={'header': header}) |
<SYSTEM_TASK:>
Calculated binned wavelength centers, edges, and flux.
<END_TASK>
<USER_TASK:>
Description:
def _init_bins(self, binset):
"""Calculated binned wavelength centers, edges, and flux.
By contrast, the native waveset and flux should be considered
samples of a continuous function.
Thus, it makes sense to interpolate ``self.waveset`` and
``self(self.waveset)``, but not `binset` and `binflux`.
""" |
if binset is None:
if self.bandpass.waveset is not None:
self._binset = self.bandpass.waveset
elif self.spectrum.waveset is not None:
self._binset = self.spectrum.waveset
log.info('Bandpass waveset is undefined; '
'Using source spectrum waveset instead.')
else:
raise exceptions.UndefinedBinset(
'Both source spectrum and bandpass have undefined '
'waveset; Provide binset manually.')
else:
self._binset = self._validate_wavelengths(binset)
# binset must be in ascending order for calcbinflux()
# to work properly.
if self._binset[0] > self._binset[-1]:
self._binset = self._binset[::-1]
self._bin_edges = binning.calculate_bin_edges(self._binset)
# Merge bin edges and centers in with the natural waveset
spwave = utils.merge_wavelengths(
self._bin_edges.value, self._binset.value)
if self.waveset is not None:
spwave = utils.merge_wavelengths(spwave, self.waveset.value)
# Throw out invalid wavelengths after merging.
spwave = spwave[spwave > 0]
# Compute indices associated to each endpoint.
indices = np.searchsorted(spwave, self._bin_edges.value)
i_beg = indices[:-1]
i_end = indices[1:]
# Prepare integration variables.
flux = self(spwave)
avflux = (flux.value[1:] + flux.value[:-1]) * 0.5
deltaw = spwave[1:] - spwave[:-1]
# Sum over each bin.
binflux, intwave = binning.calcbinflux(
self._binset.size, i_beg, i_end, avflux, deltaw)
self._binflux = binflux * flux.unit |
<SYSTEM_TASK:>
Sample binned observation without interpolation.
<END_TASK>
<USER_TASK:>
Description:
def sample_binned(self, wavelengths=None, flux_unit=None, **kwargs):
"""Sample binned observation without interpolation.
To sample unbinned data, use ``__call__``.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, `binset` is used.
flux_unit : str or `~astropy.units.core.Unit` or `None`
Flux is converted to this unit.
If not given, internal unit is used.
kwargs : dict
Keywords acceptable by :func:`~synphot.units.convert_flux`.
Returns
-------
flux : `~astropy.units.quantity.Quantity`
Binned flux in given unit.
Raises
------
synphot.exceptions.InterpolationNotAllowed
Interpolation of binned data is not allowed.
""" |
x = self._validate_binned_wavelengths(wavelengths)
i = np.searchsorted(self.binset, x)
if not np.allclose(self.binset[i].value, x.value):
raise exceptions.InterpolationNotAllowed(
'Some or all wavelength values are not in binset.')
y = self.binflux[i]
if flux_unit is None:
flux = y
else:
flux = units.convert_flux(x, y, flux_unit, **kwargs)
return flux |
<SYSTEM_TASK:>
Get binned observation in user units.
<END_TASK>
<USER_TASK:>
Description:
def _get_binned_arrays(self, wavelengths, flux_unit, area=None,
vegaspec=None):
"""Get binned observation in user units.""" |
x = self._validate_binned_wavelengths(wavelengths)
y = self.sample_binned(wavelengths=x, flux_unit=flux_unit, area=area,
vegaspec=vegaspec)
if isinstance(wavelengths, u.Quantity):
w = x.to(wavelengths.unit, u.spectral())
else:
w = x
return w, y |
<SYSTEM_TASK:>
Calculate the wavelength range covered by the given number
<END_TASK>
<USER_TASK:>
Description:
def binned_waverange(self, cenwave, npix, **kwargs):
"""Calculate the wavelength range covered by the given number
of pixels centered on the given central wavelengths of
`binset`.
Parameters
----------
cenwave : float or `~astropy.units.quantity.Quantity`
Desired central wavelength.
If not a Quantity, assumed to be in Angstrom.
npix : int
Desired number of pixels, centered on ``cenwave``.
kwargs : dict
Keywords accepted by :func:`synphot.binning.wave_range`.
Returns
-------
waverange : `~astropy.units.quantity.Quantity`
Lower and upper limits of the wavelength range,
in the unit of ``cenwave``.
""" |
# Calculation is done in the unit of cenwave.
if not isinstance(cenwave, u.Quantity):
cenwave = cenwave * self._internal_wave_unit
bin_wave = units.validate_quantity(
self.binset, cenwave.unit, equivalencies=u.spectral())
return binning.wave_range(
bin_wave.value, cenwave.value, npix, **kwargs) * cenwave.unit |
<SYSTEM_TASK:>
Calculate the number of pixels within the given wavelength
<END_TASK>
<USER_TASK:>
Description:
def binned_pixelrange(self, waverange, **kwargs):
"""Calculate the number of pixels within the given wavelength
range and `binset`.
Parameters
----------
waverange : tuple of float or `~astropy.units.quantity.Quantity`
Lower and upper limits of the desired wavelength range.
If not a Quantity, assumed to be in Angstrom.
kwargs : dict
Keywords accepted by :func:`synphot.binning.pixel_range`.
Returns
-------
npix : int
Number of pixels.
""" |
x = units.validate_quantity(
waverange, self._internal_wave_unit, equivalencies=u.spectral())
return binning.pixel_range(self.binset.value, x.value, **kwargs) |
<SYSTEM_TASK:>
Plot the observation.
<END_TASK>
<USER_TASK:>
Description:
def plot(self, binned=True, wavelengths=None, flux_unit=None, area=None,
vegaspec=None, **kwargs): # pragma: no cover
"""Plot the observation.
.. note:: Uses ``matplotlib``.
Parameters
----------
binned : bool
Plot data in native wavelengths if `False`.
Else, plot binned data (default).
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` or `binset` is used, depending
on ``binned``.
flux_unit : str or `~astropy.units.core.Unit` or `None`
Flux is converted to this unit for plotting.
If not given, internal unit is used.
area, vegaspec
See :func:`~synphot.units.convert_flux`.
kwargs : dict
See :func:`synphot.spectrum.BaseSpectrum.plot`.
Raises
------
synphot.exceptions.SynphotError
Invalid inputs.
""" |
if binned:
w, y = self._get_binned_arrays(wavelengths, flux_unit, area=area,
vegaspec=vegaspec)
else:
w, y = self._get_arrays(wavelengths, flux_unit=flux_unit,
area=area, vegaspec=vegaspec)
self._do_plot(w, y, **kwargs) |
<SYSTEM_TASK:>
Reduce the observation to an empirical source spectrum.
<END_TASK>
<USER_TASK:>
Description:
def as_spectrum(self, binned=True, wavelengths=None):
"""Reduce the observation to an empirical source spectrum.
An observation is a complex object with some restrictions
on its capabilities. At times, it would be useful to work
with the observation as a simple object that is easier to
manipulate and takes up less memory.
This is also useful for writing an observation as sampled
spectrum out to a FITS file.
Parameters
----------
binned : bool
Write out data in native wavelengths if `False`.
Else, write binned data (default).
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` or `binset` is used, depending
on ``binned``.
Returns
-------
sp : `~synphot.spectrum.SourceSpectrum`
Empirical source spectrum.
""" |
if binned:
w, y = self._get_binned_arrays(
wavelengths, self._internal_flux_unit)
else:
w, y = self._get_arrays(
wavelengths, flux_unit=self._internal_flux_unit)
header = {'observation': str(self), 'binned': binned}
return SourceSpectrum(Empirical1D, points=w, lookup_table=y,
meta={'header': header}) |
<SYSTEM_TASK:>
Start. Sending and waiting for answer.
<END_TASK>
<USER_TASK:>
Description:
async def do_api_call(self):
"""Start. Sending and waiting for answer.""" |
self.pyvlx.connection.register_frame_received_cb(
self.response_rec_callback)
await self.send_frame()
await self.start_timeout()
await self.response_received_or_timeout.wait()
await self.stop_timeout()
self.pyvlx.connection.unregister_frame_received_cb(self.response_rec_callback) |
<SYSTEM_TASK:>
Load devices and scenes, run first scene.
<END_TASK>
<USER_TASK:>
Description:
async def main():
"""Load devices and scenes, run first scene.""" |
pyvlx = PyVLX('pyvlx.yaml')
# Alternative:
# pyvlx = PyVLX(host="192.168.2.127", password="velux123", timeout=60)
await pyvlx.load_devices()
print(pyvlx.devices[1])
print(pyvlx.devices['Fenster 4'])
await pyvlx.load_scenes()
print(pyvlx.scenes[0])
print(pyvlx.scenes['Bath Closed'])
# opening/ closing windows by running scenes, yay!
await pyvlx.scenes[1].run()
await pyvlx.disconnect() |
<SYSTEM_TASK:>
Set switch to desired state.
<END_TASK>
<USER_TASK:>
Description:
async def set_state(self, parameter):
"""Set switch to desired state.""" |
command_send = CommandSend(pyvlx=self.pyvlx, node_id=self.node_id, parameter=parameter)
await command_send.do_api_call()
if not command_send.success:
raise PyVLXException("Unable to send command")
self.parameter = parameter
await self.after_update() |
<SYSTEM_TASK:>
Madau 1995 extinction for a galaxy at given redshift.
<END_TASK>
<USER_TASK:>
Description:
def etau_madau(wave, z, **kwargs):
"""Madau 1995 extinction for a galaxy at given redshift.
This is the Lyman-alpha prescription from the photo-z code BPZ.
The Lyman-alpha forest approximately has an effective
"throughput" which is a function of redshift and
rest-frame wavelength.
One would multiply the SEDs by this factor before
passing it through an instrument filter.
This approximation is from Footnote 3 of
:ref:`Madau et al. (1995) <synphot-ref-madau1995>`.
This is claimed accurate to 5%.
The scatter in this factor (due to different lines of sight)
is huge, as shown in Madau's Fig. 3 (top panel);
The figure's bottom panel shows a redshifted version of the
"exact" prescription.
Parameters
----------
wave : array-like or `~astropy.units.quantity.Quantity`
Redshifted wavelength values.
Non-redshifted wavelength is ``wave / (1 + z)``.
z : number
Redshift.
kwargs : dict
Equivalencies for unit conversion, see
:func:`~synphot.units.validate_quantity`.
Returns
-------
extcurve : `ExtinctionCurve`
Extinction curve to apply to the redshifted spectrum.
""" |
if not isinstance(z, numbers.Real):
raise exceptions.SynphotError(
'Redshift must be a real scalar number.')
if np.isscalar(wave) or len(wave) <= 1:
raise exceptions.SynphotError('Wavelength has too few data points')
wave = units.validate_quantity(wave, u.AA, **kwargs).value
ll = 912.0
c = np.array([3.6e-3, 1.7e-3, 1.2e-3, 9.3e-4])
el = np.array([1216, 1026, 973, 950], dtype=np.float) # noqa
tau = np.zeros_like(wave, dtype=np.float)
xe = 1.0 + z
# Lyman series
for i in range(len(el)):
tau = np.where(wave <= el[i] * xe,
tau + c[i] * (wave / el[i]) ** 3.46,
tau)
# Photoelectric absorption
xc = wave / ll
xc3 = xc ** 3
tau = np.where(wave <= ll * xe,
(tau + 0.25 * xc3 * (xe ** 0.46 - xc ** 0.46) +
9.4 * xc ** 1.5 * (xe ** 0.18 - xc ** 0.18) -
0.7 * xc3 * (xc ** (-1.32) - xe ** (-1.32)) -
0.023 * (xe ** 1.68 - xc ** 1.68)),
tau)
thru = np.where(tau > 700., 0., np.exp(-tau))
meta = {'descrip': 'Madau 1995 extinction for z={0}'.format(z)}
return ExtinctionCurve(ExtinctionModel1D, points=wave, lookup_table=thru,
meta=meta) |
<SYSTEM_TASK:>
Generate extinction curve.
<END_TASK>
<USER_TASK:>
Description:
def extinction_curve(self, ebv, wavelengths=None):
"""Generate extinction curve.
.. math::
A(V) = R(V) \\; \\times \\; E(B-V)
THRU = 10^{-0.4 \\; A(V)}
Parameters
----------
ebv : float or `~astropy.units.quantity.Quantity`
:math:`E(B-V)` value in magnitude.
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
Returns
-------
extcurve : `ExtinctionCurve`
Empirical extinction curve.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
""" |
if isinstance(ebv, u.Quantity) and ebv.unit.decompose() == u.mag:
ebv = ebv.value
elif not isinstance(ebv, numbers.Real):
raise exceptions.SynphotError('E(B-V)={0} is invalid.'.format(ebv))
x = self._validate_wavelengths(wavelengths).value
y = 10 ** (-0.4 * self(x).value * ebv)
header = {
'E(B-V)': ebv,
'ReddeningLaw': self.meta.get('expr', 'unknown')}
return ExtinctionCurve(ExtinctionModel1D, points=x, lookup_table=y,
meta={'header': header}) |
<SYSTEM_TASK:>
Write the reddening law to a FITS file.
<END_TASK>
<USER_TASK:>
Description:
def to_fits(self, filename, wavelengths=None, **kwargs):
"""Write the reddening law to a FITS file.
:math:`R(V)` column is automatically named 'Av/E(B-V)'.
Parameters
----------
filename : str
Output filename.
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
kwargs : dict
Keywords accepted by :func:`~synphot.specio.write_fits_spec`.
""" |
w, y = self._get_arrays(wavelengths)
kwargs['flux_col'] = 'Av/E(B-V)'
kwargs['flux_unit'] = self._internal_flux_unit
# No need to trim/pad zeroes, unless user chooses to do so.
if 'pad_zero_ends' not in kwargs:
kwargs['pad_zero_ends'] = False
if 'trim_zero' not in kwargs:
kwargs['trim_zero'] = False
# There are some standard keywords that should be added
# to the extension header.
bkeys = {'tdisp1': 'G15.7', 'tdisp2': 'G15.7'}
if 'expr' in self.meta:
bkeys['expr'] = (self.meta['expr'], 'synphot expression')
if 'ext_header' in kwargs:
kwargs['ext_header'].update(bkeys)
else:
kwargs['ext_header'] = bkeys
specio.write_fits_spec(filename, w, y, **kwargs) |
<SYSTEM_TASK:>
Create a reddening law from file.
<END_TASK>
<USER_TASK:>
Description:
def from_file(cls, filename, **kwargs):
"""Create a reddening law from file.
If filename has 'fits' or 'fit' suffix, it is read as FITS.
Otherwise, it is read as ASCII.
Parameters
----------
filename : str
Reddening law filename.
kwargs : dict
Keywords acceptable by
:func:`~synphot.specio.read_fits_spec` (if FITS) or
:func:`~synphot.specio.read_ascii_spec` (if ASCII).
Returns
-------
redlaw : `ReddeningLaw`
Empirical reddening law.
""" |
if 'flux_unit' not in kwargs:
kwargs['flux_unit'] = cls._internal_flux_unit
if ((filename.endswith('fits') or filename.endswith('fit')) and
'flux_col' not in kwargs):
kwargs['flux_col'] = 'Av/E(B-V)'
header, wavelengths, rvs = specio.read_spec(filename, **kwargs)
return cls(Empirical1D, points=wavelengths, lookup_table=rvs,
meta={'header': header}) |
<SYSTEM_TASK:>
Build raw bytes from command and payload.
<END_TASK>
<USER_TASK:>
Description:
def build_frame(command, payload):
"""Build raw bytes from command and payload.""" |
packet_length = 2 + len(payload) + 1
ret = struct.pack("BB", 0, packet_length)
ret += struct.pack(">H", command.value)
ret += payload
ret += struct.pack("B", calc_crc(ret))
return ret |
<SYSTEM_TASK:>
Run scene.
<END_TASK>
<USER_TASK:>
Description:
async def run(self, wait_for_completion=True):
"""Run scene.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
""" |
activate_scene = ActivateScene(
pyvlx=self.pyvlx,
wait_for_completion=wait_for_completion,
scene_id=self.scene_id)
await activate_scene.do_api_call()
if not activate_scene.success:
raise PyVLXException("Unable to activate scene") |
<SYSTEM_TASK:>
Parse alias array from raw bytes.
<END_TASK>
<USER_TASK:>
Description:
def parse_raw(self, raw):
"""Parse alias array from raw bytes.""" |
if not isinstance(raw, bytes):
raise PyVLXException("AliasArray::invalid_type_if_raw", type_raw=type(raw))
if len(raw) != 21:
raise PyVLXException("AliasArray::invalid_size", size=len(raw))
nbr_of_alias = raw[0]
if nbr_of_alias > 5:
raise PyVLXException("AliasArray::invalid_nbr_of_alias", nbr_of_alias=nbr_of_alias)
for i in range(0, nbr_of_alias):
self.alias_array_.append((raw[i*4+1:i*4+3], raw[i*4+3:i*4+5])) |
<SYSTEM_TASK:>
Get the next slip packet from raw data.
<END_TASK>
<USER_TASK:>
Description:
def get_next_slip(raw):
"""
Get the next slip packet from raw data.
Returns the extracted packet plus the raw data with the remaining data stream.
""" |
if not is_slip(raw):
return None, raw
length = raw[1:].index(SLIP_END)
slip_packet = decode(raw[1:length+1])
new_raw = raw[length+2:]
return slip_packet, new_raw |
<SYSTEM_TASK:>
Python implementation of ``calcbinflux``.
<END_TASK>
<USER_TASK:>
Description:
def _slow_calcbinflux(len_binwave, i_beg, i_end, avflux, deltaw):
"""Python implementation of ``calcbinflux``.
This is only used if ``synphot.synphot_utils`` C-extension
import fails.
See docstrings.py
""" |
binflux = np.empty(shape=(len_binwave, ), dtype=np.float64)
intwave = np.empty(shape=(len_binwave, ), dtype=np.float64)
# Note that, like all Python striding, the range over which
# we integrate is [first:last).
for i in range(len(i_beg)):
first = i_beg[i]
last = i_end[i]
cur_dw = deltaw[first:last]
intwave[i] = cur_dw.sum()
binflux[i] = np.sum(avflux[first:last] * cur_dw) / intwave[i]
return binflux, intwave |
<SYSTEM_TASK:>
Calculate the number of pixels within the given wavelength range
<END_TASK>
<USER_TASK:>
Description:
def pixel_range(bins, waverange, mode='round'):
"""Calculate the number of pixels within the given wavelength range
and the given bins.
Parameters
----------
bins : array-like
Wavelengths at bin centers, each centered on a pixel.
Must be 1D array.
waverange : tuple of float
Lower and upper limits of the desired wavelength range,
in the same unit as ``bins``.
mode : {'round', 'min', 'max', 'none'}
Determines how the pixels at the edges of the wavelength range
are handled. All the options, except 'none', will return
an integer number of pixels:
* 'round' - Wavelength range edges that fall in the middle
of a pixel are counted if more than half of the pixel is
within the given wavelength range. Edges that fall in
the center of a pixel are rounded to the nearest pixel
edge. This is the default.
* 'min' - Only pixels wholly within the given wavelength
range are counted.
* 'max' - Pixels that are within the given wavelength range
by any margin are counted.
* 'none' - The exact number of encompassed pixels,
including fractional pixels, is returned.
Returns
-------
npix : number
Number of pixels.
Raises
------
synphot.exceptions.OverlapError
Given wavelength range exceeds the bounds of given bins.
synphot.exceptions.SynphotError
Invalid mode.
""" |
mode = mode.lower()
if mode not in ('round', 'min', 'max', 'none'):
raise exceptions.SynphotError(
'mode={0} is invalid, must be "round", "min", "max", '
'or "none".'.format(mode))
if waverange[0] < waverange[-1]:
wave1 = waverange[0]
wave2 = waverange[-1]
else:
wave1 = waverange[-1]
wave2 = waverange[0]
# Bin values must be in ascending order.
if bins[0] > bins[-1]:
bins = bins[::-1]
# Wavelength range must be within bins
minwave = bins[0] - (bins[0:2].mean() - bins[0])
maxwave = bins[-1] + (bins[-1] - bins[-2:].mean())
if wave1 < minwave or wave2 > maxwave:
raise exceptions.OverlapError(
'Wavelength range ({0}, {1}) is out of bounds of bins '
'(min={2}, max={3}).'.format(wave1, wave2, minwave, maxwave))
if wave1 == wave2:
return 0
if mode == 'round':
ind1 = bins.searchsorted(wave1, side='right')
ind2 = bins.searchsorted(wave2, side='right')
else:
ind1 = bins.searchsorted(wave1, side='left')
ind2 = bins.searchsorted(wave2, side='left')
if mode == 'round':
npix = ind2 - ind1
elif mode == 'min':
# for ind1, figure out if pixel ind1 is wholly included or not.
# do this by figuring out where wave1 is between ind1 and ind1-1.
frac = (bins[ind1] - wave1) / (bins[ind1] - bins[ind1 - 1])
if frac < 0.5:
# ind1 is only partially included
ind1 += 1
# similar but reversed procedure for ind2
frac = (wave2 - bins[ind2 - 1]) / (bins[ind2] - bins[ind2 - 1])
if frac < 0.5:
# ind2 is only partially included
ind2 -= 1
npix = ind2 - ind1
elif mode == 'max':
# for ind1, figure out if pixel ind1-1 is partially included or not.
# do this by figuring out where wave1 is between ind1 and ind1-1.
frac = (wave1 - bins[ind1 - 1]) / (bins[ind1] - bins[ind1 - 1])
if frac < 0.5:
# ind1 is partially included
ind1 -= 1
# similar but reversed procedure for ind2
frac = (bins[ind2] - wave2) / (bins[ind2] - bins[ind2 - 1])
if frac < 0.5:
# ind2 is partially included
ind2 += 1
npix = ind2 - ind1
else: # mode == 'none'
# calculate fractional indices
frac1 = ind1 - (bins[ind1] - wave1) / (bins[ind1] - bins[ind1 - 1])
frac2 = ind2 - (bins[ind2] - wave2) / (bins[ind2] - bins[ind2 - 1])
npix = frac2 - frac1
return npix |
<SYSTEM_TASK:>
Read scene from configuration.
<END_TASK>
<USER_TASK:>
Description:
def from_config(cls, pyvlx, item):
"""Read scene from configuration.""" |
name = item['name']
ident = item['id']
return cls(pyvlx, ident, name) |
<SYSTEM_TASK:>
Creates a thermal spectral element from file.
<END_TASK>
<USER_TASK:>
Description:
def from_file(cls, filename, temperature_key='DEFT',
beamfill_key='BEAMFILL', **kwargs):
"""Creates a thermal spectral element from file.
.. note::
Only FITS format is supported.
Parameters
----------
filename : str
Thermal spectral element filename.
temperature_key, beamfill_key : str
Keywords in FITS *table extension* that store temperature
(in Kelvin) and beam filling factor values.
Beam filling factor is set to 1 if its keyword is missing.
kwargs : dict
Keywords acceptable by :func:`~synphot.specio.read_fits_spec`.
Returns
-------
th : `ThermalSpectralElement`
Empirical thermal spectral element.
Raises
------
synphot.exceptions.SynphotError
Invalid inputs.
""" |
if not (filename.endswith('fits') or filename.endswith('fit')):
raise exceptions.SynphotError('Only FITS format is supported.')
# Extra info from table header
ext = kwargs.get('ext', 1)
tab_hdr = fits.getheader(filename, ext=ext)
temperature = tab_hdr.get(temperature_key)
if temperature is None:
raise exceptions.SynphotError(
'Missing {0} keyword.'.format(temperature_key))
beam_fill_factor = tab_hdr.get('BEAMFILL', 1)
if 'flux_unit' not in kwargs:
kwargs['flux_unit'] = cls._internal_flux_unit
if 'flux_col' not in kwargs:
kwargs['flux_col'] = 'EMISSIVITY'
header, wavelengths, em = specio.read_spec(filename, **kwargs)
return cls(
Empirical1D, temperature, beam_fill_factor=beam_fill_factor,
points=wavelengths, lookup_table=em, meta={'header': header}) |
<SYSTEM_TASK:>
Test if value can be rendered out of int.
<END_TASK>
<USER_TASK:>
Description:
def is_valid_int(value):
"""Test if value can be rendered out of int.""" |
if 0 <= value <= Parameter.MAX: # This includes ON and OFF
return True
if value == Parameter.UNKNOWN_VALUE:
return True
if value == Parameter.CURRENT_POSITION:
return True
return False |
<SYSTEM_TASK:>
Test if raw packets are valid for initialization of Position.
<END_TASK>
<USER_TASK:>
Description:
def from_raw(raw):
"""Test if raw packets are valid for initialization of Position.""" |
if not isinstance(raw, bytes):
raise PyVLXException("Position::raw_must_be_bytes")
if len(raw) != 2:
raise PyVLXException("Position::raw_must_be_two_bytes")
if raw != Position.from_int(Position.CURRENT_POSITION) and \
raw != Position.from_int(Position.UNKNOWN_VALUE) and \
Position.to_int(raw) > Position.MAX:
raise PyVLXException("position::raw_exceed_limit", raw=raw)
return raw |
<SYSTEM_TASK:>
Return product as human readable string.
<END_TASK>
<USER_TASK:>
Description:
def product(self):
"""Return product as human readable string.""" |
if self.product_group == 14 and self.product_type == 3:
return "KLF 200"
return "Unknown Product: {}:{}".format(self.product_group, self.product_type) |
<SYSTEM_TASK:>
Change name of node.
<END_TASK>
<USER_TASK:>
Description:
async def rename(self, name):
"""Change name of node.""" |
set_node_name = SetNodeName(pyvlx=self.pyvlx, node_id=self.node_id, name=name)
await set_node_name.do_api_call()
if not set_node_name.success:
raise PyVLXException("Unable to rename node")
self.name = name |
<SYSTEM_TASK:>
Set window to desired position.
<END_TASK>
<USER_TASK:>
Description:
async def set_position(self, position, wait_for_completion=True):
"""Set window to desired position.
Parameters:
* position: Position object containing the target position.
* wait_for_completion: If set, function will return
after device has reached target position.
""" |
command_send = CommandSend(
pyvlx=self.pyvlx,
wait_for_completion=wait_for_completion,
node_id=self.node_id,
parameter=position)
await command_send.do_api_call()
if not command_send.success:
raise PyVLXException("Unable to send command")
await self.after_update() |
<SYSTEM_TASK:>
Open window.
<END_TASK>
<USER_TASK:>
Description:
async def open(self, wait_for_completion=True):
"""Open window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
""" |
await self.set_position(
position=Position(position_percent=0),
wait_for_completion=wait_for_completion) |
<SYSTEM_TASK:>
Close window.
<END_TASK>
<USER_TASK:>
Description:
async def close(self, wait_for_completion=True):
"""Close window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
""" |
await self.set_position(
position=Position(position_percent=100),
wait_for_completion=wait_for_completion) |
<SYSTEM_TASK:>
Stop window.
<END_TASK>
<USER_TASK:>
Description:
async def stop(self, wait_for_completion=True):
"""Stop window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
""" |
await self.set_position(
position=CurrentPosition(),
wait_for_completion=wait_for_completion) |
<SYSTEM_TASK:>
Return sampleset of a model or `None` if undefined.
<END_TASK>
<USER_TASK:>
Description:
def _get_sampleset(model):
"""Return sampleset of a model or `None` if undefined.
Model could be a real model or evaluated sampleset.""" |
if isinstance(model, Model):
if hasattr(model, 'sampleset'):
w = model.sampleset()
else:
w = None
else:
w = model # Already a sampleset
return w |
<SYSTEM_TASK:>
One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
<END_TASK>
<USER_TASK:>
Description:
def _shift_wavelengths(model1, model2):
"""One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
""" |
if isinstance(model1, _models.RedshiftScaleFactor):
val = _get_sampleset(model2)
if val is None:
w = val
else:
w = model1.inverse(val)
elif isinstance(model1, _models.Scale):
w = _get_sampleset(model2)
else:
w = _get_sampleset(model1)
return w |
<SYSTEM_TASK:>
Get optimal wavelengths for sampling a given model.
<END_TASK>
<USER_TASK:>
Description:
def get_waveset(model):
"""Get optimal wavelengths for sampling a given model.
Parameters
----------
model : `~astropy.modeling.Model`
Model.
Returns
-------
waveset : array-like or `None`
Optimal wavelengths. `None` if undefined.
Raises
------
synphot.exceptions.SynphotError
Invalid model.
""" |
if not isinstance(model, Model):
raise SynphotError('{0} is not a model.'.format(model))
if isinstance(model, _CompoundModel):
waveset = model._tree.evaluate(WAVESET_OPERATORS, getter=None)
else:
waveset = _get_sampleset(model)
return waveset |
<SYSTEM_TASK:>
Peak wavelength in Angstrom when the curve is expressed as
<END_TASK>
<USER_TASK:>
Description:
def lambda_max(self):
"""Peak wavelength in Angstrom when the curve is expressed as
power density.""" |
return ((const.b_wien.value / self.temperature) * u.m).to(u.AA).value |
<SYSTEM_TASK:>
Calculate sampleset for each model.
<END_TASK>
<USER_TASK:>
Description:
def _calc_sampleset(w1, w2, step, minimal):
"""Calculate sampleset for each model.""" |
if minimal:
arr = [w1 - step, w1, w2, w2 + step]
else:
arr = np.arange(w1 - step, w2 + step + step, step)
return arr |
<SYSTEM_TASK:>
Return flux in PHOTLAM. Assume input wavelength is in Angstrom.
<END_TASK>
<USER_TASK:>
Description:
def evaluate(self, x, *args):
"""Return flux in PHOTLAM. Assume input wavelength is in Angstrom.""" |
xx = x / self.x_0
y = (self.amplitude * xx ** (-self.alpha)) * self._flux_unit
flux = units.convert_flux(x, y, units.PHOTLAM)
return flux.value |
<SYSTEM_TASK:>
From the given request, add a snippet to the page.
<END_TASK>
<USER_TASK:>
Description:
def get_payment_request(self, cart, request):
"""
From the given request, add a snippet to the page.
""" |
try:
self.charge(cart, request)
thank_you_url = OrderModel.objects.get_latest_url()
js_expression = 'window.location.href="{}";'.format(thank_you_url)
return js_expression
except (KeyError, stripe.error.StripeError) as err:
raise ValidationError(err) |
<SYSTEM_TASK:>
Refund the payment using Stripe's refunding API.
<END_TASK>
<USER_TASK:>
Description:
def refund_payment(self):
"""
Refund the payment using Stripe's refunding API.
""" |
Money = MoneyMaker(self.currency)
filter_kwargs = {
'transaction_id__startswith': 'ch_',
'payment_method': StripePayment.namespace,
}
for payment in self.orderpayment_set.filter(**filter_kwargs):
refund = stripe.Refund.create(charge=payment.transaction_id)
if refund['status'] == 'succeeded':
amount = Money(refund['amount']) / Money.subunits
OrderPayment.objects.create(order=self, amount=-amount, transaction_id=refund['id'],
payment_method=StripePayment.namespace)
del self.amount_paid # to invalidate the cache
if self.amount_paid:
# proceed with other payment service providers
super(OrderWorkflowMixin, self).refund_payment() |
<SYSTEM_TASK:>
Creating this service is handled asynchronously so this method will
<END_TASK>
<USER_TASK:>
Description:
def _create_in_progress(self):
"""
Creating this service is handled asynchronously so this method will
simply check if the create is in progress. If it is not in progress,
we could probably infer it either failed or succeeded.
""" |
instance = self.service.service.get_instance(self.service.name)
if (instance['last_operation']['state'] == 'in progress' and
instance['last_operation']['type'] == 'create'):
return True
return False |
<SYSTEM_TASK:>
Create an instance of the Predix Cache Service with they typical
<END_TASK>
<USER_TASK:>
Description:
def create(self, max_wait=180, **kwargs):
"""
Create an instance of the Predix Cache Service with they typical
starting settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
""" |
# Will need to wait for the service to be provisioned before can add
# service keys and get env details.
self.service.create(async=True, create_keys=False)
while self._create_in_progress() and max_wait > 0:
time.sleep(1)
max_wait -= 1
# Now get the service env (via service keys)
cfg = self.service._get_service_config()
self.service.settings.save(cfg)
host = predix.config.get_env_key(self.use_class, 'host')
os.environ[host] = self.service.settings.data['host']
password = predix.config.get_env_key(self.use_class, 'password')
os.environ[password] = self.service.settings.data['password']
port = predix.config.get_env_key(self.use_class, 'port')
os.environ[port] = str(self.service.settings.data['port']) |
<SYSTEM_TASK:>
Will return the uri for an existing instance.
<END_TASK>
<USER_TASK:>
Description:
def _get_uri(self):
"""
Will return the uri for an existing instance.
""" |
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['uri'] |
<SYSTEM_TASK:>
Will return the zone id for an existing instance.
<END_TASK>
<USER_TASK:>
Description:
def _get_zone_id(self):
"""
Will return the zone id for an existing instance.
""" |
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['zone']['http-header-value'] |
<SYSTEM_TASK:>
Create an instance of the Access Control Service with the typical
<END_TASK>
<USER_TASK:>
Description:
def create(self):
"""
Create an instance of the Access Control Service with the typical
starting settings.
""" |
self.service.create()
# Set environment variables for immediate use
predix.config.set_env_value(self.use_class, 'uri', self._get_uri())
predix.config.set_env_value(self.use_class, 'zone_id',
self._get_zone_id()) |
<SYSTEM_TASK:>
Grant the given client id all the scopes and authorities
<END_TASK>
<USER_TASK:>
Description:
def grant_client(self, client_id):
"""
Grant the given client id all the scopes and authorities
needed to work with the access control service.
""" |
zone = self.service.settings.data['zone']['oauth-scope']
scopes = ['openid', zone,
'acs.policies.read', 'acs.attributes.read',
'acs.policies.write', 'acs.attributes.write']
authorities = ['uaa.resource', zone,
'acs.policies.read', 'acs.policies.write',
'acs.attributes.read', 'acs.attributes.write']
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) |
<SYSTEM_TASK:>
Generic POST with headers
<END_TASK>
<USER_TASK:>
Description:
def post(self, path, data):
"""
Generic POST with headers
""" |
uri = self.config.get_target() + path
headers = self._post_headers()
logging.debug("URI=POST " + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(data))
response = self.session.post(uri, headers=headers,
data=json.dumps(data))
if response.status_code in (200, 201, 202):
return response.json()
elif response.status_code == 401:
raise predix.admin.cf.config.CloudFoundryLoginError('token invalid')
else:
logging.debug("STATUS=" + str(response.status_code))
logging.debug("CONTENT=" + str(response.content))
response.raise_for_status() |
<SYSTEM_TASK:>
Returns a flat list of the names for the organizations
<END_TASK>
<USER_TASK:>
Description:
def get_orgs(self):
"""
Returns a flat list of the names for the organizations
user belongs.
""" |
orgs = []
for resource in self._get_orgs()['resources']:
orgs.append(resource['entity']['name'])
return orgs |
<SYSTEM_TASK:>
Returns a flat list of the names for the apps in
<END_TASK>
<USER_TASK:>
Description:
def get_apps(self):
"""
Returns a flat list of the names for the apps in
the organization.
""" |
apps = []
for resource in self._get_apps()['resources']:
apps.append(resource['entity']['name'])
return apps |
<SYSTEM_TASK:>
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
<END_TASK>
<USER_TASK:>
Description:
def add_user(self, user_name, role='user'):
"""
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
`manager`,`billing_manager`
""" |
role_uri = self._get_role_uri(role=role)
return self.api.put(path=role_uri, data={'username': user_name}) |
<SYSTEM_TASK:>
Calls CF's remove user with org
<END_TASK>
<USER_TASK:>
Description:
def remove_user(self, user_name, role):
"""
Calls CF's remove user with org
""" |
role_uri = self._get_role_uri(role=role)
return self.api.delete(path=role_uri, data={'username': user_name}) |
<SYSTEM_TASK:>
Create a new web socket connection with proper headers.
<END_TASK>
<USER_TASK:>
Description:
def _init_publisher_ws(self):
"""
Create a new web socket connection with proper headers.
""" |
logging.debug("Initializing new web socket connection.")
url = ('wss://%s/v1/stream/messages/' % self.eventhub_client.host)
headers = self._generate_publish_headers()
logging.debug("URL=" + str(url))
logging.debug("HEADERS=" + str(headers))
websocket.enableTrace(False)
self._ws = websocket.WebSocketApp(url,
header=headers,
on_message=self._on_ws_message,
on_open=self._on_ws_open,
on_close=self._on_ws_close)
self._ws_thread = threading.Thread(target=self._ws.run_forever, kwargs={'ping_interval': 30})
self._ws_thread.daemon = True
self._ws_thread.start()
time.sleep(1) |
<SYSTEM_TASK:>
Create an instance of the Parking Planning Service with the
<END_TASK>
<USER_TASK:>
Description:
def create(self):
"""
Create an instance of the Parking Planning Service with the
typical starting settings.
""" |
self.service.create()
os.environ[self.__module__ + '.uri'] = self.service.settings.data['url']
os.environ[self.__module__ + '.zone_id'] = self.get_predix_zone_id() |
<SYSTEM_TASK:>
Read an existing manifest.
<END_TASK>
<USER_TASK:>
Description:
def read_manifest(self, encrypted=None):
"""
Read an existing manifest.
""" |
with open(self.manifest_path, 'r') as input_file:
self.manifest = yaml.safe_load(input_file)
if 'env' not in self.manifest:
self.manifest['env'] = {}
if 'services' not in self.manifest:
self.manifest['services'] = []
# If manifest is encrypted, use manifest key to
# decrypt each value before storing in memory.
if 'PREDIXPY_ENCRYPTED' in self.manifest['env']:
self.encrypted = True
if encrypted or self.encrypted:
key = predix.config.get_crypt_key(self.manifest_key)
f = Fernet(key)
for var in self.manifest['env'].keys():
value = f.decrypt(bytes(self.manifest['env'][var], 'utf-8'))
self.manifest['env'][var] = value.decode('utf-8')
self.app_name = self.manifest['applications'][0]['name']
input_file.close() |
<SYSTEM_TASK:>
Create a new manifest and write it to
<END_TASK>
<USER_TASK:>
Description:
def create_manifest(self):
"""
Create a new manifest and write it to
disk.
""" |
self.manifest = {}
self.manifest['applications'] = [{'name': self.app_name}]
self.manifest['services'] = []
self.manifest['env'] = {
'PREDIXPY_VERSION': str(predix.version),
}
self.write_manifest() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.