text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Determines the number of bytes available for reading from an
<END_TASK>
<USER_TASK:>
Description:
def bytes_available(device):
"""
Determines the number of bytes available for reading from an
AlarmDecoder device
:param device: the AlarmDecoder device
:type device: :py:class:`~alarmdecoder.devices.Device`
:returns: int
""" |
bytes_avail = 0
if isinstance(device, alarmdecoder.devices.SerialDevice):
if hasattr(device._device, "in_waiting"):
bytes_avail = device._device.in_waiting
else:
bytes_avail = device._device.inWaiting()
elif isinstance(device, alarmdecoder.devices.SocketDevice):
bytes_avail = 4096
return bytes_avail |
<SYSTEM_TASK:>
Hacky workaround for old installs of the library on systems without python-future that were
<END_TASK>
<USER_TASK:>
Description:
def bytes_hack(buf):
"""
Hacky workaround for old installs of the library on systems without python-future that were
keeping the 2to3 update from working after auto-update.
""" |
ub = None
if sys.version_info > (3,):
ub = buf
else:
ub = bytes(buf)
return ub |
<SYSTEM_TASK:>
Reads a firmware file into a dequeue for processing.
<END_TASK>
<USER_TASK:>
Description:
def read_firmware_file(file_path):
"""
Reads a firmware file into a dequeue for processing.
:param file_path: Path to the firmware file
:type file_path: string
:returns: deque
""" |
data_queue = deque()
with open(file_path) as firmware_handle:
for line in firmware_handle:
line = line.rstrip()
if line != '' and line[0] == ':':
data_queue.append(line + "\r")
return data_queue |
<SYSTEM_TASK:>
Reads data from the specified device.
<END_TASK>
<USER_TASK:>
Description:
def read(device):
"""
Reads data from the specified device.
:param device: the AlarmDecoder device
:type device: :py:class:`~alarmdecoder.devices.Device`
:returns: string
""" |
response = None
bytes_avail = bytes_available(device)
if isinstance(device, alarmdecoder.devices.SerialDevice):
response = device._device.read(bytes_avail)
elif isinstance(device, alarmdecoder.devices.SocketDevice):
response = device._device.recv(bytes_avail)
return response |
<SYSTEM_TASK:>
Uploads firmware to an `AlarmDecoder`_ device.
<END_TASK>
<USER_TASK:>
Description:
def upload(device, file_path, progress_callback=None, debug=False):
"""
Uploads firmware to an `AlarmDecoder`_ device.
:param file_path: firmware file path
:type file_path: string
:param progress_callback: callback function used to report progress
:type progress_callback: function
:raises: :py:class:`~alarmdecoder.util.NoDeviceError`, :py:class:`~alarmdecoder.util.TimeoutError`
""" |
def progress_stage(stage, **kwargs):
"""Callback to update progress for the specified stage."""
if progress_callback is not None:
progress_callback(stage, **kwargs)
return stage
if device is None:
raise NoDeviceError('No device specified for firmware upload.')
fds = [device._device.fileno()]
# Read firmware file into memory
try:
write_queue = read_firmware_file(file_path)
except IOError as err:
stage = progress_stage(Firmware.STAGE_ERROR, error=str(err))
return
data_read = ''
got_response = False
running = True
stage = progress_stage(Firmware.STAGE_START)
if device.is_reader_alive():
# Close the reader thread and wait for it to die, otherwise
# it interferes with our reading.
device.stop_reader()
while device._read_thread.is_alive():
stage = progress_stage(Firmware.STAGE_WAITING)
time.sleep(0.5)
time.sleep(3)
try:
while running:
rr, wr, _ = select.select(fds, fds, [], 0.5)
if len(rr) != 0:
response = Firmware.read(device)
for c in response:
# HACK: Python 3 / PySerial hack.
if isinstance(c, int):
c = chr(c)
if c == '\xff' or c == '\r': # HACK: odd case for our mystery \xff byte.
# Boot started, start looking for the !boot message
if data_read.startswith("!sn"):
stage = progress_stage(Firmware.STAGE_BOOT)
# Entered bootloader upload mode, start uploading
elif data_read.startswith("!load"):
got_response = True
stage = progress_stage(Firmware.STAGE_UPLOADING)
# Checksum error
elif data_read == '!ce':
running = False
raise UploadChecksumError("Checksum error in {0}".format(file_path))
# Bad data
elif data_read == '!no':
running = False
raise UploadError("Incorrect data sent to bootloader.")
# Firmware upload complete
elif data_read == '!ok':
running = False
stage = progress_stage(Firmware.STAGE_DONE)
# All other responses are valid during upload.
else:
got_response = True
if stage == Firmware.STAGE_UPLOADING:
progress_stage(stage)
data_read = ''
elif c == '\n':
pass
else:
data_read += c
if len(wr) != 0:
# Reboot device
if stage in [Firmware.STAGE_START, Firmware.STAGE_WAITING]:
device.write('=')
stage = progress_stage(Firmware.STAGE_WAITING_ON_LOADER)
# Enter bootloader
elif stage == Firmware.STAGE_BOOT:
device.write('=')
stage = progress_stage(Firmware.STAGE_LOAD)
# Upload firmware
elif stage == Firmware.STAGE_UPLOADING:
if len(write_queue) > 0 and got_response == True:
got_response = False
device.write(write_queue.popleft())
except UploadError as err:
stage = progress_stage(Firmware.STAGE_ERROR, error=str(err))
else:
stage = progress_stage(Firmware.STAGE_DONE) |
<SYSTEM_TASK:>
Creates an AlarmDecoder from the specified USB device arguments.
<END_TASK>
<USER_TASK:>
Description:
def create_device(device_args):
"""
Creates an AlarmDecoder from the specified USB device arguments.
:param device_args: Tuple containing information on the USB device to open.
:type device_args: Tuple (vid, pid, serialnumber, interface_count, description)
""" |
device = AlarmDecoder(USBDevice.find(device_args))
device.on_message += handle_message
device.open()
return device |
<SYSTEM_TASK:>
Return the encoded version of a string.
<END_TASK>
<USER_TASK:>
Description:
def encode(cls, string, errors='strict'):
"""Return the encoded version of a string.
:param string:
The input string to encode.
:type string:
`basestring`
:param errors:
The error handling scheme. Only 'strict' is supported.
:type errors:
`basestring`
:return:
Tuple of encoded string and number of input bytes consumed.
:rtype:
`tuple` (`unicode`, `int`)
""" |
if errors != 'strict':
raise UnicodeError('Unsupported error handling {0}'.format(errors))
unicode_string = cls._ensure_unicode_string(string)
encoded = unicode_string.translate(cls._encoding_table)
return encoded, len(string) |
<SYSTEM_TASK:>
Return the decoded version of a string.
<END_TASK>
<USER_TASK:>
Description:
def decode(cls, string, errors='strict'):
"""Return the decoded version of a string.
:param string:
The input string to decode.
:type string:
`basestring`
:param errors:
The error handling scheme. Only 'strict' is supported.
:type errors:
`basestring`
:return:
Tuple of decoded string and number of input bytes consumed.
:rtype:
`tuple` (`unicode`, `int`)
""" |
if errors != 'strict':
raise UnicodeError('Unsupported error handling {0}'.format(errors))
unicode_string = cls._ensure_unicode_string(string)
decoded = unicode_string.translate(cls._decoding_table)
return decoded, len(string) |
<SYSTEM_TASK:>
Handles message events from the AlarmDecoder.
<END_TASK>
<USER_TASK:>
Description:
def handle_lrr_message(sender, message):
"""
Handles message events from the AlarmDecoder.
""" |
print(sender, message.partition, message.event_type, message.event_data) |
<SYSTEM_TASK:>
Example application that sends an email when an alarm event is
<END_TASK>
<USER_TASK:>
Description:
def main():
"""
Example application that sends an email when an alarm event is
detected.
""" |
try:
# Retrieve the first USB device
device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE))
# Set up an event handler and open the device
device.on_alarm += handle_alarm
with device.open(baudrate=BAUDRATE):
while True:
time.sleep(1)
except Exception as ex:
print('Exception:', ex) |
<SYSTEM_TASK:>
Handles alarm events from the AlarmDecoder.
<END_TASK>
<USER_TASK:>
Description:
def handle_alarm(sender, **kwargs):
"""
Handles alarm events from the AlarmDecoder.
""" |
zone = kwargs.pop('zone', None)
text = "Alarm: Zone {0}".format(zone)
# Build the email message
msg = MIMEText(text)
msg['Subject'] = SUBJECT
msg['From'] = FROM_ADDRESS
msg['To'] = TO_ADDRESS
s = smtplib.SMTP(SMTP_SERVER)
# Authenticate if needed
if SMTP_USERNAME is not None:
s.login(SMTP_USERNAME, SMTP_PASSWORD)
# Send the email
s.sendmail(FROM_ADDRESS, TO_ADDRESS, msg.as_string())
s.quit()
print('sent alarm email:', text) |
<SYSTEM_TASK:>
Example application that opens a device that has been exposed to the network
<END_TASK>
<USER_TASK:>
Description:
def main():
"""
Example application that opens a device that has been exposed to the network
with ser2sock and SSL encryption and authentication.
""" |
try:
# Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000.
ssl_device = SocketDevice(interface=('localhost', 10000))
# Enable SSL and set the certificates to be used.
#
# The key/cert attributes can either be a filesystem path or an X509/PKey
# object from pyopenssl.
ssl_device.ssl = True
ssl_device.ssl_ca = SSL_CA # CA certificate
ssl_device.ssl_key = SSL_KEY # Client private key
ssl_device.ssl_certificate = SSL_CERT # Client certificate
device = AlarmDecoder(ssl_device)
# Set up an event handler and open the device
device.on_message += handle_message
with device.open():
while True:
time.sleep(1)
except Exception as ex:
print('Exception:', ex) |
<SYSTEM_TASK:>
Encode a string using 'rotunicode' codec.
<END_TASK>
<USER_TASK:>
Description:
def ruencode(string, extension=False):
"""Encode a string using 'rotunicode' codec.
:param string:
The input string to encode.
:type string:
`basestring`
:param extension:
True if the entire input string should be encoded.
False to split the input string using :func:`os.path.splitext` and
encode only the file name portion keeping the extension as is.
:type extension:
`bool`
:return:
Encoded string.
:rtype:
`unicode`
""" |
if extension:
file_name = string
file_ext = ''
else:
file_name, file_ext = splitext(string)
encoded_value, _ = _ROT_UNICODE.encode(file_name)
return encoded_value + file_ext |
<SYSTEM_TASK:>
Parse a string for possible escape sequences.
<END_TASK>
<USER_TASK:>
Description:
def parse_escape_sequences(string):
"""Parse a string for possible escape sequences.
Sample usage:
>>> parse_escape_sequences('foo\\nbar')
'foo\nbar'
>>> parse_escape_sequences('foo\\\\u0256')
'foo\\u0256'
:param string:
Any string.
:type string:
`basestring`
:raises:
:class:`ValueError` if a backslash character is found, but it doesn't
form a proper escape sequence with the character(s) that follow.
:return:
The parsed string. Will parse the standard escape sequences, and also
basic \\uxxxx escape sequences.
\\uxxxxxxxxxx escape sequences are not currently supported.
:rtype:
`unicode`
""" |
string = safe_unicode(string)
characters = []
i = 0
string_len = len(string)
while i < string_len:
character = string[i]
if character == '\\':
# Figure out the size of the escape sequence. Most escape sequences
# are two characters (e.g. '\\' and 'n'), with the sole exception
# being \uxxxx escape sequences, which are six characters.
if string[(i + 1):(i + 2)] == 'u':
offset = 6
else:
offset = 2
try:
# `json.decoder.scanstring()` mostly does what we want, but it
# also does some stuff that we don't want, like parsing quote
# characters. This will mess us up. The iteration and scanning
# within this loop is meant to isolate the escape sequences, so
# that we'll always be calling it with something like
# >>> scanstring('"\n"', 1)
# or
# >>> scanstring('"\u0256"', 1)
# The 1 refers to the location of the first character after the
# open quote character.
json_string = '"' + string[i:(i + offset)] + '"'
character = scanstring(json_string, 1)[0]
characters.append(character)
i += offset
except ValueError:
# If an exception was raised, raise a new `ValueError`. The
# reason we don't re-raise the original exception is because,
# in Python 3, it is a custom JSON `ValueError` subclass. We
# don't want to raise a JSON error from a function that has
# nothing to do with JSON, so we create a new `ValueError`. The
# error message is also nonsensical to the caller, in all
# cases.
raise_from(ValueError(string), None)
else:
characters.append(character)
i += 1
return ''.join(characters) |
<SYSTEM_TASK:>
Returns all FTDI devices matching our vendor and product IDs.
<END_TASK>
<USER_TASK:>
Description:
def find_all(cls, vid=None, pid=None):
"""
Returns all FTDI devices matching our vendor and product IDs.
:returns: list of devices
:raises: :py:class:`~alarmdecoder.util.CommError`
""" |
if not have_pyftdi:
raise ImportError('The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.')
cls.__devices = []
query = cls.PRODUCT_IDS
if vid and pid:
query = [(vid, pid)]
try:
cls.__devices = Ftdi.find_all(query, nocache=True)
except (usb.core.USBError, FtdiError) as err:
raise CommError('Error enumerating AD2USB devices: {0}'.format(str(err)), err)
return cls.__devices |
<SYSTEM_TASK:>
Starts the device detection thread.
<END_TASK>
<USER_TASK:>
Description:
def start_detection(cls, on_attached=None, on_detached=None):
"""
Starts the device detection thread.
:param on_attached: function to be called when a device is attached **Callback definition:** *def callback(thread, device)*
:type on_attached: function
:param on_detached: function to be called when a device is detached **Callback definition:** *def callback(thread, device)*
:type on_detached: function
""" |
if not have_pyftdi:
raise ImportError('The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.')
cls.__detect_thread = USBDevice.DetectThread(on_attached, on_detached)
try:
cls.find_all()
except CommError:
pass
cls.__detect_thread.start() |
<SYSTEM_TASK:>
Sets the interface used to connect to the device.
<END_TASK>
<USER_TASK:>
Description:
def interface(self, value):
"""
Sets the interface used to connect to the device.
:param value: may specify either the serial number or the device index
:type value: string or int
""" |
self._interface = value
if isinstance(value, int):
self._device_number = value
else:
self._serial_number = value |
<SYSTEM_TASK:>
Retrieves the FTDI device serial number.
<END_TASK>
<USER_TASK:>
Description:
def _get_serial_number(self):
"""
Retrieves the FTDI device serial number.
:returns: string containing the device serial number
""" |
return usb.util.get_string(self._device.usb_dev, 64, self._device.usb_dev.iSerialNumber) |
<SYSTEM_TASK:>
Parse a color value.
<END_TASK>
<USER_TASK:>
Description:
def parse_color(color):
"""Parse a color value.
I've decided not to expect a leading '#' because it's a comment character
in some shells.
>>> parse_color('4bf') == (0x44, 0xbb, 0xff, 0xff)
True
>>> parse_color('ccce') == (0xcc, 0xcc, 0xcc, 0xee)
True
>>> parse_color('d8b4a2') == (0xd8, 0xb4, 0xa2, 0xff)
True
>>> parse_color('12345678') == (0x12, 0x34, 0x56, 0x78)
True
Raises ValueError on errors.
""" |
if len(color) not in (3, 4, 6, 8):
raise ValueError('bad color %s' % repr(color))
if len(color) in (3, 4):
r = int(color[0], 16) * 0x11
g = int(color[1], 16) * 0x11
b = int(color[2], 16) * 0x11
elif len(color) in (6, 8):
r = int(color[0:2], 16)
g = int(color[2:4], 16)
b = int(color[4:6], 16)
if len(color) == 4:
a = int(color[3], 16) * 0x11
elif len(color) == 8:
a = int(color[6:8], 16)
else:
a = 0xff
return (r, g, b, a) |
<SYSTEM_TASK:>
Validate and convert an option value of type 'color'.
<END_TASK>
<USER_TASK:>
Description:
def check_color(option, opt, value):
"""Validate and convert an option value of type 'color'.
``option`` is an optparse.Option instance.
``opt`` is a string with the user-supplied option name (e.g. '--bgcolor').
``value`` is the user-supplied value.
""" |
try:
return parse_color(value)
except ValueError:
raise optparse.OptionValueError("option %s: invalid color value: %r"
% (opt, value)) |
<SYSTEM_TASK:>
Pick a tiling orientation for two images.
<END_TASK>
<USER_TASK:>
Description:
def pick_orientation(img1, img2, spacing, desired_aspect=1.618):
"""Pick a tiling orientation for two images.
Returns either 'lr' for left-and-right, or 'tb' for top-and-bottom.
Picks the one that makes the combined image have a better aspect
ratio, where 'better' is defined as 'closer to 1:1.618'.
""" |
w1, h1 = img1.size
w2, h2 = img2.size
size_a = (w1 + spacing + w2, max(h1, h2, 1))
size_b = (max(w1, w2, 1), h1 + spacing + h2)
aspect_a = size_a[0] / size_a[1]
aspect_b = size_b[0] / size_b[1]
goodness_a = min(desired_aspect, aspect_a) / max(desired_aspect, aspect_a)
goodness_b = min(desired_aspect, aspect_b) / max(desired_aspect, aspect_b)
return 'lr' if goodness_a >= goodness_b else 'tb' |
<SYSTEM_TASK:>
Combine two images into one by tiling them.
<END_TASK>
<USER_TASK:>
Description:
def tile_images(img1, img2, mask1, mask2, opts):
"""Combine two images into one by tiling them.
``mask1`` and ``mask2`` provide optional masks for alpha-blending;
pass None to avoid.
Fills unused areas with ``opts.bgcolor``.
Puts a ``opts.spacing``-wide bar with a thin line of ``opts.sepcolor``
color between them.
``opts.orientation`` can be 'lr' for left-and-right, 'tb' for
top-and-bottom, or 'auto' for automatic.
""" |
w1, h1 = img1.size
w2, h2 = img2.size
if opts.orientation == 'auto':
opts.orientation = pick_orientation(img1, img2, opts.spacing)
B, S = opts.border, opts.spacing
if opts.orientation == 'lr':
w, h = (B + w1 + S + w2 + B, B + max(h1, h2) + B)
pos1 = (B, (h - h1) // 2)
pos2 = (B + w1 + S, (h - h2) // 2)
separator_line = [(B + w1 + S//2, 0), (B + w1 + S//2, h)]
else:
w, h = (B + max(w1, w2) + B, B + h1 + S + h2 + B)
pos1 = ((w - w1) // 2, B)
pos2 = ((w - w2) // 2, B + h1 + S)
separator_line = [(0, B + h1 + S//2), (w, B + h1 + S//2)]
img = Image.new('RGBA', (w, h), opts.bgcolor)
img.paste(img1, pos1, mask1)
img.paste(img2, pos2, mask2)
ImageDraw.Draw(img).line(separator_line, fill=opts.sepcolor)
return img |
<SYSTEM_TASK:>
Launch an external program to view an image.
<END_TASK>
<USER_TASK:>
Description:
def spawn_viewer(viewer, img, filename, grace):
"""Launch an external program to view an image.
``img`` is an Image object.
``viewer`` is a command name. Arguments are not allowed; exactly one
argument will be passed: the name of the image file.
``filename`` is the suggested filename for a temporary file.
``grace`` is the number of seconds to wait after spawning the viewer
before removing the temporary file. Useful if your viewer forks
into background before it opens the file.
""" |
tempdir = tempfile.mkdtemp(prefix='imgdiff-')
try:
imgfile = os.path.join(tempdir, filename)
img.save(imgfile)
started = time.time()
subprocess.call([viewer, imgfile])
elapsed = time.time() - started
if elapsed < grace:
# Program exited too quickly. I think it forked and so may not
# have had enough time to even start looking for the temp file
# we just created. Wait a bit before removing the temp file.
time.sleep(grace - elapsed)
finally:
shutil.rmtree(tempdir) |
<SYSTEM_TASK:>
Adjust a difference map into an opacity mask for a given lowest opacity.
<END_TASK>
<USER_TASK:>
Description:
def tweak_diff(diff, opacity):
"""Adjust a difference map into an opacity mask for a given lowest opacity.
Performs a linear map from [0; 255] to [opacity; 255].
The result is that similar areas will have a given opacity, while
dissimilar areas will be opaque.
""" |
mask = diff.point(lambda i: opacity + i * (255 - opacity) // 255)
return mask |
<SYSTEM_TASK:>
Compare two images with given alignments.
<END_TASK>
<USER_TASK:>
Description:
def diff(img1, img2, x1y1, x2y2):
"""Compare two images with given alignments.
Returns a difference map.
``x1y1``: a tuple ``(x1, y1)`` to specify the top-left corner of the
aligned area with respect to ``img1``.
``x2y2``: a tuple ``(x2, y2)`` to specify the top-left corner of
the aligned area with respect to ``img2``.
Either ``x1`` or ``x2`` must be 0, depending on whether ``img1`` is
narrower or wider than ``img2``. Both must be 0 if the two images
have the same width.
Either ``y1`` or ``y2`` must be 0, depending on whether ``img2`` is
shorter or taller than ``img2``. Both must be 0 if the two images
have the same height.
Suppose ``img1`` is bigger than ``img2``::
+----------------------------------+
| img1 ^ |
| | y1 |
| v |
| +------------------------+ |
| | img2 | |
|<---->| | |
| x1 | | |
| +------------------------+ |
+----------------------------------+
In this case ``x2`` and ``y2`` are zero, ``0 <= x1 <= (w1 - w2)``, and
``0 <= y1 <= (h1 - h2)``, where ``(w1, h1) == img1.size`` and
``(w2, h2) == img2.size``.
If ``img2`` is smaller than ``img1``, just swap the labels in the
description above.
Suppose ``img1`` is wider but shorter than ``img2``::
+------------------------+
| img2 ^ |
| | y2 |
| v |
+------|------------------------|--+
| img1 | | |
| | | |
|<---->| | |
| x1 | | |
| | | |
+------|------------------------|--+
+------------------------+
In this case ``x2`` and ``y1`` are zero, ``0 <= x1 <= (w1 - w2)``, and
``0 <= y2 <= (h2 - h1)``, where ``(w1, h1) == img1.size`` and
``(w2, h2) == img2.size``.
If ``img1`` is narrower but taller than ``img2``, just swap the labels
in the description above.
""" |
x1, y1 = x1y1
x2, y2 = x2y2
w1, h1 = img1.size
w2, h2 = img2.size
w, h = min(w1, w2), min(h1, h2)
diff = ImageChops.difference(img1.crop((x1, y1, x1+w, y1+h)),
img2.crop((x2, y2, x2+w, y2+h)))
diff = diff.convert('L')
return diff |
<SYSTEM_TASK:>
Estimate the "badness" value of a difference map.
<END_TASK>
<USER_TASK:>
Description:
def diff_badness(diff):
"""Estimate the "badness" value of a difference map.
Returns 0 if the pictures are identical
Returns a large number if the pictures are completely different
(e.g. a black field and a white field). More specifically, returns
``255 * width * height`` where ``(width, height) == diff.size``.
Returns something in between for other situations.
""" |
# identical pictures = black image = return 0
# completely different pictures = white image = return lots
return sum(i * n for i, n in enumerate(diff.histogram())) |
<SYSTEM_TASK:>
Find the best alignment of two images that minimizes the differences.
<END_TASK>
<USER_TASK:>
Description:
def best_diff(img1, img2, opts):
"""Find the best alignment of two images that minimizes the differences.
Returns (diff, alignments) where ``diff`` is a difference map, and
``alignments`` is a tuple ((x1, y2), (x2, y2)).
See ``diff()`` for the description of the alignment numbers.
""" |
w1, h1 = img1.size
w2, h2 = img2.size
w, h = min(w1, w2), min(h1, h2)
best = None
best_value = 255 * w * h + 1
xr = abs(w1 - w2) + 1
yr = abs(h1 - h2) + 1
p = Progress(xr * yr, timeout=opts.timeout)
for x in range(xr):
if w1 > w2:
x1, x2 = x, 0
else:
x1, x2 = 0, x
for y in range(yr):
if h1 > h2:
y1, y2 = y, 0
else:
y1, y2 = 0, y
p.next()
this = diff(img1, img2, (x1, y1), (x2, y2))
this_value = diff_badness(this)
if this_value < best_value:
best = this
best_value = this_value
best_pos = (x1, y1), (x2, y2)
return best, best_pos |
<SYSTEM_TASK:>
Try to align the two images to minimize pixel differences.
<END_TASK>
<USER_TASK:>
Description:
def simple_highlight(img1, img2, opts):
"""Try to align the two images to minimize pixel differences.
Produces two masks for img1 and img2.
The algorithm works by comparing every possible alignment of the images,
finding the aligment that minimzes the differences, and then smoothing
it a bit to reduce spurious matches in areas that are perceptibly
different (e.g. text).
""" |
try:
diff, ((x1, y1), (x2, y2)) = best_diff(img1, img2, opts)
except KeyboardInterrupt:
return None, None
diff = diff.filter(ImageFilter.MaxFilter(9))
diff = tweak_diff(diff, opts.opacity)
# If the images have different sizes, the areas outside the alignment
# zone are considered to be dissimilar -- filling them with 0xff.
# Perhaps it would be better to compare those bits with bars of solid
# color, filled with opts.bgcolor?
mask1 = Image.new('L', img1.size, 0xff)
mask2 = Image.new('L', img2.size, 0xff)
mask1.paste(diff, (x1, y1))
mask2.paste(diff, (x2, y2))
return mask1, mask2 |
<SYSTEM_TASK:>
Try to find similar areas between two images.
<END_TASK>
<USER_TASK:>
Description:
def slow_highlight(img1, img2, opts):
"""Try to find similar areas between two images.
Produces two masks for img1 and img2.
The algorithm works by comparing every possible alignment of the images,
smoothing it a bit to reduce spurious matches in areas that are
perceptibly different (e.g. text), and then taking the point-wise minimum
of all those difference maps.
This way if you insert a few pixel rows/columns into an image, similar
areas should match even if different areas need to be aligned with
different shifts.
As you can imagine, this brute-force approach can be pretty slow, if
there are many possible alignments. The closer the images are in size,
the faster this will work.
If would work better if it could compare alignments that go beyond the
outer boundaries of the images, in case some pixels got shifted closer
to an edge.
""" |
w1, h1 = img1.size
w2, h2 = img2.size
W, H = max(w1, w2), max(h1, h2)
pimg1 = Image.new('RGB', (W, H), opts.bgcolor)
pimg2 = Image.new('RGB', (W, H), opts.bgcolor)
pimg1.paste(img1, (0, 0))
pimg2.paste(img2, (0, 0))
diff = Image.new('L', (W, H), 255)
# It is not a good idea to keep one diff image; it should track the
# relative positions of the two images. I think that's what explains
# the fuzz I see near the edges of the different areas.
xr = abs(w1 - w2) + 1
yr = abs(h1 - h2) + 1
try:
p = Progress(xr * yr, timeout=opts.timeout)
for x in range(xr):
for y in range(yr):
p.next()
this = ImageChops.difference(pimg1, pimg2).convert('L')
this = this.filter(ImageFilter.MaxFilter(7))
diff = ImageChops.darker(diff, this)
if h1 > h2:
pimg2 = ImageChops.offset(pimg2, 0, 1)
else:
pimg1 = ImageChops.offset(pimg1, 0, 1)
if h1 > h2:
pimg2 = ImageChops.offset(pimg2, 0, -yr)
else:
pimg1 = ImageChops.offset(pimg1, 0, -yr)
if w1 > w2:
pimg2 = ImageChops.offset(pimg2, 1, 0)
else:
pimg1 = ImageChops.offset(pimg1, 1, 0)
except KeyboardInterrupt:
return None, None
diff = diff.filter(ImageFilter.MaxFilter(5))
diff1 = diff.crop((0, 0, w1, h1))
diff2 = diff.crop((0, 0, w2, h2))
mask1 = tweak_diff(diff1, opts.opacity)
mask2 = tweak_diff(diff2, opts.opacity)
return mask1, mask2 |
<SYSTEM_TASK:>
Generate the SOAP action call.
<END_TASK>
<USER_TASK:>
Description:
def SOAPAction(self, Action, responseElement, params = "", recursive = False):
"""Generate the SOAP action call.
:type Action: str
:type responseElement: str
:type params: str
:type recursive: bool
:param Action: The action to perform on the device
:param responseElement: The XML element that is returned upon success
:param params: Any additional parameters required for performing request (i.e. RadioID, moduleID, ect)
:param recursive: True if first attempt failed and now attempting to re-authenticate prior
:return: Text enclosed in responseElement brackets
""" |
# Authenticate client
if self.authenticated is None:
self.authenticated = self.auth()
auth = self.authenticated
#If not legacy protocol, ensure auth() is called for every call
if not self.use_legacy_protocol:
self.authenticated = None
if auth is None:
return None
payload = self.requestBody(Action, params)
# Timestamp in microseconds
time_stamp = str(round(time.time()/1e6))
action_url = '"http://purenetworks.com/HNAP1/{}"'.format(Action)
AUTHKey = hmac.new(auth[0].encode(), (time_stamp+action_url).encode()).hexdigest().upper() + " " + time_stamp
headers = {'Content-Type' : '"text/xml; charset=utf-8"',
'SOAPAction': '"http://purenetworks.com/HNAP1/{}"'.format(Action),
'HNAP_AUTH' : '{}'.format(AUTHKey),
'Cookie' : 'uid={}'.format(auth[1])}
try:
response = urlopen(Request(self.url, payload.encode(), headers))
except (HTTPError, URLError):
# Try to re-authenticate once
self.authenticated = None
# Recursive call to retry action
if not recursive:
return_value = self.SOAPAction(Action, responseElement, params, True)
if recursive or return_value is None:
_LOGGER.warning("Failed to open url to {}".format(self.ip))
self._error_report = True
return None
else:
return return_value
xmlData = response.read().decode()
root = ET.fromstring(xmlData)
# Get value from device
try:
value = root.find('.//{http://purenetworks.com/HNAP1/}%s' % (responseElement)).text
except AttributeError:
_LOGGER.warning("Unable to find %s in response." % responseElement)
return None
if value is None and self._error_report is False:
_LOGGER.warning("Could not find %s in response." % responseElement)
self._error_report = True
return None
self._error_report = False
return value |
<SYSTEM_TASK:>
Fetches statistics from my_cgi.cgi
<END_TASK>
<USER_TASK:>
Description:
def fetchMyCgi(self):
"""Fetches statistics from my_cgi.cgi""" |
try:
response = urlopen(Request('http://{}/my_cgi.cgi'.format(self.ip), b'request=create_chklst'));
except (HTTPError, URLError):
_LOGGER.warning("Failed to open url to {}".format(self.ip))
self._error_report = True
return None
lines = response.readlines()
return {line.decode().split(':')[0].strip(): line.decode().split(':')[1].strip() for line in lines} |
<SYSTEM_TASK:>
Get the current power consumption in Watt.
<END_TASK>
<USER_TASK:>
Description:
def current_consumption(self):
"""Get the current power consumption in Watt.""" |
res = 'N/A'
if self.use_legacy_protocol:
# Use /my_cgi.cgi to retrieve current consumption
try:
res = self.fetchMyCgi()['Meter Watt']
except:
return 'N/A'
else:
try:
res = self.SOAPAction('GetCurrentPowerConsumption', 'CurrentConsumption', self.moduleParameters("2"))
except:
return 'N/A'
if res is None:
return 'N/A'
try:
res = float(res)
except ValueError:
_LOGGER.error("Failed to retrieve current power consumption from SmartPlug")
return res |
<SYSTEM_TASK:>
Get the total power consumpuntion in the device lifetime.
<END_TASK>
<USER_TASK:>
Description:
def total_consumption(self):
"""Get the total power consumpuntion in the device lifetime.""" |
if self.use_legacy_protocol:
# TotalConsumption currently fails on the legacy protocol and
# creates a mess in the logs. Just return 'N/A' for now.
return 'N/A'
res = 'N/A'
try:
res = self.SOAPAction("GetPMWarningThreshold", "TotalConsumption", self.moduleParameters("2"))
except:
return 'N/A'
if res is None:
return 'N/A'
try:
float(res)
except ValueError:
_LOGGER.error("Failed to retrieve total power consumption from SmartPlug")
return res |
<SYSTEM_TASK:>
Authenticate using the SOAP interface.
<END_TASK>
<USER_TASK:>
Description:
def auth(self):
"""Authenticate using the SOAP interface.
Authentication is a two-step process. First a initial payload
is sent to the device requesting additional login information in the form
of a publickey, a challenge string and a cookie.
These values are then hashed by a MD5 algorithm producing a privatekey
used for the header and a hashed password for the XML payload.
If everything is accepted the XML returned will contain a LoginResult tag with the
string 'success'.
See https://github.com/bikerp/dsp-w215-hnap/wiki/Authentication-process for more information.
""" |
payload = self.initial_auth_payload()
# Build initial header
headers = {'Content-Type' : '"text/xml; charset=utf-8"',
'SOAPAction': '"http://purenetworks.com/HNAP1/Login"'}
# Request privatekey, cookie and challenge
try:
response = urlopen(Request(self.url, payload, headers))
except URLError:
if self._error_report is False:
_LOGGER.warning('Unable to open a connection to dlink switch {}'.format(self.ip))
self._error_report = True
return None
xmlData = response.read().decode()
root = ET.fromstring(xmlData)
# Find responses
ChallengeResponse = root.find('.//{http://purenetworks.com/HNAP1/}Challenge')
CookieResponse = root.find('.//{http://purenetworks.com/HNAP1/}Cookie')
PublickeyResponse = root.find('.//{http://purenetworks.com/HNAP1/}PublicKey')
if (ChallengeResponse == None or CookieResponse == None or PublickeyResponse == None) and self._error_report is False:
_LOGGER.warning("Failed to receive initial authentication from smartplug.")
self._error_report = True
return None
if self._error_report is True:
return None
Challenge = ChallengeResponse.text
Cookie = CookieResponse.text
Publickey = PublickeyResponse.text
# Generate hash responses
PrivateKey = hmac.new((Publickey+self.password).encode(), (Challenge).encode()).hexdigest().upper()
login_pwd = hmac.new(PrivateKey.encode(), Challenge.encode()).hexdigest().upper()
response_payload = self.auth_payload(login_pwd)
# Build response to initial request
headers = {'Content-Type' : '"text/xml; charset=utf-8"',
'SOAPAction': '"http://purenetworks.com/HNAP1/Login"',
'HNAP_AUTH' : '"{}"'.format(PrivateKey),
'Cookie' : 'uid={}'.format(Cookie)}
response = urlopen(Request(self.url, response_payload, headers))
xmlData = response.read().decode()
root = ET.fromstring(xmlData)
# Find responses
login_status = root.find('.//{http://purenetworks.com/HNAP1/}LoginResult').text.lower()
if login_status != "success" and self._error_report is False:
_LOGGER.error("Failed to authenticate with SmartPlug {}".format(self.ip))
self._error_report = True
return None
self._error_report = False # Reset error logging
return (PrivateKey, Cookie) |
<SYSTEM_TASK:>
Return a position in a file which is known to be read & handled.
<END_TASK>
<USER_TASK:>
Description:
def get_known_read_position(fp, buffered=True):
"""
Return a position in a file which is known to be read & handled.
It assumes a buffered file and streaming processing.
""" |
buffer_size = io.DEFAULT_BUFFER_SIZE if buffered else 0
return max(fp.tell() - buffer_size, 0) |
<SYSTEM_TASK:>
Skip to the next possibly decompressable part of a gzip file.
<END_TASK>
<USER_TASK:>
Description:
def recover(gzfile, last_good_position):
# type: (gzip.GzipFile, int) -> gzip.GzipFile
"""
Skip to the next possibly decompressable part of a gzip file.
Return a new GzipFile object if such part is found or None
if it is not found.
""" |
pos = get_recover_position(gzfile, last_good_position=last_good_position)
if pos == -1:
return None
fp = gzfile.fileobj
fp.seek(pos)
# gzfile.close()
return gzip.GzipFile(fileobj=fp, mode='r') |
<SYSTEM_TASK:>
Open file with either open or gzip.open, depending on file extension.
<END_TASK>
<USER_TASK:>
Description:
def maybe_gzip_open(path, *args, **kwargs):
"""
Open file with either open or gzip.open, depending on file extension.
This function doesn't handle json lines format, just opens a file
in a way it is decoded transparently if needed.
""" |
path = path_to_str(path)
if path.endswith('.gz') or path.endswith('.gzip'):
_open = gzip.open
else:
_open = open
return _open(path, *args, **kwargs) |
<SYSTEM_TASK:>
Calculates the signature for the given request data.
<END_TASK>
<USER_TASK:>
Description:
def calculate_signature(key, data, timestamp=None):
"""
Calculates the signature for the given request data.
""" |
# Create a timestamp if one was not given
if timestamp is None:
timestamp = int(time.time())
# Construct the message from the timestamp and the data in the request
message = str(timestamp) + ''.join("%s%s" % (k,v) for k,v in sorted(data.items()))
# Calculate the signature (HMAC SHA256) according to RFC 2104
signature = hmac.HMAC(str(key), message, hashlib.sha256).hexdigest()
return signature |
<SYSTEM_TASK:>
Indicate to the client that it needs to authenticate via a 401.
<END_TASK>
<USER_TASK:>
Description:
def authenticate(self):
"""
Indicate to the client that it needs to authenticate via a 401.
""" |
if request.headers.get('Authorization') or request.args.get('access_token'):
realm = 'Bearer realm="%s", error="invalid_token"' % __package__
else:
realm = 'Bearer realm="%s"' % __package__
resp = Response(None, 401, {'WWW-Authenticate': realm})
abort(401, description='Please provide proper credentials', response=resp) |
<SYSTEM_TASK:>
This function is called when a token is sent throught the access_token
<END_TASK>
<USER_TASK:>
Description:
def check_token(self, token, allowed_roles, resource, method):
"""
This function is called when a token is sent throught the access_token
parameter or the Authorization header as specified in the oAuth 2 specification.
The provided token is validated with the JWT_SECRET defined in the Eve configuration.
The token issuer (iss claim) must be the one specified by JWT_ISSUER and the audience
(aud claim) must be one of the value(s) defined by the either the "audiences" resource
parameter or the global JWT_AUDIENCES configuration.
If JWT_ROLES_CLAIM is defined and a claim by that name is present in the token, roles
are checked using this claim.
If a JWT_SCOPE_CLAIM is defined and a claim by that name is present in the token, the
claim value is check, and if "viewer" is present, only GET and HEAD methods will be
allowed. The scope name is then added to the list of roles with the scope: prefix.
If the validation succeed, the claims are stored and accessible thru the
get_authen_claims() method.
""" |
resource_conf = config.DOMAIN[resource]
audiences = resource_conf.get('audiences', config.JWT_AUDIENCES)
return self._perform_verification(token, audiences, allowed_roles) |
<SYSTEM_TASK:>
Decorator for functions that will be protected with token authentication.
<END_TASK>
<USER_TASK:>
Description:
def requires_token(self, audiences=None, allowed_roles=None):
"""
Decorator for functions that will be protected with token authentication.
Token must be provvided either through access_token parameter or Authorization
header.
See check_token() method for further details.
""" |
def requires_token_wrapper(f):
@wraps(f)
def decorated(*args, **kwargs):
try:
token = request.args['access_token']
except KeyError:
token = request.headers.get('Authorization', '').partition(' ')[2]
if not self._perform_verification(token, audiences, allowed_roles):
abort(401)
return f(*args, **kwargs)
return decorated
return requires_token_wrapper |
<SYSTEM_TASK:>
Given the name of a library, load it.
<END_TASK>
<USER_TASK:>
Description:
def load_library(self,libname):
"""Given the name of a library, load it.""" |
paths = self.getpaths(libname)
for path in paths:
if os.path.exists(path):
return self.load(path)
raise ImportError("%s not found." % libname) |
<SYSTEM_TASK:>
Return a list of paths where the library might be found.
<END_TASK>
<USER_TASK:>
Description:
def getpaths(self,libname):
"""Return a list of paths where the library might be found.""" |
if os.path.isabs(libname):
yield libname
else:
# FIXME / TODO return '.' and os.path.dirname(__file__)
for path in self.getplatformpaths(libname):
yield path
path = ctypes.util.find_library(libname)
if path: yield path |
<SYSTEM_TASK:>
Serializes a python object as JSON
<END_TASK>
<USER_TASK:>
Description:
def to_json(content, indent=None):
"""
Serializes a python object as JSON
This method uses the DJangoJSONEncoder to to ensure that python objects
such as Decimal objects are properly serialized. It can also serialize
Django QuerySet objects.
""" |
if isinstance(content, QuerySet):
json_serializer = serializers.get_serializer('json')()
serialized_content = json_serializer.serialize(content, ensure_ascii=False, indent=indent)
else:
try:
serialized_content = json.dumps(content, cls=DecimalEncoder, ensure_ascii=False, indent=indent)
except TypeError:
# Fix for Django 1.5
serialized_content = json.dumps(content, ensure_ascii=False, indent=indent)
return serialized_content |
<SYSTEM_TASK:>
Serializes a python object as HTML
<END_TASK>
<USER_TASK:>
Description:
def to_html(data):
"""
Serializes a python object as HTML
This method uses the to_json method to turn the given data object into
formatted JSON that is displayed in an HTML page. If pygments in installed,
syntax highlighting will also be applied to the JSON.
""" |
base_html_template = Template('''
<html>
<head>
{% if style %}
<style type="text/css">
{{ style }}
</style>
{% endif %}
</head>
<body>
{% if style %}
{{ body|safe }}
{% else %}
<pre></code>{{ body }}</code></pre>
{% endif %}
</body>
</html>
''')
code = to_json(data, indent=4)
if PYGMENTS_INSTALLED:
c = Context({
'body': highlight(code, JSONLexer(), HtmlFormatter()),
'style': HtmlFormatter().get_style_defs('.highlight')
})
html = base_html_template.render(c)
else:
c = Context({'body': code})
html = base_html_template.render(c)
return html |
<SYSTEM_TASK:>
Serializes a python object as plain text
<END_TASK>
<USER_TASK:>
Description:
def to_text(data):
"""
Serializes a python object as plain text
If the data can be serialized as JSON, this method will use the to_json
method to format the data, otherwise the data is returned as is.
""" |
try:
serialized_content = to_json(data, indent=4)
except Exception, e:
serialized_content = data
return serialized_content |
<SYSTEM_TASK:>
Requires that the user be authenticated either by a signature or by
<END_TASK>
<USER_TASK:>
Description:
def auth_required(secret_key_func):
"""
Requires that the user be authenticated either by a signature or by
being actively logged in.
""" |
def actual_decorator(obj):
def test_func(request, *args, **kwargs):
secret_key = secret_key_func(request, *args, **kwargs)
return validate_signature(request, secret_key) or request.user.is_authenticated()
decorator = request_passes_test(test_func)
return wrap_object(obj, decorator)
return actual_decorator |
<SYSTEM_TASK:>
Requires that the user be logged in order to gain access to the resource
<END_TASK>
<USER_TASK:>
Description:
def login_required(obj):
"""
Requires that the user be logged in order to gain access to the resource
at the specified the URI.
""" |
decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_authenticated())
return wrap_object(obj, decorator) |
<SYSTEM_TASK:>
Requires that the user be logged AND be set as a superuser
<END_TASK>
<USER_TASK:>
Description:
def admin_required(obj):
"""
Requires that the user be logged AND be set as a superuser
""" |
decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_superuser)
return wrap_object(obj, decorator) |
<SYSTEM_TASK:>
Requires that the request contain a valid signature to gain access
<END_TASK>
<USER_TASK:>
Description:
def signature_required(secret_key_func):
"""
Requires that the request contain a valid signature to gain access
to a specified resource.
""" |
def actual_decorator(obj):
def test_func(request, *args, **kwargs):
secret_key = secret_key_func(request, *args, **kwargs)
return validate_signature(request, secret_key)
decorator = request_passes_test(test_func)
return wrap_object(obj, decorator)
return actual_decorator |
<SYSTEM_TASK:>
Validates the signature associated with the given request.
<END_TASK>
<USER_TASK:>
Description:
def validate_signature(request, secret_key):
"""
Validates the signature associated with the given request.
""" |
# Extract the request parameters according to the HTTP method
data = request.GET.copy()
if request.method != 'GET':
message_body = getattr(request, request.method, {})
data.update(message_body)
# Make sure the request contains a signature
if data.get('sig', False):
sig = data['sig']
del data['sig']
else:
return False
# Make sure the request contains a timestamp
if data.get('t', False):
timestamp = int(data.get('t', False))
del data['t']
else:
return False
# Make sure the signature has not expired
local_time = datetime.utcnow()
remote_time = datetime.utcfromtimestamp(timestamp)
# this stops a bug if the client clock is ever a little ahead of
# the server clock. Makes the window of acceptable time current +/- 5 mins
if local_time > remote_time:
delta = local_time - remote_time
else:
delta = remote_time - local_time
if delta.seconds > 5 * 60: # If the signature is older than 5 minutes, it's invalid
return False
# Make sure the signature is valid
return sig == calculate_signature(secret_key, data, timestamp) |
<SYSTEM_TASK:>
Checks to see if a session currently exists in DynamoDB.
<END_TASK>
<USER_TASK:>
Description:
def exists(self, session_key):
"""
Checks to see if a session currently exists in DynamoDB.
:rtype: bool
:returns: ``True`` if a session with the given key exists in the DB,
``False`` if not.
""" |
response = self.table.get_item(
Key={'session_key': session_key},
ConsistentRead=ALWAYS_CONSISTENT)
if 'Item' in response:
return True
else:
return False |
<SYSTEM_TASK:>
Saves the current session data to the database.
<END_TASK>
<USER_TASK:>
Description:
def save(self, must_create=False):
"""
Saves the current session data to the database.
:keyword bool must_create: If ``True``, a ``CreateError`` exception
will be raised if the saving operation doesn't create a *new* entry
(as opposed to possibly updating an existing entry).
:raises: ``CreateError`` if ``must_create`` is ``True`` and a session
with the current session key already exists.
""" |
# If the save method is called with must_create equal to True, I'm
# setting self._session_key equal to None and when
# self.get_or_create_session_key is called the new
# session_key will be created.
if must_create:
self._session_key = None
self._get_or_create_session_key()
update_kwargs = {
'Key': {'session_key': self.session_key},
}
attribute_names = {'#data': 'data'}
attribute_values = {
':data': self.encode(self._get_session(no_load=must_create))
}
set_updates = ['#data = :data']
if must_create:
# Set condition to ensure session with same key doesnt exist
update_kwargs['ConditionExpression'] = \
DynamoConditionAttr('session_key').not_exists()
attribute_values[':created'] = int(time.time())
set_updates.append('created = :created')
update_kwargs['UpdateExpression'] = 'SET ' + ','.join(set_updates)
update_kwargs['ExpressionAttributeValues'] = attribute_values
update_kwargs['ExpressionAttributeNames'] = attribute_names
try:
self.table.update_item(**update_kwargs)
except ClientError as e:
error_code = e.response['Error']['Code']
if error_code == 'ConditionalCheckFailedException':
raise CreateError
raise |
<SYSTEM_TASK:>
Deletes the current session, or the one specified in ``session_key``.
<END_TASK>
<USER_TASK:>
Description:
def delete(self, session_key=None):
"""
Deletes the current session, or the one specified in ``session_key``.
:keyword str session_key: Optionally, override the session key
to delete.
""" |
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
self.table.delete_item(Key={'session_key': session_key}) |
<SYSTEM_TASK:>
Removes the current session data from the database and regenerates the
<END_TASK>
<USER_TASK:>
Description:
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
""" |
self.clear()
self.delete(self.session_key)
self.create() |
<SYSTEM_TASK:>
Decorates the given object with the decorator function.
<END_TASK>
<USER_TASK:>
Description:
def wrap_object(obj, decorator):
"""
Decorates the given object with the decorator function.
If obj is a method, the method is decorated with the decorator function
and returned. If obj is a class (i.e., a class based view), the methods
in the class corresponding to HTTP methods will be decorated and the
resultant class object will be returned.
""" |
actual_decorator = method_decorator(decorator)
if inspect.isfunction(obj):
wrapped_obj = actual_decorator(obj)
update_wrapper(wrapped_obj, obj, assigned=available_attrs(obj))
elif inspect.isclass(obj):
for method_name in obj.http_method_names:
if hasattr(obj, method_name):
method = getattr(obj, method_name)
wrapped_method = actual_decorator(method)
update_wrapper(wrapped_method, method, assigned=available_attrs(method))
setattr(obj, method_name, wrapped_method)
wrapped_obj = obj
else:
raise TypeError("received an object of type '{0}' expected 'function' or 'classobj'.".format(type(obj)))
return wrapped_obj |
<SYSTEM_TASK:>
Gives all the compatible package canonical name
<END_TASK>
<USER_TASK:>
Description:
def get_package_versions(self, name):
"""
Gives all the compatible package canonical name
name : str
Name of the package
""" |
package_data = self._packages.get(name)
versions = []
if package_data:
versions = sort_versions(list(package_data.get('versions', [])))
return versions |
<SYSTEM_TASK:>
Return absolute path for configuration file with specified filename.
<END_TASK>
<USER_TASK:>
Description:
def get_conf_path(filename=None):
"""Return absolute path for configuration file with specified filename.""" |
conf_dir = osp.join(get_home_dir(), '.condamanager')
if not osp.isdir(conf_dir):
os.mkdir(conf_dir)
if filename is None:
return conf_dir
else:
return osp.join(conf_dir, filename) |
<SYSTEM_TASK:>
Sort a list of version number strings.
<END_TASK>
<USER_TASK:>
Description:
def sort_versions(versions=(), reverse=False, sep=u'.'):
"""Sort a list of version number strings.
This function ensures that the package sorting based on number name is
performed correctly when including alpha, dev rc1 etc...
""" |
if versions == []:
return []
digits = u'0123456789'
def toint(x):
try:
n = int(x)
except:
n = x
return n
versions = list(versions)
new_versions, alpha, sizes = [], set(), set()
for item in versions:
it = item.split(sep)
temp = []
for i in it:
x = toint(i)
if not isinstance(x, int):
x = u(x)
middle = x.lstrip(digits).rstrip(digits)
tail = toint(x.lstrip(digits).replace(middle, u''))
head = toint(x.rstrip(digits).replace(middle, u''))
middle = toint(middle)
res = [head, middle, tail]
while u'' in res:
res.remove(u'')
for r in res:
if is_unicode(r):
alpha.add(r)
else:
res = [x]
temp += res
sizes.add(len(temp))
new_versions.append(temp)
# replace letters found by a negative number
replace_dic = {}
alpha = sorted(alpha, reverse=True)
if len(alpha):
replace_dic = dict(zip(alpha, list(range(-1, -(len(alpha)+1), -1))))
# Complete with zeros based on longest item and replace alphas with number
nmax = max(sizes)
for i in range(len(new_versions)):
item = []
for z in new_versions[i]:
if z in replace_dic:
item.append(replace_dic[z])
else:
item.append(z)
nzeros = nmax - len(item)
item += [0]*nzeros
item += [versions[i]]
new_versions[i] = item
new_versions = sorted(new_versions, reverse=reverse)
return [n[-1] for n in new_versions] |
<SYSTEM_TASK:>
write a file and create all needed directories
<END_TASK>
<USER_TASK:>
Description:
def write_file(fname_parts, content):
""" write a file and create all needed directories """ |
fname_parts = [str(part) for part in fname_parts]
# try to create the directory
if len(fname_parts) > 1:
try:
os.makedirs(os.path.join(*fname_parts[:-1]))
except OSError:
pass
# write file
fhandle = open(os.path.join(*fname_parts), "w")
fhandle.write(content)
fhandle.close() |
<SYSTEM_TASK:>
Removes the filter function associated with name, if it exists.
<END_TASK>
<USER_TASK:>
Description:
def remove_filter_function(self, name):
"""Removes the filter function associated with name, if it exists.
name : hashable object
""" |
if name in self._filter_functions.keys():
del self._filter_functions[name]
self.invalidateFilter() |
<SYSTEM_TASK:>
Return data_files in a platform dependent manner
<END_TASK>
<USER_TASK:>
Description:
def get_data_files():
"""Return data_files in a platform dependent manner""" |
if sys.platform.startswith('linux'):
if PY3:
data_files = [('share/applications',
['scripts/condamanager3.desktop']),
('share/pixmaps',
['img_src/condamanager3.png'])]
else:
data_files = [('share/applications',
['scripts/condamanager.desktop']),
('share/pixmaps',
['img_src/condamanager.png'])]
elif os.name == 'nt':
data_files = [('scripts', ['img_src/conda-manager.ico'])]
else:
data_files = []
return data_files |
<SYSTEM_TASK:>
Function to encode a text.
<END_TASK>
<USER_TASK:>
Description:
def encode(text, orig_coding):
"""
Function to encode a text.
@param text text to encode (string)
@param orig_coding type of the original coding (string)
@return encoded text and encoding
""" |
if orig_coding == 'utf-8-bom':
return BOM_UTF8 + text.encode("utf-8"), 'utf-8-bom'
# Try declared coding spec
coding = get_coding(text)
if coding:
try:
return text.encode(coding), coding
except (UnicodeError, LookupError):
raise RuntimeError("Incorrect encoding (%s)" % coding)
if (orig_coding and orig_coding.endswith('-default') or
orig_coding.endswith('-guessed')):
coding = orig_coding.replace("-default", "")
coding = orig_coding.replace("-guessed", "")
try:
return text.encode(coding), coding
except (UnicodeError, LookupError):
pass
# Try saving as ASCII
try:
return text.encode('ascii'), 'ascii'
except UnicodeError:
pass
# Save as UTF-8 without BOM
return text.encode('utf-8'), 'utf-8' |
<SYSTEM_TASK:>
Return pycrypto's AES mode, raise exception if not supported
<END_TASK>
<USER_TASK:>
Description:
def get_aes_mode(mode):
"""Return pycrypto's AES mode, raise exception if not supported""" |
aes_mode_attr = "MODE_{}".format(mode.upper())
try:
aes_mode = getattr(AES, aes_mode_attr)
except AttributeError:
raise Exception(
"Pycrypto/pycryptodome does not seem to support {}. ".format(aes_mode_attr) +
"If you use pycrypto, you need a version >= 2.7a1 (or a special branch)."
)
return aes_mode |
<SYSTEM_TASK:>
Split the proxy conda configuration to be used by the proxy factory.
<END_TASK>
<USER_TASK:>
Description:
def process_proxy_servers(proxy_settings):
"""Split the proxy conda configuration to be used by the proxy factory.""" |
proxy_settings_dic = {}
for key in proxy_settings:
proxy = proxy_settings[key]
proxy_config = [m.groupdict() for m in PROXY_RE.finditer(proxy)]
if proxy_config:
proxy_config = proxy_config[0]
host_port = proxy_config.pop('host_port')
if ':' in host_port:
host, port = host_port.split(':')
else:
host, port = host_port, None
proxy_config['host'] = host
proxy_config['port'] = int(port) if port else None
proxy_settings_dic[key] = proxy_config
proxy_config['full'] = proxy_settings[key]
return proxy_settings_dic |
<SYSTEM_TASK:>
Return the proxy servers available.
<END_TASK>
<USER_TASK:>
Description:
def proxy_servers(self):
"""
Return the proxy servers available.
First env variables will be searched and updated with values from
condarc config file.
""" |
proxy_servers = {}
if self._load_rc_func is None:
return proxy_servers
else:
HTTP_PROXY = os.environ.get('HTTP_PROXY')
HTTPS_PROXY = os.environ.get('HTTPS_PROXY')
if HTTP_PROXY:
proxy_servers['http'] = HTTP_PROXY
if HTTPS_PROXY:
proxy_servers['https'] = HTTPS_PROXY
proxy_servers_conf = self._load_rc_func().get('proxy_servers', {})
proxy_servers.update(proxy_servers_conf)
return proxy_servers |
<SYSTEM_TASK:>
Create a Network proxy for the given proxy settings.
<END_TASK>
<USER_TASK:>
Description:
def _create_proxy(proxy_setting):
"""Create a Network proxy for the given proxy settings.""" |
proxy = QNetworkProxy()
proxy_scheme = proxy_setting['scheme']
proxy_host = proxy_setting['host']
proxy_port = proxy_setting['port']
proxy_username = proxy_setting['username']
proxy_password = proxy_setting['password']
proxy_scheme_host = '{0}://{1}'.format(proxy_scheme, proxy_host)
proxy.setType(QNetworkProxy.HttpProxy)
if proxy_scheme_host:
# proxy.setHostName(proxy_scheme_host) # does not work with scheme
proxy.setHostName(proxy_host)
if proxy_port:
proxy.setPort(proxy_port)
if proxy_username:
proxy.setUser(proxy_username)
if proxy_password:
proxy.setPassword(proxy_password)
return proxy |
<SYSTEM_TASK:>
Callback for download once the request has finished.
<END_TASK>
<USER_TASK:>
Description:
def _request_finished(self, reply):
"""Callback for download once the request has finished.""" |
url = to_text_string(reply.url().toEncoded(), encoding='utf-8')
if url in self._paths:
path = self._paths[url]
if url in self._workers:
worker = self._workers[url]
if url in self._head_requests:
error = reply.error()
# print(url, error)
if error:
logger.error(str(('Head Reply Error:', error)))
worker.sig_download_finished.emit(url, path)
worker.sig_finished.emit(worker, path, error)
return
self._head_requests.pop(url)
start_download = not bool(error)
header_pairs = reply.rawHeaderPairs()
headers = {}
for hp in header_pairs:
headers[to_text_string(hp[0]).lower()] = to_text_string(hp[1])
total_size = int(headers.get('content-length', 0))
# Check if file exists
if os.path.isfile(path):
file_size = os.path.getsize(path)
# Check if existing file matches size of requested file
start_download = file_size != total_size
if start_download:
# File sizes dont match, hence download file
qurl = QUrl(url)
request = QNetworkRequest(qurl)
self._get_requests[url] = request
reply = self._manager.get(request)
error = reply.error()
if error:
logger.error(str(('Reply Error:', error)))
reply.downloadProgress.connect(
lambda r, t, w=worker: self._progress(r, t, w))
else:
# File sizes match, dont download file or error?
worker.finished = True
worker.sig_download_finished.emit(url, path)
worker.sig_finished.emit(worker, path, None)
elif url in self._get_requests:
data = reply.readAll()
self._save(url, path, data) |
<SYSTEM_TASK:>
Return download progress.
<END_TASK>
<USER_TASK:>
Description:
def _progress(bytes_received, bytes_total, worker):
"""Return download progress.""" |
worker.sig_download_progress.emit(
worker.url, worker.path, bytes_received, bytes_total) |
<SYSTEM_TASK:>
Download url and save data to path.
<END_TASK>
<USER_TASK:>
Description:
def download(self, url, path):
"""Download url and save data to path.""" |
# original_url = url
# print(url)
qurl = QUrl(url)
url = to_text_string(qurl.toEncoded(), encoding='utf-8')
logger.debug(str((url, path)))
if url in self._workers:
while not self._workers[url].finished:
return self._workers[url]
worker = DownloadWorker(url, path)
# Check download folder exists
folder = os.path.dirname(os.path.abspath(path))
if not os.path.isdir(folder):
os.makedirs(folder)
request = QNetworkRequest(qurl)
self._head_requests[url] = request
self._paths[url] = path
self._workers[url] = worker
self._manager.head(request)
self._timer.start()
return worker |
<SYSTEM_TASK:>
Start the next threaded worker in the queue.
<END_TASK>
<USER_TASK:>
Description:
def _start(self):
"""Start the next threaded worker in the queue.""" |
if len(self._queue) == 1:
thread = self._queue.popleft()
thread.start()
self._timer.start() |
<SYSTEM_TASK:>
Create a new worker instance.
<END_TASK>
<USER_TASK:>
Description:
def _create_worker(self, method, *args, **kwargs):
"""Create a new worker instance.""" |
thread = QThread()
worker = RequestsDownloadWorker(method, args, kwargs)
worker.moveToThread(thread)
worker.sig_finished.connect(self._start)
self._sig_download_finished.connect(worker.sig_download_finished)
self._sig_download_progress.connect(worker.sig_download_progress)
worker.sig_finished.connect(thread.quit)
thread.started.connect(worker.start)
self._queue.append(thread)
self._threads.append(thread)
self._workers.append(worker)
self._start()
return worker |
<SYSTEM_TASK:>
Download file given by url and save it to path.
<END_TASK>
<USER_TASK:>
Description:
def download(self, url, path=None, force=False):
"""Download file given by url and save it to path.""" |
logger.debug(str((url, path, force)))
method = self._download
return self._create_worker(method, url, path=path, force=force) |
<SYSTEM_TASK:>
Terminate all workers and threads.
<END_TASK>
<USER_TASK:>
Description:
def terminate(self):
"""Terminate all workers and threads.""" |
for t in self._threads:
t.quit()
self._thread = []
self._workers = [] |
<SYSTEM_TASK:>
Check if anaconda api url is valid.
<END_TASK>
<USER_TASK:>
Description:
def is_valid_api_url(self, url, non_blocking=True):
"""Check if anaconda api url is valid.""" |
logger.debug(str((url)))
if non_blocking:
method = self._is_valid_api_url
return self._create_worker(method, url)
else:
return self._is_valid_api_url(url=url) |
<SYSTEM_TASK:>
Check if a conda channel is valid.
<END_TASK>
<USER_TASK:>
Description:
def is_valid_channel(self,
channel,
conda_url='https://conda.anaconda.org',
non_blocking=True):
"""Check if a conda channel is valid.""" |
logger.debug(str((channel, conda_url)))
if non_blocking:
method = self._is_valid_channel
return self._create_worker(method, channel, conda_url)
else:
return self._is_valid_channel(channel, conda_url=conda_url) |
<SYSTEM_TASK:>
Return the number of bytes n in more human readable form.
<END_TASK>
<USER_TASK:>
Description:
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
""" |
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g |
<SYSTEM_TASK:>
Remove references of inactive workers periodically.
<END_TASK>
<USER_TASK:>
Description:
def _clean(self):
"""Remove references of inactive workers periodically.""" |
if self._workers:
for w in self._workers:
if w.is_finished():
self._workers.remove(w)
else:
self._current_worker = None
self._timer.stop() |
<SYSTEM_TASK:>
Call conda with the list of extra arguments, and return the worker.
<END_TASK>
<USER_TASK:>
Description:
def _call_conda(self, extra_args, abspath=True, parse=False,
callback=None):
"""
Call conda with the list of extra arguments, and return the worker.
The result can be force by calling worker.communicate(), which returns
the tuple (stdout, stderr).
""" |
if abspath:
if sys.platform == 'win32':
python = join(self.ROOT_PREFIX, 'python.exe')
conda = join(self.ROOT_PREFIX, 'Scripts',
'conda-script.py')
else:
python = join(self.ROOT_PREFIX, 'bin/python')
conda = join(self.ROOT_PREFIX, 'bin/conda')
cmd_list = [python, conda]
else:
# Just use whatever conda is on the path
cmd_list = ['conda']
cmd_list.extend(extra_args)
process_worker = ProcessWorker(cmd_list, parse=parse,
callback=callback)
process_worker.sig_finished.connect(self._start)
self._queue.append(process_worker)
self._start()
return process_worker |
<SYSTEM_TASK:>
Setup install commands for conda.
<END_TASK>
<USER_TASK:>
Description:
def _setup_install_commands_from_kwargs(kwargs, keys=tuple()):
"""Setup install commands for conda.""" |
cmd_list = []
if kwargs.get('override_channels', False) and 'channel' not in kwargs:
raise TypeError('conda search: override_channels requires channel')
if 'env' in kwargs:
cmd_list.extend(['--name', kwargs.pop('env')])
if 'prefix' in kwargs:
cmd_list.extend(['--prefix', kwargs.pop('prefix')])
if 'channel' in kwargs:
channel = kwargs.pop('channel')
if isinstance(channel, str):
cmd_list.extend(['--channel', channel])
else:
cmd_list.append('--channel')
cmd_list.extend(channel)
for key in keys:
if key in kwargs and kwargs[key]:
cmd_list.append('--' + key.replace('_', '-'))
return cmd_list |
<SYSTEM_TASK:>
Return environment list of absolute path to their prefixes.
<END_TASK>
<USER_TASK:>
Description:
def get_envs(self, log=True):
"""Return environment list of absolute path to their prefixes.""" |
if log:
logger.debug('')
# return self._call_and_parse(['info', '--json'],
# callback=lambda o, e: o['envs'])
envs = os.listdir(os.sep.join([self.ROOT_PREFIX, 'envs']))
envs = [os.sep.join([self.ROOT_PREFIX, 'envs', i]) for i in envs]
valid_envs = [e for e in envs if os.path.isdir(e) and
self.environment_exists(prefix=e)]
return valid_envs |
<SYSTEM_TASK:>
Return full prefix path of environment defined by `name`.
<END_TASK>
<USER_TASK:>
Description:
def get_prefix_envname(self, name, log=False):
"""Return full prefix path of environment defined by `name`.""" |
prefix = None
if name == 'root':
prefix = self.ROOT_PREFIX
# envs, error = self.get_envs().communicate()
envs = self.get_envs()
for p in envs:
if basename(p) == name:
prefix = p
return prefix |
<SYSTEM_TASK:>
Return set of canonical names of linked packages in `prefix`.
<END_TASK>
<USER_TASK:>
Description:
def linked(prefix):
"""Return set of canonical names of linked packages in `prefix`.""" |
logger.debug(str(prefix))
if not isdir(prefix):
return set()
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
# We might have nothing in linked (and no conda-meta directory)
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir)
if fn.endswith('.json')) |
<SYSTEM_TASK:>
Return a dictionary with configuration information.
<END_TASK>
<USER_TASK:>
Description:
def info(self, abspath=True):
"""
Return a dictionary with configuration information.
No guarantee is made about which keys exist. Therefore this function
should only be used for testing and debugging.
""" |
logger.debug(str(''))
return self._call_and_parse(['info', '--json'], abspath=abspath) |
<SYSTEM_TASK:>
Create new environment using conda-env via a yaml specification file.
<END_TASK>
<USER_TASK:>
Description:
def create_from_yaml(self, name, yamlfile):
"""
Create new environment using conda-env via a yaml specification file.
Unlike other methods, this calls conda-env, and requires a named
environment and uses channels as defined in rcfiles.
Parameters
----------
name : string
Environment name
yamlfile : string
Path to yaml file with package spec (as created by conda env export
""" |
logger.debug(str((name, yamlfile)))
cmd_list = ['env', 'create', '-n', name, '-f', yamlfile, '--json']
return self._call_and_parse(cmd_list) |
<SYSTEM_TASK:>
Create an environment with a specified set of packages.
<END_TASK>
<USER_TASK:>
Description:
def create(self, name=None, prefix=None, pkgs=None, channels=None):
"""Create an environment with a specified set of packages.""" |
logger.debug(str((prefix, pkgs, channels)))
# TODO: Fix temporal hack
if (not pkgs or (not isinstance(pkgs, (list, tuple)) and
not is_text_string(pkgs))):
raise TypeError('must specify a list of one or more packages to '
'install into new environment')
cmd_list = ['create', '--yes', '--json', '--mkdir']
if name:
ref = name
search = [os.path.join(d, name) for d in
self.info().communicate()[0]['envs_dirs']]
cmd_list.extend(['--name', name])
elif prefix:
ref = prefix
search = [prefix]
cmd_list.extend(['--prefix', prefix])
else:
raise TypeError('must specify either an environment name or a '
'path for new environment')
if any(os.path.exists(prefix) for prefix in search):
raise CondaEnvExistsError('Conda environment {0} already '
'exists'.format(ref))
# TODO: Fix temporal hack
if isinstance(pkgs, (list, tuple)):
cmd_list.extend(pkgs)
elif is_text_string(pkgs):
cmd_list.extend(['--file', pkgs])
# TODO: Check if correct
if channels:
cmd_list.extend(['--override-channels'])
for channel in channels:
cmd_list.extend(['--channel'])
cmd_list.extend([channel])
return self._call_and_parse(cmd_list) |
<SYSTEM_TASK:>
Adapt a channel to include token of the logged user.
<END_TASK>
<USER_TASK:>
Description:
def parse_token_channel(self, channel, token):
"""
Adapt a channel to include token of the logged user.
Ignore default channels.
""" |
if (token and channel not in self.DEFAULT_CHANNELS and
channel != 'defaults'):
url_parts = channel.split('/')
start = url_parts[:-1]
middle = 't/{0}'.format(token)
end = url_parts[-1]
token_channel = '{0}/{1}/{2}'.format('/'.join(start), middle, end)
return token_channel
else:
return channel |
<SYSTEM_TASK:>
Install a set of packages into an environment by name or path.
<END_TASK>
<USER_TASK:>
Description:
def install(self, name=None, prefix=None, pkgs=None, dep=True,
channels=None, token=None):
"""
Install a set of packages into an environment by name or path.
If token is specified, the channels different from the defaults will
get the token appended.
""" |
logger.debug(str((prefix, pkgs, channels)))
# TODO: Fix temporal hack
if not pkgs or not isinstance(pkgs, (list, tuple, str)):
raise TypeError('must specify a list of one or more packages to '
'install into existing environment')
cmd_list = ['install', '--yes', '--json', '--force-pscheck']
if name:
cmd_list.extend(['--name', name])
elif prefix:
cmd_list.extend(['--prefix', prefix])
else:
# Just install into the current environment, whatever that is
pass
# TODO: Check if correct
if channels:
cmd_list.extend(['--override-channels'])
for channel in channels:
cmd_list.extend(['--channel'])
channel = self.parse_token_channel(channel, token)
cmd_list.extend([channel])
# TODO: Fix temporal hack
if isinstance(pkgs, (list, tuple)):
cmd_list.extend(pkgs)
elif isinstance(pkgs, str):
cmd_list.extend(['--file', pkgs])
if not dep:
cmd_list.extend(['--no-deps'])
return self._call_and_parse(cmd_list) |
<SYSTEM_TASK:>
Remove an environment entirely.
<END_TASK>
<USER_TASK:>
Description:
def remove_environment(self, name=None, path=None, **kwargs):
"""
Remove an environment entirely.
See ``remove``.
""" |
return self.remove(name=name, path=path, all=True, **kwargs) |
<SYSTEM_TASK:>
Clone the environment `clone` into `name` or `prefix`.
<END_TASK>
<USER_TASK:>
Description:
def clone_environment(self, clone, name=None, prefix=None, **kwargs):
"""Clone the environment `clone` into `name` or `prefix`.""" |
cmd_list = ['create', '--json']
if (name and prefix) or not (name or prefix):
raise TypeError("conda clone_environment: exactly one of `name` "
"or `path` required")
if name:
cmd_list.extend(['--name', name])
if prefix:
cmd_list.extend(['--prefix', prefix])
cmd_list.extend(['--clone', clone])
cmd_list.extend(
self._setup_install_commands_from_kwargs(
kwargs,
('dry_run', 'unknown', 'use_index_cache', 'use_local',
'no_pin', 'force', 'all', 'channel', 'override_channels',
'no_default_packages')))
return self._call_and_parse(cmd_list, abspath=kwargs.get('abspath',
True)) |
<SYSTEM_TASK:>
Setup config commands for conda.
<END_TASK>
<USER_TASK:>
Description:
def _setup_config_from_kwargs(kwargs):
"""Setup config commands for conda.""" |
cmd_list = ['--json', '--force']
if 'file' in kwargs:
cmd_list.extend(['--file', kwargs['file']])
if 'system' in kwargs:
cmd_list.append('--system')
return cmd_list |
<SYSTEM_TASK:>
Add a value to a key.
<END_TASK>
<USER_TASK:>
Description:
def config_add(self, key, value, **kwargs):
"""
Add a value to a key.
Returns a list of warnings Conda may have emitted.
""" |
cmd_list = ['config', '--add', key, value]
cmd_list.extend(self._setup_config_from_kwargs(kwargs))
return self._call_and_parse(
cmd_list,
abspath=kwargs.get('abspath', True),
callback=lambda o, e: o.get('warnings', [])) |
<SYSTEM_TASK:>
Get dependenciy list for packages to be installed in an env.
<END_TASK>
<USER_TASK:>
Description:
def dependencies(self, name=None, prefix=None, pkgs=None, channels=None,
dep=True):
"""Get dependenciy list for packages to be installed in an env.""" |
if not pkgs or not isinstance(pkgs, (list, tuple)):
raise TypeError('must specify a list of one or more packages to '
'install into existing environment')
cmd_list = ['install', '--dry-run', '--json', '--force-pscheck']
if not dep:
cmd_list.extend(['--no-deps'])
if name:
cmd_list.extend(['--name', name])
elif prefix:
cmd_list.extend(['--prefix', prefix])
else:
pass
cmd_list.extend(pkgs)
# TODO: Check if correct
if channels:
cmd_list.extend(['--override-channels'])
for channel in channels:
cmd_list.extend(['--channel'])
cmd_list.extend([channel])
return self._call_and_parse(cmd_list) |
<SYSTEM_TASK:>
Check if an environment exists by 'name' or by 'prefix'.
<END_TASK>
<USER_TASK:>
Description:
def environment_exists(self, name=None, prefix=None, abspath=True,
log=True):
"""Check if an environment exists by 'name' or by 'prefix'.
If query is by 'name' only the default conda environments directory is
searched.
""" |
if log:
logger.debug(str((name, prefix)))
if name and prefix:
raise TypeError("Exactly one of 'name' or 'prefix' is required.")
if name:
prefix = self.get_prefix_envname(name, log=log)
if prefix is None:
prefix = self.ROOT_PREFIX
return os.path.isdir(os.path.join(prefix, 'conda-meta')) |
<SYSTEM_TASK:>
Clean any conda lock in the system.
<END_TASK>
<USER_TASK:>
Description:
def clear_lock(self, abspath=True):
"""Clean any conda lock in the system.""" |
cmd_list = ['clean', '--lock', '--json']
return self._call_and_parse(cmd_list, abspath=abspath) |
<SYSTEM_TASK:>
Get installed package version in a given env.
<END_TASK>
<USER_TASK:>
Description:
def package_version(self, prefix=None, name=None, pkg=None, build=False):
"""Get installed package version in a given env.""" |
package_versions = {}
if name and prefix:
raise TypeError("Exactly one of 'name' or 'prefix' is required.")
if name:
prefix = self.get_prefix_envname(name)
if self.environment_exists(prefix=prefix):
for package in self.linked(prefix):
if pkg in package:
n, v, b = self.split_canonical_name(package)
if build:
package_versions[n] = '{0}={1}'.format(v, b)
else:
package_versions[n] = v
return package_versions.get(pkg) |
<SYSTEM_TASK:>
Load the conda configuration file.
<END_TASK>
<USER_TASK:>
Description:
def load_rc(self, path=None, system=False):
"""
Load the conda configuration file.
If both user and system configuration exists, user will be used.
""" |
if os.path.isfile(self.user_rc_path) and not system:
path = self.user_rc_path
elif os.path.isfile(self.sys_rc_path):
path = self.sys_rc_path
if not path or not os.path.isfile(path):
return {}
with open(path) as f:
return yaml.load(f) or {} |
<SYSTEM_TASK:>
Return all the channel urls defined in .condarc.
<END_TASK>
<USER_TASK:>
Description:
def get_condarc_channels(self,
normalize=False,
conda_url='https://conda.anaconda.org',
channels=None):
"""Return all the channel urls defined in .condarc.
If no condarc file is found, use the default channels.
the `default_channel_alias` key is ignored and only the anaconda client
`url` key is used.
""" |
# https://docs.continuum.io/anaconda-repository/configuration
# They can only exist on a system condarc
default_channels = self.load_rc(system=True).get('default_channels',
self.DEFAULT_CHANNELS)
normalized_channels = []
if channels is None:
condarc = self.load_rc()
channels = condarc.get('channels')
if channels is None:
channels = ['defaults']
if normalize:
template = '{0}/{1}' if conda_url[-1] != '/' else '{0}{1}'
for channel in channels:
if channel == 'defaults':
normalized_channels += default_channels
elif channel.startswith('http'):
normalized_channels.append(channel)
else:
# Append to the conda_url that comes from anaconda client
# default_channel_alias key is deliberately ignored
normalized_channels.append(template.format(conda_url,
channel))
channels = normalized_channels
return channels |
<SYSTEM_TASK:>
Get pip location based on environment `name` or `prefix`.
<END_TASK>
<USER_TASK:>
Description:
def _pip_cmd(self, name=None, prefix=None):
"""Get pip location based on environment `name` or `prefix`.""" |
if (name and prefix) or not (name or prefix):
raise TypeError("conda pip: exactly one of 'name' ""or 'prefix' "
"required.")
if name and self.environment_exists(name=name):
prefix = self.get_prefix_envname(name)
if sys.platform == 'win32':
python = join(prefix, 'python.exe') # FIXME:
pip = join(prefix, 'pip.exe') # FIXME:
else:
python = join(prefix, 'bin/python')
pip = join(prefix, 'bin/pip')
cmd_list = [python, pip]
return cmd_list |
<SYSTEM_TASK:>
Get list of pip installed packages.
<END_TASK>
<USER_TASK:>
Description:
def pip_list(self, name=None, prefix=None, abspath=True):
"""Get list of pip installed packages.""" |
if (name and prefix) or not (name or prefix):
raise TypeError("conda pip: exactly one of 'name' ""or 'prefix' "
"required.")
if name:
prefix = self.get_prefix_envname(name)
pip_command = os.sep.join([prefix, 'bin', 'python'])
cmd_list = [pip_command, PIP_LIST_SCRIPT]
process_worker = ProcessWorker(cmd_list, pip=True, parse=True,
callback=self._pip_list,
extra_kwargs={'prefix': prefix})
process_worker.sig_finished.connect(self._start)
self._queue.append(process_worker)
self._start()
return process_worker |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.