text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> create filesystem-safe places for url-keyed data to be stored <END_TASK> <USER_TASK:> Description: def chunk(url): """ create filesystem-safe places for url-keyed data to be stored """
chunks = lambda l, n: [l[x: x+n] for x in xrange(0, len(l), n)] url_64 = base64.urlsafe_b64encode(url) return chunks(url_64, 255)
<SYSTEM_TASK:> Given an input file containing nothing but styles, print out an <END_TASK> <USER_TASK:> Description: def main(filename): """ Given an input file containing nothing but styles, print out an unrolled list of declarations in cascade order. """
input = open(filename, 'r').read() declarations = cascadenik.stylesheet_declarations(input, is_merc=True) for dec in declarations: print dec.selector, print '{', print dec.property.name+':', if cascadenik.style.properties[dec.property.name] in (cascadenik.style.color, cascadenik.style.boolean, cascadenik.style.numbers): print str(dec.value.value)+';', elif cascadenik.style.properties[dec.property.name] is cascadenik.style.uri: print 'url("'+str(dec.value.value)+'");', elif cascadenik.style.properties[dec.property.name] is str: print '"'+str(dec.value.value)+'";', elif cascadenik.style.properties[dec.property.name] in (int, float) or type(cascadenik.style.properties[dec.property.name]) is tuple: print str(dec.value.value)+';', print '}' return 0
<SYSTEM_TASK:> Connect to socket. This should be run in a new thread. <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to socket. This should be run in a new thread."""
while self.protocol: _LOGGER.info('Trying to connect to %s', self.server_address) try: sock = socket.create_connection( self.server_address, self.reconnect_timeout) except socket.timeout: _LOGGER.error( 'Connecting to socket timed out for %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) time.sleep(self.reconnect_timeout) except OSError: _LOGGER.error( 'Failed to connect to socket at %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) time.sleep(self.reconnect_timeout) else: self.tcp_check_timer = time.time() self.tcp_disconnect_timer = time.time() transport = TCPTransport( sock, lambda: self.protocol, self._check_connection) poll_thread = threading.Thread(target=self._poll_queue) self._stop_event.clear() poll_thread.start() transport.start() transport.connect() return
<SYSTEM_TASK:> Connect to the socket. <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to the socket."""
try: while True: _LOGGER.info('Trying to connect to %s', self.server_address) try: yield from asyncio.wait_for( self.loop.create_connection( lambda: self.protocol, *self.server_address), self.reconnect_timeout, loop=self.loop) self.tcp_check_timer = time.time() self.tcp_disconnect_timer = time.time() self._check_connection() return except asyncio.TimeoutError: _LOGGER.error( 'Connecting to socket timed out for %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) yield from asyncio.sleep( self.reconnect_timeout, loop=self.loop) except OSError: _LOGGER.error( 'Failed to connect to socket at %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) yield from asyncio.sleep( self.reconnect_timeout, loop=self.loop) except asyncio.CancelledError: _LOGGER.debug( 'Connect attempt to %s cancelled', self.server_address)
<SYSTEM_TASK:> Return decorator to register item with a specific name. <END_TASK> <USER_TASK:> Description: def register(self, name): """Return decorator to register item with a specific name."""
def decorator(func): """Register decorated function.""" self[name] = func return func return decorator
<SYSTEM_TASK:> Parse a mysensors command string. <END_TASK> <USER_TASK:> Description: def _parse_message_to_mqtt(self, data): """Parse a mysensors command string. Return a MQTT topic, payload and qos-level as a tuple. """
msg = Message(data, self) payload = str(msg.payload) msg.payload = '' # prefix/node/child/type/ack/subtype : payload return ('{}/{}'.format(self._out_prefix, msg.encode('/'))[:-2], payload, msg.ack)
<SYSTEM_TASK:> Receive a MQTT message. <END_TASK> <USER_TASK:> Description: def recv(self, topic, payload, qos): """Receive a MQTT message. Call this method when a message is received from the MQTT broker. """
data = self._parse_mqtt_to_message(topic, payload, qos) if data is None: return _LOGGER.debug('Receiving %s', data) self.add_job(self.logic, data)
<SYSTEM_TASK:> Publish a command string to the gateway via MQTT. <END_TASK> <USER_TASK:> Description: def send(self, message): """Publish a command string to the gateway via MQTT."""
if not message: return topic, payload, qos = self._parse_message_to_mqtt(message) try: _LOGGER.debug('Publishing %s', message.strip()) self._pub_callback(topic, payload, qos, self._retain) except Exception as exception: # pylint: disable=broad-except _LOGGER.exception('Publish to %s failed: %s', topic, exception)
<SYSTEM_TASK:> Cast to the correct value every <END_TASK> <USER_TASK:> Description: def contribute_to_class(self, cls, name, virtual_only=False): """ Cast to the correct value every """
super(RegexField, self).contribute_to_class(cls, name, virtual_only) setattr(cls, name, CastOnAssignDescriptor(self))
<SYSTEM_TASK:> Make sure value is a string so it can run through django validators <END_TASK> <USER_TASK:> Description: def run_validators(self, value): """ Make sure value is a string so it can run through django validators """
value = self.to_python(value) value = self.value_to_string(value) return super(RegexField, self).run_validators(value)
<SYSTEM_TASK:> Validate that value has hex format. <END_TASK> <USER_TASK:> Description: def validate_hex(value): """Validate that value has hex format."""
try: binascii.unhexlify(value) except Exception: raise vol.Invalid( '{} is not of hex format'.format(value)) return value
<SYSTEM_TASK:> Copy a message, optionally replace attributes with kwargs. <END_TASK> <USER_TASK:> Description: def copy(self, **kwargs): """Copy a message, optionally replace attributes with kwargs."""
msg = Message(self.encode(), self.gateway) for key, val in kwargs.items(): setattr(msg, key, val) return msg
<SYSTEM_TASK:> Modify and return message, replace attributes with kwargs. <END_TASK> <USER_TASK:> Description: def modify(self, **kwargs): """Modify and return message, replace attributes with kwargs."""
for key, val in kwargs.items(): setattr(self, key, val) return self
<SYSTEM_TASK:> Decode a message from command string. <END_TASK> <USER_TASK:> Description: def decode(self, data, delimiter=';'): """Decode a message from command string."""
try: list_data = data.rstrip().split(delimiter) self.payload = list_data.pop() (self.node_id, self.child_id, self.type, self.ack, self.sub_type) = [int(f) for f in list_data] except ValueError: _LOGGER.warning('Error decoding message from gateway, ' 'bad data received: %s', data.rstrip()) raise
<SYSTEM_TASK:> Encode a command string from message. <END_TASK> <USER_TASK:> Description: def encode(self, delimiter=';'): """Encode a command string from message."""
try: return delimiter.join([str(f) for f in [ self.node_id, self.child_id, int(self.type), self.ack, int(self.sub_type), self.payload, ]]) + '\n' except ValueError: _LOGGER.error('Error encoding message to gateway')
<SYSTEM_TASK:> Load sensors from pickle file. <END_TASK> <USER_TASK:> Description: def _load_pickle(self, filename): """Load sensors from pickle file."""
with open(filename, 'rb') as file_handle: self._sensors.update(pickle.load(file_handle))
<SYSTEM_TASK:> Load sensors safely from file. <END_TASK> <USER_TASK:> Description: def safe_load_sensors(self): """Load sensors safely from file."""
try: loaded = self._load_sensors() except (EOFError, ValueError): _LOGGER.error('Bad file contents: %s', self.persistence_file) loaded = False if not loaded: _LOGGER.warning('Trying backup file: %s', self.persistence_bak) try: if not self._load_sensors(self.persistence_bak): _LOGGER.warning('Failed to load sensors from file: %s', self.persistence_file) except (EOFError, ValueError): _LOGGER.error('Bad file contents: %s', self.persistence_file) _LOGGER.warning('Removing file: %s', self.persistence_file) os.remove(self.persistence_file)
<SYSTEM_TASK:> Perform action on specific file types. <END_TASK> <USER_TASK:> Description: def _perform_file_action(self, filename, action): """Perform action on specific file types. Dynamic dispatch function for performing actions on specific file types. """
ext = os.path.splitext(filename)[1] try: func = getattr(self, '_{}_{}'.format(action, ext[1:])) except AttributeError: raise Exception('Unsupported file type {}'.format(ext[1:])) func(filename)
<SYSTEM_TASK:> Return the const module for the protocol_version. <END_TASK> <USER_TASK:> Description: def get_const(protocol_version): """Return the const module for the protocol_version."""
path = next(( CONST_VERSIONS[const_version] for const_version in sorted(CONST_VERSIONS, reverse=True) if parse_ver(protocol_version) >= parse_ver(const_version) ), 'mysensors.const_14') if path in LOADED_CONST: return LOADED_CONST[path] const = import_module(path) LOADED_CONST[path] = const # Cache the module return const
<SYSTEM_TASK:> Unpack hex string into integers. <END_TASK> <USER_TASK:> Description: def fw_hex_to_int(hex_str, words): """Unpack hex string into integers. Use little-endian and unsigned int format. Specify number of words to unpack with argument words. """
return struct.unpack('<{}H'.format(words), binascii.unhexlify(hex_str))
<SYSTEM_TASK:> Pack integers into hex string. <END_TASK> <USER_TASK:> Description: def fw_int_to_hex(*args): """Pack integers into hex string. Use little-endian and unsigned int format. """
return binascii.hexlify( struct.pack('<{}H'.format(len(args)), *args)).decode('utf-8')
<SYSTEM_TASK:> Check that firmware is valid and return dict with binary data. <END_TASK> <USER_TASK:> Description: def prepare_fw(bin_string): """Check that firmware is valid and return dict with binary data."""
pads = len(bin_string) % 128 # 128 bytes per page for atmega328 for _ in range(128 - pads): # pad up to even 128 bytes bin_string += b'\xff' fware = { 'blocks': int(len(bin_string) / FIRMWARE_BLOCK_SIZE), 'crc': compute_crc(bin_string), 'data': bin_string, } return fware
<SYSTEM_TASK:> Get firmware type, version and a dict holding binary data. <END_TASK> <USER_TASK:> Description: def _get_fw(self, msg, updates, req_fw_type=None, req_fw_ver=None): """Get firmware type, version and a dict holding binary data."""
fw_type = None fw_ver = None if not isinstance(updates, tuple): updates = (updates, ) for store in updates: fw_id = store.pop(msg.node_id, None) if fw_id is not None: fw_type, fw_ver = fw_id updates[-1][msg.node_id] = fw_id break if fw_type is None or fw_ver is None: _LOGGER.debug( 'Node %s is not set for firmware update', msg.node_id) return None, None, None if req_fw_type is not None and req_fw_ver is not None: fw_type, fw_ver = req_fw_type, req_fw_ver fware = self.firmware.get((fw_type, fw_ver)) if fware is None: _LOGGER.debug( 'No firmware of type %s and version %s found', fw_type, fw_ver) return None, None, None return fw_type, fw_ver, fware
<SYSTEM_TASK:> Respond to a firmware request. <END_TASK> <USER_TASK:> Description: def respond_fw(self, msg): """Respond to a firmware request."""
req_fw_type, req_fw_ver, req_blk = fw_hex_to_int(msg.payload, 3) _LOGGER.debug( 'Received firmware request with firmware type %s, ' 'firmware version %s, block index %s', req_fw_type, req_fw_ver, req_blk) fw_type, fw_ver, fware = self._get_fw( msg, (self.unstarted, self.started), req_fw_type, req_fw_ver) if fware is None: return None blk_data = fware['data'][ req_blk * FIRMWARE_BLOCK_SIZE: req_blk * FIRMWARE_BLOCK_SIZE + FIRMWARE_BLOCK_SIZE] msg = msg.copy(sub_type=self._const.Stream.ST_FIRMWARE_RESPONSE) msg.payload = fw_int_to_hex(fw_type, fw_ver, req_blk) # format blk_data into payload format msg.payload = msg.payload + binascii.hexlify(blk_data).decode('utf-8') return msg
<SYSTEM_TASK:> Respond to a firmware config request. <END_TASK> <USER_TASK:> Description: def respond_fw_config(self, msg): """Respond to a firmware config request."""
(req_fw_type, req_fw_ver, req_blocks, req_crc, bloader_ver) = fw_hex_to_int(msg.payload, 5) _LOGGER.debug( 'Received firmware config request with firmware type %s, ' 'firmware version %s, %s blocks, CRC %s, bootloader %s', req_fw_type, req_fw_ver, req_blocks, req_crc, bloader_ver) fw_type, fw_ver, fware = self._get_fw( msg, (self.requested, self.unstarted)) if fware is None: return None if fw_type != req_fw_type: _LOGGER.warning( 'Firmware type %s of update is not identical to existing ' 'firmware type %s for node %s', fw_type, req_fw_type, msg.node_id) _LOGGER.info( 'Updating node %s to firmware type %s version %s from type %s ' 'version %s', msg.node_id, fw_type, fw_ver, req_fw_type, req_fw_ver) msg = msg.copy(sub_type=self._const.Stream.ST_FIRMWARE_CONFIG_RESPONSE) msg.payload = fw_int_to_hex( fw_type, fw_ver, fware['blocks'], fware['crc']) return msg
<SYSTEM_TASK:> Start firmware update process for one or more node_id. <END_TASK> <USER_TASK:> Description: def make_update(self, nids, fw_type, fw_ver, fw_bin=None): """Start firmware update process for one or more node_id."""
try: fw_type, fw_ver = int(fw_type), int(fw_ver) except ValueError: _LOGGER.error( 'Firmware type %s or version %s not valid, ' 'please enter integers', fw_type, fw_ver) return if fw_bin is not None: fware = prepare_fw(fw_bin) self.firmware[fw_type, fw_ver] = fware if (fw_type, fw_ver) not in self.firmware: _LOGGER.error( 'No firmware of type %s and version %s found, ' 'please enter path to firmware in call', fw_type, fw_ver) return if not isinstance(nids, list): nids = [nids] for node_id in nids: if node_id not in self._sensors: continue for store in self.unstarted, self.started: store.pop(node_id, None) self.requested[node_id] = fw_type, fw_ver self._sensors[node_id].reboot = True
<SYSTEM_TASK:> Process a message before going back to smartsleep. <END_TASK> <USER_TASK:> Description: def handle_smartsleep(msg): """Process a message before going back to smartsleep."""
while msg.gateway.sensors[msg.node_id].queue: msg.gateway.add_job( str, msg.gateway.sensors[msg.node_id].queue.popleft()) for child in msg.gateway.sensors[msg.node_id].children.values(): new_child = msg.gateway.sensors[msg.node_id].new_state.get( child.id, ChildSensor(child.id, child.type, child.description)) msg.gateway.sensors[msg.node_id].new_state[child.id] = new_child for value_type, value in child.values.items(): new_value = new_child.values.get(value_type) if new_value is not None and new_value != value: msg.gateway.add_job( msg.gateway.sensors[msg.node_id].set_child_value, child.id, value_type, new_value)
<SYSTEM_TASK:> Process a req message. <END_TASK> <USER_TASK:> Description: def handle_req(msg): """Process a req message. This will return the value if it exists. If no value exists, nothing is returned. """
if not msg.gateway.is_sensor(msg.node_id, msg.child_id): return None value = msg.gateway.sensors[msg.node_id].children[ msg.child_id].values.get(msg.sub_type) if value is not None: return msg.copy( type=msg.gateway.const.MessageType.set, payload=value) return None
<SYSTEM_TASK:> Process an internal time request message. <END_TASK> <USER_TASK:> Description: def handle_time(msg): """Process an internal time request message."""
return msg.copy(ack=0, payload=calendar.timegm(time.localtime()))
<SYSTEM_TASK:> Process an internal battery level message. <END_TASK> <USER_TASK:> Description: def handle_battery_level(msg): """Process an internal battery level message."""
if not msg.gateway.is_sensor(msg.node_id): return None msg.gateway.sensors[msg.node_id].battery_level = msg.payload msg.gateway.alert(msg) return None
<SYSTEM_TASK:> Process an internal sketch name message. <END_TASK> <USER_TASK:> Description: def handle_sketch_name(msg): """Process an internal sketch name message."""
if not msg.gateway.is_sensor(msg.node_id): return None msg.gateway.sensors[msg.node_id].sketch_name = msg.payload msg.gateway.alert(msg) return None
<SYSTEM_TASK:> Publish an MQTT message. <END_TASK> <USER_TASK:> Description: def publish(self, topic, payload, qos, retain): """Publish an MQTT message."""
self._mqttc.publish(topic, payload, qos, retain)
<SYSTEM_TASK:> Connect to the serial port. This should be run in a new thread. <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to the serial port. This should be run in a new thread."""
while self.protocol: _LOGGER.info('Trying to connect to %s', self.port) try: ser = serial.serial_for_url( self.port, self.baud, timeout=self.timeout) except serial.SerialException: _LOGGER.error('Unable to connect to %s', self.port) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) time.sleep(self.reconnect_timeout) else: transport = serial.threaded.ReaderThread( ser, lambda: self.protocol) transport.daemon = False poll_thread = threading.Thread(target=self._poll_queue) self._stop_event.clear() poll_thread.start() transport.start() transport.connect() return
<SYSTEM_TASK:> Connect to the serial port. <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to the serial port."""
try: while True: _LOGGER.info('Trying to connect to %s', self.port) try: yield from serial_asyncio.create_serial_connection( self.loop, lambda: self.protocol, self.port, self.baud) return except serial.SerialException: _LOGGER.error('Unable to connect to %s', self.port) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) yield from asyncio.sleep( self.reconnect_timeout, loop=self.loop) except asyncio.CancelledError: _LOGGER.debug('Connect attempt to %s cancelled', self.port)
<SYSTEM_TASK:> Validate that value is a valid version string. <END_TASK> <USER_TASK:> Description: def is_version(value): """Validate that value is a valid version string."""
try: value = str(value) if not parse_ver('1.4') <= parse_ver(value): raise ValueError() return value except (AttributeError, TypeError, ValueError): raise vol.Invalid( '{} is not a valid version specifier'.format(value))
<SYSTEM_TASK:> Validate that value is a valid battery level integer. <END_TASK> <USER_TASK:> Description: def is_battery_level(value): """Validate that value is a valid battery level integer."""
try: value = percent_int(value) return value except vol.Invalid: _LOGGER.warning( '%s is not a valid battery level, falling back to battery level 0', value) return 0
<SYSTEM_TASK:> Validate that value is a valid heartbeat integer. <END_TASK> <USER_TASK:> Description: def is_heartbeat(value): """Validate that value is a valid heartbeat integer."""
try: value = vol.Coerce(int)(value) return value except vol.Invalid: _LOGGER.warning( '%s is not a valid heartbeat value, falling back to heartbeat 0', value) return 0
<SYSTEM_TASK:> Generate a salt for the specified method. <END_TASK> <USER_TASK:> Description: def mksalt(method=None, rounds=None): """Generate a salt for the specified method. If not specified, the strongest available method will be used. """
if method is None: method = methods[0] salt = ['${0}$'.format(method.ident) if method.ident else ''] if rounds: salt.append('rounds={0:d}$'.format(rounds)) salt.append(''.join(_sr.choice(_BASE64_CHARACTERS) for char in range(method.salt_chars))) return ''.join(salt)
<SYSTEM_TASK:> Get the desired password from the user through a double prompt. <END_TASK> <USER_TASK:> Description: def double_prompt_for_plaintext_password(): """Get the desired password from the user through a double prompt."""
password = 1 password_repeat = 2 while password != password_repeat: password = getpass.getpass('Enter password: ') password_repeat = getpass.getpass('Repeat password: ') if password != password_repeat: sys.stderr.write('Passwords do not match, try again.\n') return password
<SYSTEM_TASK:> Parse the data and respond to it appropriately. <END_TASK> <USER_TASK:> Description: def logic(self, data): """Parse the data and respond to it appropriately. Response is returned to the caller and has to be sent data as a mysensors command string. """
try: msg = Message(data, self) msg.validate(self.protocol_version) except (ValueError, vol.Invalid) as exc: _LOGGER.warning('Not a valid message: %s', exc) return None message_type = self.const.MessageType(msg.type) handler = message_type.get_handler(self.handlers) ret = handler(msg) ret = self._route_message(ret) ret = ret.encode() if ret else None return ret
<SYSTEM_TASK:> Tell anyone who wants to know that a sensor was updated. <END_TASK> <USER_TASK:> Description: def alert(self, msg): """Tell anyone who wants to know that a sensor was updated."""
if self.event_callback is not None: try: self.event_callback(msg) except Exception as exception: # pylint: disable=broad-except _LOGGER.exception(exception) if self.persistence: self.persistence.need_save = True
<SYSTEM_TASK:> Add a sensor to the gateway. <END_TASK> <USER_TASK:> Description: def add_sensor(self, sensorid=None): """Add a sensor to the gateway."""
if sensorid is None: sensorid = self._get_next_id() if sensorid is not None and sensorid not in self.sensors: self.sensors[sensorid] = Sensor(sensorid) return sensorid if sensorid in self.sensors else None
<SYSTEM_TASK:> Return True if a sensor and its child exist. <END_TASK> <USER_TASK:> Description: def is_sensor(self, sensorid, child_id=None): """Return True if a sensor and its child exist."""
ret = sensorid in self.sensors if not ret: _LOGGER.warning('Node %s is unknown', sensorid) if ret and child_id is not None: ret = child_id in self.sensors[sensorid].children if not ret: _LOGGER.warning('Child %s is unknown', child_id) if not ret and parse_ver(self.protocol_version) >= parse_ver('2.0'): _LOGGER.info('Requesting new presentation for node %s', sensorid) msg = Message(gateway=self).modify( node_id=sensorid, child_id=SYSTEM_CHILD_ID, type=self.const.MessageType.internal, sub_type=self.const.Internal.I_PRESENTATION) if self._route_message(msg): self.add_job(msg.encode) return ret
<SYSTEM_TASK:> Run a job, either passed in or from the queue. <END_TASK> <USER_TASK:> Description: def run_job(self, job=None): """Run a job, either passed in or from the queue. A job is a tuple of function and optional args. Keyword arguments can be passed via use of functools.partial. The job should return a string that should be sent by the gateway protocol. The function will be called with the arguments and the result will be returned. """
if job is None: if not self.queue: return None job = self.queue.popleft() start = timer() func, args = job reply = func(*args) end = timer() if end - start > 0.1: _LOGGER.debug( 'Handle queue with call %s(%s) took %.3f seconds', func, args, end - start) return reply
<SYSTEM_TASK:> Add a command to set a sensor value, to the queue. <END_TASK> <USER_TASK:> Description: def set_child_value( self, sensor_id, child_id, value_type, value, **kwargs): """Add a command to set a sensor value, to the queue. A queued command will be sent to the sensor when the gateway thread has sent all previously queued commands. If the sensor attribute new_state returns True, the command will be buffered in a queue on the sensor, and only the internal sensor state will be updated. When a smartsleep message is received, the internal state will be pushed to the sensor, via _handle_smartsleep method. """
if not self.is_sensor(sensor_id, child_id): return if self.sensors[sensor_id].new_state: self.sensors[sensor_id].set_child_value( child_id, value_type, value, children=self.sensors[sensor_id].new_state) else: self.add_job(partial( self.sensors[sensor_id].set_child_value, child_id, value_type, value, **kwargs))
<SYSTEM_TASK:> Poll the queue for work. <END_TASK> <USER_TASK:> Description: def _poll_queue(self): """Poll the queue for work."""
while not self._stop_event.is_set(): reply = self.run_job() self.send(reply) if self.queue: continue time.sleep(0.02)
<SYSTEM_TASK:> Stop the background thread. <END_TASK> <USER_TASK:> Description: def stop(self): """Stop the background thread."""
self._stop_event.set() if not self.persistence: return if self._cancel_save is not None: self._cancel_save() self._cancel_save = None self.persistence.save_sensors()
<SYSTEM_TASK:> Disconnect from the transport. <END_TASK> <USER_TASK:> Description: def _disconnect(self): """Disconnect from the transport."""
if not self.protocol or not self.protocol.transport: self.protocol = None # Make sure protocol is None return _LOGGER.info('Disconnecting from gateway') self.protocol.transport.close() self.protocol = None
<SYSTEM_TASK:> Write a message to the gateway. <END_TASK> <USER_TASK:> Description: def send(self, message): """Write a message to the gateway."""
if not message or not self.protocol or not self.protocol.transport: return if not self.can_log: _LOGGER.debug('Sending %s', message.strip()) try: self.protocol.transport.write(message.encode()) except OSError as exc: _LOGGER.error( 'Failed writing to transport %s: %s', self.protocol.transport, exc) self.protocol.transport.close() self.protocol.conn_lost_callback()
<SYSTEM_TASK:> Add a job that should return a reply to be sent. <END_TASK> <USER_TASK:> Description: def add_job(self, func, *args): """Add a job that should return a reply to be sent. A job is a tuple of function and optional args. Keyword arguments can be passed via use of functools.partial. The job should return a string that should be sent by the gateway protocol. The async version of this method will send the reply directly. """
job = func, args reply = self.run_job(job) self.send(reply)
<SYSTEM_TASK:> Handle created connection. <END_TASK> <USER_TASK:> Description: def connection_made(self, transport): """Handle created connection."""
super().connection_made(transport) if hasattr(self.transport, 'serial'): _LOGGER.info('Connected to %s', self.transport.serial) else: _LOGGER.info('Connected to %s', self.transport)
<SYSTEM_TASK:> Handle incoming string data one line at a time. <END_TASK> <USER_TASK:> Description: def handle_line(self, line): """Handle incoming string data one line at a time."""
if not self.gateway.can_log: _LOGGER.debug('Receiving %s', line) self.gateway.add_job(self.gateway.logic, line)
<SYSTEM_TASK:> Create and add a child sensor. <END_TASK> <USER_TASK:> Description: def add_child_sensor(self, child_id, child_type, description=''): """Create and add a child sensor."""
if child_id in self.children: _LOGGER.warning( 'child_id %s already exists in children of node %s, ' 'cannot add child', child_id, self.sensor_id) return None self.children[child_id] = ChildSensor( child_id, child_type, description) return child_id
<SYSTEM_TASK:> Return the child schema for the correct const version. <END_TASK> <USER_TASK:> Description: def get_schema(self, protocol_version): """Return the child schema for the correct const version."""
const = get_const(protocol_version) custom_schema = vol.Schema({ typ.value: const.VALID_SETREQ[typ] for typ in const.VALID_TYPES[const.Presentation.S_CUSTOM]}) return custom_schema.extend({ typ.value: const.VALID_SETREQ[typ] for typ in const.VALID_TYPES[self.type]})
<SYSTEM_TASK:> Validate child value types and values against protocol_version. <END_TASK> <USER_TASK:> Description: def validate(self, protocol_version, values=None): """Validate child value types and values against protocol_version."""
if values is None: values = self.values return self.get_schema(protocol_version)(values)
<SYSTEM_TASK:> Get where package was shipped from. <END_TASK> <USER_TASK:> Description: def _get_shipped_from(row): """Get where package was shipped from."""
try: spans = row.find('div', {'id': 'coltextR2'}).find_all('span') if len(spans) < 2: return None return spans[1].string except AttributeError: return None
<SYSTEM_TASK:> RC4 encryption and decryption method. <END_TASK> <USER_TASK:> Description: def rc4(data, key): """RC4 encryption and decryption method."""
S, j, out = list(range(256)), 0, [] for i in range(256): j = (j + S[i] + ord(key[i % len(key)])) % 256 S[i], S[j] = S[j], S[i] i = j = 0 for ch in data: i = (i + 1) % 256 j = (j + S[i]) % 256 S[i], S[j] = S[j], S[i] out.append(chr(ord(ch) ^ S[(S[i] + S[j]) % 256])) return "".join(out)
<SYSTEM_TASK:> Create an email confirmation for `content_object` and send a confirmation mail. <END_TASK> <USER_TASK:> Description: def verify_email_for_object(self, email, content_object, email_field_name='email'): """ Create an email confirmation for `content_object` and send a confirmation mail. The email will be directly saved to `content_object.email_field_name` when `is_primary` and `skip_verify` both are true. """
confirmation_key = generate_random_token() try: confirmation = EmailConfirmation() confirmation.content_object = content_object confirmation.email_field_name = email_field_name confirmation.email = email confirmation.confirmation_key = confirmation_key confirmation.save() except IntegrityError: confirmation = EmailConfirmation.objects.get_for_object(content_object, email_field_name) confirmation.email = email confirmation.confirmation_key = confirmation_key confirmation.save(update_fields=['email', 'confirmation_key']) confirmation.send() return confirmation
<SYSTEM_TASK:> delete all confirmations for the same content_object and the same field <END_TASK> <USER_TASK:> Description: def clean(self): """ delete all confirmations for the same content_object and the same field """
EmailConfirmation.objects.filter(content_type=self.content_type, object_id=self.object_id, email_field_name=self.email_field_name).delete()
<SYSTEM_TASK:> Returns a mapping of items to their new values. The mapping includes only items whose value or raw string value <END_TASK> <USER_TASK:> Description: def values(self): """ Returns a mapping of items to their new values. The mapping includes only items whose value or raw string value has changed in the context. """
report = {} for k, k_changes in self._changes.items(): if len(k_changes) == 1: report[k] = k_changes[0].new_value elif k_changes[0].old_value != k_changes[-1].new_value: report[k] = k_changes[-1].new_value return report
<SYSTEM_TASK:> Returns a mapping of items to their effective change objects which include the old values <END_TASK> <USER_TASK:> Description: def changes(self): """ Returns a mapping of items to their effective change objects which include the old values and the new. The mapping includes only items whose value or raw string value has changed in the context. """
report = {} for k, k_changes in self._changes.items(): if len(k_changes) == 1: report[k] = k_changes[0] else: first = k_changes[0] last = k_changes[-1] if first.old_value != last.new_value or first.old_raw_str_value != last.new_raw_str_value: report[k] = _Change( first.old_value, last.new_value, first.old_raw_str_value, last.new_raw_str_value, ) return report
<SYSTEM_TASK:> Load configuration values from the specified source. <END_TASK> <USER_TASK:> Description: def load(self, source, as_defaults=False): """ Load configuration values from the specified source. Args: source: as_defaults (bool): if ``True``, contents of ``source`` will be treated as schema of configuration items. """
if isinstance(source, six.string_types): source = os.path.expanduser(source) with open(source, encoding='utf-8') as f: self._rw.load_config_from_file(self._config, f, as_defaults=as_defaults) elif isinstance(source, (list, tuple)): for s in source: with open(s, encoding='utf-8') as f: self._rw.load_config_from_file(self._config, f, as_defaults=as_defaults) else: self._rw.load_config_from_file(self._config, source, as_defaults=as_defaults)
<SYSTEM_TASK:> Load configuration values from the specified source string. <END_TASK> <USER_TASK:> Description: def loads(self, config_str, as_defaults=False): """ Load configuration values from the specified source string. Args: config_str: as_defaults (bool): if ``True``, contents of ``source`` will be treated as schema of configuration items. """
self._rw.load_config_from_string(self._config, config_str, as_defaults=as_defaults)
<SYSTEM_TASK:> Write configuration values to the specified destination. <END_TASK> <USER_TASK:> Description: def dump(self, destination, with_defaults=False): """ Write configuration values to the specified destination. Args: destination: with_defaults (bool): if ``True``, values of items with no custom values will be included in the output if they have a default value set. """
if isinstance(destination, six.string_types): with open(destination, 'w', encoding='utf-8') as f: self._rw.dump_config_to_file(self._config, f, with_defaults=with_defaults) else: self._rw.dump_config_to_file(self._config, destination, with_defaults=with_defaults)
<SYSTEM_TASK:> Generate a string representing all the configuration values. <END_TASK> <USER_TASK:> Description: def dumps(self, with_defaults=False): """ Generate a string representing all the configuration values. Args: with_defaults (bool): if ``True``, values of items with no custom values will be included in the output if they have a default value set. """
return self._rw.dump_config_to_string(self._config, with_defaults=with_defaults)
<SYSTEM_TASK:> This method is used only when there is a custom key_setter set. <END_TASK> <USER_TASK:> Description: def _default_key_setter(self, name, subject): """ This method is used only when there is a custom key_setter set. Do not override this method. """
if is_config_item(subject): self.add_item(name, subject) elif is_config_section(subject): self.add_section(name, subject) else: raise TypeError( 'Section items can only be replaced with items, ' 'got {type}. To set item value use ...{name}.value = <new_value>'.format( type=type(subject), name=name, ) )
<SYSTEM_TASK:> The recommended way of retrieving a section by key when extending configmanager's behaviour. <END_TASK> <USER_TASK:> Description: def get_section(self, *key): """ The recommended way of retrieving a section by key when extending configmanager's behaviour. """
section = self._get_item_or_section(key) if not section.is_section: raise RuntimeError('{} is an item, not a section'.format(key)) return section
<SYSTEM_TASK:> Add a config item to this section. <END_TASK> <USER_TASK:> Description: def add_item(self, alias, item): """ Add a config item to this section. """
if not isinstance(alias, six.string_types): raise TypeError('Item name must be a string, got a {!r}'.format(type(alias))) item = copy.deepcopy(item) if item.name is not_set: item.name = alias if self.settings.str_path_separator in item.name: raise ValueError( 'Item name must not contain str_path_separator which is configured for this Config -- {!r} -- ' 'but {!r} does.'.format(self.settings.str_path_separator, item) ) self._tree[item.name] = item if item.name != alias: if self.settings.str_path_separator in alias: raise ValueError( 'Item alias must not contain str_path_separator which is configured for this Config -- {!r} --' 'but {!r} used for {!r} does.'.format(self.settings.str_path_separator, alias, item) ) self._tree[alias] = item item._section = self self.dispatch_event(self.hooks.item_added_to_section, alias=alias, section=self, subject=item)
<SYSTEM_TASK:> Add a sub-section to this section. <END_TASK> <USER_TASK:> Description: def add_section(self, alias, section): """ Add a sub-section to this section. """
if not isinstance(alias, six.string_types): raise TypeError('Section name must be a string, got a {!r}'.format(type(alias))) self._tree[alias] = section if self.settings.str_path_separator in alias: raise ValueError( 'Section alias must not contain str_path_separator which is configured for this Config -- {!r} -- ' 'but {!r} does.'.format(self.settings.str_path_separator, alias) ) section._section = self section._section_alias = alias self.dispatch_event(self.hooks.section_added_to_section, alias=alias, section=self, subject=section)
<SYSTEM_TASK:> Basic recursive iterator whose only purpose is to yield all items <END_TASK> <USER_TASK:> Description: def _get_recursive_iterator(self, recursive=False): """ Basic recursive iterator whose only purpose is to yield all items and sections in order, with their full paths as keys. Main challenge is to de-duplicate items and sections which have aliases. Do not add any new features to this iterator, instead build others that extend this one. """
names_yielded = set() for obj_alias, obj in self._tree.items(): if obj.is_section: if obj.alias in names_yielded: continue names_yielded.add(obj.alias) yield (obj.alias,), obj if not recursive: continue for sub_item_path, sub_item in obj._get_recursive_iterator(recursive=recursive): yield (obj_alias,) + sub_item_path, sub_item else: # _tree contains duplicates so that we can have multiple aliases point # to the same item. We have to de-duplicate here. if obj.name in names_yielded: continue names_yielded.add(obj.name) yield (obj.name,), obj
<SYSTEM_TASK:> Recursively resets values of all items contained in this section <END_TASK> <USER_TASK:> Description: def reset(self): """ Recursively resets values of all items contained in this section and its subsections to their default values. """
for _, item in self.iter_items(recursive=True): item.reset()
<SYSTEM_TASK:> ``True`` if values of all config items in this section and its subsections <END_TASK> <USER_TASK:> Description: def is_default(self): """ ``True`` if values of all config items in this section and its subsections have their values equal to defaults or have no value set. """
for _, item in self.iter_items(recursive=True): if not item.is_default: return False return True
<SYSTEM_TASK:> Export values of all items contained in this section to a dictionary. <END_TASK> <USER_TASK:> Description: def dump_values(self, with_defaults=True, dict_cls=dict, flat=False): """ Export values of all items contained in this section to a dictionary. Items with no values set (and no defaults set if ``with_defaults=True``) will be excluded. Returns: dict: A dictionary of key-value pairs, where for sections values are dictionaries of their contents. """
values = dict_cls() if flat: for str_path, item in self.iter_items(recursive=True, key='str_path'): if item.has_value: if with_defaults or not item.is_default: values[str_path] = item.value else: for item_name, item in self._tree.items(): if is_config_section(item): section_values = item.dump_values(with_defaults=with_defaults, dict_cls=dict_cls) if section_values: values[item_name] = section_values else: if item.has_value: if with_defaults or not item.is_default: values[item.name] = item.value return values
<SYSTEM_TASK:> Import config values from a dictionary. <END_TASK> <USER_TASK:> Description: def load_values(self, dictionary, as_defaults=False, flat=False): """ Import config values from a dictionary. When ``as_defaults`` is set to ``True``, the values imported will be set as defaults. This can be used to declare the sections and items of configuration. Values of sections and items in ``dictionary`` can be dictionaries as well as instances of :class:`.Item` and :class:`.Config`. Args: dictionary: as_defaults: if ``True``, the imported values will be set as defaults. """
if flat: # Deflatten the dictionary and then pass on to the normal case. separator = self.settings.str_path_separator flat_dictionary = dictionary dictionary = collections.OrderedDict() for k, v in flat_dictionary.items(): k_parts = k.split(separator) c = dictionary for i, kp in enumerate(k_parts): if i >= len(k_parts) - 1: c[kp] = v else: if kp not in c: c[kp] = collections.OrderedDict() c = c[kp] for name, value in dictionary.items(): if name not in self: if as_defaults: if isinstance(value, dict): self[name] = self.create_section() self[name].load_values(value, as_defaults=as_defaults) else: self[name] = self.create_item(name, default=value) else: # Skip unknown names if not interpreting dictionary as defaults pass continue resolution = self._get_item_or_section(name, handle_not_found=False) if is_config_item(resolution): if as_defaults: resolution.default = value else: resolution.value = value else: resolution.load_values(value, as_defaults=as_defaults)
<SYSTEM_TASK:> Internal factory method used to create an instance of configuration section. <END_TASK> <USER_TASK:> Description: def create_section(self, *args, **kwargs): """ Internal factory method used to create an instance of configuration section. Should only be used when extending or modifying configmanager's functionality. Under normal circumstances you should let configmanager create sections and items when parsing configuration schemas. Do not override this method. To customise section creation, write your own section factory and pass it to Config through section_factory= keyword argument. """
kwargs.setdefault('section', self) return self.settings.section_factory(*args, **kwargs)
<SYSTEM_TASK:> A decorator to register a dynamic item attribute provider. <END_TASK> <USER_TASK:> Description: def item_attribute(self, f=None, name=None): """ A decorator to register a dynamic item attribute provider. By default, uses function name for attribute name. Override that with ``name=``. """
def decorator(func): attr_name = name or func.__name__ if attr_name.startswith('_'): raise RuntimeError('Invalid dynamic item attribute name -- should not start with an underscore') self.__item_attributes[attr_name] = func return func if f is None: return decorator else: return decorator(f)
<SYSTEM_TASK:> Method called by item when an attribute is not found. <END_TASK> <USER_TASK:> Description: def get_item_attribute(self, item, name): """ Method called by item when an attribute is not found. """
if name in self.__item_attributes: return self.__item_attributes[name](item) elif self.section: return self.section.get_item_attribute(item, name) else: raise AttributeError(name)
<SYSTEM_TASK:> Dispatch section event. <END_TASK> <USER_TASK:> Description: def dispatch_event(self, event_, **kwargs): """ Dispatch section event. Notes: You MUST NOT call event.trigger() directly because it will circumvent the section settings as well as ignore the section tree. If hooks are disabled somewhere up in the tree, and enabled down below, events will still be dispatched down below because that's where they originate. """
if self.settings.hooks_enabled: result = self.hooks.dispatch_event(event_, **kwargs) if result is not None: return result # Must also dispatch the event in parent section if self.section: return self.section.dispatch_event(event_, **kwargs) elif self.section: # Settings only apply to one section, so must still # dispatch the event in parent sections recursively. self.section.dispatch_event(event_, **kwargs)
<SYSTEM_TASK:> Load user configuration based on settings. <END_TASK> <USER_TASK:> Description: def load(self): """ Load user configuration based on settings. """
# Must reverse because we want the sources assigned to higher-up Config instances # to overrides sources assigned to lower Config instances. for section in reversed(list(self.iter_sections(recursive=True, key=None))): if section.is_config: section.load() for source in self.settings.load_sources: adapter = getattr(self, _get_persistence_adapter_for(source)) if adapter.store_exists(source): adapter.load(source)
<SYSTEM_TASK:> Registers a click.option which falls back to a configmanager Item <END_TASK> <USER_TASK:> Description: def option(self, *args, **kwargs): """ Registers a click.option which falls back to a configmanager Item if user hasn't provided a value in the command line. Item must be the last of ``args``. Examples:: config = Config({'greeting': 'Hello'}) @click.command() @config.click.option('--greeting', config.greeting) def say_hello(greeting): click.echo(greeting) """
args, kwargs = _config_parameter(args, kwargs) return self._click.option(*args, **kwargs)
<SYSTEM_TASK:> Registers a click.argument which falls back to a configmanager Item <END_TASK> <USER_TASK:> Description: def argument(self, *args, **kwargs): """ Registers a click.argument which falls back to a configmanager Item if user hasn't provided a value in the command line. Item must be the last of ``args``. """
if kwargs.get('required', True): raise TypeError( 'In click framework, arguments are mandatory, unless marked required=False. ' 'Attempt to use configmanager as a fallback provider suggests that this is an optional option, ' 'not a mandatory argument.' ) args, kwargs = _config_parameter(args, kwargs) return self._click.argument(*args, **kwargs)
<SYSTEM_TASK:> Helper to get value of a named attribute irrespective of whether it is passed <END_TASK> <USER_TASK:> Description: def _get_kwarg(self, name, kwargs): """ Helper to get value of a named attribute irrespective of whether it is passed with or without "@" prefix. """
at_name = '@{}'.format(name) if name in kwargs: if at_name in kwargs: raise ValueError('Both {!r} and {!r} specified in kwargs'.format(name, at_name)) return kwargs[name] if at_name in kwargs: return kwargs[at_name] return not_set
<SYSTEM_TASK:> Internal helper to get item value from an environment variable <END_TASK> <USER_TASK:> Description: def _get_envvar_value(self): """ Internal helper to get item value from an environment variable if item is controlled by one, and if the variable is set. Returns not_set otherwise. """
envvar_name = None if self.envvar is True: envvar_name = self.envvar_name if envvar_name is None: envvar_name = '_'.join(self.get_path()).upper() elif self.envvar: envvar_name = self.envvar if envvar_name and envvar_name in os.environ: return self.type.deserialize(os.environ[envvar_name]) else: return not_set
<SYSTEM_TASK:> Resets the value of config item to its default value. <END_TASK> <USER_TASK:> Description: def reset(self): """ Resets the value of config item to its default value. """
old_value = self._value old_raw_str_value = self.raw_str_value self._value = not_set self.raw_str_value = not_set new_value = self._value if old_value is not_set: # Nothing to report return if self.section: self.section.dispatch_event( self.section.hooks.item_value_changed, item=self, old_value=old_value, new_value=new_value, old_raw_str_value=old_raw_str_value, new_raw_str_value=self.raw_str_value, )
<SYSTEM_TASK:> ``True`` if the item's value is its default value or if no value and no default value are set. <END_TASK> <USER_TASK:> Description: def is_default(self): """ ``True`` if the item's value is its default value or if no value and no default value are set. If the item is backed by an environment variable, this will be ``True`` only if the environment variable is set and is different to the default value of the item. """
envvar_value = self._get_envvar_value() if envvar_value is not not_set: return envvar_value == self.default else: return self._value is not_set or self._value == self.default
<SYSTEM_TASK:> ``True`` if item has a default value or custom value set. <END_TASK> <USER_TASK:> Description: def has_value(self): """ ``True`` if item has a default value or custom value set. """
if self._get_envvar_value() is not not_set: return True else: return self.default is not not_set or self._value is not not_set
<SYSTEM_TASK:> Return list of available domains for use in email address. <END_TASK> <USER_TASK:> Description: def available_domains(self): """ Return list of available domains for use in email address. """
if not hasattr(self, '_available_domains'): url = 'http://{0}/request/domains/format/json/'.format( self.api_domain) req = requests.get(url) domains = req.json() setattr(self, '_available_domains', domains) return self._available_domains
<SYSTEM_TASK:> Generate string for email address login with defined length and <END_TASK> <USER_TASK:> Description: def generate_login(self, min_length=6, max_length=10, digits=True): """ Generate string for email address login with defined length and alphabet. :param min_length: (optional) min login length. Default value is ``6``. :param max_length: (optional) max login length. Default value is ``10``. :param digits: (optional) use digits in login generation. Default value is ``True``. """
chars = string.ascii_lowercase if digits: chars += string.digits length = random.randint(min_length, max_length) return ''.join(random.choice(chars) for x in range(length))
<SYSTEM_TASK:> Return full email address from login and domain from params in class <END_TASK> <USER_TASK:> Description: def get_email_address(self): """ Return full email address from login and domain from params in class initialization or generate new. """
if self.login is None: self.login = self.generate_login() available_domains = self.available_domains if self.domain is None: self.domain = random.choice(available_domains) elif self.domain not in available_domains: raise ValueError('Domain not found in available domains!') return u'{0}{1}'.format(self.login, self.domain)
<SYSTEM_TASK:> Return list of emails in given email address <END_TASK> <USER_TASK:> Description: def get_mailbox(self, email=None, email_hash=None): """ Return list of emails in given email address or dict with `error` key if mail box is empty. :param email: (optional) email address. :param email_hash: (optional) md5 hash from email address. """
if email is None: email = self.get_email_address() if email_hash is None: email_hash = self.get_hash(email) url = 'http://{0}/request/mail/id/{1}/format/json/'.format( self.api_domain, email_hash) req = requests.get(url) return req.json()
<SYSTEM_TASK:> Creates a new primary zone. <END_TASK> <USER_TASK:> Description: def create_primary_zone(self, account_name, zone_name): """Creates a new primary zone. Arguments: account_name -- The name of the account that will contain this zone. zone_name -- The name of the zone. It must be unique. """
zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"} primary_zone_info = {"forceImport": True, "createType": "NEW"} zone_data = {"properties": zone_properties, "primaryCreateInfo": primary_zone_info} return self.rest_api_connection.post("/v1/zones", json.dumps(zone_data))
<SYSTEM_TASK:> Creates a new primary zone by uploading a bind file <END_TASK> <USER_TASK:> Description: def create_primary_zone_by_upload(self, account_name, zone_name, bind_file): """Creates a new primary zone by uploading a bind file Arguments: account_name -- The name of the account that will contain this zone. zone_name -- The name of the zone. It must be unique. bind_file -- The file to upload. """
zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"} primary_zone_info = {"forceImport": True, "createType": "UPLOAD"} zone_data = {"properties": zone_properties, "primaryCreateInfo": primary_zone_info} files = {'zone': ('', json.dumps(zone_data), 'application/json'), 'file': ('file', open(bind_file, 'rb'), 'application/octet-stream')} return self.rest_api_connection.post_multi_part("/v1/zones", files)
<SYSTEM_TASK:> Creates a new primary zone by zone transferring off a master. <END_TASK> <USER_TASK:> Description: def create_primary_zone_by_axfr(self, account_name, zone_name, master, tsig_key=None, key_value=None): """Creates a new primary zone by zone transferring off a master. Arguments: account_name -- The name of the account that will contain this zone. zone_name -- The name of the zone. It must be unique. master -- Primary name server IP address. Keyword Arguments: tsig_key -- For TSIG-enabled zones: The transaction signature key. NOTE: Requires key_value. key_value -- TSIG key secret. """
zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"} if tsig_key is not None and key_value is not None: name_server_info = {"ip": master, "tsigKey": tsig_key, "tsigKeyValue": key_value} else: name_server_info = {"ip": master} primary_zone_info = {"forceImport": True, "createType": "TRANSFER", "nameServer": name_server_info} zone_data = {"properties": zone_properties, "primaryCreateInfo": primary_zone_info} return self.rest_api_connection.post("/v1/zones", json.dumps(zone_data))
<SYSTEM_TASK:> Creates a new secondary zone. <END_TASK> <USER_TASK:> Description: def create_secondary_zone(self, account_name, zone_name, master, tsig_key=None, key_value=None): """Creates a new secondary zone. Arguments: account_name -- The name of the account. zone_name -- The name of the zone. master -- Primary name server IP address. Keyword Arguments: tsig_key -- For TSIG-enabled zones: The transaction signature key. NOTE: Requires key_value. key_value -- TSIG key secret. """
zone_properties = {"name": zone_name, "accountName": account_name, "type": "SECONDARY"} if tsig_key is not None and key_value is not None: name_server_info = {"ip": master, "tsigKey": tsig_key, "tsigKeyValue": key_value} else: name_server_info = {"ip": master} name_server_ip_1 = {"nameServerIp1": name_server_info} name_server_ip_list = {"nameServerIpList": name_server_ip_1} secondary_zone_info = {"primaryNameServers": name_server_ip_list} zone_data = {"properties": zone_properties, "secondaryCreateInfo": secondary_zone_info} return self.rest_api_connection.post("/v1/zones", json.dumps(zone_data))
<SYSTEM_TASK:> Returns a list of zones for the specified account. <END_TASK> <USER_TASK:> Description: def get_zones_of_account(self, account_name, q=None, **kwargs): """Returns a list of zones for the specified account. Arguments: account_name -- The name of the account. Keyword Arguments: q -- The search parameters, in a dict. Valid keys are: name - substring match of the zone name zone_type - one of: PRIMARY SECONDARY ALIAS sort -- The sort column used to order the list. Valid values for the sort field are: NAME ACCOUNT_NAME RECORD_COUNT ZONE_TYPE reverse -- Whether the list is ascending(False) or descending(True) offset -- The position in the list for the first returned element(0 based) limit -- The maximum number of rows to be returned. """
uri = "/v1/accounts/" + account_name + "/zones" params = build_params(q, kwargs) return self.rest_api_connection.get(uri, params)
<SYSTEM_TASK:> Returns a list of zones across all of the user's accounts. <END_TASK> <USER_TASK:> Description: def get_zones(self, q=None, **kwargs): """Returns a list of zones across all of the user's accounts. Keyword Arguments: q -- The search parameters, in a dict. Valid keys are: name - substring match of the zone name zone_type - one of: PRIMARY SECONDARY ALIAS sort -- The sort column used to order the list. Valid values for the sort field are: NAME ACCOUNT_NAME RECORD_COUNT ZONE_TYPE reverse -- Whether the list is ascending(False) or descending(True) offset -- The position in the list for the first returned element(0 based) limit -- The maximum number of rows to be returned. """
uri = "/v1/zones" params = build_params(q, kwargs) return self.rest_api_connection.get(uri, params)
<SYSTEM_TASK:> Edit the axfr name servers of a secondary zone. <END_TASK> <USER_TASK:> Description: def edit_secondary_name_server(self, zone_name, primary=None, backup=None, second_backup=None): """Edit the axfr name servers of a secondary zone. Arguments: zone_name -- The name of the secondary zone being edited. primary -- The primary name server value. Keyword Arguments: backup -- The backup name server if any. second_backup -- The second backup name server. """
name_server_info = {} if primary is not None: name_server_info['nameServerIp1'] = {'ip':primary} if backup is not None: name_server_info['nameServerIp2'] = {'ip':backup} if second_backup is not None: name_server_info['nameServerIp3'] = {'ip':second_backup} name_server_ip_list = {"nameServerIpList": name_server_info} secondary_zone_info = {"primaryNameServers": name_server_ip_list} zone_data = {"secondaryCreateInfo": secondary_zone_info} return self.rest_api_connection.patch("/v1/zones/" + zone_name, json.dumps(zone_data))
<SYSTEM_TASK:> Returns the list of RRSets in the specified zone. <END_TASK> <USER_TASK:> Description: def get_rrsets(self, zone_name, q=None, **kwargs): """Returns the list of RRSets in the specified zone. Arguments: zone_name -- The name of the zone. Keyword Arguments: q -- The search parameters, in a dict. Valid keys are: ttl - must match the TTL for the rrset owner - substring match of the owner name value - substring match of the first BIND field value sort -- The sort column used to order the list. Valid values for the sort field are: OWNER TTL TYPE reverse -- Whether the list is ascending(False) or descending(True) offset -- The position in the list for the first returned element(0 based) limit -- The maximum number of rows to be returned. """
uri = "/v1/zones/" + zone_name + "/rrsets" params = build_params(q, kwargs) return self.rest_api_connection.get(uri, params)
<SYSTEM_TASK:> Creates a new RRSet in the specified zone. <END_TASK> <USER_TASK:> Description: def create_rrset(self, zone_name, rtype, owner_name, ttl, rdata): """Creates a new RRSet in the specified zone. Arguments: zone_name -- The zone that will contain the new RRSet. The trailing dot is optional. rtype -- The type of the RRSet. This can be numeric (1) or if a well-known name is defined for the type (A), you can use it instead. owner_name -- The owner name for the RRSet. If no trailing dot is supplied, the owner_name is assumed to be relative (foo). If a trailing dot is supplied, the owner name is assumed to be absolute (foo.zonename.com.) ttl -- The TTL value for the RRSet. rdata -- The BIND data for the RRSet as a string. If there is a single resource record in the RRSet, you can pass in the single string. If there are multiple resource records in this RRSet, pass in a list of strings. """
if type(rdata) is not list: rdata = [rdata] rrset = {"ttl": ttl, "rdata": rdata} return self.rest_api_connection.post("/v1/zones/" + zone_name + "/rrsets/" + rtype + "/" + owner_name, json.dumps(rrset))
<SYSTEM_TASK:> Updates an existing RRSet in the specified zone. <END_TASK> <USER_TASK:> Description: def edit_rrset(self, zone_name, rtype, owner_name, ttl, rdata, profile=None): """Updates an existing RRSet in the specified zone. Arguments: zone_name -- The zone that contains the RRSet. The trailing dot is optional. rtype -- The type of the RRSet. This can be numeric (1) or if a well-known name is defined for the type (A), you can use it instead. owner_name -- The owner name for the RRSet. If no trailing dot is supplied, the owner_name is assumed to be relative (foo). If a trailing dot is supplied, the owner name is assumed to be absolute (foo.zonename.com.) ttl -- The updated TTL value for the RRSet. rdata -- The updated BIND data for the RRSet as a string. If there is a single resource record in the RRSet, you can pass in the single string. If there are multiple resource records in this RRSet, pass in a list of strings. profile -- The profile info if this is updating a resource pool """
if type(rdata) is not list: rdata = [rdata] rrset = {"ttl": ttl, "rdata": rdata} if profile: rrset["profile"] = profile uri = "/v1/zones/" + zone_name + "/rrsets/" + rtype + "/" + owner_name return self.rest_api_connection.put(uri, json.dumps(rrset))