repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
napalm-automation/napalm-logs
napalm_logs/serializer/__init__.py
get_serializer
def get_serializer(name): ''' Return the serialize function. ''' try: log.debug('Using %s as serializer', name) return SERIALIZER_LOOKUP[name] except KeyError: msg = 'Serializer {} is not available'.format(name) log.error(msg, exc_info=True) raise InvalidSerializerException(msg)
python
def get_serializer(name): try: log.debug('Using %s as serializer', name) return SERIALIZER_LOOKUP[name] except KeyError: msg = 'Serializer {} is not available'.format(name) log.error(msg, exc_info=True) raise InvalidSerializerException(msg)
[ "def", "get_serializer", "(", "name", ")", ":", "try", ":", "log", ".", "debug", "(", "'Using %s as serializer'", ",", "name", ")", "return", "SERIALIZER_LOOKUP", "[", "name", "]", "except", "KeyError", ":", "msg", "=", "'Serializer {} is not available'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "InvalidSerializerException", "(", "msg", ")" ]
Return the serialize function.
[ "Return", "the", "serialize", "function", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/serializer/__init__.py#L34-L44
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc._handshake
def _handshake(self, conn, addr): ''' Ensures that the client receives the AES key. ''' # waiting for the magic request message msg = conn.recv(len(MAGIC_REQ)) log.debug('Received message %s from %s', msg, addr) if msg != MAGIC_REQ: log.warning('%s is not a valid REQ message from %s', msg, addr) return log.debug('Sending the private key') conn.send(self.__key) # wait for explicit ACK log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.debug('Sending the signature key') conn.send(self.__sgn) # wait for explicit ACK log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.info('%s is now authenticated', addr) self.keep_alive(conn)
python
def _handshake(self, conn, addr): msg = conn.recv(len(MAGIC_REQ)) log.debug('Received message %s from %s', msg, addr) if msg != MAGIC_REQ: log.warning('%s is not a valid REQ message from %s', msg, addr) return log.debug('Sending the private key') conn.send(self.__key) log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.debug('Sending the signature key') conn.send(self.__sgn) log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.info('%s is now authenticated', addr) self.keep_alive(conn)
[ "def", "_handshake", "(", "self", ",", "conn", ",", "addr", ")", ":", "# waiting for the magic request message", "msg", "=", "conn", ".", "recv", "(", "len", "(", "MAGIC_REQ", ")", ")", "log", ".", "debug", "(", "'Received message %s from %s'", ",", "msg", ",", "addr", ")", "if", "msg", "!=", "MAGIC_REQ", ":", "log", ".", "warning", "(", "'%s is not a valid REQ message from %s'", ",", "msg", ",", "addr", ")", "return", "log", ".", "debug", "(", "'Sending the private key'", ")", "conn", ".", "send", "(", "self", ".", "__key", ")", "# wait for explicit ACK", "log", ".", "debug", "(", "'Waiting for the client to confirm'", ")", "msg", "=", "conn", ".", "recv", "(", "len", "(", "MAGIC_ACK", ")", ")", "if", "msg", "!=", "MAGIC_ACK", ":", "return", "log", ".", "debug", "(", "'Sending the signature key'", ")", "conn", ".", "send", "(", "self", ".", "__sgn", ")", "# wait for explicit ACK", "log", ".", "debug", "(", "'Waiting for the client to confirm'", ")", "msg", "=", "conn", ".", "recv", "(", "len", "(", "MAGIC_ACK", ")", ")", "if", "msg", "!=", "MAGIC_ACK", ":", "return", "log", ".", "info", "(", "'%s is now authenticated'", ",", "addr", ")", "self", ".", "keep_alive", "(", "conn", ")" ]
Ensures that the client receives the AES key.
[ "Ensures", "that", "the", "client", "receives", "the", "AES", "key", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L79-L104
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc.keep_alive
def keep_alive(self, conn): ''' Maintains auth sessions ''' while self.__up: msg = conn.recv(len(AUTH_KEEP_ALIVE)) if msg != AUTH_KEEP_ALIVE: log.error('Received something other than %s', AUTH_KEEP_ALIVE) conn.close() return try: conn.send(AUTH_KEEP_ALIVE_ACK) except (IOError, socket.error) as err: log.error('Unable to send auth keep alive: %s', err) conn.close() return
python
def keep_alive(self, conn): while self.__up: msg = conn.recv(len(AUTH_KEEP_ALIVE)) if msg != AUTH_KEEP_ALIVE: log.error('Received something other than %s', AUTH_KEEP_ALIVE) conn.close() return try: conn.send(AUTH_KEEP_ALIVE_ACK) except (IOError, socket.error) as err: log.error('Unable to send auth keep alive: %s', err) conn.close() return
[ "def", "keep_alive", "(", "self", ",", "conn", ")", ":", "while", "self", ".", "__up", ":", "msg", "=", "conn", ".", "recv", "(", "len", "(", "AUTH_KEEP_ALIVE", ")", ")", "if", "msg", "!=", "AUTH_KEEP_ALIVE", ":", "log", ".", "error", "(", "'Received something other than %s'", ",", "AUTH_KEEP_ALIVE", ")", "conn", ".", "close", "(", ")", "return", "try", ":", "conn", ".", "send", "(", "AUTH_KEEP_ALIVE_ACK", ")", "except", "(", "IOError", ",", "socket", ".", "error", ")", "as", "err", ":", "log", ".", "error", "(", "'Unable to send auth keep alive: %s'", ",", "err", ")", "conn", ".", "close", "(", ")", "return" ]
Maintains auth sessions
[ "Maintains", "auth", "sessions" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L106-L121
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc.verify_cert
def verify_cert(self): ''' Checks that the provided cert and key are valid and usable ''' log.debug('Verifying the %s certificate, keyfile: %s', self.certificate, self.keyfile) try: ssl.create_default_context().load_cert_chain(self.certificate, keyfile=self.keyfile) except ssl.SSLError: error_string = 'SSL certificate and key do not match' log.error(error_string) raise SSLMismatchException(error_string) except IOError: log.error('Unable to open either certificate or key file') raise log.debug('Certificate looks good.')
python
def verify_cert(self): log.debug('Verifying the %s certificate, keyfile: %s', self.certificate, self.keyfile) try: ssl.create_default_context().load_cert_chain(self.certificate, keyfile=self.keyfile) except ssl.SSLError: error_string = 'SSL certificate and key do not match' log.error(error_string) raise SSLMismatchException(error_string) except IOError: log.error('Unable to open either certificate or key file') raise log.debug('Certificate looks good.')
[ "def", "verify_cert", "(", "self", ")", ":", "log", ".", "debug", "(", "'Verifying the %s certificate, keyfile: %s'", ",", "self", ".", "certificate", ",", "self", ".", "keyfile", ")", "try", ":", "ssl", ".", "create_default_context", "(", ")", ".", "load_cert_chain", "(", "self", ".", "certificate", ",", "keyfile", "=", "self", ".", "keyfile", ")", "except", "ssl", ".", "SSLError", ":", "error_string", "=", "'SSL certificate and key do not match'", "log", ".", "error", "(", "error_string", ")", "raise", "SSLMismatchException", "(", "error_string", ")", "except", "IOError", ":", "log", ".", "error", "(", "'Unable to open either certificate or key file'", ")", "raise", "log", ".", "debug", "(", "'Certificate looks good.'", ")" ]
Checks that the provided cert and key are valid and usable
[ "Checks", "that", "the", "provided", "cert", "and", "key", "are", "valid", "and", "usable" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L123-L138
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc._create_skt
def _create_skt(self): ''' Create the authentication socket. ''' log.debug('Creating the auth socket') if ':' in self.auth_address: self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.socket.bind((self.auth_address, self.auth_port)) except socket.error as msg: error_string = 'Unable to bind (auth) to port {} on {}: {}'.format(self.auth_port, self.auth_address, msg) log.error(error_string, exc_info=True) raise BindException(error_string)
python
def _create_skt(self): log.debug('Creating the auth socket') if ':' in self.auth_address: self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.socket.bind((self.auth_address, self.auth_port)) except socket.error as msg: error_string = 'Unable to bind (auth) to port {} on {}: {}'.format(self.auth_port, self.auth_address, msg) log.error(error_string, exc_info=True) raise BindException(error_string)
[ "def", "_create_skt", "(", "self", ")", ":", "log", ".", "debug", "(", "'Creating the auth socket'", ")", "if", "':'", "in", "self", ".", "auth_address", ":", "self", ".", "socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET6", ",", "socket", ".", "SOCK_STREAM", ")", "else", ":", "self", ".", "socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "self", ".", "socket", ".", "bind", "(", "(", "self", ".", "auth_address", ",", "self", ".", "auth_port", ")", ")", "except", "socket", ".", "error", "as", "msg", ":", "error_string", "=", "'Unable to bind (auth) to port {} on {}: {}'", ".", "format", "(", "self", ".", "auth_port", ",", "self", ".", "auth_address", ",", "msg", ")", "log", ".", "error", "(", "error_string", ",", "exc_info", "=", "True", ")", "raise", "BindException", "(", "error_string", ")" ]
Create the authentication socket.
[ "Create", "the", "authentication", "socket", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L140-L154
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc.start
def start(self): ''' Listen to auth requests and send the AES key. Each client connection starts a new thread. ''' # Start suicide polling thread log.debug('Starting the auth process') self.verify_cert() self._create_skt() log.debug('The auth process can receive at most %d parallel connections', AUTH_MAX_CONN) self.socket.listen(AUTH_MAX_CONN) thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: (clientsocket, address) = self.socket.accept() wrapped_auth_skt = ssl.wrap_socket(clientsocket, server_side=True, certfile=self.certificate, keyfile=self.keyfile) except ssl.SSLError: log.exception('SSL error', exc_info=True) continue except socket.error as error: if self.__up is False: return else: msg = 'Received auth socket error: {}'.format(error) log.error(msg, exc_info=True) raise NapalmLogsExit(msg) log.info('%s connected', address) log.debug('Starting the handshake') client_thread = threading.Thread(target=self._handshake, args=(wrapped_auth_skt, address)) client_thread.start()
python
def start(self): log.debug('Starting the auth process') self.verify_cert() self._create_skt() log.debug('The auth process can receive at most %d parallel connections', AUTH_MAX_CONN) self.socket.listen(AUTH_MAX_CONN) thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: (clientsocket, address) = self.socket.accept() wrapped_auth_skt = ssl.wrap_socket(clientsocket, server_side=True, certfile=self.certificate, keyfile=self.keyfile) except ssl.SSLError: log.exception('SSL error', exc_info=True) continue except socket.error as error: if self.__up is False: return else: msg = 'Received auth socket error: {}'.format(error) log.error(msg, exc_info=True) raise NapalmLogsExit(msg) log.info('%s connected', address) log.debug('Starting the handshake') client_thread = threading.Thread(target=self._handshake, args=(wrapped_auth_skt, address)) client_thread.start()
[ "def", "start", "(", "self", ")", ":", "# Start suicide polling thread", "log", ".", "debug", "(", "'Starting the auth process'", ")", "self", ".", "verify_cert", "(", ")", "self", ".", "_create_skt", "(", ")", "log", ".", "debug", "(", "'The auth process can receive at most %d parallel connections'", ",", "AUTH_MAX_CONN", ")", "self", ".", "socket", ".", "listen", "(", "AUTH_MAX_CONN", ")", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "try", ":", "(", "clientsocket", ",", "address", ")", "=", "self", ".", "socket", ".", "accept", "(", ")", "wrapped_auth_skt", "=", "ssl", ".", "wrap_socket", "(", "clientsocket", ",", "server_side", "=", "True", ",", "certfile", "=", "self", ".", "certificate", ",", "keyfile", "=", "self", ".", "keyfile", ")", "except", "ssl", ".", "SSLError", ":", "log", ".", "exception", "(", "'SSL error'", ",", "exc_info", "=", "True", ")", "continue", "except", "socket", ".", "error", "as", "error", ":", "if", "self", ".", "__up", "is", "False", ":", "return", "else", ":", "msg", "=", "'Received auth socket error: {}'", ".", "format", "(", "error", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "NapalmLogsExit", "(", "msg", ")", "log", ".", "info", "(", "'%s connected'", ",", "address", ")", "log", ".", "debug", "(", "'Starting the handshake'", ")", "client_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_handshake", ",", "args", "=", "(", "wrapped_auth_skt", ",", "address", ")", ")", "client_thread", ".", "start", "(", ")" ]
Listen to auth requests and send the AES key. Each client connection starts a new thread.
[ "Listen", "to", "auth", "requests", "and", "send", "the", "AES", "key", ".", "Each", "client", "connection", "starts", "a", "new", "thread", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L156-L192
napalm-automation/napalm-logs
napalm_logs/auth.py
NapalmLogsAuthProc.stop
def stop(self): ''' Stop the auth proc. ''' log.info('Stopping auth process') self.__up = False self.socket.close()
python
def stop(self): log.info('Stopping auth process') self.__up = False self.socket.close()
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping auth process'", ")", "self", ".", "__up", "=", "False", "self", ".", "socket", ".", "close", "(", ")" ]
Stop the auth proc.
[ "Stop", "the", "auth", "proc", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L194-L200
napalm-automation/napalm-logs
napalm_logs/buffer/__init__.py
get_interface
def get_interface(name): ''' Return the serialize function. ''' try: log.debug('Using %s as buffer interface', name) return BUFFER_LOOKUP[name] except KeyError: msg = 'Buffer interface {} is not available'.format(name) log.error(msg, exc_info=True) raise InvalidBufferException(msg)
python
def get_interface(name): try: log.debug('Using %s as buffer interface', name) return BUFFER_LOOKUP[name] except KeyError: msg = 'Buffer interface {} is not available'.format(name) log.error(msg, exc_info=True) raise InvalidBufferException(msg)
[ "def", "get_interface", "(", "name", ")", ":", "try", ":", "log", ".", "debug", "(", "'Using %s as buffer interface'", ",", "name", ")", "return", "BUFFER_LOOKUP", "[", "name", "]", "except", "KeyError", ":", "msg", "=", "'Buffer interface {} is not available'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "InvalidBufferException", "(", "msg", ")" ]
Return the serialize function.
[ "Return", "the", "serialize", "function", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/buffer/__init__.py#L28-L38
napalm-automation/napalm-logs
napalm_logs/listener/kafka.py
KafkaListener.start
def start(self): ''' Startup the kafka consumer. ''' log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s', self.bootstrap_servers, self.group_id) try: self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers, group_id=self.group_id) except kafka.errors.NoBrokersAvailable as err: log.error(err, exc_info=True) raise ListenerException(err) log.debug('Subscribing to the %s topic', self.topic) self.consumer.subscribe(topics=[self.topic])
python
def start(self): log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s', self.bootstrap_servers, self.group_id) try: self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers, group_id=self.group_id) except kafka.errors.NoBrokersAvailable as err: log.error(err, exc_info=True) raise ListenerException(err) log.debug('Subscribing to the %s topic', self.topic) self.consumer.subscribe(topics=[self.topic])
[ "def", "start", "(", "self", ")", ":", "log", ".", "debug", "(", "'Creating the consumer using the bootstrap servers: %s and the group ID: %s'", ",", "self", ".", "bootstrap_servers", ",", "self", ".", "group_id", ")", "try", ":", "self", ".", "consumer", "=", "kafka", ".", "KafkaConsumer", "(", "bootstrap_servers", "=", "self", ".", "bootstrap_servers", ",", "group_id", "=", "self", ".", "group_id", ")", "except", "kafka", ".", "errors", ".", "NoBrokersAvailable", "as", "err", ":", "log", ".", "error", "(", "err", ",", "exc_info", "=", "True", ")", "raise", "ListenerException", "(", "err", ")", "log", ".", "debug", "(", "'Subscribing to the %s topic'", ",", "self", ".", "topic", ")", "self", ".", "consumer", ".", "subscribe", "(", "topics", "=", "[", "self", ".", "topic", "]", ")" ]
Startup the kafka consumer.
[ "Startup", "the", "kafka", "consumer", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/kafka.py#L41-L55
napalm-automation/napalm-logs
napalm_logs/listener/kafka.py
KafkaListener.receive
def receive(self): ''' Return the message received and the address. ''' try: msg = next(self.consumer) except ValueError as error: log.error('Received kafka error: %s', error, exc_info=True) raise ListenerException(error) log_source = msg.key try: decoded = json.loads(msg.value.decode('utf-8')) except ValueError: log.error('Not in json format: %s', msg.value.decode('utf-8')) return '', '' log_message = decoded.get('message') log.debug('[%s] Received %s from %s', log_message, log_source, time.time()) return log_message, log_source
python
def receive(self): try: msg = next(self.consumer) except ValueError as error: log.error('Received kafka error: %s', error, exc_info=True) raise ListenerException(error) log_source = msg.key try: decoded = json.loads(msg.value.decode('utf-8')) except ValueError: log.error('Not in json format: %s', msg.value.decode('utf-8')) return '', '' log_message = decoded.get('message') log.debug('[%s] Received %s from %s', log_message, log_source, time.time()) return log_message, log_source
[ "def", "receive", "(", "self", ")", ":", "try", ":", "msg", "=", "next", "(", "self", ".", "consumer", ")", "except", "ValueError", "as", "error", ":", "log", ".", "error", "(", "'Received kafka error: %s'", ",", "error", ",", "exc_info", "=", "True", ")", "raise", "ListenerException", "(", "error", ")", "log_source", "=", "msg", ".", "key", "try", ":", "decoded", "=", "json", ".", "loads", "(", "msg", ".", "value", ".", "decode", "(", "'utf-8'", ")", ")", "except", "ValueError", ":", "log", ".", "error", "(", "'Not in json format: %s'", ",", "msg", ".", "value", ".", "decode", "(", "'utf-8'", ")", ")", "return", "''", ",", "''", "log_message", "=", "decoded", ".", "get", "(", "'message'", ")", "log", ".", "debug", "(", "'[%s] Received %s from %s'", ",", "log_message", ",", "log_source", ",", "time", ".", "time", "(", ")", ")", "return", "log_message", ",", "log_source" ]
Return the message received and the address.
[ "Return", "the", "message", "received", "and", "the", "address", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/kafka.py#L57-L74
napalm-automation/napalm-logs
napalm_logs/listener/kafka.py
KafkaListener.stop
def stop(self): ''' Shutdown kafka consumer. ''' log.info('Stopping te kafka listener class') self.consumer.unsubscribe() self.consumer.close()
python
def stop(self): log.info('Stopping te kafka listener class') self.consumer.unsubscribe() self.consumer.close()
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping te kafka listener class'", ")", "self", ".", "consumer", ".", "unsubscribe", "(", ")", "self", ".", "consumer", ".", "close", "(", ")" ]
Shutdown kafka consumer.
[ "Shutdown", "kafka", "consumer", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/kafka.py#L76-L82
napalm-automation/napalm-logs
napalm_logs/transport/__init__.py
get_transport
def get_transport(name): ''' Return the transport class. ''' try: log.debug('Using %s as transport', name) return TRANSPORT_LOOKUP[name] except KeyError: msg = 'Transport {} is not available. Are the dependencies installed?'.format(name) log.error(msg, exc_info=True) raise InvalidTransportException(msg)
python
def get_transport(name): try: log.debug('Using %s as transport', name) return TRANSPORT_LOOKUP[name] except KeyError: msg = 'Transport {} is not available. Are the dependencies installed?'.format(name) log.error(msg, exc_info=True) raise InvalidTransportException(msg)
[ "def", "get_transport", "(", "name", ")", ":", "try", ":", "log", ".", "debug", "(", "'Using %s as transport'", ",", "name", ")", "return", "TRANSPORT_LOOKUP", "[", "name", "]", "except", "KeyError", ":", "msg", "=", "'Transport {} is not available. Are the dependencies installed?'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "InvalidTransportException", "(", "msg", ")" ]
Return the transport class.
[ "Return", "the", "transport", "class", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/transport/__init__.py#L50-L60
napalm-automation/napalm-logs
napalm_logs/listener/zeromq.py
ZMQListener.start
def start(self): ''' Startup the zmq consumer. ''' zmq_uri = '{protocol}://{address}:{port}'.format( protocol=self.protocol, address=self.address, port=self.port ) if self.port else\ '{protocol}://{address}'.format( # noqa protocol=self.protocol, address=self.address ) log.debug('ZMQ URI: %s', zmq_uri) self.ctx = zmq.Context() if hasattr(zmq, self.type): skt_type = getattr(zmq, self.type) else: skt_type = zmq.PULL self.sub = self.ctx.socket(skt_type) self.sub.connect(zmq_uri) if self.hwm is not None: try: self.sub.setsockopt(zmq.HWM, self.hwm) except AttributeError: self.sub.setsockopt(zmq.RCVHWM, self.hwm) if self.recvtimeout is not None: log.debug('Setting RCVTIMEO to %d', self.recvtimeout) self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout) if self.keepalive is not None: log.debug('Setting TCP_KEEPALIVE to %d', self.keepalive) self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive) if self.keepalive_idle is not None: log.debug('Setting TCP_KEEPALIVE_IDLE to %d', self.keepalive_idle) self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle) if self.keepalive_interval is not None: log.debug('Setting TCP_KEEPALIVE_INTVL to %d', self.keepalive_interval) self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval)
python
def start(self): zmq_uri = '{protocol}://{address}:{port}'.format( protocol=self.protocol, address=self.address, port=self.port ) if self.port else\ '{protocol}://{address}'.format( protocol=self.protocol, address=self.address ) log.debug('ZMQ URI: %s', zmq_uri) self.ctx = zmq.Context() if hasattr(zmq, self.type): skt_type = getattr(zmq, self.type) else: skt_type = zmq.PULL self.sub = self.ctx.socket(skt_type) self.sub.connect(zmq_uri) if self.hwm is not None: try: self.sub.setsockopt(zmq.HWM, self.hwm) except AttributeError: self.sub.setsockopt(zmq.RCVHWM, self.hwm) if self.recvtimeout is not None: log.debug('Setting RCVTIMEO to %d', self.recvtimeout) self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout) if self.keepalive is not None: log.debug('Setting TCP_KEEPALIVE to %d', self.keepalive) self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive) if self.keepalive_idle is not None: log.debug('Setting TCP_KEEPALIVE_IDLE to %d', self.keepalive_idle) self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle) if self.keepalive_interval is not None: log.debug('Setting TCP_KEEPALIVE_INTVL to %d', self.keepalive_interval) self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval)
[ "def", "start", "(", "self", ")", ":", "zmq_uri", "=", "'{protocol}://{address}:{port}'", ".", "format", "(", "protocol", "=", "self", ".", "protocol", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", "if", "self", ".", "port", "else", "'{protocol}://{address}'", ".", "format", "(", "# noqa", "protocol", "=", "self", ".", "protocol", ",", "address", "=", "self", ".", "address", ")", "log", ".", "debug", "(", "'ZMQ URI: %s'", ",", "zmq_uri", ")", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "if", "hasattr", "(", "zmq", ",", "self", ".", "type", ")", ":", "skt_type", "=", "getattr", "(", "zmq", ",", "self", ".", "type", ")", "else", ":", "skt_type", "=", "zmq", ".", "PULL", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "skt_type", ")", "self", ".", "sub", ".", "connect", "(", "zmq_uri", ")", "if", "self", ".", "hwm", "is", "not", "None", ":", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "hwm", ")", "except", "AttributeError", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "self", ".", "hwm", ")", "if", "self", ".", "recvtimeout", "is", "not", "None", ":", "log", ".", "debug", "(", "'Setting RCVTIMEO to %d'", ",", "self", ".", "recvtimeout", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVTIMEO", ",", "self", ".", "recvtimeout", ")", "if", "self", ".", "keepalive", "is", "not", "None", ":", "log", ".", "debug", "(", "'Setting TCP_KEEPALIVE to %d'", ",", "self", ".", "keepalive", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE", ",", "self", ".", "keepalive", ")", "if", "self", ".", "keepalive_idle", "is", "not", "None", ":", "log", ".", "debug", "(", "'Setting TCP_KEEPALIVE_IDLE to %d'", ",", "self", ".", "keepalive_idle", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE_IDLE", ",", "self", ".", "keepalive_idle", ")", "if", "self", ".", "keepalive_interval", "is", "not", "None", ":", "log", ".", "debug", "(", "'Setting TCP_KEEPALIVE_INTVL to %d'", ",", "self", ".", "keepalive_interval", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "TCP_KEEPALIVE_INTVL", ",", "self", ".", "keepalive_interval", ")" ]
Startup the zmq consumer.
[ "Startup", "the", "zmq", "consumer", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L45-L82
napalm-automation/napalm-logs
napalm_logs/listener/zeromq.py
ZMQListener.receive
def receive(self): ''' Return the message received. ..note:: In ZMQ we are unable to get the address where we got the message from. ''' try: msg = self.sub.recv() except zmq.Again as error: log.error('Unable to receive messages: %s', error, exc_info=True) raise ListenerException(error) log.debug('[%s] Received %s', time.time(), msg) return msg, ''
python
def receive(self): try: msg = self.sub.recv() except zmq.Again as error: log.error('Unable to receive messages: %s', error, exc_info=True) raise ListenerException(error) log.debug('[%s] Received %s', time.time(), msg) return msg, ''
[ "def", "receive", "(", "self", ")", ":", "try", ":", "msg", "=", "self", ".", "sub", ".", "recv", "(", ")", "except", "zmq", ".", "Again", "as", "error", ":", "log", ".", "error", "(", "'Unable to receive messages: %s'", ",", "error", ",", "exc_info", "=", "True", ")", "raise", "ListenerException", "(", "error", ")", "log", ".", "debug", "(", "'[%s] Received %s'", ",", "time", ".", "time", "(", ")", ",", "msg", ")", "return", "msg", ",", "''" ]
Return the message received. ..note:: In ZMQ we are unable to get the address where we got the message from.
[ "Return", "the", "message", "received", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L84-L97
napalm-automation/napalm-logs
napalm_logs/listener/zeromq.py
ZMQListener.stop
def stop(self): ''' Shutdown zmq listener. ''' log.info('Stopping the zmq listener class') self.sub.close() self.ctx.term()
python
def stop(self): log.info('Stopping the zmq listener class') self.sub.close() self.ctx.term()
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping the zmq listener class'", ")", "self", ".", "sub", ".", "close", "(", ")", "self", ".", "ctx", ".", "term", "(", ")" ]
Shutdown zmq listener.
[ "Shutdown", "zmq", "listener", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L99-L105
napalm-automation/napalm-logs
napalm_logs/listener/udp.py
UDPListener.start
def start(self): ''' Create the UDP listener socket. ''' if ':' in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if hasattr(socket, 'SO_REUSEPORT'): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: log.error('SO_REUSEPORT not supported') try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg) log.error(error_string, exc_info=True) raise BindException(error_string)
python
def start(self): if ':' in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if hasattr(socket, 'SO_REUSEPORT'): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: log.error('SO_REUSEPORT not supported') try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg) log.error(error_string, exc_info=True) raise BindException(error_string)
[ "def", "start", "(", "self", ")", ":", "if", "':'", "in", "self", ".", "address", ":", "self", ".", "skt", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET6", ",", "socket", ".", "SOCK_DGRAM", ")", "else", ":", "self", ".", "skt", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "if", "self", ".", "reuse_port", ":", "self", ".", "skt", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "if", "hasattr", "(", "socket", ",", "'SO_REUSEPORT'", ")", ":", "self", ".", "skt", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEPORT", ",", "1", ")", "else", ":", "log", ".", "error", "(", "'SO_REUSEPORT not supported'", ")", "try", ":", "self", ".", "skt", ".", "bind", "(", "(", "self", ".", "address", ",", "int", "(", "self", ".", "port", ")", ")", ")", "except", "socket", ".", "error", "as", "msg", ":", "error_string", "=", "'Unable to bind to port {} on {}: {}'", ".", "format", "(", "self", ".", "port", ",", "self", ".", "address", ",", "msg", ")", "log", ".", "error", "(", "error_string", ",", "exc_info", "=", "True", ")", "raise", "BindException", "(", "error_string", ")" ]
Create the UDP listener socket.
[ "Create", "the", "UDP", "listener", "socket", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/udp.py#L43-L62
napalm-automation/napalm-logs
napalm_logs/listener/udp.py
UDPListener.receive
def receive(self): ''' Return the message received and the address. ''' try: msg, addr = self.skt.recvfrom(self.buffer_size) except socket.error as error: log.error('Received listener socket error: %s', error, exc_info=True) raise ListenerException(error) log.debug('[%s] Received %s from %s', msg, addr, time.time()) return msg, addr[0]
python
def receive(self): try: msg, addr = self.skt.recvfrom(self.buffer_size) except socket.error as error: log.error('Received listener socket error: %s', error, exc_info=True) raise ListenerException(error) log.debug('[%s] Received %s from %s', msg, addr, time.time()) return msg, addr[0]
[ "def", "receive", "(", "self", ")", ":", "try", ":", "msg", ",", "addr", "=", "self", ".", "skt", ".", "recvfrom", "(", "self", ".", "buffer_size", ")", "except", "socket", ".", "error", "as", "error", ":", "log", ".", "error", "(", "'Received listener socket error: %s'", ",", "error", ",", "exc_info", "=", "True", ")", "raise", "ListenerException", "(", "error", ")", "log", ".", "debug", "(", "'[%s] Received %s from %s'", ",", "msg", ",", "addr", ",", "time", ".", "time", "(", ")", ")", "return", "msg", ",", "addr", "[", "0", "]" ]
Return the message received and the address.
[ "Return", "the", "message", "received", "and", "the", "address", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/udp.py#L64-L74
napalm-automation/napalm-logs
napalm_logs/proc.py
NapalmLogsProc._suicide_when_without_parent
def _suicide_when_without_parent(self, parent_pid): ''' Kill this process when the parent died. ''' while True: time.sleep(5) try: # Check pid alive os.kill(parent_pid, 0) except OSError: # Forcibly exit # Regular sys.exit raises an exception self.stop() log.warning('The parent is not alive, exiting.') os._exit(999)
python
def _suicide_when_without_parent(self, parent_pid): while True: time.sleep(5) try: os.kill(parent_pid, 0) except OSError: self.stop() log.warning('The parent is not alive, exiting.') os._exit(999)
[ "def", "_suicide_when_without_parent", "(", "self", ",", "parent_pid", ")", ":", "while", "True", ":", "time", ".", "sleep", "(", "5", ")", "try", ":", "# Check pid alive", "os", ".", "kill", "(", "parent_pid", ",", "0", ")", "except", "OSError", ":", "# Forcibly exit", "# Regular sys.exit raises an exception", "self", ".", "stop", "(", ")", "log", ".", "warning", "(", "'The parent is not alive, exiting.'", ")", "os", ".", "_exit", "(", "999", ")" ]
Kill this process when the parent died.
[ "Kill", "this", "process", "when", "the", "parent", "died", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/proc.py#L20-L34
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._setup_buffer
def _setup_buffer(self): ''' Setup the buffer subsystem. ''' if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict): return buffer_name = list(self._buffer_cfg.keys())[0] buffer_class = napalm_logs.buffer.get_interface(buffer_name) log.debug('Setting up buffer interface "%s"', buffer_name) if 'expire_time' not in self._buffer_cfg[buffer_name]: self._buffer_cfg[buffer_name]['expire_time'] = CONFIG.BUFFER_EXPIRE_TIME self._buffer = buffer_class(**self._buffer_cfg[buffer_name])
python
def _setup_buffer(self): if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict): return buffer_name = list(self._buffer_cfg.keys())[0] buffer_class = napalm_logs.buffer.get_interface(buffer_name) log.debug('Setting up buffer interface "%s"', buffer_name) if 'expire_time' not in self._buffer_cfg[buffer_name]: self._buffer_cfg[buffer_name]['expire_time'] = CONFIG.BUFFER_EXPIRE_TIME self._buffer = buffer_class(**self._buffer_cfg[buffer_name])
[ "def", "_setup_buffer", "(", "self", ")", ":", "if", "not", "self", ".", "_buffer_cfg", "or", "not", "isinstance", "(", "self", ".", "_buffer_cfg", ",", "dict", ")", ":", "return", "buffer_name", "=", "list", "(", "self", ".", "_buffer_cfg", ".", "keys", "(", ")", ")", "[", "0", "]", "buffer_class", "=", "napalm_logs", ".", "buffer", ".", "get_interface", "(", "buffer_name", ")", "log", ".", "debug", "(", "'Setting up buffer interface \"%s\"'", ",", "buffer_name", ")", "if", "'expire_time'", "not", "in", "self", ".", "_buffer_cfg", "[", "buffer_name", "]", ":", "self", ".", "_buffer_cfg", "[", "buffer_name", "]", "[", "'expire_time'", "]", "=", "CONFIG", ".", "BUFFER_EXPIRE_TIME", "self", ".", "_buffer", "=", "buffer_class", "(", "*", "*", "self", ".", "_buffer_cfg", "[", "buffer_name", "]", ")" ]
Setup the buffer subsystem.
[ "Setup", "the", "buffer", "subsystem", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L134-L145
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._setup_metrics
def _setup_metrics(self): """ Start metric exposition """ path = os.environ.get("prometheus_multiproc_dir") if not os.path.exists(self.metrics_dir): try: log.info("Creating metrics directory") os.makedirs(self.metrics_dir) except OSError: log.error("Failed to create metrics directory!") raise ConfigurationException("Failed to create metrics directory!") path = self.metrics_dir elif path != self.metrics_dir: path = self.metrics_dir os.environ['prometheus_multiproc_dir'] = path log.info("Cleaning metrics collection directory") log.debug("Metrics directory set to: {}".format(path)) files = os.listdir(path) for f in files: if f.endswith(".db"): os.remove(os.path.join(path, f)) log.debug("Starting metrics exposition") if self.metrics_enabled: registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) start_http_server( port=self.metrics_port, addr=self.metrics_address, registry=registry )
python
def _setup_metrics(self): path = os.environ.get("prometheus_multiproc_dir") if not os.path.exists(self.metrics_dir): try: log.info("Creating metrics directory") os.makedirs(self.metrics_dir) except OSError: log.error("Failed to create metrics directory!") raise ConfigurationException("Failed to create metrics directory!") path = self.metrics_dir elif path != self.metrics_dir: path = self.metrics_dir os.environ['prometheus_multiproc_dir'] = path log.info("Cleaning metrics collection directory") log.debug("Metrics directory set to: {}".format(path)) files = os.listdir(path) for f in files: if f.endswith(".db"): os.remove(os.path.join(path, f)) log.debug("Starting metrics exposition") if self.metrics_enabled: registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) start_http_server( port=self.metrics_port, addr=self.metrics_address, registry=registry )
[ "def", "_setup_metrics", "(", "self", ")", ":", "path", "=", "os", ".", "environ", ".", "get", "(", "\"prometheus_multiproc_dir\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "metrics_dir", ")", ":", "try", ":", "log", ".", "info", "(", "\"Creating metrics directory\"", ")", "os", ".", "makedirs", "(", "self", ".", "metrics_dir", ")", "except", "OSError", ":", "log", ".", "error", "(", "\"Failed to create metrics directory!\"", ")", "raise", "ConfigurationException", "(", "\"Failed to create metrics directory!\"", ")", "path", "=", "self", ".", "metrics_dir", "elif", "path", "!=", "self", ".", "metrics_dir", ":", "path", "=", "self", ".", "metrics_dir", "os", ".", "environ", "[", "'prometheus_multiproc_dir'", "]", "=", "path", "log", ".", "info", "(", "\"Cleaning metrics collection directory\"", ")", "log", ".", "debug", "(", "\"Metrics directory set to: {}\"", ".", "format", "(", "path", ")", ")", "files", "=", "os", ".", "listdir", "(", "path", ")", "for", "f", "in", "files", ":", "if", "f", ".", "endswith", "(", "\".db\"", ")", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", ")", "log", ".", "debug", "(", "\"Starting metrics exposition\"", ")", "if", "self", ".", "metrics_enabled", ":", "registry", "=", "CollectorRegistry", "(", ")", "multiprocess", ".", "MultiProcessCollector", "(", "registry", ")", "start_http_server", "(", "port", "=", "self", ".", "metrics_port", ",", "addr", "=", "self", ".", "metrics_address", ",", "registry", "=", "registry", ")" ]
Start metric exposition
[ "Start", "metric", "exposition" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L147-L177
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._setup_log
def _setup_log(self): ''' Setup the log object. ''' logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower()) logging.basicConfig(format=self.log_format, level=logging_level)
python
def _setup_log(self): logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower()) logging.basicConfig(format=self.log_format, level=logging_level)
[ "def", "_setup_log", "(", "self", ")", ":", "logging_level", "=", "CONFIG", ".", "LOGGING_LEVEL", ".", "get", "(", "self", ".", "log_level", ".", "lower", "(", ")", ")", "logging", ".", "basicConfig", "(", "format", "=", "self", ".", "log_format", ",", "level", "=", "logging_level", ")" ]
Setup the log object.
[ "Setup", "the", "log", "object", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L179-L185
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._post_preparation
def _post_preparation(self): ''' The steps for post-preparation (when the logs, and everything is already setup). ''' self.opts['hwm'] = CONFIG.ZMQ_INTERNAL_HWM if self.hwm is None else self.hwm self.opts['_server_send_unknown'] = False for pub in self.publisher: pub_name = list(pub.keys())[0] pub_opts = list(pub.values())[0] error_whitelist = pub_opts.get('error_whitelist', []) error_blacklist = pub_opts.get('error_blacklist', []) if 'UNKNOWN' not in error_blacklist: # by default we should not send unknown messages error_blacklist.append('UNKNOWN') if 'RAW' not in error_blacklist: # same with RAW error_blacklist.append('RAW') # This implementation is a bit sub-optimal, but more readable like # that. It is executed only at the init, so just once. if 'only_unknown' in pub_opts and pub[pub_name]['only_unknown']: pub[pub_name]['send_unknown'] = True error_whitelist = ['UNKNOWN'] error_blacklist = [] if 'only_raw' in pub_opts and pub[pub_name]['only_raw']: pub[pub_name]['send_raw'] = True error_whitelist = ['RAW'] error_blacklist = [] if 'send_unknown' in pub_opts and 'UNKNOWN' in error_blacklist: error_blacklist.remove('UNKNOWN') if 'send_raw' in pub_opts and 'RAW' in error_blacklist: error_blacklist.remove('RAW') self.opts['_server_send_unknown'] |= 'UNKNOWN' in error_whitelist or 'UNKNOWN' not in error_blacklist pub[pub_name]['error_whitelist'] = error_whitelist pub[pub_name]['error_blacklist'] = error_blacklist
python
def _post_preparation(self): self.opts['hwm'] = CONFIG.ZMQ_INTERNAL_HWM if self.hwm is None else self.hwm self.opts['_server_send_unknown'] = False for pub in self.publisher: pub_name = list(pub.keys())[0] pub_opts = list(pub.values())[0] error_whitelist = pub_opts.get('error_whitelist', []) error_blacklist = pub_opts.get('error_blacklist', []) if 'UNKNOWN' not in error_blacklist: error_blacklist.append('UNKNOWN') if 'RAW' not in error_blacklist: error_blacklist.append('RAW') if 'only_unknown' in pub_opts and pub[pub_name]['only_unknown']: pub[pub_name]['send_unknown'] = True error_whitelist = ['UNKNOWN'] error_blacklist = [] if 'only_raw' in pub_opts and pub[pub_name]['only_raw']: pub[pub_name]['send_raw'] = True error_whitelist = ['RAW'] error_blacklist = [] if 'send_unknown' in pub_opts and 'UNKNOWN' in error_blacklist: error_blacklist.remove('UNKNOWN') if 'send_raw' in pub_opts and 'RAW' in error_blacklist: error_blacklist.remove('RAW') self.opts['_server_send_unknown'] |= 'UNKNOWN' in error_whitelist or 'UNKNOWN' not in error_blacklist pub[pub_name]['error_whitelist'] = error_whitelist pub[pub_name]['error_blacklist'] = error_blacklist
[ "def", "_post_preparation", "(", "self", ")", ":", "self", ".", "opts", "[", "'hwm'", "]", "=", "CONFIG", ".", "ZMQ_INTERNAL_HWM", "if", "self", ".", "hwm", "is", "None", "else", "self", ".", "hwm", "self", ".", "opts", "[", "'_server_send_unknown'", "]", "=", "False", "for", "pub", "in", "self", ".", "publisher", ":", "pub_name", "=", "list", "(", "pub", ".", "keys", "(", ")", ")", "[", "0", "]", "pub_opts", "=", "list", "(", "pub", ".", "values", "(", ")", ")", "[", "0", "]", "error_whitelist", "=", "pub_opts", ".", "get", "(", "'error_whitelist'", ",", "[", "]", ")", "error_blacklist", "=", "pub_opts", ".", "get", "(", "'error_blacklist'", ",", "[", "]", ")", "if", "'UNKNOWN'", "not", "in", "error_blacklist", ":", "# by default we should not send unknown messages", "error_blacklist", ".", "append", "(", "'UNKNOWN'", ")", "if", "'RAW'", "not", "in", "error_blacklist", ":", "# same with RAW", "error_blacklist", ".", "append", "(", "'RAW'", ")", "# This implementation is a bit sub-optimal, but more readable like", "# that. It is executed only at the init, so just once.", "if", "'only_unknown'", "in", "pub_opts", "and", "pub", "[", "pub_name", "]", "[", "'only_unknown'", "]", ":", "pub", "[", "pub_name", "]", "[", "'send_unknown'", "]", "=", "True", "error_whitelist", "=", "[", "'UNKNOWN'", "]", "error_blacklist", "=", "[", "]", "if", "'only_raw'", "in", "pub_opts", "and", "pub", "[", "pub_name", "]", "[", "'only_raw'", "]", ":", "pub", "[", "pub_name", "]", "[", "'send_raw'", "]", "=", "True", "error_whitelist", "=", "[", "'RAW'", "]", "error_blacklist", "=", "[", "]", "if", "'send_unknown'", "in", "pub_opts", "and", "'UNKNOWN'", "in", "error_blacklist", ":", "error_blacklist", ".", "remove", "(", "'UNKNOWN'", ")", "if", "'send_raw'", "in", "pub_opts", "and", "'RAW'", "in", "error_blacklist", ":", "error_blacklist", ".", "remove", "(", "'RAW'", ")", "self", ".", "opts", "[", "'_server_send_unknown'", "]", "|=", "'UNKNOWN'", "in", "error_whitelist", "or", "'UNKNOWN'", "not", "in", "error_blacklist", "pub", "[", "pub_name", "]", "[", "'error_whitelist'", "]", "=", "error_whitelist", "pub", "[", "pub_name", "]", "[", "'error_blacklist'", "]", "=", "error_blacklist" ]
The steps for post-preparation (when the logs, and everything is already setup).
[ "The", "steps", "for", "post", "-", "preparation", "(", "when", "the", "logs", "and", "everything", "is", "already", "setup", ")", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L187-L221
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._whitelist_blacklist
def _whitelist_blacklist(self, os_name): ''' Determines if the OS should be ignored, depending on the whitelist-blacklist logic configured by the user. ''' return napalm_logs.ext.check_whitelist_blacklist(os_name, whitelist=self.device_whitelist, blacklist=self.device_blacklist)
python
def _whitelist_blacklist(self, os_name): return napalm_logs.ext.check_whitelist_blacklist(os_name, whitelist=self.device_whitelist, blacklist=self.device_blacklist)
[ "def", "_whitelist_blacklist", "(", "self", ",", "os_name", ")", ":", "return", "napalm_logs", ".", "ext", ".", "check_whitelist_blacklist", "(", "os_name", ",", "whitelist", "=", "self", ".", "device_whitelist", ",", "blacklist", "=", "self", ".", "device_blacklist", ")" ]
Determines if the OS should be ignored, depending on the whitelist-blacklist logic configured by the user.
[ "Determines", "if", "the", "OS", "should", "be", "ignored", "depending", "on", "the", "whitelist", "-", "blacklist", "logic", "configured", "by", "the", "user", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L223-L231
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._extract_yaml_docstring
def _extract_yaml_docstring(stream): ''' Extract the comments at the top of the YAML file, from the stream handler. Return the extracted comment as string. ''' comment_lines = [] lines = stream.read().splitlines() for line in lines: line_strip = line.strip() if not line_strip: continue if line_strip.startswith('#'): comment_lines.append( line_strip.replace('#', '', 1).strip() ) else: break return ' '.join(comment_lines)
python
def _extract_yaml_docstring(stream): comment_lines = [] lines = stream.read().splitlines() for line in lines: line_strip = line.strip() if not line_strip: continue if line_strip.startswith(' comment_lines.append( line_strip.replace(' ) else: break return ' '.join(comment_lines)
[ "def", "_extract_yaml_docstring", "(", "stream", ")", ":", "comment_lines", "=", "[", "]", "lines", "=", "stream", ".", "read", "(", ")", ".", "splitlines", "(", ")", "for", "line", "in", "lines", ":", "line_strip", "=", "line", ".", "strip", "(", ")", "if", "not", "line_strip", ":", "continue", "if", "line_strip", ".", "startswith", "(", "'#'", ")", ":", "comment_lines", ".", "append", "(", "line_strip", ".", "replace", "(", "'#'", ",", "''", ",", "1", ")", ".", "strip", "(", ")", ")", "else", ":", "break", "return", "' '", ".", "join", "(", "comment_lines", ")" ]
Extract the comments at the top of the YAML file, from the stream handler. Return the extracted comment as string.
[ "Extract", "the", "comments", "at", "the", "top", "of", "the", "YAML", "file", "from", "the", "stream", "handler", ".", "Return", "the", "extracted", "comment", "as", "string", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L234-L252
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._load_config
def _load_config(self, path): ''' Read the configuration under a specific path and return the object. ''' config = {} log.debug('Reading configuration from %s', path) if not os.path.isdir(path): msg = ( 'Unable to read from {path}: ' 'the directory does not exist!' ).format(path=path) log.error(msg) raise IOError(msg) # The directory tree should look like the following: # . # ├── __init__.py # ├── eos # │   └── init.yml # ├── iosxr # │   └── __init__.py # ├── junos # │   └── init.yml # │   └── bgp_read_message.py # │   └── BGP_PREFIX_THRESH_EXCEEDED.py # └── nxos # └── init.yml os_subdirs = [sdpath[0] for sdpath in os.walk(path)][1:] if not os_subdirs: log.error('%s does not contain any OS subdirectories', path) for os_dir in os_subdirs: os_name = os.path.split(os_dir)[1] # the network OS name if os_name.startswith('__'): log.debug('Ignoring %s', os_name) continue if not self._whitelist_blacklist(os_name): log.debug('Not building config for %s (whitelist-blacklist logic)', os_name) # Ignore devices that are not in the whitelist (if defined), # or those operating systems that are on the blacklist. # This way we can prevent starting unwanted sub-processes. continue log.debug('Building config for %s:', os_name) log.debug('='*40) if os_name not in config: config[os_name] = {} files = os.listdir(os_dir) # Read all files under the OS dir for file_ in files: log.debug('Inspecting %s', file_) file_name, file_extension = os.path.splitext(file_) file_extension = file_extension.replace('.', '') filepath = os.path.join(os_dir, file_) comment = '' if file_extension in ('yml', 'yaml'): try: log.debug('Loading %s as YAML', file_) with open(filepath, 'r') as fstream: cfg = yaml.load(fstream) # Reposition at the top and read the comments. if file_name not in CONFIG.OS_INIT_FILENAMES: # If the file name is not a profile init. fstream.seek(0) comment = self._extract_yaml_docstring(fstream) if 'messages' in cfg: for message in cfg['messages']: message['__doc__'] = comment napalm_logs.utils.dictupdate(config[os_name], cfg) except yaml.YAMLError as yamlexc: log.error('Invalid YAML file: %s', filepath, exc_info=True) if file_name in CONFIG.OS_INIT_FILENAMES: # Raise exception and break only when the init file is borked # otherwise, it will try loading best efforts. raise IOError(yamlexc) elif file_extension == 'py': log.debug('Lazy loading Python module %s', file_) mod_fp, mod_file, mod_data = imp.find_module(file_name, [os_dir]) mod = imp.load_module(file_name, mod_fp, mod_file, mod_data) if file_name in CONFIG.OS_INIT_FILENAMES: # Init file defined as Python module log.debug('%s seems to be a Python profiler', filepath) # Init files require to define the `extract` function. # Sample init file: # def extract(message): # return {'tag': 'A_TAG', 'host': 'hostname'} if hasattr(mod, CONFIG.INIT_RUN_FUN) and\ hasattr(getattr(mod, CONFIG.INIT_RUN_FUN), '__call__'): # if extract is defined and is callable if 'prefixes' not in config[os_name]: config[os_name]['prefixes'] = [] config[os_name]['prefixes'].append({ 'values': {'tag': ''}, 'line': '', '__python_fun__': getattr(mod, CONFIG.INIT_RUN_FUN), '__python_mod__': filepath # Will be used for debugging }) log.info('Adding the prefix function defined under %s to %s', filepath, os_name) elif file_name != '__init__': # If __init__.py does not have the extractor function, no problem. log.warning('%s does not have the "%s" function defined. Ignoring.', filepath, CONFIG.INIT_RUN_FUN) else: # Other python files require the `emit` function. if hasattr(mod, '__tag__'): mod_tag = getattr(mod, '__tag__') else: log.info('%s does not have __tag__, defaulting the tag to %s', filepath, file_name) mod_tag = file_name if hasattr(mod, '__error__'): mod_err = getattr(mod, '__error__') else: log.info('%s does not have __error__, defaulting the error to %s', filepath, file_name) mod_err = file_name if hasattr(mod, '__match_on__'): err_match = getattr(mod, '__match_on__') else: err_match = 'tag' model = CONFIG.OPEN_CONFIG_NO_MODEL if hasattr(mod, '__yang_model__'): model = getattr(mod, '__yang_model__') log.debug('Mathing on %s', err_match) if hasattr(mod, CONFIG.CONFIG_RUN_FUN) and\ hasattr(getattr(mod, CONFIG.CONFIG_RUN_FUN), '__call__'): log.debug('Adding %s with tag:%s, error:%s, matching on:%s', file_, mod_tag, mod_err, err_match) # the structure below must correspond to the VALID_CONFIG structure enforcement if 'messages' not in config[os_name]: config[os_name]['messages'] = [] config[os_name]['messages'].append({ 'tag': mod_tag, 'error': mod_err, 'match_on': err_match, '__doc__': mod.__doc__, '__python_fun__': getattr(mod, CONFIG.CONFIG_RUN_FUN), '__python_mod__': filepath, # Will be used for debugging 'line': '', 'model': model, 'values': {}, 'mapping': {'variables': {}, 'static': {}} }) else: log.warning('%s does not have the "%s" function defined. Ignoring.', filepath, CONFIG.CONFIG_RUN_FUN) else: log.info('Ignoring %s (extension not allowed)', filepath) log.debug('-'*40) if not config: msg = 'Could not find proper configuration files under {path}'.format(path=path) log.error(msg) raise IOError(msg) log.debug('Complete config:') log.debug(config) log.debug('ConfigParserg size in bytes: %d', sys.getsizeof(config)) return config
python
def _load_config(self, path): config = {} log.debug('Reading configuration from %s', path) if not os.path.isdir(path): msg = ( 'Unable to read from {path}: ' 'the directory does not exist!' ).format(path=path) log.error(msg) raise IOError(msg) os_subdirs = [sdpath[0] for sdpath in os.walk(path)][1:] if not os_subdirs: log.error('%s does not contain any OS subdirectories', path) for os_dir in os_subdirs: os_name = os.path.split(os_dir)[1] if os_name.startswith('__'): log.debug('Ignoring %s', os_name) continue if not self._whitelist_blacklist(os_name): log.debug('Not building config for %s (whitelist-blacklist logic)', os_name) continue log.debug('Building config for %s:', os_name) log.debug('='*40) if os_name not in config: config[os_name] = {} files = os.listdir(os_dir) for file_ in files: log.debug('Inspecting %s', file_) file_name, file_extension = os.path.splitext(file_) file_extension = file_extension.replace('.', '') filepath = os.path.join(os_dir, file_) comment = '' if file_extension in ('yml', 'yaml'): try: log.debug('Loading %s as YAML', file_) with open(filepath, 'r') as fstream: cfg = yaml.load(fstream) if file_name not in CONFIG.OS_INIT_FILENAMES: fstream.seek(0) comment = self._extract_yaml_docstring(fstream) if 'messages' in cfg: for message in cfg['messages']: message['__doc__'] = comment napalm_logs.utils.dictupdate(config[os_name], cfg) except yaml.YAMLError as yamlexc: log.error('Invalid YAML file: %s', filepath, exc_info=True) if file_name in CONFIG.OS_INIT_FILENAMES: raise IOError(yamlexc) elif file_extension == 'py': log.debug('Lazy loading Python module %s', file_) mod_fp, mod_file, mod_data = imp.find_module(file_name, [os_dir]) mod = imp.load_module(file_name, mod_fp, mod_file, mod_data) if file_name in CONFIG.OS_INIT_FILENAMES: log.debug('%s seems to be a Python profiler', filepath) if hasattr(mod, CONFIG.INIT_RUN_FUN) and\ hasattr(getattr(mod, CONFIG.INIT_RUN_FUN), '__call__'): if 'prefixes' not in config[os_name]: config[os_name]['prefixes'] = [] config[os_name]['prefixes'].append({ 'values': {'tag': ''}, 'line': '', '__python_fun__': getattr(mod, CONFIG.INIT_RUN_FUN), '__python_mod__': filepath }) log.info('Adding the prefix function defined under %s to %s', filepath, os_name) elif file_name != '__init__': log.warning('%s does not have the "%s" function defined. Ignoring.', filepath, CONFIG.INIT_RUN_FUN) else: if hasattr(mod, '__tag__'): mod_tag = getattr(mod, '__tag__') else: log.info('%s does not have __tag__, defaulting the tag to %s', filepath, file_name) mod_tag = file_name if hasattr(mod, '__error__'): mod_err = getattr(mod, '__error__') else: log.info('%s does not have __error__, defaulting the error to %s', filepath, file_name) mod_err = file_name if hasattr(mod, '__match_on__'): err_match = getattr(mod, '__match_on__') else: err_match = 'tag' model = CONFIG.OPEN_CONFIG_NO_MODEL if hasattr(mod, '__yang_model__'): model = getattr(mod, '__yang_model__') log.debug('Mathing on %s', err_match) if hasattr(mod, CONFIG.CONFIG_RUN_FUN) and\ hasattr(getattr(mod, CONFIG.CONFIG_RUN_FUN), '__call__'): log.debug('Adding %s with tag:%s, error:%s, matching on:%s', file_, mod_tag, mod_err, err_match) if 'messages' not in config[os_name]: config[os_name]['messages'] = [] config[os_name]['messages'].append({ 'tag': mod_tag, 'error': mod_err, 'match_on': err_match, '__doc__': mod.__doc__, '__python_fun__': getattr(mod, CONFIG.CONFIG_RUN_FUN), '__python_mod__': filepath, 'line': '', 'model': model, 'values': {}, 'mapping': {'variables': {}, 'static': {}} }) else: log.warning('%s does not have the "%s" function defined. Ignoring.', filepath, CONFIG.CONFIG_RUN_FUN) else: log.info('Ignoring %s (extension not allowed)', filepath) log.debug('-'*40) if not config: msg = 'Could not find proper configuration files under {path}'.format(path=path) log.error(msg) raise IOError(msg) log.debug('Complete config:') log.debug(config) log.debug('ConfigParserg size in bytes: %d', sys.getsizeof(config)) return config
[ "def", "_load_config", "(", "self", ",", "path", ")", ":", "config", "=", "{", "}", "log", ".", "debug", "(", "'Reading configuration from %s'", ",", "path", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "msg", "=", "(", "'Unable to read from {path}: '", "'the directory does not exist!'", ")", ".", "format", "(", "path", "=", "path", ")", "log", ".", "error", "(", "msg", ")", "raise", "IOError", "(", "msg", ")", "# The directory tree should look like the following:", "# .", "# ├── __init__.py", "# ├── eos", "# │   └── init.yml", "# ├── iosxr", "# │   └── __init__.py", "# ├── junos", "# │   └── init.yml", "# │   └── bgp_read_message.py", "# │   └── BGP_PREFIX_THRESH_EXCEEDED.py", "# └── nxos", "# └── init.yml", "os_subdirs", "=", "[", "sdpath", "[", "0", "]", "for", "sdpath", "in", "os", ".", "walk", "(", "path", ")", "]", "[", "1", ":", "]", "if", "not", "os_subdirs", ":", "log", ".", "error", "(", "'%s does not contain any OS subdirectories'", ",", "path", ")", "for", "os_dir", "in", "os_subdirs", ":", "os_name", "=", "os", ".", "path", ".", "split", "(", "os_dir", ")", "[", "1", "]", "# the network OS name", "if", "os_name", ".", "startswith", "(", "'__'", ")", ":", "log", ".", "debug", "(", "'Ignoring %s'", ",", "os_name", ")", "continue", "if", "not", "self", ".", "_whitelist_blacklist", "(", "os_name", ")", ":", "log", ".", "debug", "(", "'Not building config for %s (whitelist-blacklist logic)'", ",", "os_name", ")", "# Ignore devices that are not in the whitelist (if defined),", "# or those operating systems that are on the blacklist.", "# This way we can prevent starting unwanted sub-processes.", "continue", "log", ".", "debug", "(", "'Building config for %s:'", ",", "os_name", ")", "log", ".", "debug", "(", "'='", "*", "40", ")", "if", "os_name", "not", "in", "config", ":", "config", "[", "os_name", "]", "=", "{", "}", "files", "=", "os", ".", "listdir", "(", "os_dir", ")", "# Read all files under the OS dir", "for", "file_", "in", "files", ":", "log", ".", "debug", "(", "'Inspecting %s'", ",", "file_", ")", "file_name", ",", "file_extension", "=", "os", ".", "path", ".", "splitext", "(", "file_", ")", "file_extension", "=", "file_extension", ".", "replace", "(", "'.'", ",", "''", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "os_dir", ",", "file_", ")", "comment", "=", "''", "if", "file_extension", "in", "(", "'yml'", ",", "'yaml'", ")", ":", "try", ":", "log", ".", "debug", "(", "'Loading %s as YAML'", ",", "file_", ")", "with", "open", "(", "filepath", ",", "'r'", ")", "as", "fstream", ":", "cfg", "=", "yaml", ".", "load", "(", "fstream", ")", "# Reposition at the top and read the comments.", "if", "file_name", "not", "in", "CONFIG", ".", "OS_INIT_FILENAMES", ":", "# If the file name is not a profile init.", "fstream", ".", "seek", "(", "0", ")", "comment", "=", "self", ".", "_extract_yaml_docstring", "(", "fstream", ")", "if", "'messages'", "in", "cfg", ":", "for", "message", "in", "cfg", "[", "'messages'", "]", ":", "message", "[", "'__doc__'", "]", "=", "comment", "napalm_logs", ".", "utils", ".", "dictupdate", "(", "config", "[", "os_name", "]", ",", "cfg", ")", "except", "yaml", ".", "YAMLError", "as", "yamlexc", ":", "log", ".", "error", "(", "'Invalid YAML file: %s'", ",", "filepath", ",", "exc_info", "=", "True", ")", "if", "file_name", "in", "CONFIG", ".", "OS_INIT_FILENAMES", ":", "# Raise exception and break only when the init file is borked", "# otherwise, it will try loading best efforts.", "raise", "IOError", "(", "yamlexc", ")", "elif", "file_extension", "==", "'py'", ":", "log", ".", "debug", "(", "'Lazy loading Python module %s'", ",", "file_", ")", "mod_fp", ",", "mod_file", ",", "mod_data", "=", "imp", ".", "find_module", "(", "file_name", ",", "[", "os_dir", "]", ")", "mod", "=", "imp", ".", "load_module", "(", "file_name", ",", "mod_fp", ",", "mod_file", ",", "mod_data", ")", "if", "file_name", "in", "CONFIG", ".", "OS_INIT_FILENAMES", ":", "# Init file defined as Python module", "log", ".", "debug", "(", "'%s seems to be a Python profiler'", ",", "filepath", ")", "# Init files require to define the `extract` function.", "# Sample init file:", "# def extract(message):", "# return {'tag': 'A_TAG', 'host': 'hostname'}", "if", "hasattr", "(", "mod", ",", "CONFIG", ".", "INIT_RUN_FUN", ")", "and", "hasattr", "(", "getattr", "(", "mod", ",", "CONFIG", ".", "INIT_RUN_FUN", ")", ",", "'__call__'", ")", ":", "# if extract is defined and is callable", "if", "'prefixes'", "not", "in", "config", "[", "os_name", "]", ":", "config", "[", "os_name", "]", "[", "'prefixes'", "]", "=", "[", "]", "config", "[", "os_name", "]", "[", "'prefixes'", "]", ".", "append", "(", "{", "'values'", ":", "{", "'tag'", ":", "''", "}", ",", "'line'", ":", "''", ",", "'__python_fun__'", ":", "getattr", "(", "mod", ",", "CONFIG", ".", "INIT_RUN_FUN", ")", ",", "'__python_mod__'", ":", "filepath", "# Will be used for debugging", "}", ")", "log", ".", "info", "(", "'Adding the prefix function defined under %s to %s'", ",", "filepath", ",", "os_name", ")", "elif", "file_name", "!=", "'__init__'", ":", "# If __init__.py does not have the extractor function, no problem.", "log", ".", "warning", "(", "'%s does not have the \"%s\" function defined. Ignoring.'", ",", "filepath", ",", "CONFIG", ".", "INIT_RUN_FUN", ")", "else", ":", "# Other python files require the `emit` function.", "if", "hasattr", "(", "mod", ",", "'__tag__'", ")", ":", "mod_tag", "=", "getattr", "(", "mod", ",", "'__tag__'", ")", "else", ":", "log", ".", "info", "(", "'%s does not have __tag__, defaulting the tag to %s'", ",", "filepath", ",", "file_name", ")", "mod_tag", "=", "file_name", "if", "hasattr", "(", "mod", ",", "'__error__'", ")", ":", "mod_err", "=", "getattr", "(", "mod", ",", "'__error__'", ")", "else", ":", "log", ".", "info", "(", "'%s does not have __error__, defaulting the error to %s'", ",", "filepath", ",", "file_name", ")", "mod_err", "=", "file_name", "if", "hasattr", "(", "mod", ",", "'__match_on__'", ")", ":", "err_match", "=", "getattr", "(", "mod", ",", "'__match_on__'", ")", "else", ":", "err_match", "=", "'tag'", "model", "=", "CONFIG", ".", "OPEN_CONFIG_NO_MODEL", "if", "hasattr", "(", "mod", ",", "'__yang_model__'", ")", ":", "model", "=", "getattr", "(", "mod", ",", "'__yang_model__'", ")", "log", ".", "debug", "(", "'Mathing on %s'", ",", "err_match", ")", "if", "hasattr", "(", "mod", ",", "CONFIG", ".", "CONFIG_RUN_FUN", ")", "and", "hasattr", "(", "getattr", "(", "mod", ",", "CONFIG", ".", "CONFIG_RUN_FUN", ")", ",", "'__call__'", ")", ":", "log", ".", "debug", "(", "'Adding %s with tag:%s, error:%s, matching on:%s'", ",", "file_", ",", "mod_tag", ",", "mod_err", ",", "err_match", ")", "# the structure below must correspond to the VALID_CONFIG structure enforcement", "if", "'messages'", "not", "in", "config", "[", "os_name", "]", ":", "config", "[", "os_name", "]", "[", "'messages'", "]", "=", "[", "]", "config", "[", "os_name", "]", "[", "'messages'", "]", ".", "append", "(", "{", "'tag'", ":", "mod_tag", ",", "'error'", ":", "mod_err", ",", "'match_on'", ":", "err_match", ",", "'__doc__'", ":", "mod", ".", "__doc__", ",", "'__python_fun__'", ":", "getattr", "(", "mod", ",", "CONFIG", ".", "CONFIG_RUN_FUN", ")", ",", "'__python_mod__'", ":", "filepath", ",", "# Will be used for debugging", "'line'", ":", "''", ",", "'model'", ":", "model", ",", "'values'", ":", "{", "}", ",", "'mapping'", ":", "{", "'variables'", ":", "{", "}", ",", "'static'", ":", "{", "}", "}", "}", ")", "else", ":", "log", ".", "warning", "(", "'%s does not have the \"%s\" function defined. Ignoring.'", ",", "filepath", ",", "CONFIG", ".", "CONFIG_RUN_FUN", ")", "else", ":", "log", ".", "info", "(", "'Ignoring %s (extension not allowed)'", ",", "filepath", ")", "log", ".", "debug", "(", "'-'", "*", "40", ")", "if", "not", "config", ":", "msg", "=", "'Could not find proper configuration files under {path}'", ".", "format", "(", "path", "=", "path", ")", "log", ".", "error", "(", "msg", ")", "raise", "IOError", "(", "msg", ")", "log", ".", "debug", "(", "'Complete config:'", ")", "log", ".", "debug", "(", "config", ")", "log", ".", "debug", "(", "'ConfigParserg size in bytes: %d'", ",", "sys", ".", "getsizeof", "(", "config", ")", ")", "return", "config" ]
Read the configuration under a specific path and return the object.
[ "Read", "the", "configuration", "under", "a", "specific", "path", "and", "return", "the", "object", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L254-L407
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._verify_config_dict
def _verify_config_dict(self, valid, config, dev_os, key_path=None): ''' Verify if the config dict is valid. ''' if not key_path: key_path = [] for key, value in valid.items(): self._verify_config_key(key, value, valid, config, dev_os, key_path)
python
def _verify_config_dict(self, valid, config, dev_os, key_path=None): if not key_path: key_path = [] for key, value in valid.items(): self._verify_config_key(key, value, valid, config, dev_os, key_path)
[ "def", "_verify_config_dict", "(", "self", ",", "valid", ",", "config", ",", "dev_os", ",", "key_path", "=", "None", ")", ":", "if", "not", "key_path", ":", "key_path", "=", "[", "]", "for", "key", ",", "value", "in", "valid", ".", "items", "(", ")", ":", "self", ".", "_verify_config_key", "(", "key", ",", "value", ",", "valid", ",", "config", ",", "dev_os", ",", "key_path", ")" ]
Verify if the config dict is valid.
[ "Verify", "if", "the", "config", "dict", "is", "valid", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L461-L468
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._verify_config
def _verify_config(self): ''' Verify that the config is correct ''' if not self.config_dict: self._raise_config_exception('No config found') # Check for device conifg, if there isn't anything then just log, do not raise an exception for dev_os, dev_config in self.config_dict.items(): if not dev_config: log.warning('No config found for %s', dev_os) continue # Compare the valid opts with the conifg self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os) log.debug('Read the config without error')
python
def _verify_config(self): if not self.config_dict: self._raise_config_exception('No config found') for dev_os, dev_config in self.config_dict.items(): if not dev_config: log.warning('No config found for %s', dev_os) continue self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os) log.debug('Read the config without error')
[ "def", "_verify_config", "(", "self", ")", ":", "if", "not", "self", ".", "config_dict", ":", "self", ".", "_raise_config_exception", "(", "'No config found'", ")", "# Check for device conifg, if there isn't anything then just log, do not raise an exception", "for", "dev_os", ",", "dev_config", "in", "self", ".", "config_dict", ".", "items", "(", ")", ":", "if", "not", "dev_config", ":", "log", ".", "warning", "(", "'No config found for %s'", ",", "dev_os", ")", "continue", "# Compare the valid opts with the conifg", "self", ".", "_verify_config_dict", "(", "CONFIG", ".", "VALID_CONFIG", ",", "dev_config", ",", "dev_os", ")", "log", ".", "debug", "(", "'Read the config without error'", ")" ]
Verify that the config is correct
[ "Verify", "that", "the", "config", "is", "correct" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L470-L483
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._build_config
def _build_config(self): ''' Build the config of the napalm syslog parser. ''' if not self.config_dict: if not self.config_path: # No custom config path requested # Read the native config files self.config_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'config' ) log.info('Reading the configuration from %s', self.config_path) self.config_dict = self._load_config(self.config_path) if not self.extension_config_dict and\ self.extension_config_path and\ os.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): # same path? # When extension config is not sent as dict # But `extension_config_path` is specified log.info('Reading extension configuration from %s', self.extension_config_path) self.extension_config_dict = self._load_config(self.extension_config_path) if self.extension_config_dict: napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict)
python
def _build_config(self): if not self.config_dict: if not self.config_path: self.config_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'config' ) log.info('Reading the configuration from %s', self.config_path) self.config_dict = self._load_config(self.config_path) if not self.extension_config_dict and\ self.extension_config_path and\ os.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): log.info('Reading extension configuration from %s', self.extension_config_path) self.extension_config_dict = self._load_config(self.extension_config_path) if self.extension_config_dict: napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict)
[ "def", "_build_config", "(", "self", ")", ":", "if", "not", "self", ".", "config_dict", ":", "if", "not", "self", ".", "config_path", ":", "# No custom config path requested", "# Read the native config files", "self", ".", "config_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ",", "'config'", ")", "log", ".", "info", "(", "'Reading the configuration from %s'", ",", "self", ".", "config_path", ")", "self", ".", "config_dict", "=", "self", ".", "_load_config", "(", "self", ".", "config_path", ")", "if", "not", "self", ".", "extension_config_dict", "and", "self", ".", "extension_config_path", "and", "os", ".", "path", ".", "normpath", "(", "self", ".", "extension_config_path", ")", "!=", "os", ".", "path", ".", "normpath", "(", "self", ".", "config_path", ")", ":", "# same path?", "# When extension config is not sent as dict", "# But `extension_config_path` is specified", "log", ".", "info", "(", "'Reading extension configuration from %s'", ",", "self", ".", "extension_config_path", ")", "self", ".", "extension_config_dict", "=", "self", ".", "_load_config", "(", "self", ".", "extension_config_path", ")", "if", "self", ".", "extension_config_dict", ":", "napalm_logs", ".", "utils", ".", "dictupdate", "(", "self", ".", "config_dict", ",", "self", ".", "extension_config_dict", ")" ]
Build the config of the napalm syslog parser.
[ "Build", "the", "config", "of", "the", "napalm", "syslog", "parser", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L485-L507
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._start_auth_proc
def _start_auth_proc(self): ''' Start the authenticator process. ''' log.debug('Computing the signing key hex') verify_key = self.__signing_key.verify_key sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder) log.debug('Starting the authenticator subprocess') auth = NapalmLogsAuthProc(self.certificate, self.keyfile, self.__priv_key, sgn_verify_hex, self.auth_address, self.auth_port) proc = Process(target=auth.start) proc.start() proc.description = 'Auth process' log.debug('Started auth process as %s with PID %s', proc._name, proc.pid) return proc
python
def _start_auth_proc(self): log.debug('Computing the signing key hex') verify_key = self.__signing_key.verify_key sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder) log.debug('Starting the authenticator subprocess') auth = NapalmLogsAuthProc(self.certificate, self.keyfile, self.__priv_key, sgn_verify_hex, self.auth_address, self.auth_port) proc = Process(target=auth.start) proc.start() proc.description = 'Auth process' log.debug('Started auth process as %s with PID %s', proc._name, proc.pid) return proc
[ "def", "_start_auth_proc", "(", "self", ")", ":", "log", ".", "debug", "(", "'Computing the signing key hex'", ")", "verify_key", "=", "self", ".", "__signing_key", ".", "verify_key", "sgn_verify_hex", "=", "verify_key", ".", "encode", "(", "encoder", "=", "nacl", ".", "encoding", ".", "HexEncoder", ")", "log", ".", "debug", "(", "'Starting the authenticator subprocess'", ")", "auth", "=", "NapalmLogsAuthProc", "(", "self", ".", "certificate", ",", "self", ".", "keyfile", ",", "self", ".", "__priv_key", ",", "sgn_verify_hex", ",", "self", ".", "auth_address", ",", "self", ".", "auth_port", ")", "proc", "=", "Process", "(", "target", "=", "auth", ".", "start", ")", "proc", ".", "start", "(", ")", "proc", ".", "description", "=", "'Auth process'", "log", ".", "debug", "(", "'Started auth process as %s with PID %s'", ",", "proc", ".", "_name", ",", "proc", ".", "pid", ")", "return", "proc" ]
Start the authenticator process.
[ "Start", "the", "authenticator", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L509-L527
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._start_lst_proc
def _start_lst_proc(self, listener_type, listener_opts): ''' Start the listener process. ''' log.debug('Starting the listener process for %s', listener_type) listener = NapalmLogsListenerProc(self.opts, self.address, self.port, listener_type, listener_opts=listener_opts) proc = Process(target=listener.start) proc.start() proc.description = 'Listener process' log.debug('Started listener process as %s with PID %s', proc._name, proc.pid) return proc
python
def _start_lst_proc(self, listener_type, listener_opts): log.debug('Starting the listener process for %s', listener_type) listener = NapalmLogsListenerProc(self.opts, self.address, self.port, listener_type, listener_opts=listener_opts) proc = Process(target=listener.start) proc.start() proc.description = 'Listener process' log.debug('Started listener process as %s with PID %s', proc._name, proc.pid) return proc
[ "def", "_start_lst_proc", "(", "self", ",", "listener_type", ",", "listener_opts", ")", ":", "log", ".", "debug", "(", "'Starting the listener process for %s'", ",", "listener_type", ")", "listener", "=", "NapalmLogsListenerProc", "(", "self", ".", "opts", ",", "self", ".", "address", ",", "self", ".", "port", ",", "listener_type", ",", "listener_opts", "=", "listener_opts", ")", "proc", "=", "Process", "(", "target", "=", "listener", ".", "start", ")", "proc", ".", "start", "(", ")", "proc", ".", "description", "=", "'Listener process'", "log", ".", "debug", "(", "'Started listener process as %s with PID %s'", ",", "proc", ".", "_name", ",", "proc", ".", "pid", ")", "return", "proc" ]
Start the listener process.
[ "Start", "the", "listener", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L529-L545
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._start_srv_proc
def _start_srv_proc(self, started_os_proc): ''' Start the server process. ''' log.debug('Starting the server process') server = NapalmLogsServerProc(self.opts, self.config_dict, started_os_proc, buffer=self._buffer) proc = Process(target=server.start) proc.start() proc.description = 'Server process' log.debug('Started server process as %s with PID %s', proc._name, proc.pid) return proc
python
def _start_srv_proc(self, started_os_proc): log.debug('Starting the server process') server = NapalmLogsServerProc(self.opts, self.config_dict, started_os_proc, buffer=self._buffer) proc = Process(target=server.start) proc.start() proc.description = 'Server process' log.debug('Started server process as %s with PID %s', proc._name, proc.pid) return proc
[ "def", "_start_srv_proc", "(", "self", ",", "started_os_proc", ")", ":", "log", ".", "debug", "(", "'Starting the server process'", ")", "server", "=", "NapalmLogsServerProc", "(", "self", ".", "opts", ",", "self", ".", "config_dict", ",", "started_os_proc", ",", "buffer", "=", "self", ".", "_buffer", ")", "proc", "=", "Process", "(", "target", "=", "server", ".", "start", ")", "proc", ".", "start", "(", ")", "proc", ".", "description", "=", "'Server process'", "log", ".", "debug", "(", "'Started server process as %s with PID %s'", ",", "proc", ".", "_name", ",", "proc", ".", "pid", ")", "return", "proc" ]
Start the server process.
[ "Start", "the", "server", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L547-L561
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._start_pub_proc
def _start_pub_proc(self, publisher_type, publisher_opts, pub_id): ''' Start the publisher process. ''' log.debug('Starting the publisher process for %s', publisher_type) publisher = NapalmLogsPublisherProc(self.opts, self.publish_address, self.publish_port, publisher_type, self.serializer, self.__priv_key, self.__signing_key, publisher_opts, disable_security=self.disable_security, pub_id=pub_id) proc = Process(target=publisher.start) proc.start() proc.description = 'Publisher process' log.debug('Started publisher process as %s with PID %s', proc._name, proc.pid) return proc
python
def _start_pub_proc(self, publisher_type, publisher_opts, pub_id): log.debug('Starting the publisher process for %s', publisher_type) publisher = NapalmLogsPublisherProc(self.opts, self.publish_address, self.publish_port, publisher_type, self.serializer, self.__priv_key, self.__signing_key, publisher_opts, disable_security=self.disable_security, pub_id=pub_id) proc = Process(target=publisher.start) proc.start() proc.description = 'Publisher process' log.debug('Started publisher process as %s with PID %s', proc._name, proc.pid) return proc
[ "def", "_start_pub_proc", "(", "self", ",", "publisher_type", ",", "publisher_opts", ",", "pub_id", ")", ":", "log", ".", "debug", "(", "'Starting the publisher process for %s'", ",", "publisher_type", ")", "publisher", "=", "NapalmLogsPublisherProc", "(", "self", ".", "opts", ",", "self", ".", "publish_address", ",", "self", ".", "publish_port", ",", "publisher_type", ",", "self", ".", "serializer", ",", "self", ".", "__priv_key", ",", "self", ".", "__signing_key", ",", "publisher_opts", ",", "disable_security", "=", "self", ".", "disable_security", ",", "pub_id", "=", "pub_id", ")", "proc", "=", "Process", "(", "target", "=", "publisher", ".", "start", ")", "proc", ".", "start", "(", ")", "proc", ".", "description", "=", "'Publisher process'", "log", ".", "debug", "(", "'Started publisher process as %s with PID %s'", ",", "proc", ".", "_name", ",", "proc", ".", "pid", ")", "return", "proc" ]
Start the publisher process.
[ "Start", "the", "publisher", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L573-L595
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._start_dev_proc
def _start_dev_proc(self, device_os, device_config): ''' Start the device worker process. ''' log.info('Starting the child process for %s', device_os) dos = NapalmLogsDeviceProc(device_os, self.opts, device_config) os_proc = Process(target=dos.start) os_proc.start() os_proc.description = '%s device process' % device_os log.debug('Started process %s for %s, having PID %s', os_proc._name, device_os, os_proc.pid) return os_proc
python
def _start_dev_proc(self, device_os, device_config): log.info('Starting the child process for %s', device_os) dos = NapalmLogsDeviceProc(device_os, self.opts, device_config) os_proc = Process(target=dos.start) os_proc.start() os_proc.description = '%s device process' % device_os log.debug('Started process %s for %s, having PID %s', os_proc._name, device_os, os_proc.pid) return os_proc
[ "def", "_start_dev_proc", "(", "self", ",", "device_os", ",", "device_config", ")", ":", "log", ".", "info", "(", "'Starting the child process for %s'", ",", "device_os", ")", "dos", "=", "NapalmLogsDeviceProc", "(", "device_os", ",", "self", ".", "opts", ",", "device_config", ")", "os_proc", "=", "Process", "(", "target", "=", "dos", ".", "start", ")", "os_proc", ".", "start", "(", ")", "os_proc", ".", "description", "=", "'%s device process'", "%", "device_os", "log", ".", "debug", "(", "'Started process %s for %s, having PID %s'", ",", "os_proc", ".", "_name", ",", "device_os", ",", "os_proc", ".", "pid", ")", "return", "os_proc" ]
Start the device worker process.
[ "Start", "the", "device", "worker", "process", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L597-L611
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs.start_engine
def start_engine(self): ''' Start the child processes (one per device OS) ''' if self.disable_security is True: log.warning('***Not starting the authenticator process due to disable_security being set to True***') else: log.debug('Generating the private key') self.__priv_key = nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE) log.debug('Generating the signing key') self.__signing_key = nacl.signing.SigningKey.generate() # start the keepalive thread for the auth sub-process self._processes.append(self._start_auth_proc()) log.debug('Starting the internal proxy') proc = self._start_pub_px_proc() self._processes.append(proc) # publisher process start pub_id = 0 for pub in self.publisher: publisher_type, publisher_opts = list(pub.items())[0] proc = self._start_pub_proc(publisher_type, publisher_opts, pub_id) self._processes.append(proc) pub_id += 1 # device process start log.info('Starting child processes for each device type') started_os_proc = [] for device_os, device_config in self.config_dict.items(): if not self._whitelist_blacklist(device_os): log.debug('Not starting process for %s (whitelist-blacklist logic)', device_os) # Ignore devices that are not in the whitelist (if defined), # or those operating systems that are on the blacklist. # This way we can prevent starting unwanted sub-processes. continue log.debug('Will start %d worker process(es) for %s', self.device_worker_processes, device_os) for proc_index in range(self.device_worker_processes): self._processes.append(self._start_dev_proc(device_os, device_config)) started_os_proc.append(device_os) # start the server process self._processes.append(self._start_srv_proc(started_os_proc)) # start listener process for lst in self.listener: listener_type, listener_opts = list(lst.items())[0] proc = self._start_lst_proc(listener_type, listener_opts) self._processes.append(proc) thread = threading.Thread(target=self._check_children) thread.start()
python
def start_engine(self): if self.disable_security is True: log.warning('***Not starting the authenticator process due to disable_security being set to True***') else: log.debug('Generating the private key') self.__priv_key = nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE) log.debug('Generating the signing key') self.__signing_key = nacl.signing.SigningKey.generate() self._processes.append(self._start_auth_proc()) log.debug('Starting the internal proxy') proc = self._start_pub_px_proc() self._processes.append(proc) pub_id = 0 for pub in self.publisher: publisher_type, publisher_opts = list(pub.items())[0] proc = self._start_pub_proc(publisher_type, publisher_opts, pub_id) self._processes.append(proc) pub_id += 1 log.info('Starting child processes for each device type') started_os_proc = [] for device_os, device_config in self.config_dict.items(): if not self._whitelist_blacklist(device_os): log.debug('Not starting process for %s (whitelist-blacklist logic)', device_os) continue log.debug('Will start %d worker process(es) for %s', self.device_worker_processes, device_os) for proc_index in range(self.device_worker_processes): self._processes.append(self._start_dev_proc(device_os, device_config)) started_os_proc.append(device_os) self._processes.append(self._start_srv_proc(started_os_proc)) for lst in self.listener: listener_type, listener_opts = list(lst.items())[0] proc = self._start_lst_proc(listener_type, listener_opts) self._processes.append(proc) thread = threading.Thread(target=self._check_children) thread.start()
[ "def", "start_engine", "(", "self", ")", ":", "if", "self", ".", "disable_security", "is", "True", ":", "log", ".", "warning", "(", "'***Not starting the authenticator process due to disable_security being set to True***'", ")", "else", ":", "log", ".", "debug", "(", "'Generating the private key'", ")", "self", ".", "__priv_key", "=", "nacl", ".", "utils", ".", "random", "(", "nacl", ".", "secret", ".", "SecretBox", ".", "KEY_SIZE", ")", "log", ".", "debug", "(", "'Generating the signing key'", ")", "self", ".", "__signing_key", "=", "nacl", ".", "signing", ".", "SigningKey", ".", "generate", "(", ")", "# start the keepalive thread for the auth sub-process", "self", ".", "_processes", ".", "append", "(", "self", ".", "_start_auth_proc", "(", ")", ")", "log", ".", "debug", "(", "'Starting the internal proxy'", ")", "proc", "=", "self", ".", "_start_pub_px_proc", "(", ")", "self", ".", "_processes", ".", "append", "(", "proc", ")", "# publisher process start", "pub_id", "=", "0", "for", "pub", "in", "self", ".", "publisher", ":", "publisher_type", ",", "publisher_opts", "=", "list", "(", "pub", ".", "items", "(", ")", ")", "[", "0", "]", "proc", "=", "self", ".", "_start_pub_proc", "(", "publisher_type", ",", "publisher_opts", ",", "pub_id", ")", "self", ".", "_processes", ".", "append", "(", "proc", ")", "pub_id", "+=", "1", "# device process start", "log", ".", "info", "(", "'Starting child processes for each device type'", ")", "started_os_proc", "=", "[", "]", "for", "device_os", ",", "device_config", "in", "self", ".", "config_dict", ".", "items", "(", ")", ":", "if", "not", "self", ".", "_whitelist_blacklist", "(", "device_os", ")", ":", "log", ".", "debug", "(", "'Not starting process for %s (whitelist-blacklist logic)'", ",", "device_os", ")", "# Ignore devices that are not in the whitelist (if defined),", "# or those operating systems that are on the blacklist.", "# This way we can prevent starting unwanted sub-processes.", "continue", "log", ".", "debug", "(", "'Will start %d worker process(es) for %s'", ",", "self", ".", "device_worker_processes", ",", "device_os", ")", "for", "proc_index", "in", "range", "(", "self", ".", "device_worker_processes", ")", ":", "self", ".", "_processes", ".", "append", "(", "self", ".", "_start_dev_proc", "(", "device_os", ",", "device_config", ")", ")", "started_os_proc", ".", "append", "(", "device_os", ")", "# start the server process", "self", ".", "_processes", ".", "append", "(", "self", ".", "_start_srv_proc", "(", "started_os_proc", ")", ")", "# start listener process", "for", "lst", "in", "self", ".", "listener", ":", "listener_type", ",", "listener_opts", "=", "list", "(", "lst", ".", "items", "(", ")", ")", "[", "0", "]", "proc", "=", "self", ".", "_start_lst_proc", "(", "listener_type", ",", "listener_opts", ")", "self", ".", "_processes", ".", "append", "(", "proc", ")", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_check_children", ")", "thread", ".", "start", "(", ")" ]
Start the child processes (one per device OS)
[ "Start", "the", "child", "processes", "(", "one", "per", "device", "OS", ")" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L613-L662
napalm-automation/napalm-logs
napalm_logs/base.py
NapalmLogs._check_children
def _check_children(self): ''' Check all of the child processes are still running ''' while self.up: time.sleep(1) for process in self._processes: if process.is_alive() is True: continue log.debug('%s is dead. Stopping the napalm-logs engine.', process.description) self.stop_engine()
python
def _check_children(self): while self.up: time.sleep(1) for process in self._processes: if process.is_alive() is True: continue log.debug('%s is dead. Stopping the napalm-logs engine.', process.description) self.stop_engine()
[ "def", "_check_children", "(", "self", ")", ":", "while", "self", ".", "up", ":", "time", ".", "sleep", "(", "1", ")", "for", "process", "in", "self", ".", "_processes", ":", "if", "process", ".", "is_alive", "(", ")", "is", "True", ":", "continue", "log", ".", "debug", "(", "'%s is dead. Stopping the napalm-logs engine.'", ",", "process", ".", "description", ")", "self", ".", "stop_engine", "(", ")" ]
Check all of the child processes are still running
[ "Check", "all", "of", "the", "child", "processes", "are", "still", "running" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L664-L674
napalm-automation/napalm-logs
napalm_logs/pub_proxy.py
NapalmLogsPublisherProxy._setup_ipc
def _setup_ipc(self): ''' Setup the IPC PUB and SUB sockets for the proxy. ''' log.debug('Setting up the internal IPC proxy') self.ctx = zmq.Context() # Frontend self.sub = self.ctx.socket(zmq.SUB) self.sub.bind(PUB_PX_IPC_URL) self.sub.setsockopt(zmq.SUBSCRIBE, b'') log.debug('Setting HWM for the proxy frontend: %d', self.hwm) try: self.sub.setsockopt(zmq.HWM, self.hwm) # zmq 2 except AttributeError: # zmq 3 self.sub.setsockopt(zmq.SNDHWM, self.hwm) # Backend self.pub = self.ctx.socket(zmq.PUB) self.pub.bind(PUB_IPC_URL) log.debug('Setting HWM for the proxy backend: %d', self.hwm) try: self.pub.setsockopt(zmq.HWM, self.hwm) # zmq 2 except AttributeError: # zmq 3 self.pub.setsockopt(zmq.SNDHWM, self.hwm)
python
def _setup_ipc(self): log.debug('Setting up the internal IPC proxy') self.ctx = zmq.Context() self.sub = self.ctx.socket(zmq.SUB) self.sub.bind(PUB_PX_IPC_URL) self.sub.setsockopt(zmq.SUBSCRIBE, b'') log.debug('Setting HWM for the proxy frontend: %d', self.hwm) try: self.sub.setsockopt(zmq.HWM, self.hwm) except AttributeError: self.sub.setsockopt(zmq.SNDHWM, self.hwm) self.pub = self.ctx.socket(zmq.PUB) self.pub.bind(PUB_IPC_URL) log.debug('Setting HWM for the proxy backend: %d', self.hwm) try: self.pub.setsockopt(zmq.HWM, self.hwm) except AttributeError: self.pub.setsockopt(zmq.SNDHWM, self.hwm)
[ "def", "_setup_ipc", "(", "self", ")", ":", "log", ".", "debug", "(", "'Setting up the internal IPC proxy'", ")", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "# Frontend", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "SUB", ")", "self", ".", "sub", ".", "bind", "(", "PUB_PX_IPC_URL", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "b''", ")", "log", ".", "debug", "(", "'Setting HWM for the proxy frontend: %d'", ",", "self", ".", "hwm", ")", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "hwm", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "hwm", ")", "# Backend", "self", ".", "pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PUB", ")", "self", ".", "pub", ".", "bind", "(", "PUB_IPC_URL", ")", "log", ".", "debug", "(", "'Setting HWM for the proxy backend: %d'", ",", "self", ".", "hwm", ")", "try", ":", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "hwm", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "hwm", ")" ]
Setup the IPC PUB and SUB sockets for the proxy.
[ "Setup", "the", "IPC", "PUB", "and", "SUB", "sockets", "for", "the", "proxy", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/pub_proxy.py#L38-L64
napalm-automation/napalm-logs
napalm_logs/pub_proxy.py
NapalmLogsPublisherProxy.start
def start(self): ''' Listen to messages and publish them. ''' self._setup_ipc() # Start suicide polling thread thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) try: zmq.proxy(self.sub, self.pub) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error)
python
def start(self): self._setup_ipc() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) try: zmq.proxy(self.sub, self.pub) except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error)
[ "def", "start", "(", "self", ")", ":", "self", ".", "_setup_ipc", "(", ")", "# Start suicide polling thread", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "try", ":", "zmq", ".", "proxy", "(", "self", ".", "sub", ",", "self", ".", "pub", ")", "except", "zmq", ".", "ZMQError", "as", "error", ":", "if", "self", ".", "__up", "is", "False", ":", "log", ".", "info", "(", "'Exiting on process shutdown'", ")", "return", "else", ":", "log", ".", "error", "(", "error", ",", "exc_info", "=", "True", ")", "raise", "NapalmLogsExit", "(", "error", ")" ]
Listen to messages and publish them.
[ "Listen", "to", "messages", "and", "publish", "them", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/pub_proxy.py#L66-L83
napalm-automation/napalm-logs
napalm_logs/publisher.py
NapalmLogsPublisherProc._setup_ipc
def _setup_ipc(self): ''' Subscribe to the pub IPC and publish the messages on the right transport. ''' self.ctx = zmq.Context() log.debug('Setting up the %s publisher subscriber #%d', self._transport_type, self.pub_id) self.sub = self.ctx.socket(zmq.SUB) self.sub.connect(PUB_IPC_URL) self.sub.setsockopt(zmq.SUBSCRIBE, b'') try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
python
def _setup_ipc(self): self.ctx = zmq.Context() log.debug('Setting up the %s publisher subscriber self.sub = self.ctx.socket(zmq.SUB) self.sub.connect(PUB_IPC_URL) self.sub.setsockopt(zmq.SUBSCRIBE, b'') try: self.sub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
[ "def", "_setup_ipc", "(", "self", ")", ":", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "log", ".", "debug", "(", "'Setting up the %s publisher subscriber #%d'", ",", "self", ".", "_transport_type", ",", "self", ".", "pub_id", ")", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "SUB", ")", "self", ".", "sub", ".", "connect", "(", "PUB_IPC_URL", ")", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "b''", ")", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")" ]
Subscribe to the pub IPC and publish the messages on the right transport.
[ "Subscribe", "to", "the", "pub", "IPC", "and", "publish", "the", "messages", "on", "the", "right", "transport", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L71-L87
napalm-automation/napalm-logs
napalm_logs/publisher.py
NapalmLogsPublisherProc._setup_transport
def _setup_transport(self): ''' Setup the transport. ''' if 'RAW' in self.error_whitelist: log.info('%s %d will publish partially parsed messages', self._transport_type, self.pub_id) if 'UNKNOWN' in self.error_whitelist: log.info('%s %d will publish unknown messages', self._transport_type, self.pub_id) transport_class = get_transport(self._transport_type) log.debug('Serializing the object for %s using %s', self._transport_type, self.serializer) self.serializer_fun = get_serializer(self.serializer) self.transport = transport_class(self.address, self.port, **self.publisher_opts) self.__transport_encrypt = True if hasattr(self.transport, 'NO_ENCRYPT') and\ getattr(self.transport, 'NO_ENCRYPT') is True: self.__transport_encrypt = False
python
def _setup_transport(self): if 'RAW' in self.error_whitelist: log.info('%s %d will publish partially parsed messages', self._transport_type, self.pub_id) if 'UNKNOWN' in self.error_whitelist: log.info('%s %d will publish unknown messages', self._transport_type, self.pub_id) transport_class = get_transport(self._transport_type) log.debug('Serializing the object for %s using %s', self._transport_type, self.serializer) self.serializer_fun = get_serializer(self.serializer) self.transport = transport_class(self.address, self.port, **self.publisher_opts) self.__transport_encrypt = True if hasattr(self.transport, 'NO_ENCRYPT') and\ getattr(self.transport, 'NO_ENCRYPT') is True: self.__transport_encrypt = False
[ "def", "_setup_transport", "(", "self", ")", ":", "if", "'RAW'", "in", "self", ".", "error_whitelist", ":", "log", ".", "info", "(", "'%s %d will publish partially parsed messages'", ",", "self", ".", "_transport_type", ",", "self", ".", "pub_id", ")", "if", "'UNKNOWN'", "in", "self", ".", "error_whitelist", ":", "log", ".", "info", "(", "'%s %d will publish unknown messages'", ",", "self", ".", "_transport_type", ",", "self", ".", "pub_id", ")", "transport_class", "=", "get_transport", "(", "self", ".", "_transport_type", ")", "log", ".", "debug", "(", "'Serializing the object for %s using %s'", ",", "self", ".", "_transport_type", ",", "self", ".", "serializer", ")", "self", ".", "serializer_fun", "=", "get_serializer", "(", "self", ".", "serializer", ")", "self", ".", "transport", "=", "transport_class", "(", "self", ".", "address", ",", "self", ".", "port", ",", "*", "*", "self", ".", "publisher_opts", ")", "self", ".", "__transport_encrypt", "=", "True", "if", "hasattr", "(", "self", ".", "transport", ",", "'NO_ENCRYPT'", ")", "and", "getattr", "(", "self", ".", "transport", ",", "'NO_ENCRYPT'", ")", "is", "True", ":", "self", ".", "__transport_encrypt", "=", "False" ]
Setup the transport.
[ "Setup", "the", "transport", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L89-L108
napalm-automation/napalm-logs
napalm_logs/publisher.py
NapalmLogsPublisherProc._prepare
def _prepare(self, serialized_obj): ''' Prepare the object to be sent over the untrusted channel. ''' # generating a nonce nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) # encrypting using the nonce encrypted = self.__safe.encrypt(serialized_obj, nonce) # sign the message signed = self.__signing_key.sign(encrypted) return signed
python
def _prepare(self, serialized_obj): nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) encrypted = self.__safe.encrypt(serialized_obj, nonce) signed = self.__signing_key.sign(encrypted) return signed
[ "def", "_prepare", "(", "self", ",", "serialized_obj", ")", ":", "# generating a nonce", "nonce", "=", "nacl", ".", "utils", ".", "random", "(", "nacl", ".", "secret", ".", "SecretBox", ".", "NONCE_SIZE", ")", "# encrypting using the nonce", "encrypted", "=", "self", ".", "__safe", ".", "encrypt", "(", "serialized_obj", ",", "nonce", ")", "# sign the message", "signed", "=", "self", ".", "__signing_key", ".", "sign", "(", "encrypted", ")", "return", "signed" ]
Prepare the object to be sent over the untrusted channel.
[ "Prepare", "the", "object", "to", "be", "sent", "over", "the", "untrusted", "channel", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L110-L120
napalm-automation/napalm-logs
napalm_logs/publisher.py
NapalmLogsPublisherProc.start
def start(self): ''' Listen to messages and publish them. ''' # metrics napalm_logs_publisher_received_messages = Counter( 'napalm_logs_publisher_received_messages', "Count of messages received by the publisher", ['publisher_type', 'address', 'port'] ) napalm_logs_publisher_whitelist_blacklist_check_fail = Counter( 'napalm_logs_publisher_whitelist_blacklist_check_fail', "Count of messages which fail the whitelist/blacklist check", ['publisher_type', 'address', 'port'] ) napalm_logs_publisher_messages_published = Counter( 'napalm_logs_publisher_messages_published', "Count of published messages", ['publisher_type', 'address', 'port'] ) self._setup_ipc() # Start suicide polling thread thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.transport.start() self.__up = True while self.__up: try: bin_obj = self.sub.recv() except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) obj = umsgpack.unpackb(bin_obj) if self._strip_message_details: obj.pop('message_details', None) bin_obj = self.serializer_fun(obj) napalm_logs_publisher_received_messages.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc() if not napalm_logs.ext.check_whitelist_blacklist(obj['error'], whitelist=self.error_whitelist, blacklist=self.error_blacklist): # Apply the whitelist / blacklist logic # If it doesn't match, jump over. log.debug('This error type is %s. Skipping for %s #%d', obj['error'], self._transport_type, self.pub_id) napalm_logs_publisher_whitelist_blacklist_check_fail.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc() continue serialized_obj = self._serialize(obj, bin_obj) log.debug('Publishing the OC object') if not self.disable_security and self.__transport_encrypt: # Encrypt only when needed. serialized_obj = self._prepare(serialized_obj) self.transport.publish(serialized_obj) napalm_logs_publisher_messages_published.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc()
python
def start(self): napalm_logs_publisher_received_messages = Counter( 'napalm_logs_publisher_received_messages', "Count of messages received by the publisher", ['publisher_type', 'address', 'port'] ) napalm_logs_publisher_whitelist_blacklist_check_fail = Counter( 'napalm_logs_publisher_whitelist_blacklist_check_fail', "Count of messages which fail the whitelist/blacklist check", ['publisher_type', 'address', 'port'] ) napalm_logs_publisher_messages_published = Counter( 'napalm_logs_publisher_messages_published', "Count of published messages", ['publisher_type', 'address', 'port'] ) self._setup_ipc() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.transport.start() self.__up = True while self.__up: try: bin_obj = self.sub.recv() except zmq.ZMQError as error: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) obj = umsgpack.unpackb(bin_obj) if self._strip_message_details: obj.pop('message_details', None) bin_obj = self.serializer_fun(obj) napalm_logs_publisher_received_messages.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc() if not napalm_logs.ext.check_whitelist_blacklist(obj['error'], whitelist=self.error_whitelist, blacklist=self.error_blacklist): log.debug('This error type is %s. Skipping for %s obj['error'], self._transport_type, self.pub_id) napalm_logs_publisher_whitelist_blacklist_check_fail.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc() continue serialized_obj = self._serialize(obj, bin_obj) log.debug('Publishing the OC object') if not self.disable_security and self.__transport_encrypt: serialized_obj = self._prepare(serialized_obj) self.transport.publish(serialized_obj) napalm_logs_publisher_messages_published.labels( publisher_type=self._transport_type, address=self.address, port=self.port ).inc()
[ "def", "start", "(", "self", ")", ":", "# metrics", "napalm_logs_publisher_received_messages", "=", "Counter", "(", "'napalm_logs_publisher_received_messages'", ",", "\"Count of messages received by the publisher\"", ",", "[", "'publisher_type'", ",", "'address'", ",", "'port'", "]", ")", "napalm_logs_publisher_whitelist_blacklist_check_fail", "=", "Counter", "(", "'napalm_logs_publisher_whitelist_blacklist_check_fail'", ",", "\"Count of messages which fail the whitelist/blacklist check\"", ",", "[", "'publisher_type'", ",", "'address'", ",", "'port'", "]", ")", "napalm_logs_publisher_messages_published", "=", "Counter", "(", "'napalm_logs_publisher_messages_published'", ",", "\"Count of published messages\"", ",", "[", "'publisher_type'", ",", "'address'", ",", "'port'", "]", ")", "self", ".", "_setup_ipc", "(", ")", "# Start suicide polling thread", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "self", ".", "transport", ".", "start", "(", ")", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "try", ":", "bin_obj", "=", "self", ".", "sub", ".", "recv", "(", ")", "except", "zmq", ".", "ZMQError", "as", "error", ":", "if", "self", ".", "__up", "is", "False", ":", "log", ".", "info", "(", "'Exiting on process shutdown'", ")", "return", "else", ":", "log", ".", "error", "(", "error", ",", "exc_info", "=", "True", ")", "raise", "NapalmLogsExit", "(", "error", ")", "obj", "=", "umsgpack", ".", "unpackb", "(", "bin_obj", ")", "if", "self", ".", "_strip_message_details", ":", "obj", ".", "pop", "(", "'message_details'", ",", "None", ")", "bin_obj", "=", "self", ".", "serializer_fun", "(", "obj", ")", "napalm_logs_publisher_received_messages", ".", "labels", "(", "publisher_type", "=", "self", ".", "_transport_type", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", ".", "inc", "(", ")", "if", "not", "napalm_logs", ".", "ext", ".", "check_whitelist_blacklist", "(", "obj", "[", "'error'", "]", ",", "whitelist", "=", "self", ".", "error_whitelist", ",", "blacklist", "=", "self", ".", "error_blacklist", ")", ":", "# Apply the whitelist / blacklist logic", "# If it doesn't match, jump over.", "log", ".", "debug", "(", "'This error type is %s. Skipping for %s #%d'", ",", "obj", "[", "'error'", "]", ",", "self", ".", "_transport_type", ",", "self", ".", "pub_id", ")", "napalm_logs_publisher_whitelist_blacklist_check_fail", ".", "labels", "(", "publisher_type", "=", "self", ".", "_transport_type", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", ".", "inc", "(", ")", "continue", "serialized_obj", "=", "self", ".", "_serialize", "(", "obj", ",", "bin_obj", ")", "log", ".", "debug", "(", "'Publishing the OC object'", ")", "if", "not", "self", ".", "disable_security", "and", "self", ".", "__transport_encrypt", ":", "# Encrypt only when needed.", "serialized_obj", "=", "self", ".", "_prepare", "(", "serialized_obj", ")", "self", ".", "transport", ".", "publish", "(", "serialized_obj", ")", "napalm_logs_publisher_messages_published", ".", "labels", "(", "publisher_type", "=", "self", ".", "_transport_type", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", ".", "inc", "(", ")" ]
Listen to messages and publish them.
[ "Listen", "to", "messages", "and", "publish", "them", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L132-L203
napalm-automation/napalm-logs
napalm_logs/listener/__init__.py
get_listener
def get_listener(name): ''' Return the listener class. ''' try: log.debug('Using %s as listener', name) return LISTENER_LOOKUP[name] except KeyError: msg = 'Listener {} is not available. Are the dependencies installed?'.format(name) log.error(msg, exc_info=True) raise InvalidListenerException(msg)
python
def get_listener(name): try: log.debug('Using %s as listener', name) return LISTENER_LOOKUP[name] except KeyError: msg = 'Listener {} is not available. Are the dependencies installed?'.format(name) log.error(msg, exc_info=True) raise InvalidListenerException(msg)
[ "def", "get_listener", "(", "name", ")", ":", "try", ":", "log", ".", "debug", "(", "'Using %s as listener'", ",", "name", ")", "return", "LISTENER_LOOKUP", "[", "name", "]", "except", "KeyError", ":", "msg", "=", "'Listener {} is not available. Are the dependencies installed?'", ".", "format", "(", "name", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "InvalidListenerException", "(", "msg", ")" ]
Return the listener class.
[ "Return", "the", "listener", "class", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/__init__.py#L41-L51
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
setval
def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM): ''' Set a value under the dictionary hierarchy identified under the key. The target 'foo/bar/baz' returns the dictionary hierarchy {'foo': {'bar': {'baz': {}}}}. .. note:: Currently this doesn't work with integers, i.e. cannot build lists dynamically. TODO ''' if not dict_: dict_ = {} prev_hier = dict_ dict_hier = key.split(delim) for each in dict_hier[:-1]: if isinstance(each, six.string_type): if each not in prev_hier: prev_hier[each] = {} prev_hier = prev_hier[each] else: prev_hier[each] = [{}] prev_hier = prev_hier[each] prev_hier[dict_hier[-1]] = val return dict_
python
def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM): if not dict_: dict_ = {} prev_hier = dict_ dict_hier = key.split(delim) for each in dict_hier[:-1]: if isinstance(each, six.string_type): if each not in prev_hier: prev_hier[each] = {} prev_hier = prev_hier[each] else: prev_hier[each] = [{}] prev_hier = prev_hier[each] prev_hier[dict_hier[-1]] = val return dict_
[ "def", "setval", "(", "key", ",", "val", ",", "dict_", "=", "None", ",", "delim", "=", "defaults", ".", "DEFAULT_DELIM", ")", ":", "if", "not", "dict_", ":", "dict_", "=", "{", "}", "prev_hier", "=", "dict_", "dict_hier", "=", "key", ".", "split", "(", "delim", ")", "for", "each", "in", "dict_hier", "[", ":", "-", "1", "]", ":", "if", "isinstance", "(", "each", ",", "six", ".", "string_type", ")", ":", "if", "each", "not", "in", "prev_hier", ":", "prev_hier", "[", "each", "]", "=", "{", "}", "prev_hier", "=", "prev_hier", "[", "each", "]", "else", ":", "prev_hier", "[", "each", "]", "=", "[", "{", "}", "]", "prev_hier", "=", "prev_hier", "[", "each", "]", "prev_hier", "[", "dict_hier", "[", "-", "1", "]", "]", "=", "val", "return", "dict_" ]
Set a value under the dictionary hierarchy identified under the key. The target 'foo/bar/baz' returns the dictionary hierarchy {'foo': {'bar': {'baz': {}}}}. .. note:: Currently this doesn't work with integers, i.e. cannot build lists dynamically. TODO
[ "Set", "a", "value", "under", "the", "dictionary", "hierarchy", "identified", "under", "the", "key", ".", "The", "target", "foo", "/", "bar", "/", "baz", "returns", "the", "dictionary", "hierarchy", "{", "foo", ":", "{", "bar", ":", "{", "baz", ":", "{}", "}}}", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L256-L281
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
traverse
def traverse(data, key, delim=defaults.DEFAULT_DELIM): ''' Traverse a dict or list using a slash delimiter target string. The target 'foo/bar/0' will return data['foo']['bar'][0] if this value exists, otherwise will return empty dict. Return None when not found. This can be used to verify if a certain key exists under dictionary hierarchy. ''' for each in key.split(delim): if isinstance(data, list): if isinstance(each, six.string_type): embed_match = False # Index was not numeric, lets look at any embedded dicts for embedded in (x for x in data if isinstance(x, dict)): try: data = embedded[each] embed_match = True break except KeyError: pass if not embed_match: # No embedded dicts matched return None else: try: data = data[int(each)] except IndexError: return None else: try: data = data[each] except (KeyError, TypeError): return None return data
python
def traverse(data, key, delim=defaults.DEFAULT_DELIM): for each in key.split(delim): if isinstance(data, list): if isinstance(each, six.string_type): embed_match = False for embedded in (x for x in data if isinstance(x, dict)): try: data = embedded[each] embed_match = True break except KeyError: pass if not embed_match: return None else: try: data = data[int(each)] except IndexError: return None else: try: data = data[each] except (KeyError, TypeError): return None return data
[ "def", "traverse", "(", "data", ",", "key", ",", "delim", "=", "defaults", ".", "DEFAULT_DELIM", ")", ":", "for", "each", "in", "key", ".", "split", "(", "delim", ")", ":", "if", "isinstance", "(", "data", ",", "list", ")", ":", "if", "isinstance", "(", "each", ",", "six", ".", "string_type", ")", ":", "embed_match", "=", "False", "# Index was not numeric, lets look at any embedded dicts", "for", "embedded", "in", "(", "x", "for", "x", "in", "data", "if", "isinstance", "(", "x", ",", "dict", ")", ")", ":", "try", ":", "data", "=", "embedded", "[", "each", "]", "embed_match", "=", "True", "break", "except", "KeyError", ":", "pass", "if", "not", "embed_match", ":", "# No embedded dicts matched", "return", "None", "else", ":", "try", ":", "data", "=", "data", "[", "int", "(", "each", ")", "]", "except", "IndexError", ":", "return", "None", "else", ":", "try", ":", "data", "=", "data", "[", "each", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "return", "None", "return", "data" ]
Traverse a dict or list using a slash delimiter target string. The target 'foo/bar/0' will return data['foo']['bar'][0] if this value exists, otherwise will return empty dict. Return None when not found. This can be used to verify if a certain key exists under dictionary hierarchy.
[ "Traverse", "a", "dict", "or", "list", "using", "a", "slash", "delimiter", "target", "string", ".", "The", "target", "foo", "/", "bar", "/", "0", "will", "return", "data", "[", "foo", "]", "[", "bar", "]", "[", "0", "]", "if", "this", "value", "exists", "otherwise", "will", "return", "empty", "dict", ".", "Return", "None", "when", "not", "found", ".", "This", "can", "be", "used", "to", "verify", "if", "a", "certain", "key", "exists", "under", "dictionary", "hierarchy", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L284-L318
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
ClientAuth._start_keep_alive
def _start_keep_alive(self): ''' Start the keep alive thread as a daemon ''' keep_alive_thread = threading.Thread(target=self.keep_alive) keep_alive_thread.daemon = True keep_alive_thread.start()
python
def _start_keep_alive(self): keep_alive_thread = threading.Thread(target=self.keep_alive) keep_alive_thread.daemon = True keep_alive_thread.start()
[ "def", "_start_keep_alive", "(", "self", ")", ":", "keep_alive_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "keep_alive", ")", "keep_alive_thread", ".", "daemon", "=", "True", "keep_alive_thread", ".", "start", "(", ")" ]
Start the keep alive thread as a daemon
[ "Start", "the", "keep", "alive", "thread", "as", "a", "daemon" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L61-L67
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
ClientAuth.keep_alive
def keep_alive(self): ''' Send a keep alive request periodically to make sure that the server is still alive. If not then try to reconnect. ''' self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL) while self.__up: try: log.debug('Sending keep-alive message to the server') self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) except socket.error: log.error('Unable to send keep-alive message to the server.') log.error('Re-init the SSL socket.') self.reconnect() log.debug('Trying to re-send the keep-alive message to the server.') self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK)) log.debug('Received %s from the keep-alive server', msg) if msg != defaults.AUTH_KEEP_ALIVE_ACK: log.error('Received %s instead of %s form the auth keep-alive server', msg, defaults.AUTH_KEEP_ALIVE_ACK) log.error('Re-init the SSL socket.') self.reconnect() time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL)
python
def keep_alive(self): self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL) while self.__up: try: log.debug('Sending keep-alive message to the server') self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) except socket.error: log.error('Unable to send keep-alive message to the server.') log.error('Re-init the SSL socket.') self.reconnect() log.debug('Trying to re-send the keep-alive message to the server.') self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK)) log.debug('Received %s from the keep-alive server', msg) if msg != defaults.AUTH_KEEP_ALIVE_ACK: log.error('Received %s instead of %s form the auth keep-alive server', msg, defaults.AUTH_KEEP_ALIVE_ACK) log.error('Re-init the SSL socket.') self.reconnect() time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL)
[ "def", "keep_alive", "(", "self", ")", ":", "self", ".", "ssl_skt", ".", "settimeout", "(", "defaults", ".", "AUTH_KEEP_ALIVE_INTERVAL", ")", "while", "self", ".", "__up", ":", "try", ":", "log", ".", "debug", "(", "'Sending keep-alive message to the server'", ")", "self", ".", "ssl_skt", ".", "send", "(", "defaults", ".", "AUTH_KEEP_ALIVE", ")", "except", "socket", ".", "error", ":", "log", ".", "error", "(", "'Unable to send keep-alive message to the server.'", ")", "log", ".", "error", "(", "'Re-init the SSL socket.'", ")", "self", ".", "reconnect", "(", ")", "log", ".", "debug", "(", "'Trying to re-send the keep-alive message to the server.'", ")", "self", ".", "ssl_skt", ".", "send", "(", "defaults", ".", "AUTH_KEEP_ALIVE", ")", "msg", "=", "self", ".", "ssl_skt", ".", "recv", "(", "len", "(", "defaults", ".", "AUTH_KEEP_ALIVE_ACK", ")", ")", "log", ".", "debug", "(", "'Received %s from the keep-alive server'", ",", "msg", ")", "if", "msg", "!=", "defaults", ".", "AUTH_KEEP_ALIVE_ACK", ":", "log", ".", "error", "(", "'Received %s instead of %s form the auth keep-alive server'", ",", "msg", ",", "defaults", ".", "AUTH_KEEP_ALIVE_ACK", ")", "log", ".", "error", "(", "'Re-init the SSL socket.'", ")", "self", ".", "reconnect", "(", ")", "time", ".", "sleep", "(", "defaults", ".", "AUTH_KEEP_ALIVE_INTERVAL", ")" ]
Send a keep alive request periodically to make sure that the server is still alive. If not then try to reconnect.
[ "Send", "a", "keep", "alive", "request", "periodically", "to", "make", "sure", "that", "the", "server", "is", "still", "alive", ".", "If", "not", "then", "try", "to", "reconnect", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L69-L92
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
ClientAuth.reconnect
def reconnect(self): ''' Try to reconnect and re-authenticate with the server. ''' log.debug('Closing the SSH socket.') try: self.ssl_skt.close() except socket.error: log.error('The socket seems to be closed already.') log.debug('Re-opening the SSL socket.') self.authenticate()
python
def reconnect(self): log.debug('Closing the SSH socket.') try: self.ssl_skt.close() except socket.error: log.error('The socket seems to be closed already.') log.debug('Re-opening the SSL socket.') self.authenticate()
[ "def", "reconnect", "(", "self", ")", ":", "log", ".", "debug", "(", "'Closing the SSH socket.'", ")", "try", ":", "self", ".", "ssl_skt", ".", "close", "(", ")", "except", "socket", ".", "error", ":", "log", ".", "error", "(", "'The socket seems to be closed already.'", ")", "log", ".", "debug", "(", "'Re-opening the SSL socket.'", ")", "self", ".", "authenticate", "(", ")" ]
Try to reconnect and re-authenticate with the server.
[ "Try", "to", "reconnect", "and", "re", "-", "authenticate", "with", "the", "server", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L94-L104
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
ClientAuth.authenticate
def authenticate(self): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' log.debug('Authenticate to %s:%d, using the certificate %s', self.address, self.port, self.certificate) if ':' in self.address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) self.ssl_skt = ssl.wrap_socket(skt, ca_certs=self.certificate, cert_reqs=ssl.CERT_REQUIRED) try: self.ssl_skt.connect((self.address, self.port)) self.auth_try_id = 0 except socket.error as err: log.error('Unable to open the SSL socket.') self.auth_try_id += 1 if not self.max_try or self.auth_try_id < self.max_try: log.error('Trying to authenticate again in %d seconds', self.timeout) time.sleep(self.timeout) self.authenticate() log.critical('Giving up, unable to authenticate to %s:%d using the certificate %s', self.address, self.port, self.certificate) raise ClientConnectException(err) # Explicit INIT self.ssl_skt.write(defaults.MAGIC_REQ) # Receive the private key private_key = self.ssl_skt.recv(defaults.BUFFER_SIZE) # Send back explicit ACK self.ssl_skt.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = self.ssl_skt.recv(defaults.BUFFER_SIZE) # Send back explicit ACK self.ssl_skt.write(defaults.MAGIC_ACK) self.priv_key = nacl.secret.SecretBox(private_key) self.verify_key = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder)
python
def authenticate(self): log.debug('Authenticate to %s:%d, using the certificate %s', self.address, self.port, self.certificate) if ':' in self.address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) self.ssl_skt = ssl.wrap_socket(skt, ca_certs=self.certificate, cert_reqs=ssl.CERT_REQUIRED) try: self.ssl_skt.connect((self.address, self.port)) self.auth_try_id = 0 except socket.error as err: log.error('Unable to open the SSL socket.') self.auth_try_id += 1 if not self.max_try or self.auth_try_id < self.max_try: log.error('Trying to authenticate again in %d seconds', self.timeout) time.sleep(self.timeout) self.authenticate() log.critical('Giving up, unable to authenticate to %s:%d using the certificate %s', self.address, self.port, self.certificate) raise ClientConnectException(err) self.ssl_skt.write(defaults.MAGIC_REQ) private_key = self.ssl_skt.recv(defaults.BUFFER_SIZE) self.ssl_skt.write(defaults.MAGIC_ACK) verify_key_hex = self.ssl_skt.recv(defaults.BUFFER_SIZE) self.ssl_skt.write(defaults.MAGIC_ACK) self.priv_key = nacl.secret.SecretBox(private_key) self.verify_key = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder)
[ "def", "authenticate", "(", "self", ")", ":", "log", ".", "debug", "(", "'Authenticate to %s:%d, using the certificate %s'", ",", "self", ".", "address", ",", "self", ".", "port", ",", "self", ".", "certificate", ")", "if", "':'", "in", "self", ".", "address", ":", "skt_ver", "=", "socket", ".", "AF_INET6", "else", ":", "skt_ver", "=", "socket", ".", "AF_INET", "skt", "=", "socket", ".", "socket", "(", "skt_ver", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "ssl_skt", "=", "ssl", ".", "wrap_socket", "(", "skt", ",", "ca_certs", "=", "self", ".", "certificate", ",", "cert_reqs", "=", "ssl", ".", "CERT_REQUIRED", ")", "try", ":", "self", ".", "ssl_skt", ".", "connect", "(", "(", "self", ".", "address", ",", "self", ".", "port", ")", ")", "self", ".", "auth_try_id", "=", "0", "except", "socket", ".", "error", "as", "err", ":", "log", ".", "error", "(", "'Unable to open the SSL socket.'", ")", "self", ".", "auth_try_id", "+=", "1", "if", "not", "self", ".", "max_try", "or", "self", ".", "auth_try_id", "<", "self", ".", "max_try", ":", "log", ".", "error", "(", "'Trying to authenticate again in %d seconds'", ",", "self", ".", "timeout", ")", "time", ".", "sleep", "(", "self", ".", "timeout", ")", "self", ".", "authenticate", "(", ")", "log", ".", "critical", "(", "'Giving up, unable to authenticate to %s:%d using the certificate %s'", ",", "self", ".", "address", ",", "self", ".", "port", ",", "self", ".", "certificate", ")", "raise", "ClientConnectException", "(", "err", ")", "# Explicit INIT", "self", ".", "ssl_skt", ".", "write", "(", "defaults", ".", "MAGIC_REQ", ")", "# Receive the private key", "private_key", "=", "self", ".", "ssl_skt", ".", "recv", "(", "defaults", ".", "BUFFER_SIZE", ")", "# Send back explicit ACK", "self", ".", "ssl_skt", ".", "write", "(", "defaults", ".", "MAGIC_ACK", ")", "# Read the hex of the verification key", "verify_key_hex", "=", "self", ".", "ssl_skt", ".", "recv", "(", "defaults", ".", "BUFFER_SIZE", ")", "# Send back explicit ACK", "self", ".", "ssl_skt", ".", "write", "(", "defaults", ".", "MAGIC_ACK", ")", "self", ".", "priv_key", "=", "nacl", ".", "secret", ".", "SecretBox", "(", "private_key", ")", "self", ".", "verify_key", "=", "nacl", ".", "signing", ".", "VerifyKey", "(", "verify_key_hex", ",", "encoder", "=", "nacl", ".", "encoding", ".", "HexEncoder", ")" ]
Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm.
[ "Authenticate", "the", "client", "and", "return", "the", "private", "and", "signature", "keys", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L106-L150
napalm-automation/napalm-logs
napalm_logs/utils/__init__.py
ClientAuth.decrypt
def decrypt(self, binary): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = self.verify_key.verify(binary) except BadSignatureError: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = self.priv_key.decrypt(encrypted) except CryptoError: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
python
def decrypt(self, binary): try: encrypted = self.verify_key.verify(binary) except BadSignatureError: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = self.priv_key.decrypt(encrypted) except CryptoError: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
[ "def", "decrypt", "(", "self", ",", "binary", ")", ":", "try", ":", "encrypted", "=", "self", ".", "verify_key", ".", "verify", "(", "binary", ")", "except", "BadSignatureError", ":", "log", ".", "error", "(", "'Signature was forged or corrupt'", ",", "exc_info", "=", "True", ")", "raise", "BadSignatureException", "(", "'Signature was forged or corrupt'", ")", "try", ":", "packed", "=", "self", ".", "priv_key", ".", "decrypt", "(", "encrypted", ")", "except", "CryptoError", ":", "log", ".", "error", "(", "'Unable to decrypt'", ",", "exc_info", "=", "True", ")", "raise", "CryptoException", "(", "'Unable to decrypt'", ")", "return", "umsgpack", ".", "unpackb", "(", "packed", ")" ]
Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted.
[ "Decrypt", "and", "unpack", "the", "original", "OpenConfig", "object", "serialized", "using", "MessagePack", ".", "Raise", "BadSignatureException", "when", "the", "signature", "was", "forged", "or", "corrupted", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L152-L169
napalm-automation/napalm-logs
napalm_logs/listener/tcp.py
TCPListener._client_connection
def _client_connection(self, conn, addr): ''' Handle the connecition with one client. ''' log.debug('Established connection with %s:%d', addr[0], addr[1]) conn.settimeout(self.socket_timeout) try: while self.__up: msg = conn.recv(self.buffer_size) if not msg: # log.debug('Received empty message from %s', addr) # disabled ^ as it was too noisy continue log.debug('[%s] Received %s from %s. Adding in the queue', time.time(), msg, addr) self.buffer.put((msg, '{}:{}'.format(addr[0], addr[1]))) except socket.timeout: if not self.__up: return log.debug('Connection %s:%d timed out', addr[1], addr[0]) raise ListenerException('Connection %s:%d timed out' % addr) finally: log.debug('Closing connection with %s', addr) conn.close()
python
def _client_connection(self, conn, addr): log.debug('Established connection with %s:%d', addr[0], addr[1]) conn.settimeout(self.socket_timeout) try: while self.__up: msg = conn.recv(self.buffer_size) if not msg: continue log.debug('[%s] Received %s from %s. Adding in the queue', time.time(), msg, addr) self.buffer.put((msg, '{}:{}'.format(addr[0], addr[1]))) except socket.timeout: if not self.__up: return log.debug('Connection %s:%d timed out', addr[1], addr[0]) raise ListenerException('Connection %s:%d timed out' % addr) finally: log.debug('Closing connection with %s', addr) conn.close()
[ "def", "_client_connection", "(", "self", ",", "conn", ",", "addr", ")", ":", "log", ".", "debug", "(", "'Established connection with %s:%d'", ",", "addr", "[", "0", "]", ",", "addr", "[", "1", "]", ")", "conn", ".", "settimeout", "(", "self", ".", "socket_timeout", ")", "try", ":", "while", "self", ".", "__up", ":", "msg", "=", "conn", ".", "recv", "(", "self", ".", "buffer_size", ")", "if", "not", "msg", ":", "# log.debug('Received empty message from %s', addr)", "# disabled ^ as it was too noisy", "continue", "log", ".", "debug", "(", "'[%s] Received %s from %s. Adding in the queue'", ",", "time", ".", "time", "(", ")", ",", "msg", ",", "addr", ")", "self", ".", "buffer", ".", "put", "(", "(", "msg", ",", "'{}:{}'", ".", "format", "(", "addr", "[", "0", "]", ",", "addr", "[", "1", "]", ")", ")", ")", "except", "socket", ".", "timeout", ":", "if", "not", "self", ".", "__up", ":", "return", "log", ".", "debug", "(", "'Connection %s:%d timed out'", ",", "addr", "[", "1", "]", ",", "addr", "[", "0", "]", ")", "raise", "ListenerException", "(", "'Connection %s:%d timed out'", "%", "addr", ")", "finally", ":", "log", ".", "debug", "(", "'Closing connection with %s'", ",", "addr", ")", "conn", ".", "close", "(", ")" ]
Handle the connecition with one client.
[ "Handle", "the", "connecition", "with", "one", "client", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L53-L75
napalm-automation/napalm-logs
napalm_logs/listener/tcp.py
TCPListener._serve_clients
def _serve_clients(self): ''' Accept cients and serve, one separate thread per client. ''' self.__up = True while self.__up: log.debug('Waiting for a client to connect') try: conn, addr = self.skt.accept() log.debug('Received connection from %s:%d', addr[0], addr[1]) except socket.error as error: if not self.__up: return msg = 'Received listener socket error: {}'.format(error) log.error(msg, exc_info=True) raise ListenerException(msg) client_thread = threading.Thread(target=self._client_connection, args=(conn, addr,)) client_thread.start()
python
def _serve_clients(self): self.__up = True while self.__up: log.debug('Waiting for a client to connect') try: conn, addr = self.skt.accept() log.debug('Received connection from %s:%d', addr[0], addr[1]) except socket.error as error: if not self.__up: return msg = 'Received listener socket error: {}'.format(error) log.error(msg, exc_info=True) raise ListenerException(msg) client_thread = threading.Thread(target=self._client_connection, args=(conn, addr,)) client_thread.start()
[ "def", "_serve_clients", "(", "self", ")", ":", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "log", ".", "debug", "(", "'Waiting for a client to connect'", ")", "try", ":", "conn", ",", "addr", "=", "self", ".", "skt", ".", "accept", "(", ")", "log", ".", "debug", "(", "'Received connection from %s:%d'", ",", "addr", "[", "0", "]", ",", "addr", "[", "1", "]", ")", "except", "socket", ".", "error", "as", "error", ":", "if", "not", "self", ".", "__up", ":", "return", "msg", "=", "'Received listener socket error: {}'", ".", "format", "(", "error", ")", "log", ".", "error", "(", "msg", ",", "exc_info", "=", "True", ")", "raise", "ListenerException", "(", "msg", ")", "client_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_client_connection", ",", "args", "=", "(", "conn", ",", "addr", ",", ")", ")", "client_thread", ".", "start", "(", ")" ]
Accept cients and serve, one separate thread per client.
[ "Accept", "cients", "and", "serve", "one", "separate", "thread", "per", "client", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L77-L94
napalm-automation/napalm-logs
napalm_logs/listener/tcp.py
TCPListener.start
def start(self): ''' Start listening for messages. ''' log.debug('Creating the TCP server') if ':' in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if hasattr(socket, 'SO_REUSEPORT'): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: log.error('SO_REUSEPORT not supported') try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg) log.error(error_string, exc_info=True) raise BindException(error_string) log.debug('Accepting max %d parallel connections', self.max_clients) self.skt.listen(self.max_clients) self.thread_serve = threading.Thread(target=self._serve_clients) self.thread_serve.start()
python
def start(self): log.debug('Creating the TCP server') if ':' in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if hasattr(socket, 'SO_REUSEPORT'): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: log.error('SO_REUSEPORT not supported') try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg) log.error(error_string, exc_info=True) raise BindException(error_string) log.debug('Accepting max %d parallel connections', self.max_clients) self.skt.listen(self.max_clients) self.thread_serve = threading.Thread(target=self._serve_clients) self.thread_serve.start()
[ "def", "start", "(", "self", ")", ":", "log", ".", "debug", "(", "'Creating the TCP server'", ")", "if", "':'", "in", "self", ".", "address", ":", "self", ".", "skt", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET6", ",", "socket", ".", "SOCK_STREAM", ")", "else", ":", "self", ".", "skt", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "if", "self", ".", "reuse_port", ":", "self", ".", "skt", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "if", "hasattr", "(", "socket", ",", "'SO_REUSEPORT'", ")", ":", "self", ".", "skt", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEPORT", ",", "1", ")", "else", ":", "log", ".", "error", "(", "'SO_REUSEPORT not supported'", ")", "try", ":", "self", ".", "skt", ".", "bind", "(", "(", "self", ".", "address", ",", "int", "(", "self", ".", "port", ")", ")", ")", "except", "socket", ".", "error", "as", "msg", ":", "error_string", "=", "'Unable to bind to port {} on {}: {}'", ".", "format", "(", "self", ".", "port", ",", "self", ".", "address", ",", "msg", ")", "log", ".", "error", "(", "error_string", ",", "exc_info", "=", "True", ")", "raise", "BindException", "(", "error_string", ")", "log", ".", "debug", "(", "'Accepting max %d parallel connections'", ",", "self", ".", "max_clients", ")", "self", ".", "skt", ".", "listen", "(", "self", ".", "max_clients", ")", "self", ".", "thread_serve", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_serve_clients", ")", "self", ".", "thread_serve", ".", "start", "(", ")" ]
Start listening for messages.
[ "Start", "listening", "for", "messages", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L96-L120
napalm-automation/napalm-logs
napalm_logs/listener/tcp.py
TCPListener.receive
def receive(self): ''' Return one message dequeued from the listen buffer. ''' while self.buffer.empty() and self.__up: # This sequence is skipped when the buffer is not empty. sleep_ms = random.randint(0, 1000) # log.debug('The message queue is empty, waiting %d miliseconds', sleep_ms) # disabled ^ as it was too noisy time.sleep(sleep_ms / 1000.0) if not self.buffer.empty(): return self.buffer.get(block=False) return '', ''
python
def receive(self): while self.buffer.empty() and self.__up: sleep_ms = random.randint(0, 1000) time.sleep(sleep_ms / 1000.0) if not self.buffer.empty(): return self.buffer.get(block=False) return '', ''
[ "def", "receive", "(", "self", ")", ":", "while", "self", ".", "buffer", ".", "empty", "(", ")", "and", "self", ".", "__up", ":", "# This sequence is skipped when the buffer is not empty.", "sleep_ms", "=", "random", ".", "randint", "(", "0", ",", "1000", ")", "# log.debug('The message queue is empty, waiting %d miliseconds', sleep_ms)", "# disabled ^ as it was too noisy", "time", ".", "sleep", "(", "sleep_ms", "/", "1000.0", ")", "if", "not", "self", ".", "buffer", ".", "empty", "(", ")", ":", "return", "self", ".", "buffer", ".", "get", "(", "block", "=", "False", ")", "return", "''", ",", "''" ]
Return one message dequeued from the listen buffer.
[ "Return", "one", "message", "dequeued", "from", "the", "listen", "buffer", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L122-L134
napalm-automation/napalm-logs
napalm_logs/listener/tcp.py
TCPListener.stop
def stop(self): ''' Closing the socket. ''' log.info('Stopping the TCP listener') self.__up = False try: self.skt.shutdown(socket.SHUT_RDWR) except socket.error: log.error('The following error may not be critical:', exc_info=True) self.skt.close()
python
def stop(self): log.info('Stopping the TCP listener') self.__up = False try: self.skt.shutdown(socket.SHUT_RDWR) except socket.error: log.error('The following error may not be critical:', exc_info=True) self.skt.close()
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping the TCP listener'", ")", "self", ".", "__up", "=", "False", "try", ":", "self", ".", "skt", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", "socket", ".", "error", ":", "log", ".", "error", "(", "'The following error may not be critical:'", ",", "exc_info", "=", "True", ")", "self", ".", "skt", ".", "close", "(", ")" ]
Closing the socket.
[ "Closing", "the", "socket", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L136-L146
napalm-automation/napalm-logs
napalm_logs/listener_proc.py
NapalmLogsListenerProc._setup_listener
def _setup_listener(self): ''' Setup the transport. ''' listener_class = get_listener(self._listener_type) self.address = self.listener_opts.pop('address', self.address) self.port = self.listener_opts.pop('port', self.port) self.listener = listener_class(self.address, self.port, **self.listener_opts)
python
def _setup_listener(self): listener_class = get_listener(self._listener_type) self.address = self.listener_opts.pop('address', self.address) self.port = self.listener_opts.pop('port', self.port) self.listener = listener_class(self.address, self.port, **self.listener_opts)
[ "def", "_setup_listener", "(", "self", ")", ":", "listener_class", "=", "get_listener", "(", "self", ".", "_listener_type", ")", "self", ".", "address", "=", "self", ".", "listener_opts", ".", "pop", "(", "'address'", ",", "self", ".", "address", ")", "self", ".", "port", "=", "self", ".", "listener_opts", ".", "pop", "(", "'port'", ",", "self", ".", "port", ")", "self", ".", "listener", "=", "listener_class", "(", "self", ".", "address", ",", "self", ".", "port", ",", "*", "*", "self", ".", "listener_opts", ")" ]
Setup the transport.
[ "Setup", "the", "transport", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener_proc.py#L51-L60
napalm-automation/napalm-logs
napalm_logs/listener_proc.py
NapalmLogsListenerProc._setup_ipc
def _setup_ipc(self): ''' Setup the listener ICP pusher. ''' log.debug('Setting up the listener IPC pusher') self.ctx = zmq.Context() self.pub = self.ctx.socket(zmq.PUSH) self.pub.connect(LST_IPC_URL) log.debug('Setting HWM for the listener: %d', self.opts['hwm']) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) # zmq 2 except AttributeError: # zmq 3 self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
python
def _setup_ipc(self): log.debug('Setting up the listener IPC pusher') self.ctx = zmq.Context() self.pub = self.ctx.socket(zmq.PUSH) self.pub.connect(LST_IPC_URL) log.debug('Setting HWM for the listener: %d', self.opts['hwm']) try: self.pub.setsockopt(zmq.HWM, self.opts['hwm']) except AttributeError: self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
[ "def", "_setup_ipc", "(", "self", ")", ":", "log", ".", "debug", "(", "'Setting up the listener IPC pusher'", ")", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "self", ".", "pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PUSH", ")", "self", ".", "pub", ".", "connect", "(", "LST_IPC_URL", ")", "log", ".", "debug", "(", "'Setting HWM for the listener: %d'", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "try", ":", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")" ]
Setup the listener ICP pusher.
[ "Setup", "the", "listener", "ICP", "pusher", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener_proc.py#L62-L76
napalm-automation/napalm-logs
napalm_logs/listener_proc.py
NapalmLogsListenerProc.start
def start(self): ''' Listen to messages and publish them. ''' # counter metrics for messages c_logs_ingested = Counter( 'napalm_logs_listener_logs_ingested', 'Count of ingested log messages', ['listener_type', 'address', 'port'], ) c_messages_published = Counter( 'napalm_logs_listener_messages_published', 'Count of published messages', ['listener_type', 'address', 'port'], ) self._setup_ipc() log.debug('Using the %s listener', self._listener_type) self._setup_listener() self.listener.start() # Start suicide polling thread thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: log_message, log_source = self.listener.receive() except ListenerException as lerr: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(lerr, exc_info=True) raise NapalmLogsExit(lerr) log.debug('Received %s from %s. Queueing to the server.', log_message, log_source) if not log_message: log.info('Empty message received from %s. Not queueing to the server.', log_source) continue c_logs_ingested.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc() self.pub.send(umsgpack.packb((log_message, log_source))) c_messages_published.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc()
python
def start(self): c_logs_ingested = Counter( 'napalm_logs_listener_logs_ingested', 'Count of ingested log messages', ['listener_type', 'address', 'port'], ) c_messages_published = Counter( 'napalm_logs_listener_messages_published', 'Count of published messages', ['listener_type', 'address', 'port'], ) self._setup_ipc() log.debug('Using the %s listener', self._listener_type) self._setup_listener() self.listener.start() thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),)) thread.start() signal.signal(signal.SIGTERM, self._exit_gracefully) self.__up = True while self.__up: try: log_message, log_source = self.listener.receive() except ListenerException as lerr: if self.__up is False: log.info('Exiting on process shutdown') return else: log.error(lerr, exc_info=True) raise NapalmLogsExit(lerr) log.debug('Received %s from %s. Queueing to the server.', log_message, log_source) if not log_message: log.info('Empty message received from %s. Not queueing to the server.', log_source) continue c_logs_ingested.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc() self.pub.send(umsgpack.packb((log_message, log_source))) c_messages_published.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc()
[ "def", "start", "(", "self", ")", ":", "# counter metrics for messages", "c_logs_ingested", "=", "Counter", "(", "'napalm_logs_listener_logs_ingested'", ",", "'Count of ingested log messages'", ",", "[", "'listener_type'", ",", "'address'", ",", "'port'", "]", ",", ")", "c_messages_published", "=", "Counter", "(", "'napalm_logs_listener_messages_published'", ",", "'Count of published messages'", ",", "[", "'listener_type'", ",", "'address'", ",", "'port'", "]", ",", ")", "self", ".", "_setup_ipc", "(", ")", "log", ".", "debug", "(", "'Using the %s listener'", ",", "self", ".", "_listener_type", ")", "self", ".", "_setup_listener", "(", ")", "self", ".", "listener", ".", "start", "(", ")", "# Start suicide polling thread", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_suicide_when_without_parent", ",", "args", "=", "(", "os", ".", "getppid", "(", ")", ",", ")", ")", "thread", ".", "start", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "_exit_gracefully", ")", "self", ".", "__up", "=", "True", "while", "self", ".", "__up", ":", "try", ":", "log_message", ",", "log_source", "=", "self", ".", "listener", ".", "receive", "(", ")", "except", "ListenerException", "as", "lerr", ":", "if", "self", ".", "__up", "is", "False", ":", "log", ".", "info", "(", "'Exiting on process shutdown'", ")", "return", "else", ":", "log", ".", "error", "(", "lerr", ",", "exc_info", "=", "True", ")", "raise", "NapalmLogsExit", "(", "lerr", ")", "log", ".", "debug", "(", "'Received %s from %s. Queueing to the server.'", ",", "log_message", ",", "log_source", ")", "if", "not", "log_message", ":", "log", ".", "info", "(", "'Empty message received from %s. Not queueing to the server.'", ",", "log_source", ")", "continue", "c_logs_ingested", ".", "labels", "(", "listener_type", "=", "self", ".", "_listener_type", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", ".", "inc", "(", ")", "self", ".", "pub", ".", "send", "(", "umsgpack", ".", "packb", "(", "(", "log_message", ",", "log_source", ")", ")", ")", "c_messages_published", ".", "labels", "(", "listener_type", "=", "self", ".", "_listener_type", ",", "address", "=", "self", ".", "address", ",", "port", "=", "self", ".", "port", ")", ".", "inc", "(", ")" ]
Listen to messages and publish them.
[ "Listen", "to", "messages", "and", "publish", "them", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener_proc.py#L78-L118
napalm-automation/napalm-logs
napalm_logs/config/nxos/USER_LOGIN.py
emit
def emit(msg_dict): ''' Extracts the details from the syslog message and returns an object having the following structure: .. code-block:: python { u'users': { u'user': { u'luke': { u'action': { u'login': True }, u'uid': 0 } } } } ''' log.debug('Evaluating the message dict:') log.debug(msg_dict) ret = {} extracted = napalm_logs.utils.extract(_RGX, msg_dict['message'], _RGX_PARTS) if not extracted: return ret uid_key_path = 'users//user//{0[user]}//uid'.format(extracted) uid_value = int(extracted['uid']) log.debug('Setting %d under key path %s', uid_value, uid_key_path) ret.update(napalm_logs.utils.setval(uid_key_path, uid_value, dict_=ret)) login_key_path = 'users//user//{0[user]}//action//login'.format(extracted) ret.update(napalm_logs.utils.setval(login_key_path, True, dict_=ret)) return ret
python
def emit(msg_dict): log.debug('Evaluating the message dict:') log.debug(msg_dict) ret = {} extracted = napalm_logs.utils.extract(_RGX, msg_dict['message'], _RGX_PARTS) if not extracted: return ret uid_key_path = 'users//user//{0[user]}//uid'.format(extracted) uid_value = int(extracted['uid']) log.debug('Setting %d under key path %s', uid_value, uid_key_path) ret.update(napalm_logs.utils.setval(uid_key_path, uid_value, dict_=ret)) login_key_path = 'users//user//{0[user]}//action//login'.format(extracted) ret.update(napalm_logs.utils.setval(login_key_path, True, dict_=ret)) return ret
[ "def", "emit", "(", "msg_dict", ")", ":", "log", ".", "debug", "(", "'Evaluating the message dict:'", ")", "log", ".", "debug", "(", "msg_dict", ")", "ret", "=", "{", "}", "extracted", "=", "napalm_logs", ".", "utils", ".", "extract", "(", "_RGX", ",", "msg_dict", "[", "'message'", "]", ",", "_RGX_PARTS", ")", "if", "not", "extracted", ":", "return", "ret", "uid_key_path", "=", "'users//user//{0[user]}//uid'", ".", "format", "(", "extracted", ")", "uid_value", "=", "int", "(", "extracted", "[", "'uid'", "]", ")", "log", ".", "debug", "(", "'Setting %d under key path %s'", ",", "uid_value", ",", "uid_key_path", ")", "ret", ".", "update", "(", "napalm_logs", ".", "utils", ".", "setval", "(", "uid_key_path", ",", "uid_value", ",", "dict_", "=", "ret", ")", ")", "login_key_path", "=", "'users//user//{0[user]}//action//login'", ".", "format", "(", "extracted", ")", "ret", ".", "update", "(", "napalm_logs", ".", "utils", ".", "setval", "(", "login_key_path", ",", "True", ",", "dict_", "=", "ret", ")", ")", "return", "ret" ]
Extracts the details from the syslog message and returns an object having the following structure: .. code-block:: python { u'users': { u'user': { u'luke': { u'action': { u'login': True }, u'uid': 0 } } } }
[ "Extracts", "the", "details", "from", "the", "syslog", "message", "and", "returns", "an", "object", "having", "the", "following", "structure", ":" ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/config/nxos/USER_LOGIN.py#L56-L88
napalm-automation/napalm-logs
napalm_logs/ext/__init__.py
expr_match
def expr_match(line, expr): ''' Evaluate a line of text against an expression. First try a full-string match, next try globbing, and then try to match assuming expr is a regular expression. Originally designed to match minion IDs for whitelists/blacklists. ''' if line == expr: return True if fnmatch.fnmatch(line, expr): return True try: if re.match(r'\A{0}\Z'.format(expr), line): return True except re.error: pass return False
python
def expr_match(line, expr): if line == expr: return True if fnmatch.fnmatch(line, expr): return True try: if re.match(r'\A{0}\Z'.format(expr), line): return True except re.error: pass return False
[ "def", "expr_match", "(", "line", ",", "expr", ")", ":", "if", "line", "==", "expr", ":", "return", "True", "if", "fnmatch", ".", "fnmatch", "(", "line", ",", "expr", ")", ":", "return", "True", "try", ":", "if", "re", ".", "match", "(", "r'\\A{0}\\Z'", ".", "format", "(", "expr", ")", ",", "line", ")", ":", "return", "True", "except", "re", ".", "error", ":", "pass", "return", "False" ]
Evaluate a line of text against an expression. First try a full-string match, next try globbing, and then try to match assuming expr is a regular expression. Originally designed to match minion IDs for whitelists/blacklists.
[ "Evaluate", "a", "line", "of", "text", "against", "an", "expression", ".", "First", "try", "a", "full", "-", "string", "match", "next", "try", "globbing", "and", "then", "try", "to", "match", "assuming", "expr", "is", "a", "regular", "expression", ".", "Originally", "designed", "to", "match", "minion", "IDs", "for", "whitelists", "/", "blacklists", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/ext/__init__.py#L14-L30
napalm-automation/napalm-logs
napalm_logs/ext/__init__.py
check_whitelist_blacklist
def check_whitelist_blacklist(value, whitelist=None, blacklist=None): ''' Check a whitelist and/or blacklist to see if the value matches it. value The item to check the whitelist and/or blacklist against. whitelist The list of items that are white-listed. If ``value`` is found in the whitelist, then the function returns ``True``. Otherwise, it returns ``False``. blacklist The list of items that are black-listed. If ``value`` is found in the blacklist, then the function returns ``False``. Otherwise, it returns ``True``. If both a whitelist and a blacklist are provided, value membership in the blacklist will be examined first. If the value is not found in the blacklist, then the whitelist is checked. If the value isn't found in the whitelist, the function returns ``False``. ''' if blacklist is not None: if not hasattr(blacklist, '__iter__'): blacklist = [blacklist] try: for expr in blacklist: if expr_match(value, expr): return False except TypeError: log.error('Non-iterable blacklist {0}'.format(blacklist)) if whitelist: if not hasattr(whitelist, '__iter__'): whitelist = [whitelist] try: for expr in whitelist: if expr_match(value, expr): return True except TypeError: log.error('Non-iterable whitelist {0}'.format(whitelist)) else: return True return False
python
def check_whitelist_blacklist(value, whitelist=None, blacklist=None): if blacklist is not None: if not hasattr(blacklist, '__iter__'): blacklist = [blacklist] try: for expr in blacklist: if expr_match(value, expr): return False except TypeError: log.error('Non-iterable blacklist {0}'.format(blacklist)) if whitelist: if not hasattr(whitelist, '__iter__'): whitelist = [whitelist] try: for expr in whitelist: if expr_match(value, expr): return True except TypeError: log.error('Non-iterable whitelist {0}'.format(whitelist)) else: return True return False
[ "def", "check_whitelist_blacklist", "(", "value", ",", "whitelist", "=", "None", ",", "blacklist", "=", "None", ")", ":", "if", "blacklist", "is", "not", "None", ":", "if", "not", "hasattr", "(", "blacklist", ",", "'__iter__'", ")", ":", "blacklist", "=", "[", "blacklist", "]", "try", ":", "for", "expr", "in", "blacklist", ":", "if", "expr_match", "(", "value", ",", "expr", ")", ":", "return", "False", "except", "TypeError", ":", "log", ".", "error", "(", "'Non-iterable blacklist {0}'", ".", "format", "(", "blacklist", ")", ")", "if", "whitelist", ":", "if", "not", "hasattr", "(", "whitelist", ",", "'__iter__'", ")", ":", "whitelist", "=", "[", "whitelist", "]", "try", ":", "for", "expr", "in", "whitelist", ":", "if", "expr_match", "(", "value", ",", "expr", ")", ":", "return", "True", "except", "TypeError", ":", "log", ".", "error", "(", "'Non-iterable whitelist {0}'", ".", "format", "(", "whitelist", ")", ")", "else", ":", "return", "True", "return", "False" ]
Check a whitelist and/or blacklist to see if the value matches it. value The item to check the whitelist and/or blacklist against. whitelist The list of items that are white-listed. If ``value`` is found in the whitelist, then the function returns ``True``. Otherwise, it returns ``False``. blacklist The list of items that are black-listed. If ``value`` is found in the blacklist, then the function returns ``False``. Otherwise, it returns ``True``. If both a whitelist and a blacklist are provided, value membership in the blacklist will be examined first. If the value is not found in the blacklist, then the whitelist is checked. If the value isn't found in the whitelist, the function returns ``False``.
[ "Check", "a", "whitelist", "and", "/", "or", "blacklist", "to", "see", "if", "the", "value", "matches", "it", "." ]
train
https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/ext/__init__.py#L33-L77
poppy-project/pypot
pypot/server/snap.py
set_snap_server_variables
def set_snap_server_variables(host, port, snap_extension='.xml', path=None): """ Change dynamically port and host variable in xml Snap! project file""" localdir = os.getcwd() if path is None: os.chdir(os.path.dirname(os.path.realpath(__file__))) else: os.chdir(path) xml_files = [f for f in os.listdir('.') if f.endswith(snap_extension)] for filename in xml_files: with open(filename, 'r') as xf: xml = xf.read() # Change host variable xml = re.sub(r'''<variable name="host"><l>[\s\S]*?<\/l><\/variable>''', '''<variable name="host"><l>{}</l></variable>'''.format(host), xml) # Change host argument of "set $robot host" xml = re.sub(r'''<custom-block s="set \$robot host to \%s"><l>[\s\S]*?<\/l>''', '''<custom-block s="set $robot host to %s"><l>{}</l>'''.format(host), xml) # Change port variable xml = re.sub(r'''<variable name="port"><l>[\s\S]*?<\/l><\/variable>''', '''<variable name="port"><l>{}</l></variable>'''.format(port), xml) with open(filename, 'w') as xf: xf.write(xml) os.chdir(localdir)
python
def set_snap_server_variables(host, port, snap_extension='.xml', path=None): localdir = os.getcwd() if path is None: os.chdir(os.path.dirname(os.path.realpath(__file__))) else: os.chdir(path) xml_files = [f for f in os.listdir('.') if f.endswith(snap_extension)] for filename in xml_files: with open(filename, 'r') as xf: xml = xf.read() xml = re.sub(r, .format(host), xml) xml = re.sub(r, .format(host), xml) xml = re.sub(r, .format(port), xml) with open(filename, 'w') as xf: xf.write(xml) os.chdir(localdir)
[ "def", "set_snap_server_variables", "(", "host", ",", "port", ",", "snap_extension", "=", "'.xml'", ",", "path", "=", "None", ")", ":", "localdir", "=", "os", ".", "getcwd", "(", ")", "if", "path", "is", "None", ":", "os", ".", "chdir", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ")", "else", ":", "os", ".", "chdir", "(", "path", ")", "xml_files", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "'.'", ")", "if", "f", ".", "endswith", "(", "snap_extension", ")", "]", "for", "filename", "in", "xml_files", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "xf", ":", "xml", "=", "xf", ".", "read", "(", ")", "# Change host variable", "xml", "=", "re", ".", "sub", "(", "r'''<variable name=\"host\"><l>[\\s\\S]*?<\\/l><\\/variable>'''", ",", "'''<variable name=\"host\"><l>{}</l></variable>'''", ".", "format", "(", "host", ")", ",", "xml", ")", "# Change host argument of \"set $robot host\"", "xml", "=", "re", ".", "sub", "(", "r'''<custom-block s=\"set \\$robot host to \\%s\"><l>[\\s\\S]*?<\\/l>'''", ",", "'''<custom-block s=\"set $robot host to %s\"><l>{}</l>'''", ".", "format", "(", "host", ")", ",", "xml", ")", "# Change port variable", "xml", "=", "re", ".", "sub", "(", "r'''<variable name=\"port\"><l>[\\s\\S]*?<\\/l><\\/variable>'''", ",", "'''<variable name=\"port\"><l>{}</l></variable>'''", ".", "format", "(", "port", ")", ",", "xml", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "xf", ":", "xf", ".", "write", "(", "xml", ")", "os", ".", "chdir", "(", "localdir", ")" ]
Change dynamically port and host variable in xml Snap! project file
[ "Change", "dynamically", "port", "and", "host", "variable", "in", "xml", "Snap!", "project", "file" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/server/snap.py#L82-L106
poppy-project/pypot
pypot/server/snap.py
SnapRobotServer.run
def run(self, quiet=None, server=''): """ Start the tornado server, run forever. 'quiet' and 'server' arguments are no longer used, they are keep only for backward compatibility """ try: loop = IOLoop() http_server = HTTPServer(WSGIContainer(self.app)) http_server.listen(self.port) loop.start() except socket.error as serr: # Re raise the socket error if not "[Errno 98] Address already in use" if serr.errno != errno.EADDRINUSE: raise serr else: logger.warning("""The webserver port {} is already used. The SnapRobotServer is maybe already run or another software use this port.""".format(self.port))
python
def run(self, quiet=None, server=''): try: loop = IOLoop() http_server = HTTPServer(WSGIContainer(self.app)) http_server.listen(self.port) loop.start() except socket.error as serr: if serr.errno != errno.EADDRINUSE: raise serr else: logger.warning(.format(self.port))
[ "def", "run", "(", "self", ",", "quiet", "=", "None", ",", "server", "=", "''", ")", ":", "try", ":", "loop", "=", "IOLoop", "(", ")", "http_server", "=", "HTTPServer", "(", "WSGIContainer", "(", "self", ".", "app", ")", ")", "http_server", ".", "listen", "(", "self", ".", "port", ")", "loop", ".", "start", "(", ")", "except", "socket", ".", "error", "as", "serr", ":", "# Re raise the socket error if not \"[Errno 98] Address already in use\"", "if", "serr", ".", "errno", "!=", "errno", ".", "EADDRINUSE", ":", "raise", "serr", "else", ":", "logger", ".", "warning", "(", "\"\"\"The webserver port {} is already used.\nThe SnapRobotServer is maybe already run or another software use this port.\"\"\"", ".", "format", "(", "self", ".", "port", ")", ")" ]
Start the tornado server, run forever. 'quiet' and 'server' arguments are no longer used, they are keep only for backward compatibility
[ "Start", "the", "tornado", "server", "run", "forever", ".", "quiet", "and", "server", "arguments", "are", "no", "longer", "used", "they", "are", "keep", "only", "for", "backward", "compatibility" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/server/snap.py#L406-L423
poppy-project/pypot
pypot/utils/stoppablethread.py
make_update_loop
def make_update_loop(thread, update_func): """ Makes a run loop which calls an update function at a predefined frequency. """ while not thread.should_stop(): if thread.should_pause(): thread.wait_to_resume() start = time.time() if hasattr(thread, '_updated'): thread._updated.clear() update_func() if hasattr(thread, '_updated'): thread._updated.set() end = time.time() dt = thread.period - (end - start) if dt > 0: time.sleep(dt)
python
def make_update_loop(thread, update_func): while not thread.should_stop(): if thread.should_pause(): thread.wait_to_resume() start = time.time() if hasattr(thread, '_updated'): thread._updated.clear() update_func() if hasattr(thread, '_updated'): thread._updated.set() end = time.time() dt = thread.period - (end - start) if dt > 0: time.sleep(dt)
[ "def", "make_update_loop", "(", "thread", ",", "update_func", ")", ":", "while", "not", "thread", ".", "should_stop", "(", ")", ":", "if", "thread", ".", "should_pause", "(", ")", ":", "thread", ".", "wait_to_resume", "(", ")", "start", "=", "time", ".", "time", "(", ")", "if", "hasattr", "(", "thread", ",", "'_updated'", ")", ":", "thread", ".", "_updated", ".", "clear", "(", ")", "update_func", "(", ")", "if", "hasattr", "(", "thread", ",", "'_updated'", ")", ":", "thread", ".", "_updated", ".", "set", "(", ")", "end", "=", "time", ".", "time", "(", ")", "dt", "=", "thread", ".", "period", "-", "(", "end", "-", "start", ")", "if", "dt", ">", "0", ":", "time", ".", "sleep", "(", "dt", ")" ]
Makes a run loop which calls an update function at a predefined frequency.
[ "Makes", "a", "run", "loop", "which", "calls", "an", "update", "function", "at", "a", "predefined", "frequency", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L166-L183
poppy-project/pypot
pypot/utils/stoppablethread.py
StoppableThread.start
def start(self): """ Start the run method as a new thread. It will first stop the thread if it is already running. """ if self.running: self.stop() self._thread = threading.Thread(target=self._wrapped_target) self._thread.daemon = True self._thread.start()
python
def start(self): if self.running: self.stop() self._thread = threading.Thread(target=self._wrapped_target) self._thread.daemon = True self._thread.start()
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "running", ":", "self", ".", "stop", "(", ")", "self", ".", "_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_wrapped_target", ")", "self", ".", "_thread", ".", "daemon", "=", "True", "self", ".", "_thread", ".", "start", "(", ")" ]
Start the run method as a new thread. It will first stop the thread if it is already running.
[ "Start", "the", "run", "method", "as", "a", "new", "thread", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L33-L44
poppy-project/pypot
pypot/utils/stoppablethread.py
StoppableThread.stop
def stop(self, wait=True): """ Stop the thread. More precisely, sends the stopping signal to the thread. It is then up to the run method to correctly responds. """ if self.started: self._running.clear() self._resume.set() # We cannot wait for ourself if wait and (threading.current_thread() != self._thread): while self._thread.is_alive(): self._running.clear() self._resume.set() self._thread.join(timeout=1.0) self._started.clear() self._resume.clear()
python
def stop(self, wait=True): if self.started: self._running.clear() self._resume.set() if wait and (threading.current_thread() != self._thread): while self._thread.is_alive(): self._running.clear() self._resume.set() self._thread.join(timeout=1.0) self._started.clear() self._resume.clear()
[ "def", "stop", "(", "self", ",", "wait", "=", "True", ")", ":", "if", "self", ".", "started", ":", "self", ".", "_running", ".", "clear", "(", ")", "self", ".", "_resume", ".", "set", "(", ")", "# We cannot wait for ourself", "if", "wait", "and", "(", "threading", ".", "current_thread", "(", ")", "!=", "self", ".", "_thread", ")", ":", "while", "self", ".", "_thread", ".", "is_alive", "(", ")", ":", "self", ".", "_running", ".", "clear", "(", ")", "self", ".", "_resume", ".", "set", "(", ")", "self", ".", "_thread", ".", "join", "(", "timeout", "=", "1.0", ")", "self", ".", "_started", ".", "clear", "(", ")", "self", ".", "_resume", ".", "clear", "(", ")" ]
Stop the thread. More precisely, sends the stopping signal to the thread. It is then up to the run method to correctly responds.
[ "Stop", "the", "thread", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L46-L64
poppy-project/pypot
pypot/utils/stoppablethread.py
StoppableThread.wait_to_start
def wait_to_start(self, allow_failure=False): """ Wait for the thread to actually starts. """ self._started.wait() if self._crashed and not allow_failure: self._thread.join() raise RuntimeError('Setup failed, see {} Traceback' 'for details.'.format(self._thread.name))
python
def wait_to_start(self, allow_failure=False): self._started.wait() if self._crashed and not allow_failure: self._thread.join() raise RuntimeError('Setup failed, see {} Traceback' 'for details.'.format(self._thread.name))
[ "def", "wait_to_start", "(", "self", ",", "allow_failure", "=", "False", ")", ":", "self", ".", "_started", ".", "wait", "(", ")", "if", "self", ".", "_crashed", "and", "not", "allow_failure", ":", "self", ".", "_thread", ".", "join", "(", ")", "raise", "RuntimeError", "(", "'Setup failed, see {} Traceback'", "'for details.'", ".", "format", "(", "self", ".", "_thread", ".", "name", ")", ")" ]
Wait for the thread to actually starts.
[ "Wait", "for", "the", "thread", "to", "actually", "starts", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L82-L89
poppy-project/pypot
pypot/vrep/__init__.py
from_vrep
def from_vrep(config, vrep_host='127.0.0.1', vrep_port=19997, scene=None, tracked_objects=[], tracked_collisions=[], id=None, shared_vrep_io=None): """ Create a robot from a V-REP instance. :param config: robot configuration (either the path to the json or directly the dictionary) :type config: str or dict :param str vrep_host: host of the V-REP server :param int vrep_port: port of the V-REP server :param str scene: path to the V-REP scene to load and start :param list tracked_objects: list of V-REP dummy object to track :param list tracked_collisions: list of V-REP collision to track :param int id: robot id in simulator (useful when using a scene with multiple robots) :param vrep_io: use an already connected VrepIO (useful when using a scene with multiple robots) :type vrep_io: :class:`~pypot.vrep.io.VrepIO` This function tries to connect to a V-REP instance and expects to find motors with names corresponding as the ones found in the config. .. note:: The :class:`~pypot.robot.robot.Robot` returned will also provide a convenience reset_simulation method which resets the simulation and the robot position to its intial stance. .. note:: Using the same configuration, you should be able to switch from a real to a simulated robot just by switching from :func:`~pypot.robot.config.from_config` to :func:`~pypot.vrep.from_vrep`. For instance:: import json with open('my_config.json') as f: config = json.load(f) from pypot.robot import from_config from pypot.vrep import from_vrep real_robot = from_config(config) simulated_robot = from_vrep(config, '127.0.0.1', 19997, 'poppy.ttt') """ if shared_vrep_io is None: vrep_io = VrepIO(vrep_host, vrep_port) else: vrep_io = shared_vrep_io vreptime = vrep_time(vrep_io) pypot_time.time = vreptime.get_time pypot_time.sleep = vreptime.sleep if isinstance(config, basestring): with open(config) as f: config = json.load(f, object_pairs_hook=OrderedDict) motors = [motor_from_confignode(config, name) for name in config['motors'].keys()] vc = VrepController(vrep_io, scene, motors, id=id) vc._init_vrep_streaming() sensor_controllers = [] if tracked_objects: sensors = [ObjectTracker(name) for name in tracked_objects] vot = VrepObjectTracker(vrep_io, sensors) sensor_controllers.append(vot) if tracked_collisions: sensors = [VrepCollisionDetector(name) for name in tracked_collisions] vct = VrepCollisionTracker(vrep_io, sensors) sensor_controllers.append(vct) robot = Robot(motor_controllers=[vc], sensor_controllers=sensor_controllers) for m in robot.motors: m.goto_behavior = 'minjerk' init_pos = {m: m.goal_position for m in robot.motors} make_alias(config, robot) def start_simu(): vrep_io.start_simulation() for m, p in init_pos.iteritems(): m.goal_position = p vc.start() if tracked_objects: vot.start() if tracked_collisions: vct.start() while vrep_io.get_simulation_current_time() < 1.: sys_time.sleep(0.1) def stop_simu(): if tracked_objects: vot.stop() if tracked_collisions: vct.stop() vc.stop() vrep_io.stop_simulation() def reset_simu(): stop_simu() sys_time.sleep(0.5) start_simu() robot.start_simulation = start_simu robot.stop_simulation = stop_simu robot.reset_simulation = reset_simu def current_simulation_time(robot): return robot._controllers[0].io.get_simulation_current_time() Robot.current_simulation_time = property(lambda robot: current_simulation_time(robot)) def get_object_position(robot, object, relative_to_object=None): return vrep_io.get_object_position(object, relative_to_object) Robot.get_object_position = partial(get_object_position, robot) def get_object_orientation(robot, object, relative_to_object=None): return vrep_io.get_object_orientation(object, relative_to_object) Robot.get_object_orientation = partial(get_object_orientation, robot) return robot
python
def from_vrep(config, vrep_host='127.0.0.1', vrep_port=19997, scene=None, tracked_objects=[], tracked_collisions=[], id=None, shared_vrep_io=None): if shared_vrep_io is None: vrep_io = VrepIO(vrep_host, vrep_port) else: vrep_io = shared_vrep_io vreptime = vrep_time(vrep_io) pypot_time.time = vreptime.get_time pypot_time.sleep = vreptime.sleep if isinstance(config, basestring): with open(config) as f: config = json.load(f, object_pairs_hook=OrderedDict) motors = [motor_from_confignode(config, name) for name in config['motors'].keys()] vc = VrepController(vrep_io, scene, motors, id=id) vc._init_vrep_streaming() sensor_controllers = [] if tracked_objects: sensors = [ObjectTracker(name) for name in tracked_objects] vot = VrepObjectTracker(vrep_io, sensors) sensor_controllers.append(vot) if tracked_collisions: sensors = [VrepCollisionDetector(name) for name in tracked_collisions] vct = VrepCollisionTracker(vrep_io, sensors) sensor_controllers.append(vct) robot = Robot(motor_controllers=[vc], sensor_controllers=sensor_controllers) for m in robot.motors: m.goto_behavior = 'minjerk' init_pos = {m: m.goal_position for m in robot.motors} make_alias(config, robot) def start_simu(): vrep_io.start_simulation() for m, p in init_pos.iteritems(): m.goal_position = p vc.start() if tracked_objects: vot.start() if tracked_collisions: vct.start() while vrep_io.get_simulation_current_time() < 1.: sys_time.sleep(0.1) def stop_simu(): if tracked_objects: vot.stop() if tracked_collisions: vct.stop() vc.stop() vrep_io.stop_simulation() def reset_simu(): stop_simu() sys_time.sleep(0.5) start_simu() robot.start_simulation = start_simu robot.stop_simulation = stop_simu robot.reset_simulation = reset_simu def current_simulation_time(robot): return robot._controllers[0].io.get_simulation_current_time() Robot.current_simulation_time = property(lambda robot: current_simulation_time(robot)) def get_object_position(robot, object, relative_to_object=None): return vrep_io.get_object_position(object, relative_to_object) Robot.get_object_position = partial(get_object_position, robot) def get_object_orientation(robot, object, relative_to_object=None): return vrep_io.get_object_orientation(object, relative_to_object) Robot.get_object_orientation = partial(get_object_orientation, robot) return robot
[ "def", "from_vrep", "(", "config", ",", "vrep_host", "=", "'127.0.0.1'", ",", "vrep_port", "=", "19997", ",", "scene", "=", "None", ",", "tracked_objects", "=", "[", "]", ",", "tracked_collisions", "=", "[", "]", ",", "id", "=", "None", ",", "shared_vrep_io", "=", "None", ")", ":", "if", "shared_vrep_io", "is", "None", ":", "vrep_io", "=", "VrepIO", "(", "vrep_host", ",", "vrep_port", ")", "else", ":", "vrep_io", "=", "shared_vrep_io", "vreptime", "=", "vrep_time", "(", "vrep_io", ")", "pypot_time", ".", "time", "=", "vreptime", ".", "get_time", "pypot_time", ".", "sleep", "=", "vreptime", ".", "sleep", "if", "isinstance", "(", "config", ",", "basestring", ")", ":", "with", "open", "(", "config", ")", "as", "f", ":", "config", "=", "json", ".", "load", "(", "f", ",", "object_pairs_hook", "=", "OrderedDict", ")", "motors", "=", "[", "motor_from_confignode", "(", "config", ",", "name", ")", "for", "name", "in", "config", "[", "'motors'", "]", ".", "keys", "(", ")", "]", "vc", "=", "VrepController", "(", "vrep_io", ",", "scene", ",", "motors", ",", "id", "=", "id", ")", "vc", ".", "_init_vrep_streaming", "(", ")", "sensor_controllers", "=", "[", "]", "if", "tracked_objects", ":", "sensors", "=", "[", "ObjectTracker", "(", "name", ")", "for", "name", "in", "tracked_objects", "]", "vot", "=", "VrepObjectTracker", "(", "vrep_io", ",", "sensors", ")", "sensor_controllers", ".", "append", "(", "vot", ")", "if", "tracked_collisions", ":", "sensors", "=", "[", "VrepCollisionDetector", "(", "name", ")", "for", "name", "in", "tracked_collisions", "]", "vct", "=", "VrepCollisionTracker", "(", "vrep_io", ",", "sensors", ")", "sensor_controllers", ".", "append", "(", "vct", ")", "robot", "=", "Robot", "(", "motor_controllers", "=", "[", "vc", "]", ",", "sensor_controllers", "=", "sensor_controllers", ")", "for", "m", "in", "robot", ".", "motors", ":", "m", ".", "goto_behavior", "=", "'minjerk'", "init_pos", "=", "{", "m", ":", "m", ".", "goal_position", "for", "m", "in", "robot", ".", "motors", "}", "make_alias", "(", "config", ",", "robot", ")", "def", "start_simu", "(", ")", ":", "vrep_io", ".", "start_simulation", "(", ")", "for", "m", ",", "p", "in", "init_pos", ".", "iteritems", "(", ")", ":", "m", ".", "goal_position", "=", "p", "vc", ".", "start", "(", ")", "if", "tracked_objects", ":", "vot", ".", "start", "(", ")", "if", "tracked_collisions", ":", "vct", ".", "start", "(", ")", "while", "vrep_io", ".", "get_simulation_current_time", "(", ")", "<", "1.", ":", "sys_time", ".", "sleep", "(", "0.1", ")", "def", "stop_simu", "(", ")", ":", "if", "tracked_objects", ":", "vot", ".", "stop", "(", ")", "if", "tracked_collisions", ":", "vct", ".", "stop", "(", ")", "vc", ".", "stop", "(", ")", "vrep_io", ".", "stop_simulation", "(", ")", "def", "reset_simu", "(", ")", ":", "stop_simu", "(", ")", "sys_time", ".", "sleep", "(", "0.5", ")", "start_simu", "(", ")", "robot", ".", "start_simulation", "=", "start_simu", "robot", ".", "stop_simulation", "=", "stop_simu", "robot", ".", "reset_simulation", "=", "reset_simu", "def", "current_simulation_time", "(", "robot", ")", ":", "return", "robot", ".", "_controllers", "[", "0", "]", ".", "io", ".", "get_simulation_current_time", "(", ")", "Robot", ".", "current_simulation_time", "=", "property", "(", "lambda", "robot", ":", "current_simulation_time", "(", "robot", ")", ")", "def", "get_object_position", "(", "robot", ",", "object", ",", "relative_to_object", "=", "None", ")", ":", "return", "vrep_io", ".", "get_object_position", "(", "object", ",", "relative_to_object", ")", "Robot", ".", "get_object_position", "=", "partial", "(", "get_object_position", ",", "robot", ")", "def", "get_object_orientation", "(", "robot", ",", "object", ",", "relative_to_object", "=", "None", ")", ":", "return", "vrep_io", ".", "get_object_orientation", "(", "object", ",", "relative_to_object", ")", "Robot", ".", "get_object_orientation", "=", "partial", "(", "get_object_orientation", ",", "robot", ")", "return", "robot" ]
Create a robot from a V-REP instance. :param config: robot configuration (either the path to the json or directly the dictionary) :type config: str or dict :param str vrep_host: host of the V-REP server :param int vrep_port: port of the V-REP server :param str scene: path to the V-REP scene to load and start :param list tracked_objects: list of V-REP dummy object to track :param list tracked_collisions: list of V-REP collision to track :param int id: robot id in simulator (useful when using a scene with multiple robots) :param vrep_io: use an already connected VrepIO (useful when using a scene with multiple robots) :type vrep_io: :class:`~pypot.vrep.io.VrepIO` This function tries to connect to a V-REP instance and expects to find motors with names corresponding as the ones found in the config. .. note:: The :class:`~pypot.robot.robot.Robot` returned will also provide a convenience reset_simulation method which resets the simulation and the robot position to its intial stance. .. note:: Using the same configuration, you should be able to switch from a real to a simulated robot just by switching from :func:`~pypot.robot.config.from_config` to :func:`~pypot.vrep.from_vrep`. For instance:: import json with open('my_config.json') as f: config = json.load(f) from pypot.robot import from_config from pypot.vrep import from_vrep real_robot = from_config(config) simulated_robot = from_vrep(config, '127.0.0.1', 19997, 'poppy.ttt')
[ "Create", "a", "robot", "from", "a", "V", "-", "REP", "instance", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/__init__.py#L52-L176
poppy-project/pypot
pypot/dynamixel/io/io.py
DxlIO.get_control_mode
def get_control_mode(self, ids): """ Gets the mode ('joint' or 'wheel') for the specified motors. """ to_get_ids = [id for id in ids if id not in self._known_mode] limits = self.get_angle_limit(to_get_ids, convert=False) modes = ['wheel' if limit == (0, 0) else 'joint' for limit in limits] self._known_mode.update(zip(to_get_ids, modes)) return tuple(self._known_mode[id] for id in ids)
python
def get_control_mode(self, ids): to_get_ids = [id for id in ids if id not in self._known_mode] limits = self.get_angle_limit(to_get_ids, convert=False) modes = ['wheel' if limit == (0, 0) else 'joint' for limit in limits] self._known_mode.update(zip(to_get_ids, modes)) return tuple(self._known_mode[id] for id in ids)
[ "def", "get_control_mode", "(", "self", ",", "ids", ")", ":", "to_get_ids", "=", "[", "id", "for", "id", "in", "ids", "if", "id", "not", "in", "self", ".", "_known_mode", "]", "limits", "=", "self", ".", "get_angle_limit", "(", "to_get_ids", ",", "convert", "=", "False", ")", "modes", "=", "[", "'wheel'", "if", "limit", "==", "(", "0", ",", "0", ")", "else", "'joint'", "for", "limit", "in", "limits", "]", "self", ".", "_known_mode", ".", "update", "(", "zip", "(", "to_get_ids", ",", "modes", ")", ")", "return", "tuple", "(", "self", ".", "_known_mode", "[", "id", "]", "for", "id", "in", "ids", ")" ]
Gets the mode ('joint' or 'wheel') for the specified motors.
[ "Gets", "the", "mode", "(", "joint", "or", "wheel", ")", "for", "the", "specified", "motors", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L20-L28
poppy-project/pypot
pypot/dynamixel/io/io.py
DxlIO.set_wheel_mode
def set_wheel_mode(self, ids): """ Sets the specified motors to wheel mode. """ self.set_control_mode(dict(zip(ids, itertools.repeat('wheel'))))
python
def set_wheel_mode(self, ids): self.set_control_mode(dict(zip(ids, itertools.repeat('wheel'))))
[ "def", "set_wheel_mode", "(", "self", ",", "ids", ")", ":", "self", ".", "set_control_mode", "(", "dict", "(", "zip", "(", "ids", ",", "itertools", ".", "repeat", "(", "'wheel'", ")", ")", ")", ")" ]
Sets the specified motors to wheel mode.
[ "Sets", "the", "specified", "motors", "to", "wheel", "mode", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L30-L32
poppy-project/pypot
pypot/dynamixel/io/io.py
DxlIO.set_joint_mode
def set_joint_mode(self, ids): """ Sets the specified motors to joint mode. """ self.set_control_mode(dict(zip(ids, itertools.repeat('joint'))))
python
def set_joint_mode(self, ids): self.set_control_mode(dict(zip(ids, itertools.repeat('joint'))))
[ "def", "set_joint_mode", "(", "self", ",", "ids", ")", ":", "self", ".", "set_control_mode", "(", "dict", "(", "zip", "(", "ids", ",", "itertools", ".", "repeat", "(", "'joint'", ")", ")", ")", ")" ]
Sets the specified motors to joint mode.
[ "Sets", "the", "specified", "motors", "to", "joint", "mode", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L34-L36
poppy-project/pypot
pypot/dynamixel/io/io.py
DxlIO.set_angle_limit
def set_angle_limit(self, limit_for_id, **kwargs): """ Sets the angle limit to the specified motors. """ convert = kwargs['convert'] if 'convert' in kwargs else self._convert if 'wheel' in self.get_control_mode(limit_for_id.keys()): raise ValueError('can not change the angle limit of a motor in wheel mode') if (0, 0) in limit_for_id.values(): raise ValueError('can not set limit to (0, 0)') self._set_angle_limit(limit_for_id, convert=convert)
python
def set_angle_limit(self, limit_for_id, **kwargs): convert = kwargs['convert'] if 'convert' in kwargs else self._convert if 'wheel' in self.get_control_mode(limit_for_id.keys()): raise ValueError('can not change the angle limit of a motor in wheel mode') if (0, 0) in limit_for_id.values(): raise ValueError('can not set limit to (0, 0)') self._set_angle_limit(limit_for_id, convert=convert)
[ "def", "set_angle_limit", "(", "self", ",", "limit_for_id", ",", "*", "*", "kwargs", ")", ":", "convert", "=", "kwargs", "[", "'convert'", "]", "if", "'convert'", "in", "kwargs", "else", "self", ".", "_convert", "if", "'wheel'", "in", "self", ".", "get_control_mode", "(", "limit_for_id", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "'can not change the angle limit of a motor in wheel mode'", ")", "if", "(", "0", ",", "0", ")", "in", "limit_for_id", ".", "values", "(", ")", ":", "raise", "ValueError", "(", "'can not set limit to (0, 0)'", ")", "self", ".", "_set_angle_limit", "(", "limit_for_id", ",", "convert", "=", "convert", ")" ]
Sets the angle limit to the specified motors.
[ "Sets", "the", "angle", "limit", "to", "the", "specified", "motors", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L55-L65
poppy-project/pypot
pypot/robot/robot.py
Robot.close
def close(self): """ Cleans the robot by stopping synchronization and all controllers.""" self.stop_sync() [c.io.close() for c in self._controllers if c.io is not None]
python
def close(self): self.stop_sync() [c.io.close() for c in self._controllers if c.io is not None]
[ "def", "close", "(", "self", ")", ":", "self", ".", "stop_sync", "(", ")", "[", "c", ".", "io", ".", "close", "(", ")", "for", "c", "in", "self", ".", "_controllers", "if", "c", ".", "io", "is", "not", "None", "]" ]
Cleans the robot by stopping synchronization and all controllers.
[ "Cleans", "the", "robot", "by", "stopping", "synchronization", "and", "all", "controllers", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L51-L54
poppy-project/pypot
pypot/robot/robot.py
Robot.start_sync
def start_sync(self): """ Starts all the synchonization loop (sensor/effector controllers). """ if self._syncing: return [c.start() for c in self._controllers] [c.wait_to_start() for c in self._controllers] self._primitive_manager.start() self._primitive_manager._running.wait() self._syncing = True logger.info('Starting robot synchronization.')
python
def start_sync(self): if self._syncing: return [c.start() for c in self._controllers] [c.wait_to_start() for c in self._controllers] self._primitive_manager.start() self._primitive_manager._running.wait() self._syncing = True logger.info('Starting robot synchronization.')
[ "def", "start_sync", "(", "self", ")", ":", "if", "self", ".", "_syncing", ":", "return", "[", "c", ".", "start", "(", ")", "for", "c", "in", "self", ".", "_controllers", "]", "[", "c", ".", "wait_to_start", "(", ")", "for", "c", "in", "self", ".", "_controllers", "]", "self", ".", "_primitive_manager", ".", "start", "(", ")", "self", ".", "_primitive_manager", ".", "_running", ".", "wait", "(", ")", "self", ".", "_syncing", "=", "True", "logger", ".", "info", "(", "'Starting robot synchronization.'", ")" ]
Starts all the synchonization loop (sensor/effector controllers).
[ "Starts", "all", "the", "synchonization", "loop", "(", "sensor", "/", "effector", "controllers", ")", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L59-L71
poppy-project/pypot
pypot/robot/robot.py
Robot.stop_sync
def stop_sync(self): """ Stops all the synchonization loop (sensor/effector controllers). """ if not self._syncing: return if self._primitive_manager.running: self._primitive_manager.stop() [c.stop() for c in self._controllers] [s.close() for s in self.sensors if hasattr(s, 'close')] self._syncing = False logger.info('Stopping robot synchronization.')
python
def stop_sync(self): if not self._syncing: return if self._primitive_manager.running: self._primitive_manager.stop() [c.stop() for c in self._controllers] [s.close() for s in self.sensors if hasattr(s, 'close')] self._syncing = False logger.info('Stopping robot synchronization.')
[ "def", "stop_sync", "(", "self", ")", ":", "if", "not", "self", ".", "_syncing", ":", "return", "if", "self", ".", "_primitive_manager", ".", "running", ":", "self", ".", "_primitive_manager", ".", "stop", "(", ")", "[", "c", ".", "stop", "(", ")", "for", "c", "in", "self", ".", "_controllers", "]", "[", "s", ".", "close", "(", ")", "for", "s", "in", "self", ".", "sensors", "if", "hasattr", "(", "s", ",", "'close'", ")", "]", "self", ".", "_syncing", "=", "False", "logger", ".", "info", "(", "'Stopping robot synchronization.'", ")" ]
Stops all the synchonization loop (sensor/effector controllers).
[ "Stops", "all", "the", "synchonization", "loop", "(", "sensor", "/", "effector", "controllers", ")", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L73-L86
poppy-project/pypot
pypot/robot/robot.py
Robot.goto_position
def goto_position(self, position_for_motors, duration, control=None, wait=False): """ Moves a subset of the motors to a position within a specific duration. :param dict position_for_motors: which motors you want to move {motor_name: pos, motor_name: pos,...} :param float duration: duration of the move :param str control: control type ('dummy', 'minjerk') :param bool wait: whether or not to wait for the end of the move .. note::In case of dynamixel motors, the speed is automatically adjusted so the goal position is reached after the chosen duration. """ for i, (motor_name, position) in enumerate(position_for_motors.iteritems()): w = False if i < len(position_for_motors) - 1 else wait m = getattr(self, motor_name) m.goto_position(position, duration, control, wait=w)
python
def goto_position(self, position_for_motors, duration, control=None, wait=False): for i, (motor_name, position) in enumerate(position_for_motors.iteritems()): w = False if i < len(position_for_motors) - 1 else wait m = getattr(self, motor_name) m.goto_position(position, duration, control, wait=w)
[ "def", "goto_position", "(", "self", ",", "position_for_motors", ",", "duration", ",", "control", "=", "None", ",", "wait", "=", "False", ")", ":", "for", "i", ",", "(", "motor_name", ",", "position", ")", "in", "enumerate", "(", "position_for_motors", ".", "iteritems", "(", ")", ")", ":", "w", "=", "False", "if", "i", "<", "len", "(", "position_for_motors", ")", "-", "1", "else", "wait", "m", "=", "getattr", "(", "self", ",", "motor_name", ")", "m", ".", "goto_position", "(", "position", ",", "duration", ",", "control", ",", "wait", "=", "w", ")" ]
Moves a subset of the motors to a position within a specific duration. :param dict position_for_motors: which motors you want to move {motor_name: pos, motor_name: pos,...} :param float duration: duration of the move :param str control: control type ('dummy', 'minjerk') :param bool wait: whether or not to wait for the end of the move .. note::In case of dynamixel motors, the speed is automatically adjusted so the goal position is reached after the chosen duration.
[ "Moves", "a", "subset", "of", "the", "motors", "to", "a", "position", "within", "a", "specific", "duration", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L126-L141
poppy-project/pypot
pypot/robot/robot.py
Robot.power_up
def power_up(self): """ Changes all settings to guarantee the motors will be used at their maximum power. """ for m in self.motors: m.compliant = False m.moving_speed = 0 m.torque_limit = 100.0
python
def power_up(self): for m in self.motors: m.compliant = False m.moving_speed = 0 m.torque_limit = 100.0
[ "def", "power_up", "(", "self", ")", ":", "for", "m", "in", "self", ".", "motors", ":", "m", ".", "compliant", "=", "False", "m", ".", "moving_speed", "=", "0", "m", ".", "torque_limit", "=", "100.0" ]
Changes all settings to guarantee the motors will be used at their maximum power.
[ "Changes", "all", "settings", "to", "guarantee", "the", "motors", "will", "be", "used", "at", "their", "maximum", "power", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L143-L148
poppy-project/pypot
pypot/robot/robot.py
Robot.to_config
def to_config(self): """ Generates the config for the current robot. .. note:: The generated config should be used as a basis and must probably be modified. """ from ..dynamixel.controller import DxlController dxl_controllers = [c for c in self._controllers if isinstance(c, DxlController)] config = {} config['controllers'] = {} for i, c in enumerate(dxl_controllers): name = 'dxl_controller_{}'.format(i) config['controllers'][name] = { 'port': c.io.port, 'sync_read': c.io._sync_read, 'attached_motors': [m.name for m in c.motors], } config['motors'] = {} for m in self.motors: config['motors'][m.name] = { 'id': m.id, 'type': m.model, 'offset': m.offset, 'orientation': 'direct' if m.direct else 'indirect', 'angle_limit': m.angle_limit, } if m.angle_limit == (0, 0): config['motors']['wheel_mode'] = True config['motorgroups'] = {} return config
python
def to_config(self): from ..dynamixel.controller import DxlController dxl_controllers = [c for c in self._controllers if isinstance(c, DxlController)] config = {} config['controllers'] = {} for i, c in enumerate(dxl_controllers): name = 'dxl_controller_{}'.format(i) config['controllers'][name] = { 'port': c.io.port, 'sync_read': c.io._sync_read, 'attached_motors': [m.name for m in c.motors], } config['motors'] = {} for m in self.motors: config['motors'][m.name] = { 'id': m.id, 'type': m.model, 'offset': m.offset, 'orientation': 'direct' if m.direct else 'indirect', 'angle_limit': m.angle_limit, } if m.angle_limit == (0, 0): config['motors']['wheel_mode'] = True config['motorgroups'] = {} return config
[ "def", "to_config", "(", "self", ")", ":", "from", ".", ".", "dynamixel", ".", "controller", "import", "DxlController", "dxl_controllers", "=", "[", "c", "for", "c", "in", "self", ".", "_controllers", "if", "isinstance", "(", "c", ",", "DxlController", ")", "]", "config", "=", "{", "}", "config", "[", "'controllers'", "]", "=", "{", "}", "for", "i", ",", "c", "in", "enumerate", "(", "dxl_controllers", ")", ":", "name", "=", "'dxl_controller_{}'", ".", "format", "(", "i", ")", "config", "[", "'controllers'", "]", "[", "name", "]", "=", "{", "'port'", ":", "c", ".", "io", ".", "port", ",", "'sync_read'", ":", "c", ".", "io", ".", "_sync_read", ",", "'attached_motors'", ":", "[", "m", ".", "name", "for", "m", "in", "c", ".", "motors", "]", ",", "}", "config", "[", "'motors'", "]", "=", "{", "}", "for", "m", "in", "self", ".", "motors", ":", "config", "[", "'motors'", "]", "[", "m", ".", "name", "]", "=", "{", "'id'", ":", "m", ".", "id", ",", "'type'", ":", "m", ".", "model", ",", "'offset'", ":", "m", ".", "offset", ",", "'orientation'", ":", "'direct'", "if", "m", ".", "direct", "else", "'indirect'", ",", "'angle_limit'", ":", "m", ".", "angle_limit", ",", "}", "if", "m", ".", "angle_limit", "==", "(", "0", ",", "0", ")", ":", "config", "[", "'motors'", "]", "[", "'wheel_mode'", "]", "=", "True", "config", "[", "'motorgroups'", "]", "=", "{", "}", "return", "config" ]
Generates the config for the current robot. .. note:: The generated config should be used as a basis and must probably be modified.
[ "Generates", "the", "config", "for", "the", "current", "robot", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L150-L187
poppy-project/pypot
pypot/sensor/imagefeature/blob.py
BlobDetector.detect_blob
def detect_blob(self, img, filters): """ "filters" must be something similar to: filters = { 'R': (150, 255), # (min, max) 'S': (150, 255), } """ acc_mask = ones(img.shape[:2], dtype=uint8) * 255 rgb = img.copy() hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) for c, (min, max) in filters.items(): img = rgb if c in 'RGB' else hsv mask = img[:, :, self.channels[c]] mask[mask < min] = 0 mask[mask > max] = 0 acc_mask &= mask kernel = ones((5, 5), uint8) acc_mask = cv2.dilate(cv2.erode(acc_mask, kernel), kernel) circles = cv2.HoughCircles(acc_mask, cv2.HOUGH_GRADIENT, 3, img.shape[0] / 5.) return circles.reshape(-1, 3) if circles is not None else []
python
def detect_blob(self, img, filters): acc_mask = ones(img.shape[:2], dtype=uint8) * 255 rgb = img.copy() hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) for c, (min, max) in filters.items(): img = rgb if c in 'RGB' else hsv mask = img[:, :, self.channels[c]] mask[mask < min] = 0 mask[mask > max] = 0 acc_mask &= mask kernel = ones((5, 5), uint8) acc_mask = cv2.dilate(cv2.erode(acc_mask, kernel), kernel) circles = cv2.HoughCircles(acc_mask, cv2.HOUGH_GRADIENT, 3, img.shape[0] / 5.) return circles.reshape(-1, 3) if circles is not None else []
[ "def", "detect_blob", "(", "self", ",", "img", ",", "filters", ")", ":", "acc_mask", "=", "ones", "(", "img", ".", "shape", "[", ":", "2", "]", ",", "dtype", "=", "uint8", ")", "*", "255", "rgb", "=", "img", ".", "copy", "(", ")", "hsv", "=", "cv2", ".", "cvtColor", "(", "img", ",", "cv2", ".", "COLOR_BGR2HSV", ")", "for", "c", ",", "(", "min", ",", "max", ")", "in", "filters", ".", "items", "(", ")", ":", "img", "=", "rgb", "if", "c", "in", "'RGB'", "else", "hsv", "mask", "=", "img", "[", ":", ",", ":", ",", "self", ".", "channels", "[", "c", "]", "]", "mask", "[", "mask", "<", "min", "]", "=", "0", "mask", "[", "mask", ">", "max", "]", "=", "0", "acc_mask", "&=", "mask", "kernel", "=", "ones", "(", "(", "5", ",", "5", ")", ",", "uint8", ")", "acc_mask", "=", "cv2", ".", "dilate", "(", "cv2", ".", "erode", "(", "acc_mask", ",", "kernel", ")", ",", "kernel", ")", "circles", "=", "cv2", ".", "HoughCircles", "(", "acc_mask", ",", "cv2", ".", "HOUGH_GRADIENT", ",", "3", ",", "img", ".", "shape", "[", "0", "]", "/", "5.", ")", "return", "circles", ".", "reshape", "(", "-", "1", ",", "3", ")", "if", "circles", "is", "not", "None", "else", "[", "]" ]
"filters" must be something similar to: filters = { 'R': (150, 255), # (min, max) 'S': (150, 255), }
[ "filters", "must", "be", "something", "similar", "to", ":", "filters", "=", "{", "R", ":", "(", "150", "255", ")", "#", "(", "min", "max", ")", "S", ":", "(", "150", "255", ")", "}" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/sensor/imagefeature/blob.py#L40-L67
poppy-project/pypot
pypot/vrep/controller.py
VrepController.setup
def setup(self): """ Setups the controller by reading/setting position for all motors. """ self._init_vrep_streaming() # Init lifo for temperature spoofing for m in self.motors: m.__dict__['_load_fifo'] = deque(200 * [1], maxlen=200) self.update()
python
def setup(self): self._init_vrep_streaming() for m in self.motors: m.__dict__['_load_fifo'] = deque(200 * [1], maxlen=200) self.update()
[ "def", "setup", "(", "self", ")", ":", "self", ".", "_init_vrep_streaming", "(", ")", "# Init lifo for temperature spoofing", "for", "m", "in", "self", ".", "motors", ":", "m", ".", "__dict__", "[", "'_load_fifo'", "]", "=", "deque", "(", "200", "*", "[", "1", "]", ",", "maxlen", "=", "200", ")", "self", ".", "update", "(", ")" ]
Setups the controller by reading/setting position for all motors.
[ "Setups", "the", "controller", "by", "reading", "/", "setting", "position", "for", "all", "motors", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L31-L39
poppy-project/pypot
pypot/vrep/controller.py
VrepController.update
def update(self): """ Synchronization update loop. At each update all motor position are read from vrep and set to the motors. The motors target position are also send to v-rep. """ # Read all the angle limits h, _, l, _ = self.io.call_remote_api('simxGetObjectGroupData', remote_api.sim_object_joint_type, 16, streaming=True) limits4handle = {hh: (ll, lr) for hh, ll, lr in zip(h, l[::2], l[1::2])} for m in self.motors: tmax = torque_max[m.model] # Read values from V-REP and set them to the Motor p = round( rad2deg(self.io.get_motor_position(motor_name=self._motor_name(m))), 1) m.__dict__['present_position'] = p l = 100. * self.io.get_motor_force(motor_name=self._motor_name(m)) / tmax m.__dict__['present_load'] = l m.__dict__['_load_fifo'].append(abs(l)) m.__dict__['present_temperature'] = 25 + \ round(2.5 * sum(m.__dict__['_load_fifo']) / len(m.__dict__['_load_fifo']), 1) ll, lr = limits4handle[self.io._object_handles[self._motor_name(m)]] m.__dict__['lower_limit'] = rad2deg(ll) m.__dict__['upper_limit'] = rad2deg(ll) + rad2deg(lr) # Send new values from Motor to V-REP p = deg2rad(round(m.__dict__['goal_position'], 1)) self.io.set_motor_position(motor_name=self._motor_name(m), position=p) t = m.__dict__['torque_limit'] * tmax / 100. if m.__dict__['compliant']: t = 0. self.io.set_motor_force(motor_name=self._motor_name(m), force=t)
python
def update(self): h, _, l, _ = self.io.call_remote_api('simxGetObjectGroupData', remote_api.sim_object_joint_type, 16, streaming=True) limits4handle = {hh: (ll, lr) for hh, ll, lr in zip(h, l[::2], l[1::2])} for m in self.motors: tmax = torque_max[m.model] p = round( rad2deg(self.io.get_motor_position(motor_name=self._motor_name(m))), 1) m.__dict__['present_position'] = p l = 100. * self.io.get_motor_force(motor_name=self._motor_name(m)) / tmax m.__dict__['present_load'] = l m.__dict__['_load_fifo'].append(abs(l)) m.__dict__['present_temperature'] = 25 + \ round(2.5 * sum(m.__dict__['_load_fifo']) / len(m.__dict__['_load_fifo']), 1) ll, lr = limits4handle[self.io._object_handles[self._motor_name(m)]] m.__dict__['lower_limit'] = rad2deg(ll) m.__dict__['upper_limit'] = rad2deg(ll) + rad2deg(lr) p = deg2rad(round(m.__dict__['goal_position'], 1)) self.io.set_motor_position(motor_name=self._motor_name(m), position=p) t = m.__dict__['torque_limit'] * tmax / 100. if m.__dict__['compliant']: t = 0. self.io.set_motor_force(motor_name=self._motor_name(m), force=t)
[ "def", "update", "(", "self", ")", ":", "# Read all the angle limits", "h", ",", "_", ",", "l", ",", "_", "=", "self", ".", "io", ".", "call_remote_api", "(", "'simxGetObjectGroupData'", ",", "remote_api", ".", "sim_object_joint_type", ",", "16", ",", "streaming", "=", "True", ")", "limits4handle", "=", "{", "hh", ":", "(", "ll", ",", "lr", ")", "for", "hh", ",", "ll", ",", "lr", "in", "zip", "(", "h", ",", "l", "[", ":", ":", "2", "]", ",", "l", "[", "1", ":", ":", "2", "]", ")", "}", "for", "m", "in", "self", ".", "motors", ":", "tmax", "=", "torque_max", "[", "m", ".", "model", "]", "# Read values from V-REP and set them to the Motor", "p", "=", "round", "(", "rad2deg", "(", "self", ".", "io", ".", "get_motor_position", "(", "motor_name", "=", "self", ".", "_motor_name", "(", "m", ")", ")", ")", ",", "1", ")", "m", ".", "__dict__", "[", "'present_position'", "]", "=", "p", "l", "=", "100.", "*", "self", ".", "io", ".", "get_motor_force", "(", "motor_name", "=", "self", ".", "_motor_name", "(", "m", ")", ")", "/", "tmax", "m", ".", "__dict__", "[", "'present_load'", "]", "=", "l", "m", ".", "__dict__", "[", "'_load_fifo'", "]", ".", "append", "(", "abs", "(", "l", ")", ")", "m", ".", "__dict__", "[", "'present_temperature'", "]", "=", "25", "+", "round", "(", "2.5", "*", "sum", "(", "m", ".", "__dict__", "[", "'_load_fifo'", "]", ")", "/", "len", "(", "m", ".", "__dict__", "[", "'_load_fifo'", "]", ")", ",", "1", ")", "ll", ",", "lr", "=", "limits4handle", "[", "self", ".", "io", ".", "_object_handles", "[", "self", ".", "_motor_name", "(", "m", ")", "]", "]", "m", ".", "__dict__", "[", "'lower_limit'", "]", "=", "rad2deg", "(", "ll", ")", "m", ".", "__dict__", "[", "'upper_limit'", "]", "=", "rad2deg", "(", "ll", ")", "+", "rad2deg", "(", "lr", ")", "# Send new values from Motor to V-REP", "p", "=", "deg2rad", "(", "round", "(", "m", ".", "__dict__", "[", "'goal_position'", "]", ",", "1", ")", ")", "self", ".", "io", ".", "set_motor_position", "(", "motor_name", "=", "self", ".", "_motor_name", "(", "m", ")", ",", "position", "=", "p", ")", "t", "=", "m", ".", "__dict__", "[", "'torque_limit'", "]", "*", "tmax", "/", "100.", "if", "m", ".", "__dict__", "[", "'compliant'", "]", ":", "t", "=", "0.", "self", ".", "io", ".", "set_motor_force", "(", "motor_name", "=", "self", ".", "_motor_name", "(", "m", ")", ",", "force", "=", "t", ")" ]
Synchronization update loop. At each update all motor position are read from vrep and set to the motors. The motors target position are also send to v-rep.
[ "Synchronization", "update", "loop", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L41-L82
poppy-project/pypot
pypot/vrep/controller.py
VrepObjectTracker.update
def update(self): """ Updates the position and orientation of the tracked objects. """ for s in self.sensors: s.position = self.io.get_object_position(object_name=s.name) s.orientation = self.io.get_object_orientation(object_name=s.name)
python
def update(self): for s in self.sensors: s.position = self.io.get_object_position(object_name=s.name) s.orientation = self.io.get_object_orientation(object_name=s.name)
[ "def", "update", "(", "self", ")", ":", "for", "s", "in", "self", ".", "sensors", ":", "s", ".", "position", "=", "self", ".", "io", ".", "get_object_position", "(", "object_name", "=", "s", ".", "name", ")", "s", ".", "orientation", "=", "self", ".", "io", ".", "get_object_orientation", "(", "object_name", "=", "s", ".", "name", ")" ]
Updates the position and orientation of the tracked objects.
[ "Updates", "the", "position", "and", "orientation", "of", "the", "tracked", "objects", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L149-L153
poppy-project/pypot
pypot/vrep/controller.py
VrepCollisionTracker.update
def update(self): """ Update the state of the collision detectors. """ for s in self.sensors: s.colliding = self.io.get_collision_state(collision_name=s.name)
python
def update(self): for s in self.sensors: s.colliding = self.io.get_collision_state(collision_name=s.name)
[ "def", "update", "(", "self", ")", ":", "for", "s", "in", "self", ".", "sensors", ":", "s", ".", "colliding", "=", "self", ".", "io", ".", "get_collision_state", "(", "collision_name", "=", "s", ".", "name", ")" ]
Update the state of the collision detectors.
[ "Update", "the", "state", "of", "the", "collision", "detectors", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L180-L184
poppy-project/pypot
pypot/kinematics.py
Link.get_transformation_matrix
def get_transformation_matrix(self, theta): """ Computes the homogeneous transformation matrix for this link. """ ct = numpy.cos(theta + self.theta) st = numpy.sin(theta + self.theta) ca = numpy.cos(self.alpha) sa = numpy.sin(self.alpha) return numpy.matrix(((ct, -st * ca, st * sa, self.a * ct), (st, ct * ca, -ct * sa, self.a * st), (0, sa, ca, self.d), (0, 0, 0, 1)))
python
def get_transformation_matrix(self, theta): ct = numpy.cos(theta + self.theta) st = numpy.sin(theta + self.theta) ca = numpy.cos(self.alpha) sa = numpy.sin(self.alpha) return numpy.matrix(((ct, -st * ca, st * sa, self.a * ct), (st, ct * ca, -ct * sa, self.a * st), (0, sa, ca, self.d), (0, 0, 0, 1)))
[ "def", "get_transformation_matrix", "(", "self", ",", "theta", ")", ":", "ct", "=", "numpy", ".", "cos", "(", "theta", "+", "self", ".", "theta", ")", "st", "=", "numpy", ".", "sin", "(", "theta", "+", "self", ".", "theta", ")", "ca", "=", "numpy", ".", "cos", "(", "self", ".", "alpha", ")", "sa", "=", "numpy", ".", "sin", "(", "self", ".", "alpha", ")", "return", "numpy", ".", "matrix", "(", "(", "(", "ct", ",", "-", "st", "*", "ca", ",", "st", "*", "sa", ",", "self", ".", "a", "*", "ct", ")", ",", "(", "st", ",", "ct", "*", "ca", ",", "-", "ct", "*", "sa", ",", "self", ".", "a", "*", "st", ")", ",", "(", "0", ",", "sa", ",", "ca", ",", "self", ".", "d", ")", ",", "(", "0", ",", "0", ",", "0", ",", "1", ")", ")", ")" ]
Computes the homogeneous transformation matrix for this link.
[ "Computes", "the", "homogeneous", "transformation", "matrix", "for", "this", "link", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/kinematics.py#L27-L37
poppy-project/pypot
pypot/kinematics.py
Chain.forward_kinematics
def forward_kinematics(self, q): """ Computes the homogeneous transformation matrix of the end effector of the chain. :param vector q: vector of the joint angles (theta 1, theta 2, ..., theta n) """ q = numpy.array(q).flatten() if len(q) != len(self.links): raise ValueError('q must contain as element as the number of links') tr = self.base.copy() l = [] for link, theta in zip(self.links, q): tr = tr * link.get_transformation_matrix(theta) l.append(tr) tr = tr * self.tool l.append(tr) return tr, numpy.asarray(l)
python
def forward_kinematics(self, q): q = numpy.array(q).flatten() if len(q) != len(self.links): raise ValueError('q must contain as element as the number of links') tr = self.base.copy() l = [] for link, theta in zip(self.links, q): tr = tr * link.get_transformation_matrix(theta) l.append(tr) tr = tr * self.tool l.append(tr) return tr, numpy.asarray(l)
[ "def", "forward_kinematics", "(", "self", ",", "q", ")", ":", "q", "=", "numpy", ".", "array", "(", "q", ")", ".", "flatten", "(", ")", "if", "len", "(", "q", ")", "!=", "len", "(", "self", ".", "links", ")", ":", "raise", "ValueError", "(", "'q must contain as element as the number of links'", ")", "tr", "=", "self", ".", "base", ".", "copy", "(", ")", "l", "=", "[", "]", "for", "link", ",", "theta", "in", "zip", "(", "self", ".", "links", ",", "q", ")", ":", "tr", "=", "tr", "*", "link", ".", "get_transformation_matrix", "(", "theta", ")", "l", ".", "append", "(", "tr", ")", "tr", "=", "tr", "*", "self", ".", "tool", "l", ".", "append", "(", "tr", ")", "return", "tr", ",", "numpy", ".", "asarray", "(", "l", ")" ]
Computes the homogeneous transformation matrix of the end effector of the chain. :param vector q: vector of the joint angles (theta 1, theta 2, ..., theta n)
[ "Computes", "the", "homogeneous", "transformation", "matrix", "of", "the", "end", "effector", "of", "the", "chain", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/kinematics.py#L51-L73
poppy-project/pypot
pypot/kinematics.py
Chain.inverse_kinematics
def inverse_kinematics(self, end_effector_transformation, q=None, max_iter=1000, tolerance=0.05, mask=numpy.ones(6), use_pinv=False): """ Computes the joint angles corresponding to the end effector transformation. :param end_effector_transformation: the end effector homogeneous transformation matrix :param vector q: initial estimate of the joint angles :param int max_iter: maximum number of iteration :param float tolerance: tolerance before convergence :param mask: specify the cartesian DOF that will be ignore (in the case of a chain with less than 6 joints). :rtype: vector of the joint angles (theta 1, theta 2, ..., theta n) """ if q is None: q = numpy.zeros((len(self.links), 1)) q = numpy.matrix(q.reshape(-1, 1)) best_e = numpy.ones(6) * numpy.inf best_q = None alpha = 1.0 for _ in range(max_iter): e = numpy.multiply(transform_difference(self.forward_kinematics(q)[0], end_effector_transformation), mask) d = numpy.linalg.norm(e) if d < numpy.linalg.norm(best_e): best_e = e.copy() best_q = q.copy() alpha *= 2.0 ** (1.0 / 8.0) else: q = best_q.copy() e = best_e.copy() alpha *= 0.5 if use_pinv: dq = numpy.linalg.pinv(self._jacob0(q)) * e.reshape((-1, 1)) else: dq = self._jacob0(q).T * e.reshape((-1, 1)) q += alpha * dq # d = numpy.linalg.norm(dq) if d < tolerance: return q else: raise ValueError('could not converge d={}'.format(numpy.linalg.norm(best_e)))
python
def inverse_kinematics(self, end_effector_transformation, q=None, max_iter=1000, tolerance=0.05, mask=numpy.ones(6), use_pinv=False): if q is None: q = numpy.zeros((len(self.links), 1)) q = numpy.matrix(q.reshape(-1, 1)) best_e = numpy.ones(6) * numpy.inf best_q = None alpha = 1.0 for _ in range(max_iter): e = numpy.multiply(transform_difference(self.forward_kinematics(q)[0], end_effector_transformation), mask) d = numpy.linalg.norm(e) if d < numpy.linalg.norm(best_e): best_e = e.copy() best_q = q.copy() alpha *= 2.0 ** (1.0 / 8.0) else: q = best_q.copy() e = best_e.copy() alpha *= 0.5 if use_pinv: dq = numpy.linalg.pinv(self._jacob0(q)) * e.reshape((-1, 1)) else: dq = self._jacob0(q).T * e.reshape((-1, 1)) q += alpha * dq if d < tolerance: return q else: raise ValueError('could not converge d={}'.format(numpy.linalg.norm(best_e)))
[ "def", "inverse_kinematics", "(", "self", ",", "end_effector_transformation", ",", "q", "=", "None", ",", "max_iter", "=", "1000", ",", "tolerance", "=", "0.05", ",", "mask", "=", "numpy", ".", "ones", "(", "6", ")", ",", "use_pinv", "=", "False", ")", ":", "if", "q", "is", "None", ":", "q", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "self", ".", "links", ")", ",", "1", ")", ")", "q", "=", "numpy", ".", "matrix", "(", "q", ".", "reshape", "(", "-", "1", ",", "1", ")", ")", "best_e", "=", "numpy", ".", "ones", "(", "6", ")", "*", "numpy", ".", "inf", "best_q", "=", "None", "alpha", "=", "1.0", "for", "_", "in", "range", "(", "max_iter", ")", ":", "e", "=", "numpy", ".", "multiply", "(", "transform_difference", "(", "self", ".", "forward_kinematics", "(", "q", ")", "[", "0", "]", ",", "end_effector_transformation", ")", ",", "mask", ")", "d", "=", "numpy", ".", "linalg", ".", "norm", "(", "e", ")", "if", "d", "<", "numpy", ".", "linalg", ".", "norm", "(", "best_e", ")", ":", "best_e", "=", "e", ".", "copy", "(", ")", "best_q", "=", "q", ".", "copy", "(", ")", "alpha", "*=", "2.0", "**", "(", "1.0", "/", "8.0", ")", "else", ":", "q", "=", "best_q", ".", "copy", "(", ")", "e", "=", "best_e", ".", "copy", "(", ")", "alpha", "*=", "0.5", "if", "use_pinv", ":", "dq", "=", "numpy", ".", "linalg", ".", "pinv", "(", "self", ".", "_jacob0", "(", "q", ")", ")", "*", "e", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", "else", ":", "dq", "=", "self", ".", "_jacob0", "(", "q", ")", ".", "T", "*", "e", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", "q", "+=", "alpha", "*", "dq", "# d = numpy.linalg.norm(dq)", "if", "d", "<", "tolerance", ":", "return", "q", "else", ":", "raise", "ValueError", "(", "'could not converge d={}'", ".", "format", "(", "numpy", ".", "linalg", ".", "norm", "(", "best_e", ")", ")", ")" ]
Computes the joint angles corresponding to the end effector transformation. :param end_effector_transformation: the end effector homogeneous transformation matrix :param vector q: initial estimate of the joint angles :param int max_iter: maximum number of iteration :param float tolerance: tolerance before convergence :param mask: specify the cartesian DOF that will be ignore (in the case of a chain with less than 6 joints). :rtype: vector of the joint angles (theta 1, theta 2, ..., theta n)
[ "Computes", "the", "joint", "angles", "corresponding", "to", "the", "end", "effector", "transformation", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/kinematics.py#L75-L122
poppy-project/pypot
pypot/primitive/utils.py
Sinus.update
def update(self): """ Compute the sin(t) where t is the elapsed time since the primitive has been started. """ pos = self._amp * numpy.sin(self._freq * 2.0 * numpy.pi * self.elapsed_time + self._phase * numpy.pi / 180.0) + self._offset for m in self.motor_list: m.goal_position = pos
python
def update(self): pos = self._amp * numpy.sin(self._freq * 2.0 * numpy.pi * self.elapsed_time + self._phase * numpy.pi / 180.0) + self._offset for m in self.motor_list: m.goal_position = pos
[ "def", "update", "(", "self", ")", ":", "pos", "=", "self", ".", "_amp", "*", "numpy", ".", "sin", "(", "self", ".", "_freq", "*", "2.0", "*", "numpy", ".", "pi", "*", "self", ".", "elapsed_time", "+", "self", ".", "_phase", "*", "numpy", ".", "pi", "/", "180.0", ")", "+", "self", ".", "_offset", "for", "m", "in", "self", ".", "motor_list", ":", "m", ".", "goal_position", "=", "pos" ]
Compute the sin(t) where t is the elapsed time since the primitive has been started.
[ "Compute", "the", "sin", "(", "t", ")", "where", "t", "is", "the", "elapsed", "time", "since", "the", "primitive", "has", "been", "started", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/primitive/utils.py#L26-L32
poppy-project/pypot
pypot/dynamixel/__init__.py
_get_available_ports
def _get_available_ports(): """ Tries to find the available serial ports on your system. """ if platform.system() == 'Darwin': return glob.glob('/dev/tty.usb*') elif platform.system() == 'Linux': return glob.glob('/dev/ttyACM*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') elif sys.platform.lower() == 'cygwin': return glob.glob('/dev/com*') elif platform.system() == 'Windows': import _winreg import itertools ports = [] path = 'HARDWARE\\DEVICEMAP\\SERIALCOMM' key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, path) for i in itertools.count(): try: ports.append(str(_winreg.EnumValue(key, i)[1])) except WindowsError: return ports else: raise EnvironmentError('{} is an unsupported platform, cannot find serial ports !'.format(platform.system())) return []
python
def _get_available_ports(): if platform.system() == 'Darwin': return glob.glob('/dev/tty.usb*') elif platform.system() == 'Linux': return glob.glob('/dev/ttyACM*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') elif sys.platform.lower() == 'cygwin': return glob.glob('/dev/com*') elif platform.system() == 'Windows': import _winreg import itertools ports = [] path = 'HARDWARE\\DEVICEMAP\\SERIALCOMM' key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, path) for i in itertools.count(): try: ports.append(str(_winreg.EnumValue(key, i)[1])) except WindowsError: return ports else: raise EnvironmentError('{} is an unsupported platform, cannot find serial ports !'.format(platform.system())) return []
[ "def", "_get_available_ports", "(", ")", ":", "if", "platform", ".", "system", "(", ")", "==", "'Darwin'", ":", "return", "glob", ".", "glob", "(", "'/dev/tty.usb*'", ")", "elif", "platform", ".", "system", "(", ")", "==", "'Linux'", ":", "return", "glob", ".", "glob", "(", "'/dev/ttyACM*'", ")", "+", "glob", ".", "glob", "(", "'/dev/ttyUSB*'", ")", "+", "glob", ".", "glob", "(", "'/dev/ttyAMA*'", ")", "elif", "sys", ".", "platform", ".", "lower", "(", ")", "==", "'cygwin'", ":", "return", "glob", ".", "glob", "(", "'/dev/com*'", ")", "elif", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "import", "_winreg", "import", "itertools", "ports", "=", "[", "]", "path", "=", "'HARDWARE\\\\DEVICEMAP\\\\SERIALCOMM'", "key", "=", "_winreg", ".", "OpenKey", "(", "_winreg", ".", "HKEY_LOCAL_MACHINE", ",", "path", ")", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "try", ":", "ports", ".", "append", "(", "str", "(", "_winreg", ".", "EnumValue", "(", "key", ",", "i", ")", "[", "1", "]", ")", ")", "except", "WindowsError", ":", "return", "ports", "else", ":", "raise", "EnvironmentError", "(", "'{} is an unsupported platform, cannot find serial ports !'", ".", "format", "(", "platform", ".", "system", "(", ")", ")", ")", "return", "[", "]" ]
Tries to find the available serial ports on your system.
[ "Tries", "to", "find", "the", "available", "serial", "ports", "on", "your", "system", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/__init__.py#L20-L46
poppy-project/pypot
pypot/dynamixel/__init__.py
get_port_vendor_info
def get_port_vendor_info(port=None): """ Return vendor informations of a usb2serial device. It may depends on the Operating System. :param string port: port of the usb2serial device :Example: Result with a USB2Dynamixel on Linux: In [1]: import pypot.dynamixel In [2]: pypot.dynamixel.get_port_vendor_info('/dev/ttyUSB0') Out[2]: 'USB VID:PID=0403:6001 SNR=A7005LKE' """ port_info_dict = dict((x[0], x[2]) for x in serial.tools.list_ports.comports()) return port_info_dict[port] if port is not None else port_info_dict
python
def get_port_vendor_info(port=None): port_info_dict = dict((x[0], x[2]) for x in serial.tools.list_ports.comports()) return port_info_dict[port] if port is not None else port_info_dict
[ "def", "get_port_vendor_info", "(", "port", "=", "None", ")", ":", "port_info_dict", "=", "dict", "(", "(", "x", "[", "0", "]", ",", "x", "[", "2", "]", ")", "for", "x", "in", "serial", ".", "tools", ".", "list_ports", ".", "comports", "(", ")", ")", "return", "port_info_dict", "[", "port", "]", "if", "port", "is", "not", "None", "else", "port_info_dict" ]
Return vendor informations of a usb2serial device. It may depends on the Operating System. :param string port: port of the usb2serial device :Example: Result with a USB2Dynamixel on Linux: In [1]: import pypot.dynamixel In [2]: pypot.dynamixel.get_port_vendor_info('/dev/ttyUSB0') Out[2]: 'USB VID:PID=0403:6001 SNR=A7005LKE'
[ "Return", "vendor", "informations", "of", "a", "usb2serial", "device", ".", "It", "may", "depends", "on", "the", "Operating", "System", ".", ":", "param", "string", "port", ":", "port", "of", "the", "usb2serial", "device" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/__init__.py#L58-L71
poppy-project/pypot
pypot/dynamixel/__init__.py
find_port
def find_port(ids, strict=True): """ Find the port with the specified attached motor ids. :param list ids: list of motor ids to find :param bool strict: specify if all ids should be find (when set to False, only half motor must be found) .. warning:: If two (or more) ports are attached to the same list of motor ids the first match will be returned. """ ids_founds = [] for port in get_available_ports(): for DxlIOCls in (DxlIO, Dxl320IO): try: with DxlIOCls(port) as dxl: _ids_founds = dxl.scan(ids) ids_founds += _ids_founds if strict and len(_ids_founds) == len(ids): return port if not strict and len(_ids_founds) >= len(ids) / 2: logger.warning('Missing ids: {}'.format(ids, list(set(ids) - set(_ids_founds)))) return port if len(ids_founds) > 0: logger.warning('Port:{} ids found:{}'.format(port, _ids_founds)) except DxlError: logger.warning('DxlError on port {}'.format(port)) continue raise IndexError('No suitable port found for ids {}. These ids are missing {} !'.format( ids, list(set(ids) - set(ids_founds))))
python
def find_port(ids, strict=True): ids_founds = [] for port in get_available_ports(): for DxlIOCls in (DxlIO, Dxl320IO): try: with DxlIOCls(port) as dxl: _ids_founds = dxl.scan(ids) ids_founds += _ids_founds if strict and len(_ids_founds) == len(ids): return port if not strict and len(_ids_founds) >= len(ids) / 2: logger.warning('Missing ids: {}'.format(ids, list(set(ids) - set(_ids_founds)))) return port if len(ids_founds) > 0: logger.warning('Port:{} ids found:{}'.format(port, _ids_founds)) except DxlError: logger.warning('DxlError on port {}'.format(port)) continue raise IndexError('No suitable port found for ids {}. These ids are missing {} !'.format( ids, list(set(ids) - set(ids_founds))))
[ "def", "find_port", "(", "ids", ",", "strict", "=", "True", ")", ":", "ids_founds", "=", "[", "]", "for", "port", "in", "get_available_ports", "(", ")", ":", "for", "DxlIOCls", "in", "(", "DxlIO", ",", "Dxl320IO", ")", ":", "try", ":", "with", "DxlIOCls", "(", "port", ")", "as", "dxl", ":", "_ids_founds", "=", "dxl", ".", "scan", "(", "ids", ")", "ids_founds", "+=", "_ids_founds", "if", "strict", "and", "len", "(", "_ids_founds", ")", "==", "len", "(", "ids", ")", ":", "return", "port", "if", "not", "strict", "and", "len", "(", "_ids_founds", ")", ">=", "len", "(", "ids", ")", "/", "2", ":", "logger", ".", "warning", "(", "'Missing ids: {}'", ".", "format", "(", "ids", ",", "list", "(", "set", "(", "ids", ")", "-", "set", "(", "_ids_founds", ")", ")", ")", ")", "return", "port", "if", "len", "(", "ids_founds", ")", ">", "0", ":", "logger", ".", "warning", "(", "'Port:{} ids found:{}'", ".", "format", "(", "port", ",", "_ids_founds", ")", ")", "except", "DxlError", ":", "logger", ".", "warning", "(", "'DxlError on port {}'", ".", "format", "(", "port", ")", ")", "continue", "raise", "IndexError", "(", "'No suitable port found for ids {}. These ids are missing {} !'", ".", "format", "(", "ids", ",", "list", "(", "set", "(", "ids", ")", "-", "set", "(", "ids_founds", ")", ")", ")", ")" ]
Find the port with the specified attached motor ids. :param list ids: list of motor ids to find :param bool strict: specify if all ids should be find (when set to False, only half motor must be found) .. warning:: If two (or more) ports are attached to the same list of motor ids the first match will be returned.
[ "Find", "the", "port", "with", "the", "specified", "attached", "motor", "ids", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/__init__.py#L74-L106
poppy-project/pypot
pypot/dynamixel/__init__.py
autodetect_robot
def autodetect_robot(): """ Creates a :class:`~pypot.robot.robot.Robot` by detecting dynamixel motors on all available ports. """ motor_controllers = [] for port in get_available_ports(): for DxlIOCls in (DxlIO, Dxl320IO): dxl_io = DxlIOCls(port) ids = dxl_io.scan() if not ids: dxl_io.close() continue models = dxl_io.get_model(ids) motorcls = { 'MX': DxlMXMotor, 'RX': DxlAXRXMotor, 'AX': DxlAXRXMotor, 'XL': DxlXL320Motor, 'SR': DxlSRMotor, } motors = [motorcls[model[:2]](id, model=model) for id, model in zip(ids, models)] c = BaseDxlController(dxl_io, motors) motor_controllers.append(c) break return Robot(motor_controllers)
python
def autodetect_robot(): motor_controllers = [] for port in get_available_ports(): for DxlIOCls in (DxlIO, Dxl320IO): dxl_io = DxlIOCls(port) ids = dxl_io.scan() if not ids: dxl_io.close() continue models = dxl_io.get_model(ids) motorcls = { 'MX': DxlMXMotor, 'RX': DxlAXRXMotor, 'AX': DxlAXRXMotor, 'XL': DxlXL320Motor, 'SR': DxlSRMotor, } motors = [motorcls[model[:2]](id, model=model) for id, model in zip(ids, models)] c = BaseDxlController(dxl_io, motors) motor_controllers.append(c) break return Robot(motor_controllers)
[ "def", "autodetect_robot", "(", ")", ":", "motor_controllers", "=", "[", "]", "for", "port", "in", "get_available_ports", "(", ")", ":", "for", "DxlIOCls", "in", "(", "DxlIO", ",", "Dxl320IO", ")", ":", "dxl_io", "=", "DxlIOCls", "(", "port", ")", "ids", "=", "dxl_io", ".", "scan", "(", ")", "if", "not", "ids", ":", "dxl_io", ".", "close", "(", ")", "continue", "models", "=", "dxl_io", ".", "get_model", "(", "ids", ")", "motorcls", "=", "{", "'MX'", ":", "DxlMXMotor", ",", "'RX'", ":", "DxlAXRXMotor", ",", "'AX'", ":", "DxlAXRXMotor", ",", "'XL'", ":", "DxlXL320Motor", ",", "'SR'", ":", "DxlSRMotor", ",", "}", "motors", "=", "[", "motorcls", "[", "model", "[", ":", "2", "]", "]", "(", "id", ",", "model", "=", "model", ")", "for", "id", ",", "model", "in", "zip", "(", "ids", ",", "models", ")", "]", "c", "=", "BaseDxlController", "(", "dxl_io", ",", "motors", ")", "motor_controllers", ".", "append", "(", "c", ")", "break", "return", "Robot", "(", "motor_controllers", ")" ]
Creates a :class:`~pypot.robot.robot.Robot` by detecting dynamixel motors on all available ports.
[ "Creates", "a", ":", "class", ":", "~pypot", ".", "robot", ".", "robot", ".", "Robot", "by", "detecting", "dynamixel", "motors", "on", "all", "available", "ports", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/__init__.py#L109-L139
poppy-project/pypot
pypot/dynamixel/controller.py
DxlController.get_register
def get_register(self, motors, disable_sync_read=False): """ Gets the value from the specified register and sets it to the :class:`~pypot.dynamixel.motor.DxlMotor`. """ if not motors: return False ids = [m.id for m in motors] getter = getattr(self.io, 'get_{}'.format(self.regname)) values = (sum([list(getter([id])) for id in ids], []) if disable_sync_read else getter(ids)) if not values: return False for m, val in zip(motors, values): m.__dict__[self.varname] = val for m in motors: m._read_synced[self.varname].done() return True
python
def get_register(self, motors, disable_sync_read=False): if not motors: return False ids = [m.id for m in motors] getter = getattr(self.io, 'get_{}'.format(self.regname)) values = (sum([list(getter([id])) for id in ids], []) if disable_sync_read else getter(ids)) if not values: return False for m, val in zip(motors, values): m.__dict__[self.varname] = val for m in motors: m._read_synced[self.varname].done() return True
[ "def", "get_register", "(", "self", ",", "motors", ",", "disable_sync_read", "=", "False", ")", ":", "if", "not", "motors", ":", "return", "False", "ids", "=", "[", "m", ".", "id", "for", "m", "in", "motors", "]", "getter", "=", "getattr", "(", "self", ".", "io", ",", "'get_{}'", ".", "format", "(", "self", ".", "regname", ")", ")", "values", "=", "(", "sum", "(", "[", "list", "(", "getter", "(", "[", "id", "]", ")", ")", "for", "id", "in", "ids", "]", ",", "[", "]", ")", "if", "disable_sync_read", "else", "getter", "(", "ids", ")", ")", "if", "not", "values", ":", "return", "False", "for", "m", ",", "val", "in", "zip", "(", "motors", ",", "values", ")", ":", "m", ".", "__dict__", "[", "self", ".", "varname", "]", "=", "val", "for", "m", "in", "motors", ":", "m", ".", "_read_synced", "[", "self", ".", "varname", "]", ".", "done", "(", ")", "return", "True" ]
Gets the value from the specified register and sets it to the :class:`~pypot.dynamixel.motor.DxlMotor`.
[ "Gets", "the", "value", "from", "the", "specified", "register", "and", "sets", "it", "to", "the", ":", "class", ":", "~pypot", ".", "dynamixel", ".", "motor", ".", "DxlMotor", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/controller.py#L62-L83
poppy-project/pypot
pypot/dynamixel/controller.py
DxlController.set_register
def set_register(self, motors): """ Gets the value from :class:`~pypot.dynamixel.motor.DxlMotor` and sets it to the specified register. """ if not motors: return ids = [m.id for m in motors] values = (m.__dict__[self.varname] for m in motors) getattr(self.io, 'set_{}'.format(self.regname))(dict(zip(ids, values))) for m in motors: m._write_synced[self.varname].done()
python
def set_register(self, motors): if not motors: return ids = [m.id for m in motors] values = (m.__dict__[self.varname] for m in motors) getattr(self.io, 'set_{}'.format(self.regname))(dict(zip(ids, values))) for m in motors: m._write_synced[self.varname].done()
[ "def", "set_register", "(", "self", ",", "motors", ")", ":", "if", "not", "motors", ":", "return", "ids", "=", "[", "m", ".", "id", "for", "m", "in", "motors", "]", "values", "=", "(", "m", ".", "__dict__", "[", "self", ".", "varname", "]", "for", "m", "in", "motors", ")", "getattr", "(", "self", ".", "io", ",", "'set_{}'", ".", "format", "(", "self", ".", "regname", ")", ")", "(", "dict", "(", "zip", "(", "ids", ",", "values", ")", ")", ")", "for", "m", "in", "motors", ":", "m", ".", "_write_synced", "[", "self", ".", "varname", "]", ".", "done", "(", ")" ]
Gets the value from :class:`~pypot.dynamixel.motor.DxlMotor` and sets it to the specified register.
[ "Gets", "the", "value", "from", ":", "class", ":", "~pypot", ".", "dynamixel", ".", "motor", ".", "DxlMotor", "and", "sets", "it", "to", "the", "specified", "register", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/controller.py#L85-L95
poppy-project/pypot
pypot/sensor/depth/sonar.py
Sonar._filter
def _filter(self, data): """ Apply a filter to reduce noisy data. Return the median value of a heap of data. """ filtered_data = [] for queue, data in zip(self._raw_data_queues, data): queue.append(data) filtered_data.append(numpy.median(queue)) return filtered_data
python
def _filter(self, data): filtered_data = [] for queue, data in zip(self._raw_data_queues, data): queue.append(data) filtered_data.append(numpy.median(queue)) return filtered_data
[ "def", "_filter", "(", "self", ",", "data", ")", ":", "filtered_data", "=", "[", "]", "for", "queue", ",", "data", "in", "zip", "(", "self", ".", "_raw_data_queues", ",", "data", ")", ":", "queue", ".", "append", "(", "data", ")", "filtered_data", ".", "append", "(", "numpy", ".", "median", "(", "queue", ")", ")", "return", "filtered_data" ]
Apply a filter to reduce noisy data. Return the median value of a heap of data.
[ "Apply", "a", "filter", "to", "reduce", "noisy", "data", "." ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/sensor/depth/sonar.py#L87-L98
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxGetJointPosition
def simxGetJointPosition(clientID, jointHandle, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' position = ct.c_float() return c_GetJointPosition(clientID, jointHandle, ct.byref(position), operationMode), position.value
python
def simxGetJointPosition(clientID, jointHandle, operationMode): position = ct.c_float() return c_GetJointPosition(clientID, jointHandle, ct.byref(position), operationMode), position.value
[ "def", "simxGetJointPosition", "(", "clientID", ",", "jointHandle", ",", "operationMode", ")", ":", "position", "=", "ct", ".", "c_float", "(", ")", "return", "c_GetJointPosition", "(", "clientID", ",", "jointHandle", ",", "ct", ".", "byref", "(", "position", ")", ",", "operationMode", ")", ",", "position", ".", "value" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L162-L167
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxSetJointPosition
def simxSetJointPosition(clientID, jointHandle, position, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' return c_SetJointPosition(clientID, jointHandle, position, operationMode)
python
def simxSetJointPosition(clientID, jointHandle, position, operationMode): return c_SetJointPosition(clientID, jointHandle, position, operationMode)
[ "def", "simxSetJointPosition", "(", "clientID", ",", "jointHandle", ",", "position", ",", "operationMode", ")", ":", "return", "c_SetJointPosition", "(", "clientID", ",", "jointHandle", ",", "position", ",", "operationMode", ")" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L169-L174
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxGetJointMatrix
def simxGetJointMatrix(clientID, jointHandle, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' matrix = (ct.c_float*12)() ret = c_GetJointMatrix(clientID, jointHandle, matrix, operationMode) arr = [] for i in range(12): arr.append(matrix[i]) return ret, arr
python
def simxGetJointMatrix(clientID, jointHandle, operationMode): matrix = (ct.c_float*12)() ret = c_GetJointMatrix(clientID, jointHandle, matrix, operationMode) arr = [] for i in range(12): arr.append(matrix[i]) return ret, arr
[ "def", "simxGetJointMatrix", "(", "clientID", ",", "jointHandle", ",", "operationMode", ")", ":", "matrix", "=", "(", "ct", ".", "c_float", "*", "12", ")", "(", ")", "ret", "=", "c_GetJointMatrix", "(", "clientID", ",", "jointHandle", ",", "matrix", ",", "operationMode", ")", "arr", "=", "[", "]", "for", "i", "in", "range", "(", "12", ")", ":", "arr", ".", "append", "(", "matrix", "[", "i", "]", ")", "return", "ret", ",", "arr" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L176-L185
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxSetSphericalJointMatrix
def simxSetSphericalJointMatrix(clientID, jointHandle, matrix, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' matrix = (ct.c_float*12)(*matrix) return c_SetSphericalJointMatrix(clientID, jointHandle, matrix, operationMode)
python
def simxSetSphericalJointMatrix(clientID, jointHandle, matrix, operationMode): matrix = (ct.c_float*12)(*matrix) return c_SetSphericalJointMatrix(clientID, jointHandle, matrix, operationMode)
[ "def", "simxSetSphericalJointMatrix", "(", "clientID", ",", "jointHandle", ",", "matrix", ",", "operationMode", ")", ":", "matrix", "=", "(", "ct", ".", "c_float", "*", "12", ")", "(", "*", "matrix", ")", "return", "c_SetSphericalJointMatrix", "(", "clientID", ",", "jointHandle", ",", "matrix", ",", "operationMode", ")" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L187-L192
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxSetJointTargetVelocity
def simxSetJointTargetVelocity(clientID, jointHandle, targetVelocity, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' return c_SetJointTargetVelocity(clientID, jointHandle, targetVelocity, operationMode)
python
def simxSetJointTargetVelocity(clientID, jointHandle, targetVelocity, operationMode): return c_SetJointTargetVelocity(clientID, jointHandle, targetVelocity, operationMode)
[ "def", "simxSetJointTargetVelocity", "(", "clientID", ",", "jointHandle", ",", "targetVelocity", ",", "operationMode", ")", ":", "return", "c_SetJointTargetVelocity", "(", "clientID", ",", "jointHandle", ",", "targetVelocity", ",", "operationMode", ")" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L194-L199
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxSetJointTargetPosition
def simxSetJointTargetPosition(clientID, jointHandle, targetPosition, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' return c_SetJointTargetPosition(clientID, jointHandle, targetPosition, operationMode)
python
def simxSetJointTargetPosition(clientID, jointHandle, targetPosition, operationMode): return c_SetJointTargetPosition(clientID, jointHandle, targetPosition, operationMode)
[ "def", "simxSetJointTargetPosition", "(", "clientID", ",", "jointHandle", ",", "targetPosition", ",", "operationMode", ")", ":", "return", "c_SetJointTargetPosition", "(", "clientID", ",", "jointHandle", ",", "targetPosition", ",", "operationMode", ")" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L201-L206
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
simxJointGetForce
def simxJointGetForce(clientID, jointHandle, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' force = ct.c_float() return c_GetJointForce(clientID, jointHandle, ct.byref(force), operationMode), force.value
python
def simxJointGetForce(clientID, jointHandle, operationMode): force = ct.c_float() return c_GetJointForce(clientID, jointHandle, ct.byref(force), operationMode), force.value
[ "def", "simxJointGetForce", "(", "clientID", ",", "jointHandle", ",", "operationMode", ")", ":", "force", "=", "ct", ".", "c_float", "(", ")", "return", "c_GetJointForce", "(", "clientID", ",", "jointHandle", ",", "ct", ".", "byref", "(", "force", ")", ",", "operationMode", ")", ",", "force", ".", "value" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
train
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L208-L213