id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
2,600
jborean93/smbprotocol
smbprotocol/open.py
SMB2QueryDirectoryRequest.unpack_response
def unpack_response(file_information_class, buffer): """ Pass in the buffer value from the response object to unpack it and return a list of query response structures for the request. :param buffer: The raw bytes value of the SMB2QueryDirectoryResponse buffer field. :return: List of query_info.* structures based on the FileInformationClass used in the initial query request. """ structs = smbprotocol.query_info resp_structure = { FileInformationClass.FILE_DIRECTORY_INFORMATION: structs.FileDirectoryInformation, FileInformationClass.FILE_NAMES_INFORMATION: structs.FileNamesInformation, FileInformationClass.FILE_BOTH_DIRECTORY_INFORMATION: structs.FileBothDirectoryInformation, FileInformationClass.FILE_ID_BOTH_DIRECTORY_INFORMATION: structs.FileIdBothDirectoryInformation, FileInformationClass.FILE_FULL_DIRECTORY_INFORMATION: structs.FileFullDirectoryInformation, FileInformationClass.FILE_ID_FULL_DIRECTORY_INFORMATION: structs.FileIdFullDirectoryInformation, }[file_information_class] query_results = [] current_offset = 0 is_next = True while is_next: result = resp_structure() result.unpack(buffer[current_offset:]) query_results.append(result) current_offset += result['next_entry_offset'].get_value() is_next = result['next_entry_offset'].get_value() != 0 return query_results
python
def unpack_response(file_information_class, buffer): structs = smbprotocol.query_info resp_structure = { FileInformationClass.FILE_DIRECTORY_INFORMATION: structs.FileDirectoryInformation, FileInformationClass.FILE_NAMES_INFORMATION: structs.FileNamesInformation, FileInformationClass.FILE_BOTH_DIRECTORY_INFORMATION: structs.FileBothDirectoryInformation, FileInformationClass.FILE_ID_BOTH_DIRECTORY_INFORMATION: structs.FileIdBothDirectoryInformation, FileInformationClass.FILE_FULL_DIRECTORY_INFORMATION: structs.FileFullDirectoryInformation, FileInformationClass.FILE_ID_FULL_DIRECTORY_INFORMATION: structs.FileIdFullDirectoryInformation, }[file_information_class] query_results = [] current_offset = 0 is_next = True while is_next: result = resp_structure() result.unpack(buffer[current_offset:]) query_results.append(result) current_offset += result['next_entry_offset'].get_value() is_next = result['next_entry_offset'].get_value() != 0 return query_results
[ "def", "unpack_response", "(", "file_information_class", ",", "buffer", ")", ":", "structs", "=", "smbprotocol", ".", "query_info", "resp_structure", "=", "{", "FileInformationClass", ".", "FILE_DIRECTORY_INFORMATION", ":", "structs", ".", "FileDirectoryInformation", ",", "FileInformationClass", ".", "FILE_NAMES_INFORMATION", ":", "structs", ".", "FileNamesInformation", ",", "FileInformationClass", ".", "FILE_BOTH_DIRECTORY_INFORMATION", ":", "structs", ".", "FileBothDirectoryInformation", ",", "FileInformationClass", ".", "FILE_ID_BOTH_DIRECTORY_INFORMATION", ":", "structs", ".", "FileIdBothDirectoryInformation", ",", "FileInformationClass", ".", "FILE_FULL_DIRECTORY_INFORMATION", ":", "structs", ".", "FileFullDirectoryInformation", ",", "FileInformationClass", ".", "FILE_ID_FULL_DIRECTORY_INFORMATION", ":", "structs", ".", "FileIdFullDirectoryInformation", ",", "}", "[", "file_information_class", "]", "query_results", "=", "[", "]", "current_offset", "=", "0", "is_next", "=", "True", "while", "is_next", ":", "result", "=", "resp_structure", "(", ")", "result", ".", "unpack", "(", "buffer", "[", "current_offset", ":", "]", ")", "query_results", ".", "append", "(", "result", ")", "current_offset", "+=", "result", "[", "'next_entry_offset'", "]", ".", "get_value", "(", ")", "is_next", "=", "result", "[", "'next_entry_offset'", "]", ".", "get_value", "(", ")", "!=", "0", "return", "query_results" ]
Pass in the buffer value from the response object to unpack it and return a list of query response structures for the request. :param buffer: The raw bytes value of the SMB2QueryDirectoryResponse buffer field. :return: List of query_info.* structures based on the FileInformationClass used in the initial query request.
[ "Pass", "in", "the", "buffer", "value", "from", "the", "response", "object", "to", "unpack", "it", "and", "return", "a", "list", "of", "query", "response", "structures", "for", "the", "request", "." ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/open.py#L804-L840
2,601
jborean93/smbprotocol
smbprotocol/open.py
Open.read
def read(self, offset, length, min_length=0, unbuffered=False, wait=True, send=True): """ Reads from an opened file or pipe Supports out of band send function, call this function with send=False to return a tuple of (SMB2ReadRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param offset: The offset to start the read of the file. :param length: The number of bytes to read from the offset. :param min_length: The minimum number of bytes to be read for a successful operation. :param unbuffered: Whether to the server should cache the read data at intermediate layers, only value for SMB 3.0.2 or newer :param wait: If send=True, whether to wait for a response if STATUS_PENDING was received from the server or fail. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: A byte string of the bytes read """ if length > self.connection.max_read_size: raise SMBException("The requested read length %d is greater than " "the maximum negotiated read size %d" % (length, self.connection.max_read_size)) read = SMB2ReadRequest() read['length'] = length read['offset'] = offset read['minimum_count'] = min_length read['file_id'] = self.file_id read['padding'] = b"\x50" if unbuffered: if self.connection.dialect < Dialects.SMB_3_0_2: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_3_0_2, "SMB2_READFLAG_READ_UNBUFFERED", True) read['flags'].set_flag(ReadFlags.SMB2_READFLAG_READ_UNBUFFERED) if not send: return read, self._read_response log.info("Session: %s, Tree Connect ID: %s - sending SMB2 Read " "Request for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(read)) request = self.connection.send(read, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._read_response(request, wait)
python
def read(self, offset, length, min_length=0, unbuffered=False, wait=True, send=True): if length > self.connection.max_read_size: raise SMBException("The requested read length %d is greater than " "the maximum negotiated read size %d" % (length, self.connection.max_read_size)) read = SMB2ReadRequest() read['length'] = length read['offset'] = offset read['minimum_count'] = min_length read['file_id'] = self.file_id read['padding'] = b"\x50" if unbuffered: if self.connection.dialect < Dialects.SMB_3_0_2: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_3_0_2, "SMB2_READFLAG_READ_UNBUFFERED", True) read['flags'].set_flag(ReadFlags.SMB2_READFLAG_READ_UNBUFFERED) if not send: return read, self._read_response log.info("Session: %s, Tree Connect ID: %s - sending SMB2 Read " "Request for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(read)) request = self.connection.send(read, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._read_response(request, wait)
[ "def", "read", "(", "self", ",", "offset", ",", "length", ",", "min_length", "=", "0", ",", "unbuffered", "=", "False", ",", "wait", "=", "True", ",", "send", "=", "True", ")", ":", "if", "length", ">", "self", ".", "connection", ".", "max_read_size", ":", "raise", "SMBException", "(", "\"The requested read length %d is greater than \"", "\"the maximum negotiated read size %d\"", "%", "(", "length", ",", "self", ".", "connection", ".", "max_read_size", ")", ")", "read", "=", "SMB2ReadRequest", "(", ")", "read", "[", "'length'", "]", "=", "length", "read", "[", "'offset'", "]", "=", "offset", "read", "[", "'minimum_count'", "]", "=", "min_length", "read", "[", "'file_id'", "]", "=", "self", ".", "file_id", "read", "[", "'padding'", "]", "=", "b\"\\x50\"", "if", "unbuffered", ":", "if", "self", ".", "connection", ".", "dialect", "<", "Dialects", ".", "SMB_3_0_2", ":", "raise", "SMBUnsupportedFeature", "(", "self", ".", "connection", ".", "dialect", ",", "Dialects", ".", "SMB_3_0_2", ",", "\"SMB2_READFLAG_READ_UNBUFFERED\"", ",", "True", ")", "read", "[", "'flags'", "]", ".", "set_flag", "(", "ReadFlags", ".", "SMB2_READFLAG_READ_UNBUFFERED", ")", "if", "not", "send", ":", "return", "read", ",", "self", ".", "_read_response", "log", ".", "info", "(", "\"Session: %s, Tree Connect ID: %s - sending SMB2 Read \"", "\"Request for file %s\"", "%", "(", "self", ".", "tree_connect", ".", "session", ".", "username", ",", "self", ".", "tree_connect", ".", "share_name", ",", "self", ".", "file_name", ")", ")", "log", ".", "debug", "(", "str", "(", "read", ")", ")", "request", "=", "self", ".", "connection", ".", "send", "(", "read", ",", "self", ".", "tree_connect", ".", "session", ".", "session_id", ",", "self", ".", "tree_connect", ".", "tree_connect_id", ")", "return", "self", ".", "_read_response", "(", "request", ",", "wait", ")" ]
Reads from an opened file or pipe Supports out of band send function, call this function with send=False to return a tuple of (SMB2ReadRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param offset: The offset to start the read of the file. :param length: The number of bytes to read from the offset. :param min_length: The minimum number of bytes to be read for a successful operation. :param unbuffered: Whether to the server should cache the read data at intermediate layers, only value for SMB 3.0.2 or newer :param wait: If send=True, whether to wait for a response if STATUS_PENDING was received from the server or fail. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: A byte string of the bytes read
[ "Reads", "from", "an", "opened", "file", "or", "pipe" ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/open.py#L1047-L1101
2,602
jborean93/smbprotocol
smbprotocol/open.py
Open.write
def write(self, data, offset=0, write_through=False, unbuffered=False, wait=True, send=True): """ Writes data to an opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMBWriteRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param data: The bytes data to write. :param offset: The offset in the file to write the bytes at :param write_through: Whether written data is persisted to the underlying storage, not valid for SMB 2.0.2. :param unbuffered: Whether to the server should cache the write data at intermediate layers, only value for SMB 3.0.2 or newer :param wait: If send=True, whether to wait for a response if STATUS_PENDING was received from the server or fail. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: The number of bytes written """ data_len = len(data) if data_len > self.connection.max_write_size: raise SMBException("The requested write length %d is greater than " "the maximum negotiated write size %d" % (data_len, self.connection.max_write_size)) write = SMB2WriteRequest() write['length'] = len(data) write['offset'] = offset write['file_id'] = self.file_id write['buffer'] = data if write_through: if self.connection.dialect < Dialects.SMB_2_1_0: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_2_1_0, "SMB2_WRITEFLAG_WRITE_THROUGH", True) write['flags'].set_flag(WriteFlags.SMB2_WRITEFLAG_WRITE_THROUGH) if unbuffered: if self.connection.dialect < Dialects.SMB_3_0_2: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_3_0_2, "SMB2_WRITEFLAG_WRITE_UNBUFFERED", True) write['flags'].set_flag(WriteFlags.SMB2_WRITEFLAG_WRITE_UNBUFFERED) if not send: return write, self._write_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Write Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(write)) request = self.connection.send(write, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._write_response(request, wait)
python
def write(self, data, offset=0, write_through=False, unbuffered=False, wait=True, send=True): data_len = len(data) if data_len > self.connection.max_write_size: raise SMBException("The requested write length %d is greater than " "the maximum negotiated write size %d" % (data_len, self.connection.max_write_size)) write = SMB2WriteRequest() write['length'] = len(data) write['offset'] = offset write['file_id'] = self.file_id write['buffer'] = data if write_through: if self.connection.dialect < Dialects.SMB_2_1_0: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_2_1_0, "SMB2_WRITEFLAG_WRITE_THROUGH", True) write['flags'].set_flag(WriteFlags.SMB2_WRITEFLAG_WRITE_THROUGH) if unbuffered: if self.connection.dialect < Dialects.SMB_3_0_2: raise SMBUnsupportedFeature(self.connection.dialect, Dialects.SMB_3_0_2, "SMB2_WRITEFLAG_WRITE_UNBUFFERED", True) write['flags'].set_flag(WriteFlags.SMB2_WRITEFLAG_WRITE_UNBUFFERED) if not send: return write, self._write_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Write Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(write)) request = self.connection.send(write, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._write_response(request, wait)
[ "def", "write", "(", "self", ",", "data", ",", "offset", "=", "0", ",", "write_through", "=", "False", ",", "unbuffered", "=", "False", ",", "wait", "=", "True", ",", "send", "=", "True", ")", ":", "data_len", "=", "len", "(", "data", ")", "if", "data_len", ">", "self", ".", "connection", ".", "max_write_size", ":", "raise", "SMBException", "(", "\"The requested write length %d is greater than \"", "\"the maximum negotiated write size %d\"", "%", "(", "data_len", ",", "self", ".", "connection", ".", "max_write_size", ")", ")", "write", "=", "SMB2WriteRequest", "(", ")", "write", "[", "'length'", "]", "=", "len", "(", "data", ")", "write", "[", "'offset'", "]", "=", "offset", "write", "[", "'file_id'", "]", "=", "self", ".", "file_id", "write", "[", "'buffer'", "]", "=", "data", "if", "write_through", ":", "if", "self", ".", "connection", ".", "dialect", "<", "Dialects", ".", "SMB_2_1_0", ":", "raise", "SMBUnsupportedFeature", "(", "self", ".", "connection", ".", "dialect", ",", "Dialects", ".", "SMB_2_1_0", ",", "\"SMB2_WRITEFLAG_WRITE_THROUGH\"", ",", "True", ")", "write", "[", "'flags'", "]", ".", "set_flag", "(", "WriteFlags", ".", "SMB2_WRITEFLAG_WRITE_THROUGH", ")", "if", "unbuffered", ":", "if", "self", ".", "connection", ".", "dialect", "<", "Dialects", ".", "SMB_3_0_2", ":", "raise", "SMBUnsupportedFeature", "(", "self", ".", "connection", ".", "dialect", ",", "Dialects", ".", "SMB_3_0_2", ",", "\"SMB2_WRITEFLAG_WRITE_UNBUFFERED\"", ",", "True", ")", "write", "[", "'flags'", "]", ".", "set_flag", "(", "WriteFlags", ".", "SMB2_WRITEFLAG_WRITE_UNBUFFERED", ")", "if", "not", "send", ":", "return", "write", ",", "self", ".", "_write_response", "log", ".", "info", "(", "\"Session: %s, Tree Connect: %s - sending SMB2 Write Request \"", "\"for file %s\"", "%", "(", "self", ".", "tree_connect", ".", "session", ".", "username", ",", "self", ".", "tree_connect", ".", "share_name", ",", "self", ".", "file_name", ")", ")", "log", ".", "debug", "(", "str", "(", "write", ")", ")", "request", "=", "self", ".", "connection", ".", "send", "(", "write", ",", "self", ".", "tree_connect", ".", "session", ".", "session_id", ",", "self", ".", "tree_connect", ".", "tree_connect_id", ")", "return", "self", ".", "_write_response", "(", "request", ",", "wait", ")" ]
Writes data to an opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMBWriteRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param data: The bytes data to write. :param offset: The offset in the file to write the bytes at :param write_through: Whether written data is persisted to the underlying storage, not valid for SMB 2.0.2. :param unbuffered: Whether to the server should cache the write data at intermediate layers, only value for SMB 3.0.2 or newer :param wait: If send=True, whether to wait for a response if STATUS_PENDING was received from the server or fail. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: The number of bytes written
[ "Writes", "data", "to", "an", "opened", "file", "." ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/open.py#L1114-L1176
2,603
jborean93/smbprotocol
smbprotocol/open.py
Open.flush
def flush(self, send=True): """ A command sent by the client to request that a server flush all cached file information for the opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMB2FlushRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: The SMB2FlushResponse received from the server """ flush = SMB2FlushRequest() flush['file_id'] = self.file_id if not send: return flush, self._flush_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Flush Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(flush)) request = self.connection.send(flush, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._flush_response(request)
python
def flush(self, send=True): flush = SMB2FlushRequest() flush['file_id'] = self.file_id if not send: return flush, self._flush_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Flush Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(flush)) request = self.connection.send(flush, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._flush_response(request)
[ "def", "flush", "(", "self", ",", "send", "=", "True", ")", ":", "flush", "=", "SMB2FlushRequest", "(", ")", "flush", "[", "'file_id'", "]", "=", "self", ".", "file_id", "if", "not", "send", ":", "return", "flush", ",", "self", ".", "_flush_response", "log", ".", "info", "(", "\"Session: %s, Tree Connect: %s - sending SMB2 Flush Request \"", "\"for file %s\"", "%", "(", "self", ".", "tree_connect", ".", "session", ".", "username", ",", "self", ".", "tree_connect", ".", "share_name", ",", "self", ".", "file_name", ")", ")", "log", ".", "debug", "(", "str", "(", "flush", ")", ")", "request", "=", "self", ".", "connection", ".", "send", "(", "flush", ",", "self", ".", "tree_connect", ".", "session", ".", "session_id", ",", "self", ".", "tree_connect", ".", "tree_connect_id", ")", "return", "self", ".", "_flush_response", "(", "request", ")" ]
A command sent by the client to request that a server flush all cached file information for the opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMB2FlushRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: The SMB2FlushResponse received from the server
[ "A", "command", "sent", "by", "the", "client", "to", "request", "that", "a", "server", "flush", "all", "cached", "file", "information", "for", "the", "opened", "file", "." ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/open.py#L1189-L1218
2,604
jborean93/smbprotocol
smbprotocol/open.py
Open.close
def close(self, get_attributes=False, send=True): """ Closes an opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMB2CloseRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param get_attributes: (Bool) whether to get the latest attributes on the close and set them on the Open object :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: SMB2CloseResponse message received from the server """ # it is already closed and this isn't for an out of band request if not self._connected and send: return close = SMB2CloseRequest() close['file_id'] = self.file_id if get_attributes: close['flags'] = CloseFlags.SMB2_CLOSE_FLAG_POSTQUERY_ATTRIB if not send: return close, self._close_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Close Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(close)) request = self.connection.send(close, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._close_response(request)
python
def close(self, get_attributes=False, send=True): # it is already closed and this isn't for an out of band request if not self._connected and send: return close = SMB2CloseRequest() close['file_id'] = self.file_id if get_attributes: close['flags'] = CloseFlags.SMB2_CLOSE_FLAG_POSTQUERY_ATTRIB if not send: return close, self._close_response log.info("Session: %s, Tree Connect: %s - sending SMB2 Close Request " "for file %s" % (self.tree_connect.session.username, self.tree_connect.share_name, self.file_name)) log.debug(str(close)) request = self.connection.send(close, self.tree_connect.session.session_id, self.tree_connect.tree_connect_id) return self._close_response(request)
[ "def", "close", "(", "self", ",", "get_attributes", "=", "False", ",", "send", "=", "True", ")", ":", "# it is already closed and this isn't for an out of band request", "if", "not", "self", ".", "_connected", "and", "send", ":", "return", "close", "=", "SMB2CloseRequest", "(", ")", "close", "[", "'file_id'", "]", "=", "self", ".", "file_id", "if", "get_attributes", ":", "close", "[", "'flags'", "]", "=", "CloseFlags", ".", "SMB2_CLOSE_FLAG_POSTQUERY_ATTRIB", "if", "not", "send", ":", "return", "close", ",", "self", ".", "_close_response", "log", ".", "info", "(", "\"Session: %s, Tree Connect: %s - sending SMB2 Close Request \"", "\"for file %s\"", "%", "(", "self", ".", "tree_connect", ".", "session", ".", "username", ",", "self", ".", "tree_connect", ".", "share_name", ",", "self", ".", "file_name", ")", ")", "log", ".", "debug", "(", "str", "(", "close", ")", ")", "request", "=", "self", ".", "connection", ".", "send", "(", "close", ",", "self", ".", "tree_connect", ".", "session", ".", "session_id", ",", "self", ".", "tree_connect", ".", "tree_connect_id", ")", "return", "self", ".", "_close_response", "(", "request", ")" ]
Closes an opened file. Supports out of band send function, call this function with send=False to return a tuple of (SMB2CloseRequest, receive_func) instead of sending the the request and waiting for the response. The receive_func can be used to get the response from the server by passing in the Request that was used to sent it out of band. :param get_attributes: (Bool) whether to get the latest attributes on the close and set them on the Open object :param send: Whether to send the request in the same call or return the message to the caller and the unpack function :return: SMB2CloseResponse message received from the server
[ "Closes", "an", "opened", "file", "." ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/open.py#L1294-L1331
2,605
jborean93/smbprotocol
smbprotocol/security_descriptor.py
SIDPacket.from_string
def from_string(self, sid_string): """ Used to set the structure parameters based on the input string :param sid_string: String of the sid in S-x-x-x-x form """ if not sid_string.startswith("S-"): raise ValueError("A SID string must start with S-") sid_entries = sid_string.split("-") if len(sid_entries) < 3: raise ValueError("A SID string must start with S and contain a " "revision and identifier authority, e.g. S-1-0") revision = int(sid_entries[1]) id_authority = int(sid_entries[2]) sub_authorities = [int(i) for i in sid_entries[3:]] self['revision'].set_value(revision) self['identifier_authority'].set_value(id_authority) self['sub_authorities'] = sub_authorities
python
def from_string(self, sid_string): if not sid_string.startswith("S-"): raise ValueError("A SID string must start with S-") sid_entries = sid_string.split("-") if len(sid_entries) < 3: raise ValueError("A SID string must start with S and contain a " "revision and identifier authority, e.g. S-1-0") revision = int(sid_entries[1]) id_authority = int(sid_entries[2]) sub_authorities = [int(i) for i in sid_entries[3:]] self['revision'].set_value(revision) self['identifier_authority'].set_value(id_authority) self['sub_authorities'] = sub_authorities
[ "def", "from_string", "(", "self", ",", "sid_string", ")", ":", "if", "not", "sid_string", ".", "startswith", "(", "\"S-\"", ")", ":", "raise", "ValueError", "(", "\"A SID string must start with S-\"", ")", "sid_entries", "=", "sid_string", ".", "split", "(", "\"-\"", ")", "if", "len", "(", "sid_entries", ")", "<", "3", ":", "raise", "ValueError", "(", "\"A SID string must start with S and contain a \"", "\"revision and identifier authority, e.g. S-1-0\"", ")", "revision", "=", "int", "(", "sid_entries", "[", "1", "]", ")", "id_authority", "=", "int", "(", "sid_entries", "[", "2", "]", ")", "sub_authorities", "=", "[", "int", "(", "i", ")", "for", "i", "in", "sid_entries", "[", "3", ":", "]", "]", "self", "[", "'revision'", "]", ".", "set_value", "(", "revision", ")", "self", "[", "'identifier_authority'", "]", ".", "set_value", "(", "id_authority", ")", "self", "[", "'sub_authorities'", "]", "=", "sub_authorities" ]
Used to set the structure parameters based on the input string :param sid_string: String of the sid in S-x-x-x-x form
[ "Used", "to", "set", "the", "structure", "parameters", "based", "on", "the", "input", "string" ]
d8eb00fbc824f97d0f4946e3f768c5e6c723499a
https://github.com/jborean93/smbprotocol/blob/d8eb00fbc824f97d0f4946e3f768c5e6c723499a/smbprotocol/security_descriptor.py#L160-L180
2,606
Thibauth/python-pushover
pushover.py
Pushover.sounds
def sounds(self): """Return a dictionary of sounds recognized by Pushover and that can be used in a notification message. """ if not Pushover._SOUNDS: request = Request("get", SOUND_URL, {"token": self.token}) Pushover._SOUNDS = request.answer["sounds"] return Pushover._SOUNDS
python
def sounds(self): if not Pushover._SOUNDS: request = Request("get", SOUND_URL, {"token": self.token}) Pushover._SOUNDS = request.answer["sounds"] return Pushover._SOUNDS
[ "def", "sounds", "(", "self", ")", ":", "if", "not", "Pushover", ".", "_SOUNDS", ":", "request", "=", "Request", "(", "\"get\"", ",", "SOUND_URL", ",", "{", "\"token\"", ":", "self", ".", "token", "}", ")", "Pushover", ".", "_SOUNDS", "=", "request", ".", "answer", "[", "\"sounds\"", "]", "return", "Pushover", ".", "_SOUNDS" ]
Return a dictionary of sounds recognized by Pushover and that can be used in a notification message.
[ "Return", "a", "dictionary", "of", "sounds", "recognized", "by", "Pushover", "and", "that", "can", "be", "used", "in", "a", "notification", "message", "." ]
420bde9a2bd7981b5ea8f0c1cb8875d5f676f368
https://github.com/Thibauth/python-pushover/blob/420bde9a2bd7981b5ea8f0c1cb8875d5f676f368/pushover.py#L163-L170
2,607
Thibauth/python-pushover
pushover.py
Pushover.message
def message(self, user, message, **kwargs): """Send `message` to the user specified by `user`. It is possible to specify additional properties of the message by passing keyword arguments. The list of valid keywords is ``title, priority, sound, callback, timestamp, url, url_title, device, retry, expire and html`` which are described in the Pushover API documentation. For convenience, you can simply set ``timestamp=True`` to set the timestamp to the current timestamp. An image can be attached to a message by passing a file-like object to the `attachment` keyword argument. This method returns a :class:`MessageRequest` object. """ payload = {"message": message, "user": user, "token": self.token} for key, value in kwargs.iteritems(): if key not in Pushover.message_keywords: raise ValueError("{0}: invalid message parameter".format(key)) elif key == "timestamp" and value is True: payload[key] = int(time.time()) elif key == "sound" and value not in self.sounds: raise ValueError("{0}: invalid sound".format(value)) else: payload[key] = value return MessageRequest(payload)
python
def message(self, user, message, **kwargs): payload = {"message": message, "user": user, "token": self.token} for key, value in kwargs.iteritems(): if key not in Pushover.message_keywords: raise ValueError("{0}: invalid message parameter".format(key)) elif key == "timestamp" and value is True: payload[key] = int(time.time()) elif key == "sound" and value not in self.sounds: raise ValueError("{0}: invalid sound".format(value)) else: payload[key] = value return MessageRequest(payload)
[ "def", "message", "(", "self", ",", "user", ",", "message", ",", "*", "*", "kwargs", ")", ":", "payload", "=", "{", "\"message\"", ":", "message", ",", "\"user\"", ":", "user", ",", "\"token\"", ":", "self", ".", "token", "}", "for", "key", ",", "value", "in", "kwargs", ".", "iteritems", "(", ")", ":", "if", "key", "not", "in", "Pushover", ".", "message_keywords", ":", "raise", "ValueError", "(", "\"{0}: invalid message parameter\"", ".", "format", "(", "key", ")", ")", "elif", "key", "==", "\"timestamp\"", "and", "value", "is", "True", ":", "payload", "[", "key", "]", "=", "int", "(", "time", ".", "time", "(", ")", ")", "elif", "key", "==", "\"sound\"", "and", "value", "not", "in", "self", ".", "sounds", ":", "raise", "ValueError", "(", "\"{0}: invalid sound\"", ".", "format", "(", "value", ")", ")", "else", ":", "payload", "[", "key", "]", "=", "value", "return", "MessageRequest", "(", "payload", ")" ]
Send `message` to the user specified by `user`. It is possible to specify additional properties of the message by passing keyword arguments. The list of valid keywords is ``title, priority, sound, callback, timestamp, url, url_title, device, retry, expire and html`` which are described in the Pushover API documentation. For convenience, you can simply set ``timestamp=True`` to set the timestamp to the current timestamp. An image can be attached to a message by passing a file-like object to the `attachment` keyword argument. This method returns a :class:`MessageRequest` object.
[ "Send", "message", "to", "the", "user", "specified", "by", "user", ".", "It", "is", "possible", "to", "specify", "additional", "properties", "of", "the", "message", "by", "passing", "keyword", "arguments", ".", "The", "list", "of", "valid", "keywords", "is", "title", "priority", "sound", "callback", "timestamp", "url", "url_title", "device", "retry", "expire", "and", "html", "which", "are", "described", "in", "the", "Pushover", "API", "documentation", "." ]
420bde9a2bd7981b5ea8f0c1cb8875d5f676f368
https://github.com/Thibauth/python-pushover/blob/420bde9a2bd7981b5ea8f0c1cb8875d5f676f368/pushover.py#L187-L214
2,608
Thibauth/python-pushover
pushover.py
Pushover.glance
def glance(self, user, **kwargs): """Send a glance to the user. The default property is ``text``, as this is used on most glances, however a valid glance does not need to require text and can be constructed using any combination of valid keyword properties. The list of valid keywords is ``title, text, subtext, count, percent and device`` which are described in the Pushover Glance API documentation. This method returns a :class:`GlanceRequest` object. """ payload = {"user": user, "token": self.token} for key, value in kwargs.iteritems(): if key not in Pushover.glance_keywords: raise ValueError("{0}: invalid glance parameter".format(key)) else: payload[key] = value return Request("post", GLANCE_URL, payload)
python
def glance(self, user, **kwargs): payload = {"user": user, "token": self.token} for key, value in kwargs.iteritems(): if key not in Pushover.glance_keywords: raise ValueError("{0}: invalid glance parameter".format(key)) else: payload[key] = value return Request("post", GLANCE_URL, payload)
[ "def", "glance", "(", "self", ",", "user", ",", "*", "*", "kwargs", ")", ":", "payload", "=", "{", "\"user\"", ":", "user", ",", "\"token\"", ":", "self", ".", "token", "}", "for", "key", ",", "value", "in", "kwargs", ".", "iteritems", "(", ")", ":", "if", "key", "not", "in", "Pushover", ".", "glance_keywords", ":", "raise", "ValueError", "(", "\"{0}: invalid glance parameter\"", ".", "format", "(", "key", ")", ")", "else", ":", "payload", "[", "key", "]", "=", "value", "return", "Request", "(", "\"post\"", ",", "GLANCE_URL", ",", "payload", ")" ]
Send a glance to the user. The default property is ``text``, as this is used on most glances, however a valid glance does not need to require text and can be constructed using any combination of valid keyword properties. The list of valid keywords is ``title, text, subtext, count, percent and device`` which are described in the Pushover Glance API documentation. This method returns a :class:`GlanceRequest` object.
[ "Send", "a", "glance", "to", "the", "user", ".", "The", "default", "property", "is", "text", "as", "this", "is", "used", "on", "most", "glances", "however", "a", "valid", "glance", "does", "not", "need", "to", "require", "text", "and", "can", "be", "constructed", "using", "any", "combination", "of", "valid", "keyword", "properties", ".", "The", "list", "of", "valid", "keywords", "is", "title", "text", "subtext", "count", "percent", "and", "device", "which", "are", "described", "in", "the", "Pushover", "Glance", "API", "documentation", "." ]
420bde9a2bd7981b5ea8f0c1cb8875d5f676f368
https://github.com/Thibauth/python-pushover/blob/420bde9a2bd7981b5ea8f0c1cb8875d5f676f368/pushover.py#L216-L234
2,609
Wramberg/adaptfilt
adaptfilt/misc.py
mswe
def mswe(w, v): """ Calculate mean squared weight error between estimated and true filter coefficients, in respect to iterations. Parameters ---------- v : array-like True coefficients used to generate desired signal, must be a one-dimensional array. w : array-like Estimated coefficients from adaptive filtering algorithm. Must be an N x M matrix where N is the number of iterations, and M is the number of filter coefficients. Returns ------- mswe : numpy.array One-dimensional array containing the mean-squared weight error for every iteration. Raises ------ TypeError If inputs have wrong dimensions Note ---- To use this function with the adaptive filter functions set the optional parameter returnCoeffs to True. This will return a coefficient matrix w corresponding with the input-parameter w. """ # Ensure inputs are numpy arrays w = np.array(w) v = np.array(v) # Check dimensions if(len(w.shape) != 2): raise TypeError('Estimated coefficients must be in NxM matrix') if(len(v.shape) != 1): raise TypeError('Real coefficients must be in 1d array') # Ensure equal length between estimated and real coeffs N, M = w.shape L = v.size if(M < L): v = v[:-(L-M)] elif(M > L): v = np.concatenate((v, np.zeros(M-L))) # Calculate and return MSWE mswe = np.mean((w - v)**2, axis=1) return mswe
python
def mswe(w, v): # Ensure inputs are numpy arrays w = np.array(w) v = np.array(v) # Check dimensions if(len(w.shape) != 2): raise TypeError('Estimated coefficients must be in NxM matrix') if(len(v.shape) != 1): raise TypeError('Real coefficients must be in 1d array') # Ensure equal length between estimated and real coeffs N, M = w.shape L = v.size if(M < L): v = v[:-(L-M)] elif(M > L): v = np.concatenate((v, np.zeros(M-L))) # Calculate and return MSWE mswe = np.mean((w - v)**2, axis=1) return mswe
[ "def", "mswe", "(", "w", ",", "v", ")", ":", "# Ensure inputs are numpy arrays", "w", "=", "np", ".", "array", "(", "w", ")", "v", "=", "np", ".", "array", "(", "v", ")", "# Check dimensions", "if", "(", "len", "(", "w", ".", "shape", ")", "!=", "2", ")", ":", "raise", "TypeError", "(", "'Estimated coefficients must be in NxM matrix'", ")", "if", "(", "len", "(", "v", ".", "shape", ")", "!=", "1", ")", ":", "raise", "TypeError", "(", "'Real coefficients must be in 1d array'", ")", "# Ensure equal length between estimated and real coeffs", "N", ",", "M", "=", "w", ".", "shape", "L", "=", "v", ".", "size", "if", "(", "M", "<", "L", ")", ":", "v", "=", "v", "[", ":", "-", "(", "L", "-", "M", ")", "]", "elif", "(", "M", ">", "L", ")", ":", "v", "=", "np", ".", "concatenate", "(", "(", "v", ",", "np", ".", "zeros", "(", "M", "-", "L", ")", ")", ")", "# Calculate and return MSWE", "mswe", "=", "np", ".", "mean", "(", "(", "w", "-", "v", ")", "**", "2", ",", "axis", "=", "1", ")", "return", "mswe" ]
Calculate mean squared weight error between estimated and true filter coefficients, in respect to iterations. Parameters ---------- v : array-like True coefficients used to generate desired signal, must be a one-dimensional array. w : array-like Estimated coefficients from adaptive filtering algorithm. Must be an N x M matrix where N is the number of iterations, and M is the number of filter coefficients. Returns ------- mswe : numpy.array One-dimensional array containing the mean-squared weight error for every iteration. Raises ------ TypeError If inputs have wrong dimensions Note ---- To use this function with the adaptive filter functions set the optional parameter returnCoeffs to True. This will return a coefficient matrix w corresponding with the input-parameter w.
[ "Calculate", "mean", "squared", "weight", "error", "between", "estimated", "and", "true", "filter", "coefficients", "in", "respect", "to", "iterations", "." ]
9bb943bb5e4162e10a8aaabfc68339b8fc06c11a
https://github.com/Wramberg/adaptfilt/blob/9bb943bb5e4162e10a8aaabfc68339b8fc06c11a/adaptfilt/misc.py#L7-L57
2,610
mlavin/django-selectable
selectable/forms/fields.py
BaseAutoCompleteField.has_changed
def has_changed(self, initial, data): "Detects if the data was changed. This is added in 1.6." if initial is None and data is None: return False if data and not hasattr(data, '__iter__'): data = self.widget.decompress(data) initial = self.to_python(initial) data = self.to_python(data) if hasattr(self, '_coerce'): data = self._coerce(data) if isinstance(data, Model) and isinstance(initial, Model): return model_vars(data) != model_vars(initial) else: return data != initial
python
def has_changed(self, initial, data): "Detects if the data was changed. This is added in 1.6." if initial is None and data is None: return False if data and not hasattr(data, '__iter__'): data = self.widget.decompress(data) initial = self.to_python(initial) data = self.to_python(data) if hasattr(self, '_coerce'): data = self._coerce(data) if isinstance(data, Model) and isinstance(initial, Model): return model_vars(data) != model_vars(initial) else: return data != initial
[ "def", "has_changed", "(", "self", ",", "initial", ",", "data", ")", ":", "if", "initial", "is", "None", "and", "data", "is", "None", ":", "return", "False", "if", "data", "and", "not", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "data", "=", "self", ".", "widget", ".", "decompress", "(", "data", ")", "initial", "=", "self", ".", "to_python", "(", "initial", ")", "data", "=", "self", ".", "to_python", "(", "data", ")", "if", "hasattr", "(", "self", ",", "'_coerce'", ")", ":", "data", "=", "self", ".", "_coerce", "(", "data", ")", "if", "isinstance", "(", "data", ",", "Model", ")", "and", "isinstance", "(", "initial", ",", "Model", ")", ":", "return", "model_vars", "(", "data", ")", "!=", "model_vars", "(", "initial", ")", "else", ":", "return", "data", "!=", "initial" ]
Detects if the data was changed. This is added in 1.6.
[ "Detects", "if", "the", "data", "was", "changed", ".", "This", "is", "added", "in", "1", ".", "6", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/forms/fields.py#L29-L42
2,611
mlavin/django-selectable
selectable/decorators.py
results_decorator
def results_decorator(func): """ Helper for constructing simple decorators around Lookup.results. func is a function which takes a request as the first parameter. If func returns an HttpReponse it is returned otherwise the original Lookup.results is returned. """ # Wrap function to maintian the original doc string, etc @wraps(func) def decorator(lookup_cls): # Construct a class decorator from the original function original = lookup_cls.results def inner(self, request): # Wrap lookup_cls.results by first calling func and checking the result result = func(request) if isinstance(result, HttpResponse): return result return original(self, request) # Replace original lookup_cls.results with wrapped version lookup_cls.results = inner return lookup_cls # Return the constructed decorator return decorator
python
def results_decorator(func): # Wrap function to maintian the original doc string, etc @wraps(func) def decorator(lookup_cls): # Construct a class decorator from the original function original = lookup_cls.results def inner(self, request): # Wrap lookup_cls.results by first calling func and checking the result result = func(request) if isinstance(result, HttpResponse): return result return original(self, request) # Replace original lookup_cls.results with wrapped version lookup_cls.results = inner return lookup_cls # Return the constructed decorator return decorator
[ "def", "results_decorator", "(", "func", ")", ":", "# Wrap function to maintian the original doc string, etc", "@", "wraps", "(", "func", ")", "def", "decorator", "(", "lookup_cls", ")", ":", "# Construct a class decorator from the original function", "original", "=", "lookup_cls", ".", "results", "def", "inner", "(", "self", ",", "request", ")", ":", "# Wrap lookup_cls.results by first calling func and checking the result", "result", "=", "func", "(", "request", ")", "if", "isinstance", "(", "result", ",", "HttpResponse", ")", ":", "return", "result", "return", "original", "(", "self", ",", "request", ")", "# Replace original lookup_cls.results with wrapped version", "lookup_cls", ".", "results", "=", "inner", "return", "lookup_cls", "# Return the constructed decorator", "return", "decorator" ]
Helper for constructing simple decorators around Lookup.results. func is a function which takes a request as the first parameter. If func returns an HttpReponse it is returned otherwise the original Lookup.results is returned.
[ "Helper", "for", "constructing", "simple", "decorators", "around", "Lookup", ".", "results", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/decorators.py#L16-L39
2,612
mlavin/django-selectable
selectable/decorators.py
login_required
def login_required(request): "Lookup decorator to require the user to be authenticated." user = getattr(request, 'user', None) if user is None or not user.is_authenticated: return HttpResponse(status=401)
python
def login_required(request): "Lookup decorator to require the user to be authenticated." user = getattr(request, 'user', None) if user is None or not user.is_authenticated: return HttpResponse(status=401)
[ "def", "login_required", "(", "request", ")", ":", "user", "=", "getattr", "(", "request", ",", "'user'", ",", "None", ")", "if", "user", "is", "None", "or", "not", "user", ".", "is_authenticated", ":", "return", "HttpResponse", "(", "status", "=", "401", ")" ]
Lookup decorator to require the user to be authenticated.
[ "Lookup", "decorator", "to", "require", "the", "user", "to", "be", "authenticated", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/decorators.py#L50-L54
2,613
mlavin/django-selectable
selectable/decorators.py
staff_member_required
def staff_member_required(request): "Lookup decorator to require the user is a staff member." user = getattr(request, 'user', None) if user is None or not user.is_authenticated: return HttpResponse(status=401) # Unauthorized elif not user.is_staff: return HttpResponseForbidden()
python
def staff_member_required(request): "Lookup decorator to require the user is a staff member." user = getattr(request, 'user', None) if user is None or not user.is_authenticated: return HttpResponse(status=401) # Unauthorized elif not user.is_staff: return HttpResponseForbidden()
[ "def", "staff_member_required", "(", "request", ")", ":", "user", "=", "getattr", "(", "request", ",", "'user'", ",", "None", ")", "if", "user", "is", "None", "or", "not", "user", ".", "is_authenticated", ":", "return", "HttpResponse", "(", "status", "=", "401", ")", "# Unauthorized", "elif", "not", "user", ".", "is_staff", ":", "return", "HttpResponseForbidden", "(", ")" ]
Lookup decorator to require the user is a staff member.
[ "Lookup", "decorator", "to", "require", "the", "user", "is", "a", "staff", "member", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/decorators.py#L58-L64
2,614
mlavin/django-selectable
selectable/base.py
LookupBase.format_item
def format_item(self, item): "Construct result dictionary for the match item." result = { 'id': self.get_item_id(item), 'value': self.get_item_value(item), 'label': self.get_item_label(item), } for key in settings.SELECTABLE_ESCAPED_KEYS: if key in result: result[key] = conditional_escape(result[key]) return result
python
def format_item(self, item): "Construct result dictionary for the match item." result = { 'id': self.get_item_id(item), 'value': self.get_item_value(item), 'label': self.get_item_label(item), } for key in settings.SELECTABLE_ESCAPED_KEYS: if key in result: result[key] = conditional_escape(result[key]) return result
[ "def", "format_item", "(", "self", ",", "item", ")", ":", "result", "=", "{", "'id'", ":", "self", ".", "get_item_id", "(", "item", ")", ",", "'value'", ":", "self", ".", "get_item_value", "(", "item", ")", ",", "'label'", ":", "self", ".", "get_item_label", "(", "item", ")", ",", "}", "for", "key", "in", "settings", ".", "SELECTABLE_ESCAPED_KEYS", ":", "if", "key", "in", "result", ":", "result", "[", "key", "]", "=", "conditional_escape", "(", "result", "[", "key", "]", ")", "return", "result" ]
Construct result dictionary for the match item.
[ "Construct", "result", "dictionary", "for", "the", "match", "item", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/base.py#L67-L77
2,615
mlavin/django-selectable
selectable/base.py
LookupBase.paginate_results
def paginate_results(self, results, options): "Return a django.core.paginator.Page of results." limit = options.get('limit', settings.SELECTABLE_MAX_LIMIT) paginator = Paginator(results, limit) page = options.get('page', 1) try: results = paginator.page(page) except (EmptyPage, InvalidPage): results = paginator.page(paginator.num_pages) return results
python
def paginate_results(self, results, options): "Return a django.core.paginator.Page of results." limit = options.get('limit', settings.SELECTABLE_MAX_LIMIT) paginator = Paginator(results, limit) page = options.get('page', 1) try: results = paginator.page(page) except (EmptyPage, InvalidPage): results = paginator.page(paginator.num_pages) return results
[ "def", "paginate_results", "(", "self", ",", "results", ",", "options", ")", ":", "limit", "=", "options", ".", "get", "(", "'limit'", ",", "settings", ".", "SELECTABLE_MAX_LIMIT", ")", "paginator", "=", "Paginator", "(", "results", ",", "limit", ")", "page", "=", "options", ".", "get", "(", "'page'", ",", "1", ")", "try", ":", "results", "=", "paginator", ".", "page", "(", "page", ")", "except", "(", "EmptyPage", ",", "InvalidPage", ")", ":", "results", "=", "paginator", ".", "page", "(", "paginator", ".", "num_pages", ")", "return", "results" ]
Return a django.core.paginator.Page of results.
[ "Return", "a", "django", ".", "core", ".", "paginator", ".", "Page", "of", "results", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/base.py#L79-L88
2,616
mlavin/django-selectable
selectable/base.py
LookupBase.results
def results(self, request): "Match results to given term and return the serialized HttpResponse." results = {} form = self.form(request.GET) if form.is_valid(): options = form.cleaned_data term = options.get('term', '') raw_data = self.get_query(request, term) results = self.format_results(raw_data, options) return self.response(results)
python
def results(self, request): "Match results to given term and return the serialized HttpResponse." results = {} form = self.form(request.GET) if form.is_valid(): options = form.cleaned_data term = options.get('term', '') raw_data = self.get_query(request, term) results = self.format_results(raw_data, options) return self.response(results)
[ "def", "results", "(", "self", ",", "request", ")", ":", "results", "=", "{", "}", "form", "=", "self", ".", "form", "(", "request", ".", "GET", ")", "if", "form", ".", "is_valid", "(", ")", ":", "options", "=", "form", ".", "cleaned_data", "term", "=", "options", ".", "get", "(", "'term'", ",", "''", ")", "raw_data", "=", "self", ".", "get_query", "(", "request", ",", "term", ")", "results", "=", "self", ".", "format_results", "(", "raw_data", ",", "options", ")", "return", "self", ".", "response", "(", "results", ")" ]
Match results to given term and return the serialized HttpResponse.
[ "Match", "results", "to", "given", "term", "and", "return", "the", "serialized", "HttpResponse", "." ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/base.py#L90-L99
2,617
mlavin/django-selectable
selectable/base.py
LookupBase.format_results
def format_results(self, raw_data, options): ''' Returns a python structure that later gets serialized. raw_data full list of objects matching the search term options a dictionary of the given options ''' page_data = self.paginate_results(raw_data, options) results = {} meta = options.copy() meta['more'] = _('Show more results') if page_data and page_data.has_next(): meta['next_page'] = page_data.next_page_number() if page_data and page_data.has_previous(): meta['prev_page'] = page_data.previous_page_number() results['data'] = [self.format_item(item) for item in page_data.object_list] results['meta'] = meta return results
python
def format_results(self, raw_data, options): ''' Returns a python structure that later gets serialized. raw_data full list of objects matching the search term options a dictionary of the given options ''' page_data = self.paginate_results(raw_data, options) results = {} meta = options.copy() meta['more'] = _('Show more results') if page_data and page_data.has_next(): meta['next_page'] = page_data.next_page_number() if page_data and page_data.has_previous(): meta['prev_page'] = page_data.previous_page_number() results['data'] = [self.format_item(item) for item in page_data.object_list] results['meta'] = meta return results
[ "def", "format_results", "(", "self", ",", "raw_data", ",", "options", ")", ":", "page_data", "=", "self", ".", "paginate_results", "(", "raw_data", ",", "options", ")", "results", "=", "{", "}", "meta", "=", "options", ".", "copy", "(", ")", "meta", "[", "'more'", "]", "=", "_", "(", "'Show more results'", ")", "if", "page_data", "and", "page_data", ".", "has_next", "(", ")", ":", "meta", "[", "'next_page'", "]", "=", "page_data", ".", "next_page_number", "(", ")", "if", "page_data", "and", "page_data", ".", "has_previous", "(", ")", ":", "meta", "[", "'prev_page'", "]", "=", "page_data", ".", "previous_page_number", "(", ")", "results", "[", "'data'", "]", "=", "[", "self", ".", "format_item", "(", "item", ")", "for", "item", "in", "page_data", ".", "object_list", "]", "results", "[", "'meta'", "]", "=", "meta", "return", "results" ]
Returns a python structure that later gets serialized. raw_data full list of objects matching the search term options a dictionary of the given options
[ "Returns", "a", "python", "structure", "that", "later", "gets", "serialized", ".", "raw_data", "full", "list", "of", "objects", "matching", "the", "search", "term", "options", "a", "dictionary", "of", "the", "given", "options" ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/base.py#L101-L119
2,618
mlavin/django-selectable
selectable/forms/base.py
import_lookup_class
def import_lookup_class(lookup_class): """ Import lookup_class as a dotted base and ensure it extends LookupBase """ from selectable.base import LookupBase if isinstance(lookup_class, string_types): mod_str, cls_str = lookup_class.rsplit('.', 1) mod = import_module(mod_str) lookup_class = getattr(mod, cls_str) if not issubclass(lookup_class, LookupBase): raise TypeError('lookup_class must extend from selectable.base.LookupBase') return lookup_class
python
def import_lookup_class(lookup_class): from selectable.base import LookupBase if isinstance(lookup_class, string_types): mod_str, cls_str = lookup_class.rsplit('.', 1) mod = import_module(mod_str) lookup_class = getattr(mod, cls_str) if not issubclass(lookup_class, LookupBase): raise TypeError('lookup_class must extend from selectable.base.LookupBase') return lookup_class
[ "def", "import_lookup_class", "(", "lookup_class", ")", ":", "from", "selectable", ".", "base", "import", "LookupBase", "if", "isinstance", "(", "lookup_class", ",", "string_types", ")", ":", "mod_str", ",", "cls_str", "=", "lookup_class", ".", "rsplit", "(", "'.'", ",", "1", ")", "mod", "=", "import_module", "(", "mod_str", ")", "lookup_class", "=", "getattr", "(", "mod", ",", "cls_str", ")", "if", "not", "issubclass", "(", "lookup_class", ",", "LookupBase", ")", ":", "raise", "TypeError", "(", "'lookup_class must extend from selectable.base.LookupBase'", ")", "return", "lookup_class" ]
Import lookup_class as a dotted base and ensure it extends LookupBase
[ "Import", "lookup_class", "as", "a", "dotted", "base", "and", "ensure", "it", "extends", "LookupBase" ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/forms/base.py#L34-L45
2,619
mlavin/django-selectable
selectable/forms/base.py
BaseLookupForm.clean_limit
def clean_limit(self): "Ensure given limit is less than default if defined" limit = self.cleaned_data.get('limit', None) if (settings.SELECTABLE_MAX_LIMIT is not None and (not limit or limit > settings.SELECTABLE_MAX_LIMIT)): limit = settings.SELECTABLE_MAX_LIMIT return limit
python
def clean_limit(self): "Ensure given limit is less than default if defined" limit = self.cleaned_data.get('limit', None) if (settings.SELECTABLE_MAX_LIMIT is not None and (not limit or limit > settings.SELECTABLE_MAX_LIMIT)): limit = settings.SELECTABLE_MAX_LIMIT return limit
[ "def", "clean_limit", "(", "self", ")", ":", "limit", "=", "self", ".", "cleaned_data", ".", "get", "(", "'limit'", ",", "None", ")", "if", "(", "settings", ".", "SELECTABLE_MAX_LIMIT", "is", "not", "None", "and", "(", "not", "limit", "or", "limit", ">", "settings", ".", "SELECTABLE_MAX_LIMIT", ")", ")", ":", "limit", "=", "settings", ".", "SELECTABLE_MAX_LIMIT", "return", "limit" ]
Ensure given limit is less than default if defined
[ "Ensure", "given", "limit", "is", "less", "than", "default", "if", "defined" ]
3d7b8db0526dd924a774c599f0c665eff98fb375
https://github.com/mlavin/django-selectable/blob/3d7b8db0526dd924a774c599f0c665eff98fb375/selectable/forms/base.py#L21-L27
2,620
desbma/sacad
sacad/rate_watcher.py
AccessRateWatcher.waitAccessAsync
async def waitAccessAsync(self): """ Wait the needed time before sending a request to honor rate limit. """ async with self.lock: while True: last_access_ts = self.__getLastAccess() if last_access_ts is not None: now = time.time() last_access_ts = last_access_ts[0] time_since_last_access = now - last_access_ts if time_since_last_access < self.min_delay_between_accesses: time_to_wait = self.min_delay_between_accesses - time_since_last_access if self.jitter_range_ms is not None: time_to_wait += random.randint(*self.jitter_range_ms) / 1000 self.logger.debug("Sleeping for %.2fms because of rate limit for domain %s" % (time_to_wait * 1000, self.domain)) await asyncio.sleep(time_to_wait) access_time = time.time() self.__access(access_time) # now we should be good... except if another process did the same query at the same time # the database serves as an atomic lock, query again to be sure the last row is the one # we just inserted last_access_ts = self.__getLastAccess() if last_access_ts[0] == access_time: break
python
async def waitAccessAsync(self): async with self.lock: while True: last_access_ts = self.__getLastAccess() if last_access_ts is not None: now = time.time() last_access_ts = last_access_ts[0] time_since_last_access = now - last_access_ts if time_since_last_access < self.min_delay_between_accesses: time_to_wait = self.min_delay_between_accesses - time_since_last_access if self.jitter_range_ms is not None: time_to_wait += random.randint(*self.jitter_range_ms) / 1000 self.logger.debug("Sleeping for %.2fms because of rate limit for domain %s" % (time_to_wait * 1000, self.domain)) await asyncio.sleep(time_to_wait) access_time = time.time() self.__access(access_time) # now we should be good... except if another process did the same query at the same time # the database serves as an atomic lock, query again to be sure the last row is the one # we just inserted last_access_ts = self.__getLastAccess() if last_access_ts[0] == access_time: break
[ "async", "def", "waitAccessAsync", "(", "self", ")", ":", "async", "with", "self", ".", "lock", ":", "while", "True", ":", "last_access_ts", "=", "self", ".", "__getLastAccess", "(", ")", "if", "last_access_ts", "is", "not", "None", ":", "now", "=", "time", ".", "time", "(", ")", "last_access_ts", "=", "last_access_ts", "[", "0", "]", "time_since_last_access", "=", "now", "-", "last_access_ts", "if", "time_since_last_access", "<", "self", ".", "min_delay_between_accesses", ":", "time_to_wait", "=", "self", ".", "min_delay_between_accesses", "-", "time_since_last_access", "if", "self", ".", "jitter_range_ms", "is", "not", "None", ":", "time_to_wait", "+=", "random", ".", "randint", "(", "*", "self", ".", "jitter_range_ms", ")", "/", "1000", "self", ".", "logger", ".", "debug", "(", "\"Sleeping for %.2fms because of rate limit for domain %s\"", "%", "(", "time_to_wait", "*", "1000", ",", "self", ".", "domain", ")", ")", "await", "asyncio", ".", "sleep", "(", "time_to_wait", ")", "access_time", "=", "time", ".", "time", "(", ")", "self", ".", "__access", "(", "access_time", ")", "# now we should be good... except if another process did the same query at the same time", "# the database serves as an atomic lock, query again to be sure the last row is the one", "# we just inserted", "last_access_ts", "=", "self", ".", "__getLastAccess", "(", ")", "if", "last_access_ts", "[", "0", "]", "==", "access_time", ":", "break" ]
Wait the needed time before sending a request to honor rate limit.
[ "Wait", "the", "needed", "time", "before", "sending", "a", "request", "to", "honor", "rate", "limit", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/rate_watcher.py#L28-L53
2,621
desbma/sacad
sacad/rate_watcher.py
AccessRateWatcher.__access
def __access(self, ts): """ Record an API access. """ with self.connection: self.connection.execute("INSERT OR REPLACE INTO access_timestamp (timestamp, domain) VALUES (?, ?)", (ts, self.domain))
python
def __access(self, ts): with self.connection: self.connection.execute("INSERT OR REPLACE INTO access_timestamp (timestamp, domain) VALUES (?, ?)", (ts, self.domain))
[ "def", "__access", "(", "self", ",", "ts", ")", ":", "with", "self", ".", "connection", ":", "self", ".", "connection", ".", "execute", "(", "\"INSERT OR REPLACE INTO access_timestamp (timestamp, domain) VALUES (?, ?)\"", ",", "(", "ts", ",", "self", ".", "domain", ")", ")" ]
Record an API access.
[ "Record", "an", "API", "access", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/rate_watcher.py#L62-L66
2,622
desbma/sacad
sacad/http_helpers.py
aiohttp_socket_timeout
def aiohttp_socket_timeout(socket_timeout_s): """ Return a aiohttp.ClientTimeout object with only socket timeouts set. """ return aiohttp.ClientTimeout(total=None, connect=None, sock_connect=socket_timeout_s, sock_read=socket_timeout_s)
python
def aiohttp_socket_timeout(socket_timeout_s): return aiohttp.ClientTimeout(total=None, connect=None, sock_connect=socket_timeout_s, sock_read=socket_timeout_s)
[ "def", "aiohttp_socket_timeout", "(", "socket_timeout_s", ")", ":", "return", "aiohttp", ".", "ClientTimeout", "(", "total", "=", "None", ",", "connect", "=", "None", ",", "sock_connect", "=", "socket_timeout_s", ",", "sock_read", "=", "socket_timeout_s", ")" ]
Return a aiohttp.ClientTimeout object with only socket timeouts set.
[ "Return", "a", "aiohttp", ".", "ClientTimeout", "object", "with", "only", "socket", "timeouts", "set", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/http_helpers.py#L15-L20
2,623
desbma/sacad
sacad/http_helpers.py
Http.isReachable
async def isReachable(self, url, *, headers=None, verify=True, response_headers=None, cache=None): """ Send a HEAD request with short timeout or get data from cache, return True if ressource has 2xx status code, False instead. """ if (cache is not None) and (url in cache): # try from cache first self.logger.debug("Got headers for URL '%s' from cache" % (url)) resp_ok, response_headers = pickle.loads(cache[url]) return resp_ok domain_rate_watcher = rate_watcher.AccessRateWatcher(self.watcher_db_filepath, url, self.min_delay_between_accesses, jitter_range_ms=self.jitter_range_ms, logger=self.logger) resp_ok = True try: for attempt, time_to_sleep in enumerate(redo.retrier(max_attempts=HTTP_MAX_ATTEMPTS, sleeptime=0.5, max_sleeptime=HTTP_MAX_RETRY_SLEEP_SHORT_S, sleepscale=1.5), 1): await domain_rate_watcher.waitAccessAsync() try: async with self.session.head(url, headers=self._buildHeaders(headers), timeout=HTTP_SHORT_TIMEOUT, ssl=verify) as response: pass except (asyncio.TimeoutError, aiohttp.ClientError) as e: self.logger.warning("Probing '%s' failed (attempt %u/%u): %s %s" % (url, attempt, HTTP_MAX_ATTEMPTS, e.__class__.__qualname__, e)) if attempt == HTTP_MAX_ATTEMPTS: resp_ok = False else: self.logger.debug("Retrying in %.3fs" % (time_to_sleep)) await asyncio.sleep(time_to_sleep) else: response.raise_for_status() if response_headers is not None: response_headers.update(response.headers) break # http retry loop except aiohttp.ClientResponseError as e: self.logger.debug("Probing '%s' failed: %s %s" % (url, e.__class__.__qualname__, e)) resp_ok = False if cache is not None: # store in cache cache[url] = pickle.dumps((resp_ok, response_headers)) return resp_ok
python
async def isReachable(self, url, *, headers=None, verify=True, response_headers=None, cache=None): if (cache is not None) and (url in cache): # try from cache first self.logger.debug("Got headers for URL '%s' from cache" % (url)) resp_ok, response_headers = pickle.loads(cache[url]) return resp_ok domain_rate_watcher = rate_watcher.AccessRateWatcher(self.watcher_db_filepath, url, self.min_delay_between_accesses, jitter_range_ms=self.jitter_range_ms, logger=self.logger) resp_ok = True try: for attempt, time_to_sleep in enumerate(redo.retrier(max_attempts=HTTP_MAX_ATTEMPTS, sleeptime=0.5, max_sleeptime=HTTP_MAX_RETRY_SLEEP_SHORT_S, sleepscale=1.5), 1): await domain_rate_watcher.waitAccessAsync() try: async with self.session.head(url, headers=self._buildHeaders(headers), timeout=HTTP_SHORT_TIMEOUT, ssl=verify) as response: pass except (asyncio.TimeoutError, aiohttp.ClientError) as e: self.logger.warning("Probing '%s' failed (attempt %u/%u): %s %s" % (url, attempt, HTTP_MAX_ATTEMPTS, e.__class__.__qualname__, e)) if attempt == HTTP_MAX_ATTEMPTS: resp_ok = False else: self.logger.debug("Retrying in %.3fs" % (time_to_sleep)) await asyncio.sleep(time_to_sleep) else: response.raise_for_status() if response_headers is not None: response_headers.update(response.headers) break # http retry loop except aiohttp.ClientResponseError as e: self.logger.debug("Probing '%s' failed: %s %s" % (url, e.__class__.__qualname__, e)) resp_ok = False if cache is not None: # store in cache cache[url] = pickle.dumps((resp_ok, response_headers)) return resp_ok
[ "async", "def", "isReachable", "(", "self", ",", "url", ",", "*", ",", "headers", "=", "None", ",", "verify", "=", "True", ",", "response_headers", "=", "None", ",", "cache", "=", "None", ")", ":", "if", "(", "cache", "is", "not", "None", ")", "and", "(", "url", "in", "cache", ")", ":", "# try from cache first", "self", ".", "logger", ".", "debug", "(", "\"Got headers for URL '%s' from cache\"", "%", "(", "url", ")", ")", "resp_ok", ",", "response_headers", "=", "pickle", ".", "loads", "(", "cache", "[", "url", "]", ")", "return", "resp_ok", "domain_rate_watcher", "=", "rate_watcher", ".", "AccessRateWatcher", "(", "self", ".", "watcher_db_filepath", ",", "url", ",", "self", ".", "min_delay_between_accesses", ",", "jitter_range_ms", "=", "self", ".", "jitter_range_ms", ",", "logger", "=", "self", ".", "logger", ")", "resp_ok", "=", "True", "try", ":", "for", "attempt", ",", "time_to_sleep", "in", "enumerate", "(", "redo", ".", "retrier", "(", "max_attempts", "=", "HTTP_MAX_ATTEMPTS", ",", "sleeptime", "=", "0.5", ",", "max_sleeptime", "=", "HTTP_MAX_RETRY_SLEEP_SHORT_S", ",", "sleepscale", "=", "1.5", ")", ",", "1", ")", ":", "await", "domain_rate_watcher", ".", "waitAccessAsync", "(", ")", "try", ":", "async", "with", "self", ".", "session", ".", "head", "(", "url", ",", "headers", "=", "self", ".", "_buildHeaders", "(", "headers", ")", ",", "timeout", "=", "HTTP_SHORT_TIMEOUT", ",", "ssl", "=", "verify", ")", "as", "response", ":", "pass", "except", "(", "asyncio", ".", "TimeoutError", ",", "aiohttp", ".", "ClientError", ")", "as", "e", ":", "self", ".", "logger", ".", "warning", "(", "\"Probing '%s' failed (attempt %u/%u): %s %s\"", "%", "(", "url", ",", "attempt", ",", "HTTP_MAX_ATTEMPTS", ",", "e", ".", "__class__", ".", "__qualname__", ",", "e", ")", ")", "if", "attempt", "==", "HTTP_MAX_ATTEMPTS", ":", "resp_ok", "=", "False", "else", ":", "self", ".", "logger", ".", "debug", "(", "\"Retrying in %.3fs\"", "%", "(", "time_to_sleep", ")", ")", "await", "asyncio", ".", "sleep", "(", "time_to_sleep", ")", "else", ":", "response", ".", "raise_for_status", "(", ")", "if", "response_headers", "is", "not", "None", ":", "response_headers", ".", "update", "(", "response", ".", "headers", ")", "break", "# http retry loop", "except", "aiohttp", ".", "ClientResponseError", "as", "e", ":", "self", ".", "logger", ".", "debug", "(", "\"Probing '%s' failed: %s %s\"", "%", "(", "url", ",", "e", ".", "__class__", ".", "__qualname__", ",", "e", ")", ")", "resp_ok", "=", "False", "if", "cache", "is", "not", "None", ":", "# store in cache", "cache", "[", "url", "]", "=", "pickle", ".", "dumps", "(", "(", "resp_ok", ",", "response_headers", ")", ")", "return", "resp_ok" ]
Send a HEAD request with short timeout or get data from cache, return True if ressource has 2xx status code, False instead.
[ "Send", "a", "HEAD", "request", "with", "short", "timeout", "or", "get", "data", "from", "cache", "return", "True", "if", "ressource", "has", "2xx", "status", "code", "False", "instead", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/http_helpers.py#L130-L187
2,624
desbma/sacad
sacad/http_helpers.py
Http.fastStreamedQuery
async def fastStreamedQuery(self, url, *, headers=None, verify=True): """ Send a GET request with short timeout, do not retry, and return streamed response. """ response = await self.session.get(url, headers=self._buildHeaders(headers), timeout=HTTP_SHORT_TIMEOUT, ssl=verify) response.raise_for_status() return response
python
async def fastStreamedQuery(self, url, *, headers=None, verify=True): response = await self.session.get(url, headers=self._buildHeaders(headers), timeout=HTTP_SHORT_TIMEOUT, ssl=verify) response.raise_for_status() return response
[ "async", "def", "fastStreamedQuery", "(", "self", ",", "url", ",", "*", ",", "headers", "=", "None", ",", "verify", "=", "True", ")", ":", "response", "=", "await", "self", ".", "session", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "_buildHeaders", "(", "headers", ")", ",", "timeout", "=", "HTTP_SHORT_TIMEOUT", ",", "ssl", "=", "verify", ")", "response", ".", "raise_for_status", "(", ")", "return", "response" ]
Send a GET request with short timeout, do not retry, and return streamed response.
[ "Send", "a", "GET", "request", "with", "short", "timeout", "do", "not", "retry", "and", "return", "streamed", "response", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/http_helpers.py#L189-L198
2,625
desbma/sacad
sacad/sources/lastfm.py
LastFmCoverSource.processQueryString
def processQueryString(self, s): """ See CoverSource.processQueryString. """ char_blacklist = set(string.punctuation) char_blacklist.remove("'") char_blacklist.remove("&") char_blacklist = frozenset(char_blacklist) return __class__.unpunctuate(s.lower(), char_blacklist=char_blacklist)
python
def processQueryString(self, s): char_blacklist = set(string.punctuation) char_blacklist.remove("'") char_blacklist.remove("&") char_blacklist = frozenset(char_blacklist) return __class__.unpunctuate(s.lower(), char_blacklist=char_blacklist)
[ "def", "processQueryString", "(", "self", ",", "s", ")", ":", "char_blacklist", "=", "set", "(", "string", ".", "punctuation", ")", "char_blacklist", ".", "remove", "(", "\"'\"", ")", "char_blacklist", ".", "remove", "(", "\"&\"", ")", "char_blacklist", "=", "frozenset", "(", "char_blacklist", ")", "return", "__class__", ".", "unpunctuate", "(", "s", ".", "lower", "(", ")", ",", "char_blacklist", "=", "char_blacklist", ")" ]
See CoverSource.processQueryString.
[ "See", "CoverSource", ".", "processQueryString", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/sources/lastfm.py#L47-L53
2,626
desbma/sacad
sacad/__init__.py
search_and_download
async def search_and_download(album, artist, format, size, out_filepath, *, size_tolerance_prct, amazon_tlds, no_lq_sources, async_loop): """ Search and download a cover, return True if success, False instead. """ # register sources source_args = (size, size_tolerance_prct) cover_sources = [sources.LastFmCoverSource(*source_args), sources.AmazonCdCoverSource(*source_args), sources.AmazonDigitalCoverSource(*source_args)] for tld in amazon_tlds: cover_sources.append(sources.AmazonCdCoverSource(*source_args, tld=tld)) if not no_lq_sources: cover_sources.append(sources.GoogleImagesWebScrapeCoverSource(*source_args)) # schedule search work search_futures = [] for cover_source in cover_sources: coroutine = cover_source.search(album, artist) future = asyncio.ensure_future(coroutine, loop=async_loop) search_futures.append(future) # wait for it await asyncio.wait(search_futures, loop=async_loop) # get results results = [] for future in search_futures: source_results = future.result() results.extend(source_results) # sort results results = await CoverSourceResult.preProcessForComparison(results, size, size_tolerance_prct) results.sort(reverse=True, key=functools.cmp_to_key(functools.partial(CoverSourceResult.compare, target_size=size, size_tolerance_prct=size_tolerance_prct))) if not results: logging.getLogger("Main").info("No results") # download for result in results: try: await result.get(format, size, size_tolerance_prct, out_filepath) except Exception as e: logging.getLogger("Main").warning("Download of %s failed: %s %s" % (result, e.__class__.__qualname__, e)) continue else: return True return False
python
async def search_and_download(album, artist, format, size, out_filepath, *, size_tolerance_prct, amazon_tlds, no_lq_sources, async_loop): # register sources source_args = (size, size_tolerance_prct) cover_sources = [sources.LastFmCoverSource(*source_args), sources.AmazonCdCoverSource(*source_args), sources.AmazonDigitalCoverSource(*source_args)] for tld in amazon_tlds: cover_sources.append(sources.AmazonCdCoverSource(*source_args, tld=tld)) if not no_lq_sources: cover_sources.append(sources.GoogleImagesWebScrapeCoverSource(*source_args)) # schedule search work search_futures = [] for cover_source in cover_sources: coroutine = cover_source.search(album, artist) future = asyncio.ensure_future(coroutine, loop=async_loop) search_futures.append(future) # wait for it await asyncio.wait(search_futures, loop=async_loop) # get results results = [] for future in search_futures: source_results = future.result() results.extend(source_results) # sort results results = await CoverSourceResult.preProcessForComparison(results, size, size_tolerance_prct) results.sort(reverse=True, key=functools.cmp_to_key(functools.partial(CoverSourceResult.compare, target_size=size, size_tolerance_prct=size_tolerance_prct))) if not results: logging.getLogger("Main").info("No results") # download for result in results: try: await result.get(format, size, size_tolerance_prct, out_filepath) except Exception as e: logging.getLogger("Main").warning("Download of %s failed: %s %s" % (result, e.__class__.__qualname__, e)) continue else: return True return False
[ "async", "def", "search_and_download", "(", "album", ",", "artist", ",", "format", ",", "size", ",", "out_filepath", ",", "*", ",", "size_tolerance_prct", ",", "amazon_tlds", ",", "no_lq_sources", ",", "async_loop", ")", ":", "# register sources", "source_args", "=", "(", "size", ",", "size_tolerance_prct", ")", "cover_sources", "=", "[", "sources", ".", "LastFmCoverSource", "(", "*", "source_args", ")", ",", "sources", ".", "AmazonCdCoverSource", "(", "*", "source_args", ")", ",", "sources", ".", "AmazonDigitalCoverSource", "(", "*", "source_args", ")", "]", "for", "tld", "in", "amazon_tlds", ":", "cover_sources", ".", "append", "(", "sources", ".", "AmazonCdCoverSource", "(", "*", "source_args", ",", "tld", "=", "tld", ")", ")", "if", "not", "no_lq_sources", ":", "cover_sources", ".", "append", "(", "sources", ".", "GoogleImagesWebScrapeCoverSource", "(", "*", "source_args", ")", ")", "# schedule search work", "search_futures", "=", "[", "]", "for", "cover_source", "in", "cover_sources", ":", "coroutine", "=", "cover_source", ".", "search", "(", "album", ",", "artist", ")", "future", "=", "asyncio", ".", "ensure_future", "(", "coroutine", ",", "loop", "=", "async_loop", ")", "search_futures", ".", "append", "(", "future", ")", "# wait for it", "await", "asyncio", ".", "wait", "(", "search_futures", ",", "loop", "=", "async_loop", ")", "# get results", "results", "=", "[", "]", "for", "future", "in", "search_futures", ":", "source_results", "=", "future", ".", "result", "(", ")", "results", ".", "extend", "(", "source_results", ")", "# sort results", "results", "=", "await", "CoverSourceResult", ".", "preProcessForComparison", "(", "results", ",", "size", ",", "size_tolerance_prct", ")", "results", ".", "sort", "(", "reverse", "=", "True", ",", "key", "=", "functools", ".", "cmp_to_key", "(", "functools", ".", "partial", "(", "CoverSourceResult", ".", "compare", ",", "target_size", "=", "size", ",", "size_tolerance_prct", "=", "size_tolerance_prct", ")", ")", ")", "if", "not", "results", ":", "logging", ".", "getLogger", "(", "\"Main\"", ")", ".", "info", "(", "\"No results\"", ")", "# download", "for", "result", "in", "results", ":", "try", ":", "await", "result", ".", "get", "(", "format", ",", "size", ",", "size_tolerance_prct", ",", "out_filepath", ")", "except", "Exception", "as", "e", ":", "logging", ".", "getLogger", "(", "\"Main\"", ")", ".", "warning", "(", "\"Download of %s failed: %s %s\"", "%", "(", "result", ",", "e", ".", "__class__", ".", "__qualname__", ",", "e", ")", ")", "continue", "else", ":", "return", "True", "return", "False" ]
Search and download a cover, return True if success, False instead.
[ "Search", "and", "download", "a", "cover", "return", "True", "if", "success", "False", "instead", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/__init__.py#L20-L70
2,627
desbma/sacad
sacad/sources/amazondigital.py
AmazonDigitalCoverSource.generateImgUrls
def generateImgUrls(self, product_id, dynapi_key, format_id, slice_count): """ Generate URLs for slice_count^2 subimages of a product. """ for x in range(slice_count): for y in range(slice_count): yield ("http://z2-ec2.images-amazon.com/R/1/a=" + product_id + "+c=" + dynapi_key + "+d=_SCR%28" + str(format_id) + "," + str(x) + "," + str(y) + "%29_=.jpg")
python
def generateImgUrls(self, product_id, dynapi_key, format_id, slice_count): for x in range(slice_count): for y in range(slice_count): yield ("http://z2-ec2.images-amazon.com/R/1/a=" + product_id + "+c=" + dynapi_key + "+d=_SCR%28" + str(format_id) + "," + str(x) + "," + str(y) + "%29_=.jpg")
[ "def", "generateImgUrls", "(", "self", ",", "product_id", ",", "dynapi_key", ",", "format_id", ",", "slice_count", ")", ":", "for", "x", "in", "range", "(", "slice_count", ")", ":", "for", "y", "in", "range", "(", "slice_count", ")", ":", "yield", "(", "\"http://z2-ec2.images-amazon.com/R/1/a=\"", "+", "product_id", "+", "\"+c=\"", "+", "dynapi_key", "+", "\"+d=_SCR%28\"", "+", "str", "(", "format_id", ")", "+", "\",\"", "+", "str", "(", "x", ")", "+", "\",\"", "+", "str", "(", "y", ")", "+", "\"%29_=.jpg\"", ")" ]
Generate URLs for slice_count^2 subimages of a product.
[ "Generate", "URLs", "for", "slice_count^2", "subimages", "of", "a", "product", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/sources/amazondigital.py#L134-L140
2,628
desbma/sacad
sacad/redo.py
retrier
def retrier(*, max_attempts, sleeptime, max_sleeptime, sleepscale=1.5, jitter=0.2): """ Generator yielding time to wait for, after the attempt, if it failed. """ assert(max_attempts > 1) assert(sleeptime >= 0) assert(0 <= jitter <= sleeptime) assert(sleepscale >= 1) cur_sleeptime = min(max_sleeptime, sleeptime) for attempt in range(max_attempts): cur_jitter = random.randint(int(-jitter * 1000), int(jitter * 1000)) / 1000 yield max(0, cur_sleeptime + cur_jitter) cur_sleeptime = min(max_sleeptime, cur_sleeptime * sleepscale)
python
def retrier(*, max_attempts, sleeptime, max_sleeptime, sleepscale=1.5, jitter=0.2): assert(max_attempts > 1) assert(sleeptime >= 0) assert(0 <= jitter <= sleeptime) assert(sleepscale >= 1) cur_sleeptime = min(max_sleeptime, sleeptime) for attempt in range(max_attempts): cur_jitter = random.randint(int(-jitter * 1000), int(jitter * 1000)) / 1000 yield max(0, cur_sleeptime + cur_jitter) cur_sleeptime = min(max_sleeptime, cur_sleeptime * sleepscale)
[ "def", "retrier", "(", "*", ",", "max_attempts", ",", "sleeptime", ",", "max_sleeptime", ",", "sleepscale", "=", "1.5", ",", "jitter", "=", "0.2", ")", ":", "assert", "(", "max_attempts", ">", "1", ")", "assert", "(", "sleeptime", ">=", "0", ")", "assert", "(", "0", "<=", "jitter", "<=", "sleeptime", ")", "assert", "(", "sleepscale", ">=", "1", ")", "cur_sleeptime", "=", "min", "(", "max_sleeptime", ",", "sleeptime", ")", "for", "attempt", "in", "range", "(", "max_attempts", ")", ":", "cur_jitter", "=", "random", ".", "randint", "(", "int", "(", "-", "jitter", "*", "1000", ")", ",", "int", "(", "jitter", "*", "1000", ")", ")", "/", "1000", "yield", "max", "(", "0", ",", "cur_sleeptime", "+", "cur_jitter", ")", "cur_sleeptime", "=", "min", "(", "max_sleeptime", ",", "cur_sleeptime", "*", "sleepscale", ")" ]
Generator yielding time to wait for, after the attempt, if it failed.
[ "Generator", "yielding", "time", "to", "wait", "for", "after", "the", "attempt", "if", "it", "failed", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/redo.py#L6-L18
2,629
desbma/sacad
sacad/cover.py
CoverSourceResult.get
async def get(self, target_format, target_size, size_tolerance_prct, out_filepath): """ Download cover and process it. """ if self.source_quality.value <= CoverSourceQuality.LOW.value: logging.getLogger("Cover").warning("Cover is from a potentially unreliable source and may be unrelated to the search") images_data = [] for i, url in enumerate(self.urls): # download logging.getLogger("Cover").info("Downloading cover '%s' (part %u/%u)..." % (url, i + 1, len(self.urls))) headers = {} self.source.updateHttpHeaders(headers) async def pre_cache_callback(img_data): return await __class__.crunch(img_data, self.format) store_in_cache_callback, image_data = await self.source.http.query(url, headers=headers, verify=False, cache=__class__.image_cache, pre_cache_callback=pre_cache_callback) # store immediately in cache await store_in_cache_callback() # append for multi images images_data.append(image_data) need_format_change = (self.format != target_format) need_size_change = ((max(self.size) > target_size) and (abs(max(self.size) - target_size) > target_size * size_tolerance_prct / 100)) need_join = len(images_data) > 1 if need_join or need_format_change or need_size_change: # post process image_data = self.postProcess(images_data, target_format if need_format_change else None, target_size if need_size_change else None) # crunch image again image_data = await __class__.crunch(image_data, target_format) # write it with open(out_filepath, "wb") as file: file.write(image_data)
python
async def get(self, target_format, target_size, size_tolerance_prct, out_filepath): if self.source_quality.value <= CoverSourceQuality.LOW.value: logging.getLogger("Cover").warning("Cover is from a potentially unreliable source and may be unrelated to the search") images_data = [] for i, url in enumerate(self.urls): # download logging.getLogger("Cover").info("Downloading cover '%s' (part %u/%u)..." % (url, i + 1, len(self.urls))) headers = {} self.source.updateHttpHeaders(headers) async def pre_cache_callback(img_data): return await __class__.crunch(img_data, self.format) store_in_cache_callback, image_data = await self.source.http.query(url, headers=headers, verify=False, cache=__class__.image_cache, pre_cache_callback=pre_cache_callback) # store immediately in cache await store_in_cache_callback() # append for multi images images_data.append(image_data) need_format_change = (self.format != target_format) need_size_change = ((max(self.size) > target_size) and (abs(max(self.size) - target_size) > target_size * size_tolerance_prct / 100)) need_join = len(images_data) > 1 if need_join or need_format_change or need_size_change: # post process image_data = self.postProcess(images_data, target_format if need_format_change else None, target_size if need_size_change else None) # crunch image again image_data = await __class__.crunch(image_data, target_format) # write it with open(out_filepath, "wb") as file: file.write(image_data)
[ "async", "def", "get", "(", "self", ",", "target_format", ",", "target_size", ",", "size_tolerance_prct", ",", "out_filepath", ")", ":", "if", "self", ".", "source_quality", ".", "value", "<=", "CoverSourceQuality", ".", "LOW", ".", "value", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"Cover is from a potentially unreliable source and may be unrelated to the search\"", ")", "images_data", "=", "[", "]", "for", "i", ",", "url", "in", "enumerate", "(", "self", ".", "urls", ")", ":", "# download", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "info", "(", "\"Downloading cover '%s' (part %u/%u)...\"", "%", "(", "url", ",", "i", "+", "1", ",", "len", "(", "self", ".", "urls", ")", ")", ")", "headers", "=", "{", "}", "self", ".", "source", ".", "updateHttpHeaders", "(", "headers", ")", "async", "def", "pre_cache_callback", "(", "img_data", ")", ":", "return", "await", "__class__", ".", "crunch", "(", "img_data", ",", "self", ".", "format", ")", "store_in_cache_callback", ",", "image_data", "=", "await", "self", ".", "source", ".", "http", ".", "query", "(", "url", ",", "headers", "=", "headers", ",", "verify", "=", "False", ",", "cache", "=", "__class__", ".", "image_cache", ",", "pre_cache_callback", "=", "pre_cache_callback", ")", "# store immediately in cache", "await", "store_in_cache_callback", "(", ")", "# append for multi images", "images_data", ".", "append", "(", "image_data", ")", "need_format_change", "=", "(", "self", ".", "format", "!=", "target_format", ")", "need_size_change", "=", "(", "(", "max", "(", "self", ".", "size", ")", ">", "target_size", ")", "and", "(", "abs", "(", "max", "(", "self", ".", "size", ")", "-", "target_size", ")", ">", "target_size", "*", "size_tolerance_prct", "/", "100", ")", ")", "need_join", "=", "len", "(", "images_data", ")", ">", "1", "if", "need_join", "or", "need_format_change", "or", "need_size_change", ":", "# post process", "image_data", "=", "self", ".", "postProcess", "(", "images_data", ",", "target_format", "if", "need_format_change", "else", "None", ",", "target_size", "if", "need_size_change", "else", "None", ")", "# crunch image again", "image_data", "=", "await", "__class__", ".", "crunch", "(", "image_data", ",", "target_format", ")", "# write it", "with", "open", "(", "out_filepath", ",", "\"wb\"", ")", "as", "file", ":", "file", ".", "write", "(", "image_data", ")" ]
Download cover and process it.
[ "Download", "cover", "and", "process", "it", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L114-L157
2,630
desbma/sacad
sacad/cover.py
CoverSourceResult.setFormatMetadata
def setFormatMetadata(self, format): """ Set format image metadata to what has been reliably identified. """ assert((self.needMetadataUpdate(CoverImageMetadata.FORMAT)) or (self.format is format)) self.format = format self.check_metadata &= ~CoverImageMetadata.FORMAT
python
def setFormatMetadata(self, format): assert((self.needMetadataUpdate(CoverImageMetadata.FORMAT)) or (self.format is format)) self.format = format self.check_metadata &= ~CoverImageMetadata.FORMAT
[ "def", "setFormatMetadata", "(", "self", ",", "format", ")", ":", "assert", "(", "(", "self", ".", "needMetadataUpdate", "(", "CoverImageMetadata", ".", "FORMAT", ")", ")", "or", "(", "self", ".", "format", "is", "format", ")", ")", "self", ".", "format", "=", "format", "self", ".", "check_metadata", "&=", "~", "CoverImageMetadata", ".", "FORMAT" ]
Set format image metadata to what has been reliably identified.
[ "Set", "format", "image", "metadata", "to", "what", "has", "been", "reliably", "identified", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L313-L318
2,631
desbma/sacad
sacad/cover.py
CoverSourceResult.setSizeMetadata
def setSizeMetadata(self, size): """ Set size image metadata to what has been reliably identified. """ assert((self.needMetadataUpdate(CoverImageMetadata.SIZE)) or (self.size == size)) self.size = size self.check_metadata &= ~CoverImageMetadata.SIZE
python
def setSizeMetadata(self, size): assert((self.needMetadataUpdate(CoverImageMetadata.SIZE)) or (self.size == size)) self.size = size self.check_metadata &= ~CoverImageMetadata.SIZE
[ "def", "setSizeMetadata", "(", "self", ",", "size", ")", ":", "assert", "(", "(", "self", ".", "needMetadataUpdate", "(", "CoverImageMetadata", ".", "SIZE", ")", ")", "or", "(", "self", ".", "size", "==", "size", ")", ")", "self", ".", "size", "=", "size", "self", ".", "check_metadata", "&=", "~", "CoverImageMetadata", ".", "SIZE" ]
Set size image metadata to what has been reliably identified.
[ "Set", "size", "image", "metadata", "to", "what", "has", "been", "reliably", "identified", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L320-L325
2,632
desbma/sacad
sacad/cover.py
CoverSourceResult.updateSignature
async def updateSignature(self): """ Calculate a cover's "signature" using its thumbnail url. """ assert(self.thumbnail_sig is None) if self.thumbnail_url is None: logging.getLogger("Cover").warning("No thumbnail available for %s" % (self)) return # download logging.getLogger("Cover").debug("Downloading cover thumbnail '%s'..." % (self.thumbnail_url)) headers = {} self.source.updateHttpHeaders(headers) async def pre_cache_callback(img_data): return await __class__.crunch(img_data, CoverImageFormat.JPEG, silent=True) try: store_in_cache_callback, image_data = await self.source.http.query(self.thumbnail_url, cache=__class__.image_cache, headers=headers, pre_cache_callback=pre_cache_callback) except Exception as e: logging.getLogger("Cover").warning("Download of '%s' failed: %s %s" % (self.thumbnail_url, e.__class__.__qualname__, e)) return # compute sig logging.getLogger("Cover").debug("Computing signature of %s..." % (self)) try: self.thumbnail_sig = __class__.computeImgSignature(image_data) except Exception as e: logging.getLogger("Cover").warning("Failed to compute signature of '%s': %s %s" % (self, e.__class__.__qualname__, e)) else: await store_in_cache_callback()
python
async def updateSignature(self): assert(self.thumbnail_sig is None) if self.thumbnail_url is None: logging.getLogger("Cover").warning("No thumbnail available for %s" % (self)) return # download logging.getLogger("Cover").debug("Downloading cover thumbnail '%s'..." % (self.thumbnail_url)) headers = {} self.source.updateHttpHeaders(headers) async def pre_cache_callback(img_data): return await __class__.crunch(img_data, CoverImageFormat.JPEG, silent=True) try: store_in_cache_callback, image_data = await self.source.http.query(self.thumbnail_url, cache=__class__.image_cache, headers=headers, pre_cache_callback=pre_cache_callback) except Exception as e: logging.getLogger("Cover").warning("Download of '%s' failed: %s %s" % (self.thumbnail_url, e.__class__.__qualname__, e)) return # compute sig logging.getLogger("Cover").debug("Computing signature of %s..." % (self)) try: self.thumbnail_sig = __class__.computeImgSignature(image_data) except Exception as e: logging.getLogger("Cover").warning("Failed to compute signature of '%s': %s %s" % (self, e.__class__.__qualname__, e)) else: await store_in_cache_callback()
[ "async", "def", "updateSignature", "(", "self", ")", ":", "assert", "(", "self", ".", "thumbnail_sig", "is", "None", ")", "if", "self", ".", "thumbnail_url", "is", "None", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"No thumbnail available for %s\"", "%", "(", "self", ")", ")", "return", "# download", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"Downloading cover thumbnail '%s'...\"", "%", "(", "self", ".", "thumbnail_url", ")", ")", "headers", "=", "{", "}", "self", ".", "source", ".", "updateHttpHeaders", "(", "headers", ")", "async", "def", "pre_cache_callback", "(", "img_data", ")", ":", "return", "await", "__class__", ".", "crunch", "(", "img_data", ",", "CoverImageFormat", ".", "JPEG", ",", "silent", "=", "True", ")", "try", ":", "store_in_cache_callback", ",", "image_data", "=", "await", "self", ".", "source", ".", "http", ".", "query", "(", "self", ".", "thumbnail_url", ",", "cache", "=", "__class__", ".", "image_cache", ",", "headers", "=", "headers", ",", "pre_cache_callback", "=", "pre_cache_callback", ")", "except", "Exception", "as", "e", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"Download of '%s' failed: %s %s\"", "%", "(", "self", ".", "thumbnail_url", ",", "e", ".", "__class__", ".", "__qualname__", ",", "e", ")", ")", "return", "# compute sig", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"Computing signature of %s...\"", "%", "(", "self", ")", ")", "try", ":", "self", ".", "thumbnail_sig", "=", "__class__", ".", "computeImgSignature", "(", "image_data", ")", "except", "Exception", "as", "e", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"Failed to compute signature of '%s': %s %s\"", "%", "(", "self", ",", "e", ".", "__class__", ".", "__qualname__", ",", "e", ")", ")", "else", ":", "await", "store_in_cache_callback", "(", ")" ]
Calculate a cover's "signature" using its thumbnail url.
[ "Calculate", "a", "cover", "s", "signature", "using", "its", "thumbnail", "url", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L327-L363
2,633
desbma/sacad
sacad/cover.py
CoverSourceResult.crunch
async def crunch(image_data, format, silent=False): """ Crunch image data, and return the processed data, or orignal data if operation failed. """ if (((format is CoverImageFormat.PNG) and (not HAS_OPTIPNG)) or ((format is CoverImageFormat.JPEG) and (not HAS_JPEGOPTIM))): return image_data with mkstemp_ctx.mkstemp(suffix=".%s" % (format.name.lower())) as tmp_out_filepath: if not silent: logging.getLogger("Cover").info("Crunching %s image..." % (format.name.upper())) with open(tmp_out_filepath, "wb") as tmp_out_file: tmp_out_file.write(image_data) size_before = len(image_data) if format is CoverImageFormat.PNG: cmd = ["optipng", "-quiet", "-o1"] elif format is CoverImageFormat.JPEG: cmd = ["jpegoptim", "-q", "--strip-all"] cmd.append(tmp_out_filepath) p = await asyncio.create_subprocess_exec(*cmd, stdin=asyncio.subprocess.DEVNULL, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await p.wait() if p.returncode != 0: if not silent: logging.getLogger("Cover").warning("Crunching image failed") return image_data with open(tmp_out_filepath, "rb") as tmp_out_file: crunched_image_data = tmp_out_file.read() size_after = len(crunched_image_data) pct_saved = 100 * (size_before - size_after) / size_before if not silent: logging.getLogger("Cover").debug("Crunching image saved %.2f%% filesize" % (pct_saved)) return crunched_image_data
python
async def crunch(image_data, format, silent=False): if (((format is CoverImageFormat.PNG) and (not HAS_OPTIPNG)) or ((format is CoverImageFormat.JPEG) and (not HAS_JPEGOPTIM))): return image_data with mkstemp_ctx.mkstemp(suffix=".%s" % (format.name.lower())) as tmp_out_filepath: if not silent: logging.getLogger("Cover").info("Crunching %s image..." % (format.name.upper())) with open(tmp_out_filepath, "wb") as tmp_out_file: tmp_out_file.write(image_data) size_before = len(image_data) if format is CoverImageFormat.PNG: cmd = ["optipng", "-quiet", "-o1"] elif format is CoverImageFormat.JPEG: cmd = ["jpegoptim", "-q", "--strip-all"] cmd.append(tmp_out_filepath) p = await asyncio.create_subprocess_exec(*cmd, stdin=asyncio.subprocess.DEVNULL, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await p.wait() if p.returncode != 0: if not silent: logging.getLogger("Cover").warning("Crunching image failed") return image_data with open(tmp_out_filepath, "rb") as tmp_out_file: crunched_image_data = tmp_out_file.read() size_after = len(crunched_image_data) pct_saved = 100 * (size_before - size_after) / size_before if not silent: logging.getLogger("Cover").debug("Crunching image saved %.2f%% filesize" % (pct_saved)) return crunched_image_data
[ "async", "def", "crunch", "(", "image_data", ",", "format", ",", "silent", "=", "False", ")", ":", "if", "(", "(", "(", "format", "is", "CoverImageFormat", ".", "PNG", ")", "and", "(", "not", "HAS_OPTIPNG", ")", ")", "or", "(", "(", "format", "is", "CoverImageFormat", ".", "JPEG", ")", "and", "(", "not", "HAS_JPEGOPTIM", ")", ")", ")", ":", "return", "image_data", "with", "mkstemp_ctx", ".", "mkstemp", "(", "suffix", "=", "\".%s\"", "%", "(", "format", ".", "name", ".", "lower", "(", ")", ")", ")", "as", "tmp_out_filepath", ":", "if", "not", "silent", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "info", "(", "\"Crunching %s image...\"", "%", "(", "format", ".", "name", ".", "upper", "(", ")", ")", ")", "with", "open", "(", "tmp_out_filepath", ",", "\"wb\"", ")", "as", "tmp_out_file", ":", "tmp_out_file", ".", "write", "(", "image_data", ")", "size_before", "=", "len", "(", "image_data", ")", "if", "format", "is", "CoverImageFormat", ".", "PNG", ":", "cmd", "=", "[", "\"optipng\"", ",", "\"-quiet\"", ",", "\"-o1\"", "]", "elif", "format", "is", "CoverImageFormat", ".", "JPEG", ":", "cmd", "=", "[", "\"jpegoptim\"", ",", "\"-q\"", ",", "\"--strip-all\"", "]", "cmd", ".", "append", "(", "tmp_out_filepath", ")", "p", "=", "await", "asyncio", ".", "create_subprocess_exec", "(", "*", "cmd", ",", "stdin", "=", "asyncio", ".", "subprocess", ".", "DEVNULL", ",", "stdout", "=", "asyncio", ".", "subprocess", ".", "DEVNULL", ",", "stderr", "=", "asyncio", ".", "subprocess", ".", "DEVNULL", ")", "await", "p", ".", "wait", "(", ")", "if", "p", ".", "returncode", "!=", "0", ":", "if", "not", "silent", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"Crunching image failed\"", ")", "return", "image_data", "with", "open", "(", "tmp_out_filepath", ",", "\"rb\"", ")", "as", "tmp_out_file", ":", "crunched_image_data", "=", "tmp_out_file", ".", "read", "(", ")", "size_after", "=", "len", "(", "crunched_image_data", ")", "pct_saved", "=", "100", "*", "(", "size_before", "-", "size_after", ")", "/", "size_before", "if", "not", "silent", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"Crunching image saved %.2f%% filesize\"", "%", "(", "pct_saved", ")", ")", "return", "crunched_image_data" ]
Crunch image data, and return the processed data, or orignal data if operation failed.
[ "Crunch", "image", "data", "and", "return", "the", "processed", "data", "or", "orignal", "data", "if", "operation", "failed", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L460-L491
2,634
desbma/sacad
sacad/cover.py
CoverSourceResult.guessImageMetadataFromData
def guessImageMetadataFromData(img_data): """ Identify an image format and size from its first bytes. """ format, width, height = None, None, None img_stream = io.BytesIO(img_data) try: img = PIL.Image.open(img_stream) except IOError: format = imghdr.what(None, h=img_data) format = SUPPORTED_IMG_FORMATS.get(format, None) else: format = img.format.lower() format = SUPPORTED_IMG_FORMATS.get(format, None) width, height = img.size return format, width, height
python
def guessImageMetadataFromData(img_data): format, width, height = None, None, None img_stream = io.BytesIO(img_data) try: img = PIL.Image.open(img_stream) except IOError: format = imghdr.what(None, h=img_data) format = SUPPORTED_IMG_FORMATS.get(format, None) else: format = img.format.lower() format = SUPPORTED_IMG_FORMATS.get(format, None) width, height = img.size return format, width, height
[ "def", "guessImageMetadataFromData", "(", "img_data", ")", ":", "format", ",", "width", ",", "height", "=", "None", ",", "None", ",", "None", "img_stream", "=", "io", ".", "BytesIO", "(", "img_data", ")", "try", ":", "img", "=", "PIL", ".", "Image", ".", "open", "(", "img_stream", ")", "except", "IOError", ":", "format", "=", "imghdr", ".", "what", "(", "None", ",", "h", "=", "img_data", ")", "format", "=", "SUPPORTED_IMG_FORMATS", ".", "get", "(", "format", ",", "None", ")", "else", ":", "format", "=", "img", ".", "format", ".", "lower", "(", ")", "format", "=", "SUPPORTED_IMG_FORMATS", ".", "get", "(", "format", ",", "None", ")", "width", ",", "height", "=", "img", ".", "size", "return", "format", ",", "width", ",", "height" ]
Identify an image format and size from its first bytes.
[ "Identify", "an", "image", "format", "and", "size", "from", "its", "first", "bytes", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L494-L507
2,635
desbma/sacad
sacad/cover.py
CoverSourceResult.guessImageMetadataFromHttpData
async def guessImageMetadataFromHttpData(response): """ Identify an image format and size from the beginning of its HTTP data. """ metadata = None img_data = bytearray() while len(img_data) < CoverSourceResult.MAX_FILE_METADATA_PEEK_SIZE: new_img_data = await response.content.read(__class__.METADATA_PEEK_SIZE_INCREMENT) if not new_img_data: break img_data.extend(new_img_data) metadata = __class__.guessImageMetadataFromData(img_data) if (metadata is not None) and all(metadata): return metadata return metadata
python
async def guessImageMetadataFromHttpData(response): metadata = None img_data = bytearray() while len(img_data) < CoverSourceResult.MAX_FILE_METADATA_PEEK_SIZE: new_img_data = await response.content.read(__class__.METADATA_PEEK_SIZE_INCREMENT) if not new_img_data: break img_data.extend(new_img_data) metadata = __class__.guessImageMetadataFromData(img_data) if (metadata is not None) and all(metadata): return metadata return metadata
[ "async", "def", "guessImageMetadataFromHttpData", "(", "response", ")", ":", "metadata", "=", "None", "img_data", "=", "bytearray", "(", ")", "while", "len", "(", "img_data", ")", "<", "CoverSourceResult", ".", "MAX_FILE_METADATA_PEEK_SIZE", ":", "new_img_data", "=", "await", "response", ".", "content", ".", "read", "(", "__class__", ".", "METADATA_PEEK_SIZE_INCREMENT", ")", "if", "not", "new_img_data", ":", "break", "img_data", ".", "extend", "(", "new_img_data", ")", "metadata", "=", "__class__", ".", "guessImageMetadataFromData", "(", "img_data", ")", "if", "(", "metadata", "is", "not", "None", ")", "and", "all", "(", "metadata", ")", ":", "return", "metadata", "return", "metadata" ]
Identify an image format and size from the beginning of its HTTP data.
[ "Identify", "an", "image", "format", "and", "size", "from", "the", "beginning", "of", "its", "HTTP", "data", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L510-L525
2,636
desbma/sacad
sacad/cover.py
CoverSourceResult.guessImageFormatFromHttpResponse
def guessImageFormatFromHttpResponse(response): """ Guess file format from HTTP response, return format or None. """ extensions = [] # try to guess extension from response content-type header try: content_type = response.headers["Content-Type"] except KeyError: pass else: ext = mimetypes.guess_extension(content_type, strict=False) if ext is not None: extensions.append(ext) # try to extract extension from URL urls = list(response.history) + [response.url] for url in map(str, urls): ext = os.path.splitext(urllib.parse.urlsplit(url).path)[-1] if (ext is not None) and (ext not in extensions): extensions.append(ext) # now guess from the extensions for ext in extensions: try: return SUPPORTED_IMG_FORMATS[ext[1:]] except KeyError: pass
python
def guessImageFormatFromHttpResponse(response): extensions = [] # try to guess extension from response content-type header try: content_type = response.headers["Content-Type"] except KeyError: pass else: ext = mimetypes.guess_extension(content_type, strict=False) if ext is not None: extensions.append(ext) # try to extract extension from URL urls = list(response.history) + [response.url] for url in map(str, urls): ext = os.path.splitext(urllib.parse.urlsplit(url).path)[-1] if (ext is not None) and (ext not in extensions): extensions.append(ext) # now guess from the extensions for ext in extensions: try: return SUPPORTED_IMG_FORMATS[ext[1:]] except KeyError: pass
[ "def", "guessImageFormatFromHttpResponse", "(", "response", ")", ":", "extensions", "=", "[", "]", "# try to guess extension from response content-type header", "try", ":", "content_type", "=", "response", ".", "headers", "[", "\"Content-Type\"", "]", "except", "KeyError", ":", "pass", "else", ":", "ext", "=", "mimetypes", ".", "guess_extension", "(", "content_type", ",", "strict", "=", "False", ")", "if", "ext", "is", "not", "None", ":", "extensions", ".", "append", "(", "ext", ")", "# try to extract extension from URL", "urls", "=", "list", "(", "response", ".", "history", ")", "+", "[", "response", ".", "url", "]", "for", "url", "in", "map", "(", "str", ",", "urls", ")", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "urllib", ".", "parse", ".", "urlsplit", "(", "url", ")", ".", "path", ")", "[", "-", "1", "]", "if", "(", "ext", "is", "not", "None", ")", "and", "(", "ext", "not", "in", "extensions", ")", ":", "extensions", ".", "append", "(", "ext", ")", "# now guess from the extensions", "for", "ext", "in", "extensions", ":", "try", ":", "return", "SUPPORTED_IMG_FORMATS", "[", "ext", "[", "1", ":", "]", "]", "except", "KeyError", ":", "pass" ]
Guess file format from HTTP response, return format or None.
[ "Guess", "file", "format", "from", "HTTP", "response", "return", "format", "or", "None", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L528-L554
2,637
desbma/sacad
sacad/cover.py
CoverSourceResult.preProcessForComparison
async def preProcessForComparison(results, target_size, size_tolerance_prct): """ Process results to prepare them for future comparison and sorting. """ # find reference (=image most likely to match target cover ignoring factors like size and format) reference = None for result in results: if result.source_quality is CoverSourceQuality.REFERENCE: if ((reference is None) or (CoverSourceResult.compare(result, reference, target_size=target_size, size_tolerance_prct=size_tolerance_prct) > 0)): reference = result # remove results that are only refs results = list(itertools.filterfalse(operator.attrgetter("is_only_reference"), results)) # remove duplicates no_dup_results = [] for result in results: is_dup = False for result_comp in results: if ((result_comp is not result) and (result_comp.urls == result.urls) and (__class__.compare(result, result_comp, target_size=target_size, size_tolerance_prct=size_tolerance_prct) < 0)): is_dup = True break if not is_dup: no_dup_results.append(result) dup_count = len(results) - len(no_dup_results) if dup_count > 0: logging.getLogger("Cover").info("Removed %u duplicate results" % (dup_count)) results = no_dup_results if reference is not None: logging.getLogger("Cover").info("Reference is: %s" % (reference)) reference.is_similar_to_reference = True # calculate sigs futures = [] for result in results: coroutine = result.updateSignature() future = asyncio.ensure_future(coroutine) futures.append(future) if reference.is_only_reference: assert(reference not in results) coroutine = reference.updateSignature() future = asyncio.ensure_future(coroutine) futures.append(future) if futures: await asyncio.wait(futures) for future in futures: future.result() # raise pending exception if any # compare other results to reference for result in results: if ((result is not reference) and (result.thumbnail_sig is not None) and (reference.thumbnail_sig is not None)): result.is_similar_to_reference = __class__.areImageSigsSimilar(result.thumbnail_sig, reference.thumbnail_sig) if result.is_similar_to_reference: logging.getLogger("Cover").debug("%s is similar to reference" % (result)) else: logging.getLogger("Cover").debug("%s is NOT similar to reference" % (result)) else: logging.getLogger("Cover").warning("No reference result found") return results
python
async def preProcessForComparison(results, target_size, size_tolerance_prct): # find reference (=image most likely to match target cover ignoring factors like size and format) reference = None for result in results: if result.source_quality is CoverSourceQuality.REFERENCE: if ((reference is None) or (CoverSourceResult.compare(result, reference, target_size=target_size, size_tolerance_prct=size_tolerance_prct) > 0)): reference = result # remove results that are only refs results = list(itertools.filterfalse(operator.attrgetter("is_only_reference"), results)) # remove duplicates no_dup_results = [] for result in results: is_dup = False for result_comp in results: if ((result_comp is not result) and (result_comp.urls == result.urls) and (__class__.compare(result, result_comp, target_size=target_size, size_tolerance_prct=size_tolerance_prct) < 0)): is_dup = True break if not is_dup: no_dup_results.append(result) dup_count = len(results) - len(no_dup_results) if dup_count > 0: logging.getLogger("Cover").info("Removed %u duplicate results" % (dup_count)) results = no_dup_results if reference is not None: logging.getLogger("Cover").info("Reference is: %s" % (reference)) reference.is_similar_to_reference = True # calculate sigs futures = [] for result in results: coroutine = result.updateSignature() future = asyncio.ensure_future(coroutine) futures.append(future) if reference.is_only_reference: assert(reference not in results) coroutine = reference.updateSignature() future = asyncio.ensure_future(coroutine) futures.append(future) if futures: await asyncio.wait(futures) for future in futures: future.result() # raise pending exception if any # compare other results to reference for result in results: if ((result is not reference) and (result.thumbnail_sig is not None) and (reference.thumbnail_sig is not None)): result.is_similar_to_reference = __class__.areImageSigsSimilar(result.thumbnail_sig, reference.thumbnail_sig) if result.is_similar_to_reference: logging.getLogger("Cover").debug("%s is similar to reference" % (result)) else: logging.getLogger("Cover").debug("%s is NOT similar to reference" % (result)) else: logging.getLogger("Cover").warning("No reference result found") return results
[ "async", "def", "preProcessForComparison", "(", "results", ",", "target_size", ",", "size_tolerance_prct", ")", ":", "# find reference (=image most likely to match target cover ignoring factors like size and format)", "reference", "=", "None", "for", "result", "in", "results", ":", "if", "result", ".", "source_quality", "is", "CoverSourceQuality", ".", "REFERENCE", ":", "if", "(", "(", "reference", "is", "None", ")", "or", "(", "CoverSourceResult", ".", "compare", "(", "result", ",", "reference", ",", "target_size", "=", "target_size", ",", "size_tolerance_prct", "=", "size_tolerance_prct", ")", ">", "0", ")", ")", ":", "reference", "=", "result", "# remove results that are only refs", "results", "=", "list", "(", "itertools", ".", "filterfalse", "(", "operator", ".", "attrgetter", "(", "\"is_only_reference\"", ")", ",", "results", ")", ")", "# remove duplicates", "no_dup_results", "=", "[", "]", "for", "result", "in", "results", ":", "is_dup", "=", "False", "for", "result_comp", "in", "results", ":", "if", "(", "(", "result_comp", "is", "not", "result", ")", "and", "(", "result_comp", ".", "urls", "==", "result", ".", "urls", ")", "and", "(", "__class__", ".", "compare", "(", "result", ",", "result_comp", ",", "target_size", "=", "target_size", ",", "size_tolerance_prct", "=", "size_tolerance_prct", ")", "<", "0", ")", ")", ":", "is_dup", "=", "True", "break", "if", "not", "is_dup", ":", "no_dup_results", ".", "append", "(", "result", ")", "dup_count", "=", "len", "(", "results", ")", "-", "len", "(", "no_dup_results", ")", "if", "dup_count", ">", "0", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "info", "(", "\"Removed %u duplicate results\"", "%", "(", "dup_count", ")", ")", "results", "=", "no_dup_results", "if", "reference", "is", "not", "None", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "info", "(", "\"Reference is: %s\"", "%", "(", "reference", ")", ")", "reference", ".", "is_similar_to_reference", "=", "True", "# calculate sigs", "futures", "=", "[", "]", "for", "result", "in", "results", ":", "coroutine", "=", "result", ".", "updateSignature", "(", ")", "future", "=", "asyncio", ".", "ensure_future", "(", "coroutine", ")", "futures", ".", "append", "(", "future", ")", "if", "reference", ".", "is_only_reference", ":", "assert", "(", "reference", "not", "in", "results", ")", "coroutine", "=", "reference", ".", "updateSignature", "(", ")", "future", "=", "asyncio", ".", "ensure_future", "(", "coroutine", ")", "futures", ".", "append", "(", "future", ")", "if", "futures", ":", "await", "asyncio", ".", "wait", "(", "futures", ")", "for", "future", "in", "futures", ":", "future", ".", "result", "(", ")", "# raise pending exception if any", "# compare other results to reference", "for", "result", "in", "results", ":", "if", "(", "(", "result", "is", "not", "reference", ")", "and", "(", "result", ".", "thumbnail_sig", "is", "not", "None", ")", "and", "(", "reference", ".", "thumbnail_sig", "is", "not", "None", ")", ")", ":", "result", ".", "is_similar_to_reference", "=", "__class__", ".", "areImageSigsSimilar", "(", "result", ".", "thumbnail_sig", ",", "reference", ".", "thumbnail_sig", ")", "if", "result", ".", "is_similar_to_reference", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"%s is similar to reference\"", "%", "(", "result", ")", ")", "else", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"%s is NOT similar to reference\"", "%", "(", "result", ")", ")", "else", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "warning", "(", "\"No reference result found\"", ")", "return", "results" ]
Process results to prepare them for future comparison and sorting.
[ "Process", "results", "to", "prepare", "them", "for", "future", "comparison", "and", "sorting", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L557-L627
2,638
desbma/sacad
sacad/cover.py
CoverSourceResult.computeImgSignature
def computeImgSignature(image_data): """ Calculate an image signature. This is similar to ahash but uses 3 colors components See: https://github.com/JohannesBuchner/imagehash/blob/4.0/imagehash/__init__.py#L125 """ parser = PIL.ImageFile.Parser() parser.feed(image_data) img = parser.close() target_size = (__class__.IMG_SIG_SIZE, __class__.IMG_SIG_SIZE) img.thumbnail(target_size, PIL.Image.BICUBIC) if img.size != target_size: logging.getLogger("Cover").debug("Non square thumbnail after resize to %ux%u, unable to compute signature" % target_size) return None img = img.convert(mode="RGB") pixels = img.getdata() pixel_count = target_size[0] * target_size[1] color_count = 3 r = bitarray.bitarray(pixel_count * color_count) r.setall(False) for ic in range(color_count): mean = sum(p[ic] for p in pixels) // pixel_count for ip, p in enumerate(pixels): if p[ic] > mean: r[pixel_count * ic + ip] = True return r
python
def computeImgSignature(image_data): parser = PIL.ImageFile.Parser() parser.feed(image_data) img = parser.close() target_size = (__class__.IMG_SIG_SIZE, __class__.IMG_SIG_SIZE) img.thumbnail(target_size, PIL.Image.BICUBIC) if img.size != target_size: logging.getLogger("Cover").debug("Non square thumbnail after resize to %ux%u, unable to compute signature" % target_size) return None img = img.convert(mode="RGB") pixels = img.getdata() pixel_count = target_size[0] * target_size[1] color_count = 3 r = bitarray.bitarray(pixel_count * color_count) r.setall(False) for ic in range(color_count): mean = sum(p[ic] for p in pixels) // pixel_count for ip, p in enumerate(pixels): if p[ic] > mean: r[pixel_count * ic + ip] = True return r
[ "def", "computeImgSignature", "(", "image_data", ")", ":", "parser", "=", "PIL", ".", "ImageFile", ".", "Parser", "(", ")", "parser", ".", "feed", "(", "image_data", ")", "img", "=", "parser", ".", "close", "(", ")", "target_size", "=", "(", "__class__", ".", "IMG_SIG_SIZE", ",", "__class__", ".", "IMG_SIG_SIZE", ")", "img", ".", "thumbnail", "(", "target_size", ",", "PIL", ".", "Image", ".", "BICUBIC", ")", "if", "img", ".", "size", "!=", "target_size", ":", "logging", ".", "getLogger", "(", "\"Cover\"", ")", ".", "debug", "(", "\"Non square thumbnail after resize to %ux%u, unable to compute signature\"", "%", "target_size", ")", "return", "None", "img", "=", "img", ".", "convert", "(", "mode", "=", "\"RGB\"", ")", "pixels", "=", "img", ".", "getdata", "(", ")", "pixel_count", "=", "target_size", "[", "0", "]", "*", "target_size", "[", "1", "]", "color_count", "=", "3", "r", "=", "bitarray", ".", "bitarray", "(", "pixel_count", "*", "color_count", ")", "r", ".", "setall", "(", "False", ")", "for", "ic", "in", "range", "(", "color_count", ")", ":", "mean", "=", "sum", "(", "p", "[", "ic", "]", "for", "p", "in", "pixels", ")", "//", "pixel_count", "for", "ip", ",", "p", "in", "enumerate", "(", "pixels", ")", ":", "if", "p", "[", "ic", "]", ">", "mean", ":", "r", "[", "pixel_count", "*", "ic", "+", "ip", "]", "=", "True", "return", "r" ]
Calculate an image signature. This is similar to ahash but uses 3 colors components See: https://github.com/JohannesBuchner/imagehash/blob/4.0/imagehash/__init__.py#L125
[ "Calculate", "an", "image", "signature", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/cover.py#L630-L657
2,639
desbma/sacad
sacad/recurse.py
get_metadata
def get_metadata(audio_filepaths): """ Return a tuple of album, artist, has_embedded_album_art from a list of audio files. """ artist, album, has_embedded_album_art = None, None, None for audio_filepath in audio_filepaths: try: mf = mutagen.File(audio_filepath) except Exception: continue if mf is None: continue # artist for key in ("albumartist", "artist", # ogg "TPE1", "TPE2", # mp3 "aART", "\xa9ART"): # mp4 try: val = mf.get(key, None) except ValueError: val = None if val is not None: artist = val[-1] break # album for key in ("_album", "album", # ogg "TALB", # mp3 "\xa9alb"): # mp4 try: val = mf.get(key, None) except ValueError: val = None if val is not None: album = val[-1] break if artist and album: # album art if isinstance(mf, mutagen.ogg.OggFileType): has_embedded_album_art = "metadata_block_picture" in mf elif isinstance(mf, mutagen.mp3.MP3): has_embedded_album_art = any(map(operator.methodcaller("startswith", "APIC:"), mf.keys())) elif isinstance(mf, mutagen.mp4.MP4): has_embedded_album_art = "covr" in mf # stop at the first file that succeeds (for performance) break return artist, album, has_embedded_album_art
python
def get_metadata(audio_filepaths): artist, album, has_embedded_album_art = None, None, None for audio_filepath in audio_filepaths: try: mf = mutagen.File(audio_filepath) except Exception: continue if mf is None: continue # artist for key in ("albumartist", "artist", # ogg "TPE1", "TPE2", # mp3 "aART", "\xa9ART"): # mp4 try: val = mf.get(key, None) except ValueError: val = None if val is not None: artist = val[-1] break # album for key in ("_album", "album", # ogg "TALB", # mp3 "\xa9alb"): # mp4 try: val = mf.get(key, None) except ValueError: val = None if val is not None: album = val[-1] break if artist and album: # album art if isinstance(mf, mutagen.ogg.OggFileType): has_embedded_album_art = "metadata_block_picture" in mf elif isinstance(mf, mutagen.mp3.MP3): has_embedded_album_art = any(map(operator.methodcaller("startswith", "APIC:"), mf.keys())) elif isinstance(mf, mutagen.mp4.MP4): has_embedded_album_art = "covr" in mf # stop at the first file that succeeds (for performance) break return artist, album, has_embedded_album_art
[ "def", "get_metadata", "(", "audio_filepaths", ")", ":", "artist", ",", "album", ",", "has_embedded_album_art", "=", "None", ",", "None", ",", "None", "for", "audio_filepath", "in", "audio_filepaths", ":", "try", ":", "mf", "=", "mutagen", ".", "File", "(", "audio_filepath", ")", "except", "Exception", ":", "continue", "if", "mf", "is", "None", ":", "continue", "# artist", "for", "key", "in", "(", "\"albumartist\"", ",", "\"artist\"", ",", "# ogg", "\"TPE1\"", ",", "\"TPE2\"", ",", "# mp3", "\"aART\"", ",", "\"\\xa9ART\"", ")", ":", "# mp4", "try", ":", "val", "=", "mf", ".", "get", "(", "key", ",", "None", ")", "except", "ValueError", ":", "val", "=", "None", "if", "val", "is", "not", "None", ":", "artist", "=", "val", "[", "-", "1", "]", "break", "# album", "for", "key", "in", "(", "\"_album\"", ",", "\"album\"", ",", "# ogg", "\"TALB\"", ",", "# mp3", "\"\\xa9alb\"", ")", ":", "# mp4", "try", ":", "val", "=", "mf", ".", "get", "(", "key", ",", "None", ")", "except", "ValueError", ":", "val", "=", "None", "if", "val", "is", "not", "None", ":", "album", "=", "val", "[", "-", "1", "]", "break", "if", "artist", "and", "album", ":", "# album art", "if", "isinstance", "(", "mf", ",", "mutagen", ".", "ogg", ".", "OggFileType", ")", ":", "has_embedded_album_art", "=", "\"metadata_block_picture\"", "in", "mf", "elif", "isinstance", "(", "mf", ",", "mutagen", ".", "mp3", ".", "MP3", ")", ":", "has_embedded_album_art", "=", "any", "(", "map", "(", "operator", ".", "methodcaller", "(", "\"startswith\"", ",", "\"APIC:\"", ")", ",", "mf", ".", "keys", "(", ")", ")", ")", "elif", "isinstance", "(", "mf", ",", "mutagen", ".", "mp4", ".", "MP4", ")", ":", "has_embedded_album_art", "=", "\"covr\"", "in", "mf", "# stop at the first file that succeeds (for performance)", "break", "return", "artist", ",", "album", ",", "has_embedded_album_art" ]
Return a tuple of album, artist, has_embedded_album_art from a list of audio files.
[ "Return", "a", "tuple", "of", "album", "artist", "has_embedded_album_art", "from", "a", "list", "of", "audio", "files", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/recurse.py#L59-L106
2,640
desbma/sacad
sacad/recurse.py
embed_album_art
def embed_album_art(cover_filepath, path): """ Embed album art into audio files. """ with open(cover_filepath, "rb") as f: cover_data = f.read() for filename in os.listdir(path): try: ext = os.path.splitext(filename)[1][1:].lower() except IndexError: continue if ext in AUDIO_EXTENSIONS: filepath = os.path.join(path, filename) mf = mutagen.File(filepath) if (isinstance(mf.tags, mutagen._vorbis.VComment) or isinstance(mf, mutagen.ogg.OggFileType)): picture = mutagen.flac.Picture() picture.data = cover_data picture.type = mutagen.id3.PictureType.COVER_FRONT picture.mime = "image/jpeg" encoded_data = base64.b64encode(picture.write()) mf["metadata_block_picture"] = encoded_data.decode("ascii") elif (isinstance(mf.tags, mutagen.id3.ID3) or isinstance(mf, mutagen.id3.ID3FileType)): mf.tags.add(mutagen.id3.APIC(mime="image/jpeg", type=mutagen.id3.PictureType.COVER_FRONT, data=cover_data)) elif (isinstance(mf.tags, mutagen.mp4.MP4Tags) or isinstance(mf, mutagen.mp4.MP4)): mf["covr"] = [mutagen.mp4.MP4Cover(cover_data, imageformat=mutagen.mp4.AtomDataType.JPEG)] mf.save()
python
def embed_album_art(cover_filepath, path): with open(cover_filepath, "rb") as f: cover_data = f.read() for filename in os.listdir(path): try: ext = os.path.splitext(filename)[1][1:].lower() except IndexError: continue if ext in AUDIO_EXTENSIONS: filepath = os.path.join(path, filename) mf = mutagen.File(filepath) if (isinstance(mf.tags, mutagen._vorbis.VComment) or isinstance(mf, mutagen.ogg.OggFileType)): picture = mutagen.flac.Picture() picture.data = cover_data picture.type = mutagen.id3.PictureType.COVER_FRONT picture.mime = "image/jpeg" encoded_data = base64.b64encode(picture.write()) mf["metadata_block_picture"] = encoded_data.decode("ascii") elif (isinstance(mf.tags, mutagen.id3.ID3) or isinstance(mf, mutagen.id3.ID3FileType)): mf.tags.add(mutagen.id3.APIC(mime="image/jpeg", type=mutagen.id3.PictureType.COVER_FRONT, data=cover_data)) elif (isinstance(mf.tags, mutagen.mp4.MP4Tags) or isinstance(mf, mutagen.mp4.MP4)): mf["covr"] = [mutagen.mp4.MP4Cover(cover_data, imageformat=mutagen.mp4.AtomDataType.JPEG)] mf.save()
[ "def", "embed_album_art", "(", "cover_filepath", ",", "path", ")", ":", "with", "open", "(", "cover_filepath", ",", "\"rb\"", ")", "as", "f", ":", "cover_data", "=", "f", ".", "read", "(", ")", "for", "filename", "in", "os", ".", "listdir", "(", "path", ")", ":", "try", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", "[", "1", ":", "]", ".", "lower", "(", ")", "except", "IndexError", ":", "continue", "if", "ext", "in", "AUDIO_EXTENSIONS", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", "mf", "=", "mutagen", ".", "File", "(", "filepath", ")", "if", "(", "isinstance", "(", "mf", ".", "tags", ",", "mutagen", ".", "_vorbis", ".", "VComment", ")", "or", "isinstance", "(", "mf", ",", "mutagen", ".", "ogg", ".", "OggFileType", ")", ")", ":", "picture", "=", "mutagen", ".", "flac", ".", "Picture", "(", ")", "picture", ".", "data", "=", "cover_data", "picture", ".", "type", "=", "mutagen", ".", "id3", ".", "PictureType", ".", "COVER_FRONT", "picture", ".", "mime", "=", "\"image/jpeg\"", "encoded_data", "=", "base64", ".", "b64encode", "(", "picture", ".", "write", "(", ")", ")", "mf", "[", "\"metadata_block_picture\"", "]", "=", "encoded_data", ".", "decode", "(", "\"ascii\"", ")", "elif", "(", "isinstance", "(", "mf", ".", "tags", ",", "mutagen", ".", "id3", ".", "ID3", ")", "or", "isinstance", "(", "mf", ",", "mutagen", ".", "id3", ".", "ID3FileType", ")", ")", ":", "mf", ".", "tags", ".", "add", "(", "mutagen", ".", "id3", ".", "APIC", "(", "mime", "=", "\"image/jpeg\"", ",", "type", "=", "mutagen", ".", "id3", ".", "PictureType", ".", "COVER_FRONT", ",", "data", "=", "cover_data", ")", ")", "elif", "(", "isinstance", "(", "mf", ".", "tags", ",", "mutagen", ".", "mp4", ".", "MP4Tags", ")", "or", "isinstance", "(", "mf", ",", "mutagen", ".", "mp4", ".", "MP4", ")", ")", ":", "mf", "[", "\"covr\"", "]", "=", "[", "mutagen", ".", "mp4", ".", "MP4Cover", "(", "cover_data", ",", "imageformat", "=", "mutagen", ".", "mp4", ".", "AtomDataType", ".", "JPEG", ")", "]", "mf", ".", "save", "(", ")" ]
Embed album art into audio files.
[ "Embed", "album", "art", "into", "audio", "files", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/recurse.py#L142-L173
2,641
desbma/sacad
sacad/recurse.py
ichunk
def ichunk(iterable, n): """ Split an iterable into n-sized chunks. """ it = iter(iterable) while True: chunk = tuple(itertools.islice(it, n)) if not chunk: return yield chunk
python
def ichunk(iterable, n): it = iter(iterable) while True: chunk = tuple(itertools.islice(it, n)) if not chunk: return yield chunk
[ "def", "ichunk", "(", "iterable", ",", "n", ")", ":", "it", "=", "iter", "(", "iterable", ")", "while", "True", ":", "chunk", "=", "tuple", "(", "itertools", ".", "islice", "(", "it", ",", "n", ")", ")", "if", "not", "chunk", ":", "return", "yield", "chunk" ]
Split an iterable into n-sized chunks.
[ "Split", "an", "iterable", "into", "n", "-", "sized", "chunks", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/recurse.py#L176-L183
2,642
desbma/sacad
sacad/tqdm_logging.py
redirect_logging
def redirect_logging(tqdm_obj, logger=logging.getLogger()): """ Context manager to redirect logging to a TqdmLoggingHandler object and then restore the original. """ # remove current handler assert(len(logger.handlers) == 1) prev_handler = logger.handlers[0] logger.removeHandler(prev_handler) # add tqdm handler tqdm_handler = TqdmLoggingHandler(tqdm_obj) if prev_handler.formatter is not None: tqdm_handler.setFormatter(prev_handler.formatter) logger.addHandler(tqdm_handler) try: yield finally: # restore handler logger.removeHandler(tqdm_handler) logger.addHandler(prev_handler)
python
def redirect_logging(tqdm_obj, logger=logging.getLogger()): # remove current handler assert(len(logger.handlers) == 1) prev_handler = logger.handlers[0] logger.removeHandler(prev_handler) # add tqdm handler tqdm_handler = TqdmLoggingHandler(tqdm_obj) if prev_handler.formatter is not None: tqdm_handler.setFormatter(prev_handler.formatter) logger.addHandler(tqdm_handler) try: yield finally: # restore handler logger.removeHandler(tqdm_handler) logger.addHandler(prev_handler)
[ "def", "redirect_logging", "(", "tqdm_obj", ",", "logger", "=", "logging", ".", "getLogger", "(", ")", ")", ":", "# remove current handler", "assert", "(", "len", "(", "logger", ".", "handlers", ")", "==", "1", ")", "prev_handler", "=", "logger", ".", "handlers", "[", "0", "]", "logger", ".", "removeHandler", "(", "prev_handler", ")", "# add tqdm handler", "tqdm_handler", "=", "TqdmLoggingHandler", "(", "tqdm_obj", ")", "if", "prev_handler", ".", "formatter", "is", "not", "None", ":", "tqdm_handler", ".", "setFormatter", "(", "prev_handler", ".", "formatter", ")", "logger", ".", "addHandler", "(", "tqdm_handler", ")", "try", ":", "yield", "finally", ":", "# restore handler", "logger", ".", "removeHandler", "(", "tqdm_handler", ")", "logger", ".", "addHandler", "(", "prev_handler", ")" ]
Context manager to redirect logging to a TqdmLoggingHandler object and then restore the original.
[ "Context", "manager", "to", "redirect", "logging", "to", "a", "TqdmLoggingHandler", "object", "and", "then", "restore", "the", "original", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/tqdm_logging.py#L21-L39
2,643
desbma/sacad
sacad/sources/base.py
CoverSource.probeUrl
async def probeUrl(self, url, response_headers=None): """ Probe URL reachability from cache or HEAD request. """ self.logger.debug("Probing URL '%s'..." % (url)) headers = {} self.updateHttpHeaders(headers) resp_headers = {} resp_ok = await self.http.isReachable(url, headers=headers, response_headers=resp_headers, cache=__class__.probe_cache) if response_headers is not None: response_headers.update(resp_headers) return resp_ok
python
async def probeUrl(self, url, response_headers=None): self.logger.debug("Probing URL '%s'..." % (url)) headers = {} self.updateHttpHeaders(headers) resp_headers = {} resp_ok = await self.http.isReachable(url, headers=headers, response_headers=resp_headers, cache=__class__.probe_cache) if response_headers is not None: response_headers.update(resp_headers) return resp_ok
[ "async", "def", "probeUrl", "(", "self", ",", "url", ",", "response_headers", "=", "None", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"Probing URL '%s'...\"", "%", "(", "url", ")", ")", "headers", "=", "{", "}", "self", ".", "updateHttpHeaders", "(", "headers", ")", "resp_headers", "=", "{", "}", "resp_ok", "=", "await", "self", ".", "http", ".", "isReachable", "(", "url", ",", "headers", "=", "headers", ",", "response_headers", "=", "resp_headers", ",", "cache", "=", "__class__", ".", "probe_cache", ")", "if", "response_headers", "is", "not", "None", ":", "response_headers", ".", "update", "(", "resp_headers", ")", "return", "resp_ok" ]
Probe URL reachability from cache or HEAD request.
[ "Probe", "URL", "reachability", "from", "cache", "or", "HEAD", "request", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/sources/base.py#L143-L157
2,644
desbma/sacad
sacad/sources/base.py
CoverSource.unaccentuate
def unaccentuate(s): """ Replace accentuated chars in string by their non accentuated equivalent. """ return "".join(c for c in unicodedata.normalize("NFKD", s) if not unicodedata.combining(c))
python
def unaccentuate(s): return "".join(c for c in unicodedata.normalize("NFKD", s) if not unicodedata.combining(c))
[ "def", "unaccentuate", "(", "s", ")", ":", "return", "\"\"", ".", "join", "(", "c", "for", "c", "in", "unicodedata", ".", "normalize", "(", "\"NFKD\"", ",", "s", ")", "if", "not", "unicodedata", ".", "combining", "(", "c", ")", ")" ]
Replace accentuated chars in string by their non accentuated equivalent.
[ "Replace", "accentuated", "chars", "in", "string", "by", "their", "non", "accentuated", "equivalent", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/sources/base.py#L165-L167
2,645
desbma/sacad
sacad/sources/base.py
CoverSource.unpunctuate
def unpunctuate(s, *, char_blacklist=string.punctuation): """ Remove punctuation from string s. """ # remove punctuation s = "".join(c for c in s if c not in char_blacklist) # remove consecutive spaces return " ".join(filter(None, s.split(" ")))
python
def unpunctuate(s, *, char_blacklist=string.punctuation): # remove punctuation s = "".join(c for c in s if c not in char_blacklist) # remove consecutive spaces return " ".join(filter(None, s.split(" ")))
[ "def", "unpunctuate", "(", "s", ",", "*", ",", "char_blacklist", "=", "string", ".", "punctuation", ")", ":", "# remove punctuation", "s", "=", "\"\"", ".", "join", "(", "c", "for", "c", "in", "s", "if", "c", "not", "in", "char_blacklist", ")", "# remove consecutive spaces", "return", "\" \"", ".", "join", "(", "filter", "(", "None", ",", "s", ".", "split", "(", "\" \"", ")", ")", ")" ]
Remove punctuation from string s.
[ "Remove", "punctuation", "from", "string", "s", "." ]
a7a010c4d9618a0c90927f1acb530101ca05fac4
https://github.com/desbma/sacad/blob/a7a010c4d9618a0c90927f1acb530101ca05fac4/sacad/sources/base.py#L170-L175
2,646
fogleman/pg
pg/glfw.py
_glfw_get_version
def _glfw_get_version(filename): ''' Queries and returns the library version tuple or None by using a subprocess. ''' version_checker_source = """ import sys import ctypes def get_version(library_handle): ''' Queries and returns the library version tuple or None. ''' major_value = ctypes.c_int(0) major = ctypes.pointer(major_value) minor_value = ctypes.c_int(0) minor = ctypes.pointer(minor_value) rev_value = ctypes.c_int(0) rev = ctypes.pointer(rev_value) if hasattr(library_handle, 'glfwGetVersion'): library_handle.glfwGetVersion(major, minor, rev) version = (major_value.value, minor_value.value, rev_value.value) return version else: return None try: input_func = raw_input except NameError: input_func = input filename = input_func().strip() try: library_handle = ctypes.CDLL(filename) except OSError: pass else: version = get_version(library_handle) print(version) """ args = [sys.executable, '-c', textwrap.dedent(version_checker_source)] process = subprocess.Popen(args, universal_newlines=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) out = process.communicate(_to_char_p(filename))[0] out = out.strip() if out: return eval(out) else: return None
python
def _glfw_get_version(filename): ''' Queries and returns the library version tuple or None by using a subprocess. ''' version_checker_source = """ import sys import ctypes def get_version(library_handle): ''' Queries and returns the library version tuple or None. ''' major_value = ctypes.c_int(0) major = ctypes.pointer(major_value) minor_value = ctypes.c_int(0) minor = ctypes.pointer(minor_value) rev_value = ctypes.c_int(0) rev = ctypes.pointer(rev_value) if hasattr(library_handle, 'glfwGetVersion'): library_handle.glfwGetVersion(major, minor, rev) version = (major_value.value, minor_value.value, rev_value.value) return version else: return None try: input_func = raw_input except NameError: input_func = input filename = input_func().strip() try: library_handle = ctypes.CDLL(filename) except OSError: pass else: version = get_version(library_handle) print(version) """ args = [sys.executable, '-c', textwrap.dedent(version_checker_source)] process = subprocess.Popen(args, universal_newlines=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) out = process.communicate(_to_char_p(filename))[0] out = out.strip() if out: return eval(out) else: return None
[ "def", "_glfw_get_version", "(", "filename", ")", ":", "version_checker_source", "=", "\"\"\"\n import sys\n import ctypes\n\n def get_version(library_handle):\n '''\n Queries and returns the library version tuple or None.\n '''\n major_value = ctypes.c_int(0)\n major = ctypes.pointer(major_value)\n minor_value = ctypes.c_int(0)\n minor = ctypes.pointer(minor_value)\n rev_value = ctypes.c_int(0)\n rev = ctypes.pointer(rev_value)\n if hasattr(library_handle, 'glfwGetVersion'):\n library_handle.glfwGetVersion(major, minor, rev)\n version = (major_value.value,\n minor_value.value,\n rev_value.value)\n return version\n else:\n return None\n\n try:\n input_func = raw_input\n except NameError:\n input_func = input\n filename = input_func().strip()\n\n try:\n library_handle = ctypes.CDLL(filename)\n except OSError:\n pass\n else:\n version = get_version(library_handle)\n print(version)\n \"\"\"", "args", "=", "[", "sys", ".", "executable", ",", "'-c'", ",", "textwrap", ".", "dedent", "(", "version_checker_source", ")", "]", "process", "=", "subprocess", ".", "Popen", "(", "args", ",", "universal_newlines", "=", "True", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "out", "=", "process", ".", "communicate", "(", "_to_char_p", "(", "filename", ")", ")", "[", "0", "]", "out", "=", "out", ".", "strip", "(", ")", "if", "out", ":", "return", "eval", "(", "out", ")", "else", ":", "return", "None" ]
Queries and returns the library version tuple or None by using a subprocess.
[ "Queries", "and", "returns", "the", "library", "version", "tuple", "or", "None", "by", "using", "a", "subprocess", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L84-L135
2,647
fogleman/pg
pg/glfw.py
set_error_callback
def set_error_callback(cbfun): ''' Sets the error callback. Wrapper for: GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun); ''' global _error_callback previous_callback = _error_callback if cbfun is None: cbfun = 0 c_cbfun = _GLFWerrorfun(cbfun) _error_callback = (cbfun, c_cbfun) cbfun = c_cbfun _glfw.glfwSetErrorCallback(cbfun) if previous_callback is not None and previous_callback[0] != 0: return previous_callback[0]
python
def set_error_callback(cbfun): ''' Sets the error callback. Wrapper for: GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun); ''' global _error_callback previous_callback = _error_callback if cbfun is None: cbfun = 0 c_cbfun = _GLFWerrorfun(cbfun) _error_callback = (cbfun, c_cbfun) cbfun = c_cbfun _glfw.glfwSetErrorCallback(cbfun) if previous_callback is not None and previous_callback[0] != 0: return previous_callback[0]
[ "def", "set_error_callback", "(", "cbfun", ")", ":", "global", "_error_callback", "previous_callback", "=", "_error_callback", "if", "cbfun", "is", "None", ":", "cbfun", "=", "0", "c_cbfun", "=", "_GLFWerrorfun", "(", "cbfun", ")", "_error_callback", "=", "(", "cbfun", ",", "c_cbfun", ")", "cbfun", "=", "c_cbfun", "_glfw", ".", "glfwSetErrorCallback", "(", "cbfun", ")", "if", "previous_callback", "is", "not", "None", "and", "previous_callback", "[", "0", "]", "!=", "0", ":", "return", "previous_callback", "[", "0", "]" ]
Sets the error callback. Wrapper for: GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
[ "Sets", "the", "error", "callback", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L579-L595
2,648
fogleman/pg
pg/glfw.py
destroy_window
def destroy_window(window): ''' Destroys the specified window and its context. Wrapper for: void glfwDestroyWindow(GLFWwindow* window); ''' _glfw.glfwDestroyWindow(window) window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_ulong)).contents.value for callback_repository in _callback_repositories: if window_addr in callback_repository: del callback_repository[window_addr]
python
def destroy_window(window): ''' Destroys the specified window and its context. Wrapper for: void glfwDestroyWindow(GLFWwindow* window); ''' _glfw.glfwDestroyWindow(window) window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_ulong)).contents.value for callback_repository in _callback_repositories: if window_addr in callback_repository: del callback_repository[window_addr]
[ "def", "destroy_window", "(", "window", ")", ":", "_glfw", ".", "glfwDestroyWindow", "(", "window", ")", "window_addr", "=", "ctypes", ".", "cast", "(", "ctypes", ".", "pointer", "(", "window", ")", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_ulong", ")", ")", ".", "contents", ".", "value", "for", "callback_repository", "in", "_callback_repositories", ":", "if", "window_addr", "in", "callback_repository", ":", "del", "callback_repository", "[", "window_addr", "]" ]
Destroys the specified window and its context. Wrapper for: void glfwDestroyWindow(GLFWwindow* window);
[ "Destroys", "the", "specified", "window", "and", "its", "context", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L798-L810
2,649
fogleman/pg
pg/util.py
normalize
def normalize(vector): '''Normalizes the `vector` so that its length is 1. `vector` can have any number of components. ''' d = sum(x * x for x in vector) ** 0.5 return tuple(x / d for x in vector)
python
def normalize(vector): '''Normalizes the `vector` so that its length is 1. `vector` can have any number of components. ''' d = sum(x * x for x in vector) ** 0.5 return tuple(x / d for x in vector)
[ "def", "normalize", "(", "vector", ")", ":", "d", "=", "sum", "(", "x", "*", "x", "for", "x", "in", "vector", ")", "**", "0.5", "return", "tuple", "(", "x", "/", "d", "for", "x", "in", "vector", ")" ]
Normalizes the `vector` so that its length is 1. `vector` can have any number of components.
[ "Normalizes", "the", "vector", "so", "that", "its", "length", "is", "1", ".", "vector", "can", "have", "any", "number", "of", "components", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L14-L19
2,650
fogleman/pg
pg/util.py
distance
def distance(p1, p2): '''Computes and returns the distance between two points, `p1` and `p2`. The points can have any number of components. ''' return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
python
def distance(p1, p2): '''Computes and returns the distance between two points, `p1` and `p2`. The points can have any number of components. ''' return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
[ "def", "distance", "(", "p1", ",", "p2", ")", ":", "return", "sum", "(", "(", "a", "-", "b", ")", "**", "2", "for", "a", ",", "b", "in", "zip", "(", "p1", ",", "p2", ")", ")", "**", "0.5" ]
Computes and returns the distance between two points, `p1` and `p2`. The points can have any number of components.
[ "Computes", "and", "returns", "the", "distance", "between", "two", "points", "p1", "and", "p2", ".", "The", "points", "can", "have", "any", "number", "of", "components", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L21-L25
2,651
fogleman/pg
pg/util.py
cross
def cross(v1, v2): '''Computes the cross product of two vectors. ''' return ( v1[1] * v2[2] - v1[2] * v2[1], v1[2] * v2[0] - v1[0] * v2[2], v1[0] * v2[1] - v1[1] * v2[0], )
python
def cross(v1, v2): '''Computes the cross product of two vectors. ''' return ( v1[1] * v2[2] - v1[2] * v2[1], v1[2] * v2[0] - v1[0] * v2[2], v1[0] * v2[1] - v1[1] * v2[0], )
[ "def", "cross", "(", "v1", ",", "v2", ")", ":", "return", "(", "v1", "[", "1", "]", "*", "v2", "[", "2", "]", "-", "v1", "[", "2", "]", "*", "v2", "[", "1", "]", ",", "v1", "[", "2", "]", "*", "v2", "[", "0", "]", "-", "v1", "[", "0", "]", "*", "v2", "[", "2", "]", ",", "v1", "[", "0", "]", "*", "v2", "[", "1", "]", "-", "v1", "[", "1", "]", "*", "v2", "[", "0", "]", ",", ")" ]
Computes the cross product of two vectors.
[ "Computes", "the", "cross", "product", "of", "two", "vectors", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L27-L34
2,652
fogleman/pg
pg/util.py
dot
def dot(v1, v2): '''Computes the dot product of two vectors. ''' x1, y1, z1 = v1 x2, y2, z2 = v2 return x1 * x2 + y1 * y2 + z1 * z2
python
def dot(v1, v2): '''Computes the dot product of two vectors. ''' x1, y1, z1 = v1 x2, y2, z2 = v2 return x1 * x2 + y1 * y2 + z1 * z2
[ "def", "dot", "(", "v1", ",", "v2", ")", ":", "x1", ",", "y1", ",", "z1", "=", "v1", "x2", ",", "y2", ",", "z2", "=", "v2", "return", "x1", "*", "x2", "+", "y1", "*", "y2", "+", "z1", "*", "z2" ]
Computes the dot product of two vectors.
[ "Computes", "the", "dot", "product", "of", "two", "vectors", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L36-L41
2,653
fogleman/pg
pg/util.py
add
def add(v1, v2): '''Adds two vectors. ''' return tuple(a + b for a, b in zip(v1, v2))
python
def add(v1, v2): '''Adds two vectors. ''' return tuple(a + b for a, b in zip(v1, v2))
[ "def", "add", "(", "v1", ",", "v2", ")", ":", "return", "tuple", "(", "a", "+", "b", "for", "a", ",", "b", "in", "zip", "(", "v1", ",", "v2", ")", ")" ]
Adds two vectors.
[ "Adds", "two", "vectors", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L43-L46
2,654
fogleman/pg
pg/util.py
sub
def sub(v1, v2): '''Subtracts two vectors. ''' return tuple(a - b for a, b in zip(v1, v2))
python
def sub(v1, v2): '''Subtracts two vectors. ''' return tuple(a - b for a, b in zip(v1, v2))
[ "def", "sub", "(", "v1", ",", "v2", ")", ":", "return", "tuple", "(", "a", "-", "b", "for", "a", ",", "b", "in", "zip", "(", "v1", ",", "v2", ")", ")" ]
Subtracts two vectors.
[ "Subtracts", "two", "vectors", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L48-L51
2,655
fogleman/pg
pg/util.py
interpolate
def interpolate(v1, v2, t): '''Interpolate from one vector to another. ''' return add(v1, mul(sub(v2, v1), t))
python
def interpolate(v1, v2, t): '''Interpolate from one vector to another. ''' return add(v1, mul(sub(v2, v1), t))
[ "def", "interpolate", "(", "v1", ",", "v2", ",", "t", ")", ":", "return", "add", "(", "v1", ",", "mul", "(", "sub", "(", "v2", ",", "v1", ")", ",", "t", ")", ")" ]
Interpolate from one vector to another.
[ "Interpolate", "from", "one", "vector", "to", "another", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L63-L66
2,656
fogleman/pg
pg/util.py
normal_from_points
def normal_from_points(a, b, c): '''Computes a normal vector given three points. ''' x1, y1, z1 = a x2, y2, z2 = b x3, y3, z3 = c ab = (x2 - x1, y2 - y1, z2 - z1) ac = (x3 - x1, y3 - y1, z3 - z1) x, y, z = cross(ab, ac) d = (x * x + y * y + z * z) ** 0.5 return (x / d, y / d, z / d)
python
def normal_from_points(a, b, c): '''Computes a normal vector given three points. ''' x1, y1, z1 = a x2, y2, z2 = b x3, y3, z3 = c ab = (x2 - x1, y2 - y1, z2 - z1) ac = (x3 - x1, y3 - y1, z3 - z1) x, y, z = cross(ab, ac) d = (x * x + y * y + z * z) ** 0.5 return (x / d, y / d, z / d)
[ "def", "normal_from_points", "(", "a", ",", "b", ",", "c", ")", ":", "x1", ",", "y1", ",", "z1", "=", "a", "x2", ",", "y2", ",", "z2", "=", "b", "x3", ",", "y3", ",", "z3", "=", "c", "ab", "=", "(", "x2", "-", "x1", ",", "y2", "-", "y1", ",", "z2", "-", "z1", ")", "ac", "=", "(", "x3", "-", "x1", ",", "y3", "-", "y1", ",", "z3", "-", "z1", ")", "x", ",", "y", ",", "z", "=", "cross", "(", "ab", ",", "ac", ")", "d", "=", "(", "x", "*", "x", "+", "y", "*", "y", "+", "z", "*", "z", ")", "**", "0.5", "return", "(", "x", "/", "d", ",", "y", "/", "d", ",", "z", "/", "d", ")" ]
Computes a normal vector given three points.
[ "Computes", "a", "normal", "vector", "given", "three", "points", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L68-L78
2,657
fogleman/pg
pg/util.py
smooth_normals
def smooth_normals(positions, normals): '''Assigns an averaged normal to each position based on all of the normals originally used for the position. ''' lookup = defaultdict(list) for position, normal in zip(positions, normals): lookup[position].append(normal) result = [] for position in positions: tx = ty = tz = 0 for x, y, z in lookup[position]: tx += x ty += y tz += z d = (tx * tx + ty * ty + tz * tz) ** 0.5 result.append((tx / d, ty / d, tz / d)) return result
python
def smooth_normals(positions, normals): '''Assigns an averaged normal to each position based on all of the normals originally used for the position. ''' lookup = defaultdict(list) for position, normal in zip(positions, normals): lookup[position].append(normal) result = [] for position in positions: tx = ty = tz = 0 for x, y, z in lookup[position]: tx += x ty += y tz += z d = (tx * tx + ty * ty + tz * tz) ** 0.5 result.append((tx / d, ty / d, tz / d)) return result
[ "def", "smooth_normals", "(", "positions", ",", "normals", ")", ":", "lookup", "=", "defaultdict", "(", "list", ")", "for", "position", ",", "normal", "in", "zip", "(", "positions", ",", "normals", ")", ":", "lookup", "[", "position", "]", ".", "append", "(", "normal", ")", "result", "=", "[", "]", "for", "position", "in", "positions", ":", "tx", "=", "ty", "=", "tz", "=", "0", "for", "x", ",", "y", ",", "z", "in", "lookup", "[", "position", "]", ":", "tx", "+=", "x", "ty", "+=", "y", "tz", "+=", "z", "d", "=", "(", "tx", "*", "tx", "+", "ty", "*", "ty", "+", "tz", "*", "tz", ")", "**", "0.5", "result", ".", "append", "(", "(", "tx", "/", "d", ",", "ty", "/", "d", ",", "tz", "/", "d", ")", ")", "return", "result" ]
Assigns an averaged normal to each position based on all of the normals originally used for the position.
[ "Assigns", "an", "averaged", "normal", "to", "each", "position", "based", "on", "all", "of", "the", "normals", "originally", "used", "for", "the", "position", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L80-L96
2,658
fogleman/pg
pg/util.py
bounding_box
def bounding_box(positions): '''Computes the bounding box for a list of 3-dimensional points. ''' (x0, y0, z0) = (x1, y1, z1) = positions[0] for x, y, z in positions: x0 = min(x0, x) y0 = min(y0, y) z0 = min(z0, z) x1 = max(x1, x) y1 = max(y1, y) z1 = max(z1, z) return (x0, y0, z0), (x1, y1, z1)
python
def bounding_box(positions): '''Computes the bounding box for a list of 3-dimensional points. ''' (x0, y0, z0) = (x1, y1, z1) = positions[0] for x, y, z in positions: x0 = min(x0, x) y0 = min(y0, y) z0 = min(z0, z) x1 = max(x1, x) y1 = max(y1, y) z1 = max(z1, z) return (x0, y0, z0), (x1, y1, z1)
[ "def", "bounding_box", "(", "positions", ")", ":", "(", "x0", ",", "y0", ",", "z0", ")", "=", "(", "x1", ",", "y1", ",", "z1", ")", "=", "positions", "[", "0", "]", "for", "x", ",", "y", ",", "z", "in", "positions", ":", "x0", "=", "min", "(", "x0", ",", "x", ")", "y0", "=", "min", "(", "y0", ",", "y", ")", "z0", "=", "min", "(", "z0", ",", "z", ")", "x1", "=", "max", "(", "x1", ",", "x", ")", "y1", "=", "max", "(", "y1", ",", "y", ")", "z1", "=", "max", "(", "z1", ",", "z", ")", "return", "(", "x0", ",", "y0", ",", "z0", ")", ",", "(", "x1", ",", "y1", ",", "z1", ")" ]
Computes the bounding box for a list of 3-dimensional points.
[ "Computes", "the", "bounding", "box", "for", "a", "list", "of", "3", "-", "dimensional", "points", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L98-L109
2,659
fogleman/pg
pg/util.py
recenter
def recenter(positions): '''Returns a list of new positions centered around the origin. ''' (x0, y0, z0), (x1, y1, z1) = bounding_box(positions) dx = x1 - (x1 - x0) / 2.0 dy = y1 - (y1 - y0) / 2.0 dz = z1 - (z1 - z0) / 2.0 result = [] for x, y, z in positions: result.append((x - dx, y - dy, z - dz)) return result
python
def recenter(positions): '''Returns a list of new positions centered around the origin. ''' (x0, y0, z0), (x1, y1, z1) = bounding_box(positions) dx = x1 - (x1 - x0) / 2.0 dy = y1 - (y1 - y0) / 2.0 dz = z1 - (z1 - z0) / 2.0 result = [] for x, y, z in positions: result.append((x - dx, y - dy, z - dz)) return result
[ "def", "recenter", "(", "positions", ")", ":", "(", "x0", ",", "y0", ",", "z0", ")", ",", "(", "x1", ",", "y1", ",", "z1", ")", "=", "bounding_box", "(", "positions", ")", "dx", "=", "x1", "-", "(", "x1", "-", "x0", ")", "/", "2.0", "dy", "=", "y1", "-", "(", "y1", "-", "y0", ")", "/", "2.0", "dz", "=", "z1", "-", "(", "z1", "-", "z0", ")", "/", "2.0", "result", "=", "[", "]", "for", "x", ",", "y", ",", "z", "in", "positions", ":", "result", ".", "append", "(", "(", "x", "-", "dx", ",", "y", "-", "dy", ",", "z", "-", "dz", ")", ")", "return", "result" ]
Returns a list of new positions centered around the origin.
[ "Returns", "a", "list", "of", "new", "positions", "centered", "around", "the", "origin", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L111-L121
2,660
fogleman/pg
pg/util.py
interleave
def interleave(*args): '''Interleaves the elements of the provided arrays. >>> a = [(0, 0), (1, 0), (2, 0), (3, 0)] >>> b = [(0, 0), (0, 1), (0, 2), (0, 3)] >>> interleave(a, b) [(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)] This is useful for combining multiple vertex attributes into a single vertex buffer. The shader attributes can be assigned a slice of the vertex buffer. ''' result = [] for array in zip(*args): result.append(tuple(flatten(array))) return result
python
def interleave(*args): '''Interleaves the elements of the provided arrays. >>> a = [(0, 0), (1, 0), (2, 0), (3, 0)] >>> b = [(0, 0), (0, 1), (0, 2), (0, 3)] >>> interleave(a, b) [(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)] This is useful for combining multiple vertex attributes into a single vertex buffer. The shader attributes can be assigned a slice of the vertex buffer. ''' result = [] for array in zip(*args): result.append(tuple(flatten(array))) return result
[ "def", "interleave", "(", "*", "args", ")", ":", "result", "=", "[", "]", "for", "array", "in", "zip", "(", "*", "args", ")", ":", "result", ".", "append", "(", "tuple", "(", "flatten", "(", "array", ")", ")", ")", "return", "result" ]
Interleaves the elements of the provided arrays. >>> a = [(0, 0), (1, 0), (2, 0), (3, 0)] >>> b = [(0, 0), (0, 1), (0, 2), (0, 3)] >>> interleave(a, b) [(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)] This is useful for combining multiple vertex attributes into a single vertex buffer. The shader attributes can be assigned a slice of the vertex buffer.
[ "Interleaves", "the", "elements", "of", "the", "provided", "arrays", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L123-L138
2,661
fogleman/pg
pg/util.py
distinct
def distinct(iterable, keyfunc=None): '''Yields distinct items from `iterable` in the order that they appear. ''' seen = set() for item in iterable: key = item if keyfunc is None else keyfunc(item) if key not in seen: seen.add(key) yield item
python
def distinct(iterable, keyfunc=None): '''Yields distinct items from `iterable` in the order that they appear. ''' seen = set() for item in iterable: key = item if keyfunc is None else keyfunc(item) if key not in seen: seen.add(key) yield item
[ "def", "distinct", "(", "iterable", ",", "keyfunc", "=", "None", ")", ":", "seen", "=", "set", "(", ")", "for", "item", "in", "iterable", ":", "key", "=", "item", "if", "keyfunc", "is", "None", "else", "keyfunc", "(", "item", ")", "if", "key", "not", "in", "seen", ":", "seen", ".", "add", "(", "key", ")", "yield", "item" ]
Yields distinct items from `iterable` in the order that they appear.
[ "Yields", "distinct", "items", "from", "iterable", "in", "the", "order", "that", "they", "appear", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L154-L162
2,662
fogleman/pg
pg/util.py
ray_triangle_intersection
def ray_triangle_intersection(v1, v2, v3, o, d): '''Computes the distance from a point to a triangle given a ray. ''' eps = 1e-6 e1 = sub(v2, v1) e2 = sub(v3, v1) p = cross(d, e2) det = dot(e1, p) if abs(det) < eps: return None inv = 1.0 / det t = sub(o, v1) u = dot(t, p) * inv if u < 0 or u > 1: return None q = cross(t, e1) v = dot(d, q) * inv if v < 0 or v > 1: return None t = dot(e2, q) * inv if t > eps: return t return None
python
def ray_triangle_intersection(v1, v2, v3, o, d): '''Computes the distance from a point to a triangle given a ray. ''' eps = 1e-6 e1 = sub(v2, v1) e2 = sub(v3, v1) p = cross(d, e2) det = dot(e1, p) if abs(det) < eps: return None inv = 1.0 / det t = sub(o, v1) u = dot(t, p) * inv if u < 0 or u > 1: return None q = cross(t, e1) v = dot(d, q) * inv if v < 0 or v > 1: return None t = dot(e2, q) * inv if t > eps: return t return None
[ "def", "ray_triangle_intersection", "(", "v1", ",", "v2", ",", "v3", ",", "o", ",", "d", ")", ":", "eps", "=", "1e-6", "e1", "=", "sub", "(", "v2", ",", "v1", ")", "e2", "=", "sub", "(", "v3", ",", "v1", ")", "p", "=", "cross", "(", "d", ",", "e2", ")", "det", "=", "dot", "(", "e1", ",", "p", ")", "if", "abs", "(", "det", ")", "<", "eps", ":", "return", "None", "inv", "=", "1.0", "/", "det", "t", "=", "sub", "(", "o", ",", "v1", ")", "u", "=", "dot", "(", "t", ",", "p", ")", "*", "inv", "if", "u", "<", "0", "or", "u", ">", "1", ":", "return", "None", "q", "=", "cross", "(", "t", ",", "e1", ")", "v", "=", "dot", "(", "d", ",", "q", ")", "*", "inv", "if", "v", "<", "0", "or", "v", ">", "1", ":", "return", "None", "t", "=", "dot", "(", "e2", ",", "q", ")", "*", "inv", "if", "t", ">", "eps", ":", "return", "t", "return", "None" ]
Computes the distance from a point to a triangle given a ray.
[ "Computes", "the", "distance", "from", "a", "point", "to", "a", "triangle", "given", "a", "ray", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L164-L186
2,663
fogleman/pg
pg/util.py
pack_list
def pack_list(fmt, data): '''Convert a Python list into a ctypes buffer. This appears to be faster than the typical method of creating a ctypes array, e.g. (c_float * len(data))(*data) ''' func = struct.Struct(fmt).pack return create_string_buffer(''.join([func(x) for x in data]))
python
def pack_list(fmt, data): '''Convert a Python list into a ctypes buffer. This appears to be faster than the typical method of creating a ctypes array, e.g. (c_float * len(data))(*data) ''' func = struct.Struct(fmt).pack return create_string_buffer(''.join([func(x) for x in data]))
[ "def", "pack_list", "(", "fmt", ",", "data", ")", ":", "func", "=", "struct", ".", "Struct", "(", "fmt", ")", ".", "pack", "return", "create_string_buffer", "(", "''", ".", "join", "(", "[", "func", "(", "x", ")", "for", "x", "in", "data", "]", ")", ")" ]
Convert a Python list into a ctypes buffer. This appears to be faster than the typical method of creating a ctypes array, e.g. (c_float * len(data))(*data)
[ "Convert", "a", "Python", "list", "into", "a", "ctypes", "buffer", "." ]
124ea3803c788b2c98c4f3a428e5d26842a67b58
https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L188-L195
2,664
wgnet/webium
webium/controls/click.py
Clickable.click
def click(self, jquery=False): """ Click by WebElement, if not, JQuery click """ if jquery: e = JQuery(self) e.click() else: super(Clickable, self).click()
python
def click(self, jquery=False): if jquery: e = JQuery(self) e.click() else: super(Clickable, self).click()
[ "def", "click", "(", "self", ",", "jquery", "=", "False", ")", ":", "if", "jquery", ":", "e", "=", "JQuery", "(", "self", ")", "e", ".", "click", "(", ")", "else", ":", "super", "(", "Clickable", ",", "self", ")", ".", "click", "(", ")" ]
Click by WebElement, if not, JQuery click
[ "Click", "by", "WebElement", "if", "not", "JQuery", "click" ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/controls/click.py#L8-L16
2,665
wgnet/webium
webium/cookie.py
add_cookies_to_web_driver
def add_cookies_to_web_driver(driver, cookies): """ Sets cookies in an existing WebDriver session. """ for cookie in cookies: driver.add_cookie(convert_cookie_to_dict(cookie)) return driver
python
def add_cookies_to_web_driver(driver, cookies): for cookie in cookies: driver.add_cookie(convert_cookie_to_dict(cookie)) return driver
[ "def", "add_cookies_to_web_driver", "(", "driver", ",", "cookies", ")", ":", "for", "cookie", "in", "cookies", ":", "driver", ".", "add_cookie", "(", "convert_cookie_to_dict", "(", "cookie", ")", ")", "return", "driver" ]
Sets cookies in an existing WebDriver session.
[ "Sets", "cookies", "in", "an", "existing", "WebDriver", "session", "." ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/cookie.py#L41-L47
2,666
wgnet/webium
webium/plugins/browser_closer.py
BrowserCloserPlugin.configure
def configure(self, options, conf): """Configure plugin. Plugin is enabled by default. """ self.conf = conf self.when = options.browser_closer_when
python
def configure(self, options, conf): self.conf = conf self.when = options.browser_closer_when
[ "def", "configure", "(", "self", ",", "options", ",", "conf", ")", ":", "self", ".", "conf", "=", "conf", "self", ".", "when", "=", "options", ".", "browser_closer_when" ]
Configure plugin. Plugin is enabled by default.
[ "Configure", "plugin", ".", "Plugin", "is", "enabled", "by", "default", "." ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/plugins/browser_closer.py#L25-L29
2,667
alecthomas/importmagic
importmagic/index.py
SymbolIndex.index_path
def index_path(self, root): """Index a path. :param root: Either a package directory, a .so or a .py module. """ basename = os.path.basename(root) if os.path.splitext(basename)[0] != '__init__' and basename.startswith('_'): return location = self._determine_location_for(root) if os.path.isfile(root): self._index_module(root, location) elif os.path.isdir(root) and os.path.exists(os.path.join(root, '__init__.py')): self._index_package(root, location)
python
def index_path(self, root): basename = os.path.basename(root) if os.path.splitext(basename)[0] != '__init__' and basename.startswith('_'): return location = self._determine_location_for(root) if os.path.isfile(root): self._index_module(root, location) elif os.path.isdir(root) and os.path.exists(os.path.join(root, '__init__.py')): self._index_package(root, location)
[ "def", "index_path", "(", "self", ",", "root", ")", ":", "basename", "=", "os", ".", "path", ".", "basename", "(", "root", ")", "if", "os", ".", "path", ".", "splitext", "(", "basename", ")", "[", "0", "]", "!=", "'__init__'", "and", "basename", ".", "startswith", "(", "'_'", ")", ":", "return", "location", "=", "self", ".", "_determine_location_for", "(", "root", ")", "if", "os", ".", "path", ".", "isfile", "(", "root", ")", ":", "self", ".", "_index_module", "(", "root", ",", "location", ")", "elif", "os", ".", "path", ".", "isdir", "(", "root", ")", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root", ",", "'__init__.py'", ")", ")", ":", "self", ".", "_index_package", "(", "root", ",", "location", ")" ]
Index a path. :param root: Either a package directory, a .so or a .py module.
[ "Index", "a", "path", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/index.py#L148-L160
2,668
alecthomas/importmagic
importmagic/index.py
SymbolIndex.get_or_create_index
def get_or_create_index(self, paths=None, name=None, refresh=False): """ Get index with given name from cache. Create if it doesn't exists. """ if not paths: paths = sys.path if not name: name = 'default' self._name = name idx_dir = get_cache_dir() idx_file = os.path.join(idx_dir, name + '.json') if os.path.exists(idx_file) and not refresh: with open(idx_file) as fd: self.deserialize(fd) else: self.build_index(paths) with open(idx_file, 'w') as fd: self.serialize(fd) return self
python
def get_or_create_index(self, paths=None, name=None, refresh=False): if not paths: paths = sys.path if not name: name = 'default' self._name = name idx_dir = get_cache_dir() idx_file = os.path.join(idx_dir, name + '.json') if os.path.exists(idx_file) and not refresh: with open(idx_file) as fd: self.deserialize(fd) else: self.build_index(paths) with open(idx_file, 'w') as fd: self.serialize(fd) return self
[ "def", "get_or_create_index", "(", "self", ",", "paths", "=", "None", ",", "name", "=", "None", ",", "refresh", "=", "False", ")", ":", "if", "not", "paths", ":", "paths", "=", "sys", ".", "path", "if", "not", "name", ":", "name", "=", "'default'", "self", ".", "_name", "=", "name", "idx_dir", "=", "get_cache_dir", "(", ")", "idx_file", "=", "os", ".", "path", ".", "join", "(", "idx_dir", ",", "name", "+", "'.json'", ")", "if", "os", ".", "path", ".", "exists", "(", "idx_file", ")", "and", "not", "refresh", ":", "with", "open", "(", "idx_file", ")", "as", "fd", ":", "self", ".", "deserialize", "(", "fd", ")", "else", ":", "self", ".", "build_index", "(", "paths", ")", "with", "open", "(", "idx_file", ",", "'w'", ")", "as", "fd", ":", "self", ".", "serialize", "(", "fd", ")", "return", "self" ]
Get index with given name from cache. Create if it doesn't exists.
[ "Get", "index", "with", "given", "name", "from", "cache", ".", "Create", "if", "it", "doesn", "t", "exists", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/index.py#L208-L229
2,669
alecthomas/importmagic
importmagic/index.py
SymbolIndex.symbol_scores
def symbol_scores(self, symbol): """Find matches for symbol. :param symbol: A . separated symbol. eg. 'os.path.basename' :returns: A list of tuples of (score, package, reference|None), ordered by score from highest to lowest. """ scores = [] path = [] # sys.path sys path -> import sys # os.path.basename os.path basename -> import os.path # basename os.path basename -> from os.path import basename # path.basename os.path basename -> from os import path def fixup(module, variable): prefix = module.split('.') if variable is not None: prefix.append(variable) seeking = symbol.split('.') new_module = [] while prefix and seeking[0] != prefix[0]: new_module.append(prefix.pop(0)) if new_module: module, variable = '.'.join(new_module), prefix[0] else: variable = None return module, variable def score_walk(scope, scale): sub_path, score = self._score_key(scope, full_key) if score > 0.1: try: i = sub_path.index(None) sub_path, from_symbol = sub_path[:i], '.'.join(sub_path[i + 1:]) except ValueError: from_symbol = None package_path = '.'.join(path + sub_path) package_path, from_symbol = fixup(package_path, from_symbol) scores.append((score * scale, package_path, from_symbol)) for key, subscope in scope._tree.items(): if type(subscope) is not float: path.append(key) score_walk(subscope, subscope.score * scale - 0.1) path.pop() full_key = symbol.split('.') score_walk(self, 1.0) scores.sort(reverse=True) return scores
python
def symbol_scores(self, symbol): scores = [] path = [] # sys.path sys path -> import sys # os.path.basename os.path basename -> import os.path # basename os.path basename -> from os.path import basename # path.basename os.path basename -> from os import path def fixup(module, variable): prefix = module.split('.') if variable is not None: prefix.append(variable) seeking = symbol.split('.') new_module = [] while prefix and seeking[0] != prefix[0]: new_module.append(prefix.pop(0)) if new_module: module, variable = '.'.join(new_module), prefix[0] else: variable = None return module, variable def score_walk(scope, scale): sub_path, score = self._score_key(scope, full_key) if score > 0.1: try: i = sub_path.index(None) sub_path, from_symbol = sub_path[:i], '.'.join(sub_path[i + 1:]) except ValueError: from_symbol = None package_path = '.'.join(path + sub_path) package_path, from_symbol = fixup(package_path, from_symbol) scores.append((score * scale, package_path, from_symbol)) for key, subscope in scope._tree.items(): if type(subscope) is not float: path.append(key) score_walk(subscope, subscope.score * scale - 0.1) path.pop() full_key = symbol.split('.') score_walk(self, 1.0) scores.sort(reverse=True) return scores
[ "def", "symbol_scores", "(", "self", ",", "symbol", ")", ":", "scores", "=", "[", "]", "path", "=", "[", "]", "# sys.path sys path -> import sys", "# os.path.basename os.path basename -> import os.path", "# basename os.path basename -> from os.path import basename", "# path.basename os.path basename -> from os import path", "def", "fixup", "(", "module", ",", "variable", ")", ":", "prefix", "=", "module", ".", "split", "(", "'.'", ")", "if", "variable", "is", "not", "None", ":", "prefix", ".", "append", "(", "variable", ")", "seeking", "=", "symbol", ".", "split", "(", "'.'", ")", "new_module", "=", "[", "]", "while", "prefix", "and", "seeking", "[", "0", "]", "!=", "prefix", "[", "0", "]", ":", "new_module", ".", "append", "(", "prefix", ".", "pop", "(", "0", ")", ")", "if", "new_module", ":", "module", ",", "variable", "=", "'.'", ".", "join", "(", "new_module", ")", ",", "prefix", "[", "0", "]", "else", ":", "variable", "=", "None", "return", "module", ",", "variable", "def", "score_walk", "(", "scope", ",", "scale", ")", ":", "sub_path", ",", "score", "=", "self", ".", "_score_key", "(", "scope", ",", "full_key", ")", "if", "score", ">", "0.1", ":", "try", ":", "i", "=", "sub_path", ".", "index", "(", "None", ")", "sub_path", ",", "from_symbol", "=", "sub_path", "[", ":", "i", "]", ",", "'.'", ".", "join", "(", "sub_path", "[", "i", "+", "1", ":", "]", ")", "except", "ValueError", ":", "from_symbol", "=", "None", "package_path", "=", "'.'", ".", "join", "(", "path", "+", "sub_path", ")", "package_path", ",", "from_symbol", "=", "fixup", "(", "package_path", ",", "from_symbol", ")", "scores", ".", "append", "(", "(", "score", "*", "scale", ",", "package_path", ",", "from_symbol", ")", ")", "for", "key", ",", "subscope", "in", "scope", ".", "_tree", ".", "items", "(", ")", ":", "if", "type", "(", "subscope", ")", "is", "not", "float", ":", "path", ".", "append", "(", "key", ")", "score_walk", "(", "subscope", ",", "subscope", ".", "score", "*", "scale", "-", "0.1", ")", "path", ".", "pop", "(", ")", "full_key", "=", "symbol", ".", "split", "(", "'.'", ")", "score_walk", "(", "self", ",", "1.0", ")", "scores", ".", "sort", "(", "reverse", "=", "True", ")", "return", "scores" ]
Find matches for symbol. :param symbol: A . separated symbol. eg. 'os.path.basename' :returns: A list of tuples of (score, package, reference|None), ordered by score from highest to lowest.
[ "Find", "matches", "for", "symbol", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/index.py#L231-L280
2,670
alecthomas/importmagic
importmagic/index.py
SymbolIndex.find
def find(self, path): """Return the node for a path, or None.""" path = path.split('.') node = self while node._parent: node = node._parent for name in path: node = node._tree.get(name, None) if node is None or type(node) is float: return None return node
python
def find(self, path): path = path.split('.') node = self while node._parent: node = node._parent for name in path: node = node._tree.get(name, None) if node is None or type(node) is float: return None return node
[ "def", "find", "(", "self", ",", "path", ")", ":", "path", "=", "path", ".", "split", "(", "'.'", ")", "node", "=", "self", "while", "node", ".", "_parent", ":", "node", "=", "node", ".", "_parent", "for", "name", "in", "path", ":", "node", "=", "node", ".", "_tree", ".", "get", "(", "name", ",", "None", ")", "if", "node", "is", "None", "or", "type", "(", "node", ")", "is", "float", ":", "return", "None", "return", "node" ]
Return the node for a path, or None.
[ "Return", "the", "node", "for", "a", "path", "or", "None", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/index.py#L302-L312
2,671
alecthomas/importmagic
importmagic/index.py
SymbolIndex.location_for
def location_for(self, path): """Return the location code for a path.""" path = path.split('.') node = self while node._parent: node = node._parent location = node.location for name in path: tree = node._tree.get(name, None) if tree is None or type(tree) is float: return location location = tree.location return location
python
def location_for(self, path): path = path.split('.') node = self while node._parent: node = node._parent location = node.location for name in path: tree = node._tree.get(name, None) if tree is None or type(tree) is float: return location location = tree.location return location
[ "def", "location_for", "(", "self", ",", "path", ")", ":", "path", "=", "path", ".", "split", "(", "'.'", ")", "node", "=", "self", "while", "node", ".", "_parent", ":", "node", "=", "node", ".", "_parent", "location", "=", "node", ".", "location", "for", "name", "in", "path", ":", "tree", "=", "node", ".", "_tree", ".", "get", "(", "name", ",", "None", ")", "if", "tree", "is", "None", "or", "type", "(", "tree", ")", "is", "float", ":", "return", "location", "location", "=", "tree", ".", "location", "return", "location" ]
Return the location code for a path.
[ "Return", "the", "location", "code", "for", "a", "path", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/index.py#L314-L326
2,672
wgnet/webium
webium/controls/select.py
Select.select_option
def select_option(self, option): """ Performs selection of provided item from Web List @params option - string item name """ items_list = self.get_options() for item in items_list: if item.get_attribute("value") == option: item.click() break
python
def select_option(self, option): items_list = self.get_options() for item in items_list: if item.get_attribute("value") == option: item.click() break
[ "def", "select_option", "(", "self", ",", "option", ")", ":", "items_list", "=", "self", ".", "get_options", "(", ")", "for", "item", "in", "items_list", ":", "if", "item", ".", "get_attribute", "(", "\"value\"", ")", "==", "option", ":", "item", ".", "click", "(", ")", "break" ]
Performs selection of provided item from Web List @params option - string item name
[ "Performs", "selection", "of", "provided", "item", "from", "Web", "List" ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/controls/select.py#L15-L25
2,673
wgnet/webium
webium/controls/select.py
Select.get_attribute_selected
def get_attribute_selected(self, attribute): """ Performs search of selected item from Web List Return attribute of selected item @params attribute - string attribute name """ items_list = self.get_options() return next(iter([item.get_attribute(attribute) for item in items_list if item.is_selected()]), None)
python
def get_attribute_selected(self, attribute): items_list = self.get_options() return next(iter([item.get_attribute(attribute) for item in items_list if item.is_selected()]), None)
[ "def", "get_attribute_selected", "(", "self", ",", "attribute", ")", ":", "items_list", "=", "self", ".", "get_options", "(", ")", "return", "next", "(", "iter", "(", "[", "item", ".", "get_attribute", "(", "attribute", ")", "for", "item", "in", "items_list", "if", "item", ".", "is_selected", "(", ")", "]", ")", ",", "None", ")" ]
Performs search of selected item from Web List Return attribute of selected item @params attribute - string attribute name
[ "Performs", "search", "of", "selected", "item", "from", "Web", "List", "Return", "attribute", "of", "selected", "item" ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/controls/select.py#L33-L41
2,674
wgnet/webium
webium/controls/select.py
Select.select_by_visible_text
def select_by_visible_text(self, text): """ Performs search of selected item from Web List @params text - string visible text """ xpath = './/option[normalize-space(.) = {0}]'.format(self._escape_string(text)) opts = self.find_elements_by_xpath(xpath) matched = False for opt in opts: self._set_selected(opt) if not self.is_multiple: return matched = True # in case the target option isn't found by xpath # attempt to find it by direct comparison among options which contain at least the longest token from the text if len(opts) == 0 and ' ' in text: sub_string_without_space = self._get_longest_token(text) if sub_string_without_space == "": candidates = self.get_options() else: xpath = ".//option[contains(.,{0})]".format(self._escape_string(sub_string_without_space)) candidates = self.find_elements_by_xpath(xpath) for candidate in candidates: if text == candidate.text: self._set_selected(candidate) if not self.is_multiple: return matched = True if not matched: raise NoSuchElementException("Could not locate element with visible text: " + str(text))
python
def select_by_visible_text(self, text): xpath = './/option[normalize-space(.) = {0}]'.format(self._escape_string(text)) opts = self.find_elements_by_xpath(xpath) matched = False for opt in opts: self._set_selected(opt) if not self.is_multiple: return matched = True # in case the target option isn't found by xpath # attempt to find it by direct comparison among options which contain at least the longest token from the text if len(opts) == 0 and ' ' in text: sub_string_without_space = self._get_longest_token(text) if sub_string_without_space == "": candidates = self.get_options() else: xpath = ".//option[contains(.,{0})]".format(self._escape_string(sub_string_without_space)) candidates = self.find_elements_by_xpath(xpath) for candidate in candidates: if text == candidate.text: self._set_selected(candidate) if not self.is_multiple: return matched = True if not matched: raise NoSuchElementException("Could not locate element with visible text: " + str(text))
[ "def", "select_by_visible_text", "(", "self", ",", "text", ")", ":", "xpath", "=", "'.//option[normalize-space(.) = {0}]'", ".", "format", "(", "self", ".", "_escape_string", "(", "text", ")", ")", "opts", "=", "self", ".", "find_elements_by_xpath", "(", "xpath", ")", "matched", "=", "False", "for", "opt", "in", "opts", ":", "self", ".", "_set_selected", "(", "opt", ")", "if", "not", "self", ".", "is_multiple", ":", "return", "matched", "=", "True", "# in case the target option isn't found by xpath", "# attempt to find it by direct comparison among options which contain at least the longest token from the text", "if", "len", "(", "opts", ")", "==", "0", "and", "' '", "in", "text", ":", "sub_string_without_space", "=", "self", ".", "_get_longest_token", "(", "text", ")", "if", "sub_string_without_space", "==", "\"\"", ":", "candidates", "=", "self", ".", "get_options", "(", ")", "else", ":", "xpath", "=", "\".//option[contains(.,{0})]\"", ".", "format", "(", "self", ".", "_escape_string", "(", "sub_string_without_space", ")", ")", "candidates", "=", "self", ".", "find_elements_by_xpath", "(", "xpath", ")", "for", "candidate", "in", "candidates", ":", "if", "text", "==", "candidate", ".", "text", ":", "self", ".", "_set_selected", "(", "candidate", ")", "if", "not", "self", ".", "is_multiple", ":", "return", "matched", "=", "True", "if", "not", "matched", ":", "raise", "NoSuchElementException", "(", "\"Could not locate element with visible text: \"", "+", "str", "(", "text", ")", ")" ]
Performs search of selected item from Web List @params text - string visible text
[ "Performs", "search", "of", "selected", "item", "from", "Web", "List" ]
ccb09876a201e75f5c5810392d4db7a8708b90cb
https://github.com/wgnet/webium/blob/ccb09876a201e75f5c5810392d4db7a8708b90cb/webium/controls/select.py#L57-L87
2,675
alecthomas/importmagic
importmagic/util.py
parse_ast
def parse_ast(source, filename=None): """Parse source into a Python AST, taking care of encoding.""" if isinstance(source, text_type) and sys.version_info[0] == 2: # ast.parse() on Python 2 doesn't like encoding declarations # in Unicode strings source = CODING_COOKIE_RE.sub(r'\1', source, 1) return ast.parse(source, filename or '<unknown>')
python
def parse_ast(source, filename=None): if isinstance(source, text_type) and sys.version_info[0] == 2: # ast.parse() on Python 2 doesn't like encoding declarations # in Unicode strings source = CODING_COOKIE_RE.sub(r'\1', source, 1) return ast.parse(source, filename or '<unknown>')
[ "def", "parse_ast", "(", "source", ",", "filename", "=", "None", ")", ":", "if", "isinstance", "(", "source", ",", "text_type", ")", "and", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "# ast.parse() on Python 2 doesn't like encoding declarations", "# in Unicode strings", "source", "=", "CODING_COOKIE_RE", ".", "sub", "(", "r'\\1'", ",", "source", ",", "1", ")", "return", "ast", ".", "parse", "(", "source", ",", "filename", "or", "'<unknown>'", ")" ]
Parse source into a Python AST, taking care of encoding.
[ "Parse", "source", "into", "a", "Python", "AST", "taking", "care", "of", "encoding", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/util.py#L14-L20
2,676
alecthomas/importmagic
importmagic/symbols.py
Scope.find_unresolved_and_unreferenced_symbols
def find_unresolved_and_unreferenced_symbols(self): """Find any unresolved symbols, and unreferenced symbols from this scope. :returns: ({unresolved}, {unreferenced}) """ unresolved = set() unreferenced = self._definitions.copy() self._collect_unresolved_and_unreferenced(set(), set(), unresolved, unreferenced, frozenset(self._definitions), start=True) return unresolved, unreferenced - Scope.ALL_BUILTINS
python
def find_unresolved_and_unreferenced_symbols(self): unresolved = set() unreferenced = self._definitions.copy() self._collect_unresolved_and_unreferenced(set(), set(), unresolved, unreferenced, frozenset(self._definitions), start=True) return unresolved, unreferenced - Scope.ALL_BUILTINS
[ "def", "find_unresolved_and_unreferenced_symbols", "(", "self", ")", ":", "unresolved", "=", "set", "(", ")", "unreferenced", "=", "self", ".", "_definitions", ".", "copy", "(", ")", "self", ".", "_collect_unresolved_and_unreferenced", "(", "set", "(", ")", ",", "set", "(", ")", ",", "unresolved", ",", "unreferenced", ",", "frozenset", "(", "self", ".", "_definitions", ")", ",", "start", "=", "True", ")", "return", "unresolved", ",", "unreferenced", "-", "Scope", ".", "ALL_BUILTINS" ]
Find any unresolved symbols, and unreferenced symbols from this scope. :returns: ({unresolved}, {unreferenced})
[ "Find", "any", "unresolved", "symbols", "and", "unreferenced", "symbols", "from", "this", "scope", "." ]
c00f2b282d933e0a9780146a20792f9e31fc8e6f
https://github.com/alecthomas/importmagic/blob/c00f2b282d933e0a9780146a20792f9e31fc8e6f/importmagic/symbols.py#L116-L125
2,677
netpieio/microgear-python
microgear/cache.py
get_item
def get_item(key): """Return content in cached file in JSON format""" CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) try: return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"] except (IOError, ValueError): return None
python
def get_item(key): CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) try: return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"] except (IOError, ValueError): return None
[ "def", "get_item", "(", "key", ")", ":", "CACHED_KEY_FILE", "=", "os", ".", "path", ".", "join", "(", "CURRENT_DIR", ",", "key", ")", "try", ":", "return", "json", ".", "loads", "(", "open", "(", "CACHED_KEY_FILE", ",", "\"rb\"", ")", ".", "read", "(", ")", ".", "decode", "(", "'UTF-8'", ")", ")", "[", "\"_\"", "]", "except", "(", "IOError", ",", "ValueError", ")", ":", "return", "None" ]
Return content in cached file in JSON format
[ "Return", "content", "in", "cached", "file", "in", "JSON", "format" ]
ea9bb352c7dd84b92f3462177645eaa4d448d50b
https://github.com/netpieio/microgear-python/blob/ea9bb352c7dd84b92f3462177645eaa4d448d50b/microgear/cache.py#L9-L16
2,678
netpieio/microgear-python
microgear/cache.py
set_item
def set_item(key,value): """Write JSON content from value argument to cached file and return""" CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8')) return value
python
def set_item(key,value): CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8')) return value
[ "def", "set_item", "(", "key", ",", "value", ")", ":", "CACHED_KEY_FILE", "=", "os", ".", "path", ".", "join", "(", "CURRENT_DIR", ",", "key", ")", "open", "(", "CACHED_KEY_FILE", ",", "\"wb\"", ")", ".", "write", "(", "json", ".", "dumps", "(", "{", "\"_\"", ":", "value", "}", ")", ".", "encode", "(", "'UTF-8'", ")", ")", "return", "value" ]
Write JSON content from value argument to cached file and return
[ "Write", "JSON", "content", "from", "value", "argument", "to", "cached", "file", "and", "return" ]
ea9bb352c7dd84b92f3462177645eaa4d448d50b
https://github.com/netpieio/microgear-python/blob/ea9bb352c7dd84b92f3462177645eaa4d448d50b/microgear/cache.py#L19-L25
2,679
netpieio/microgear-python
microgear/cache.py
delete_item
def delete_item(key): """Delete cached file if present""" CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) if os.path.isfile(CACHED_KEY_FILE): os.remove(CACHED_KEY_FILE)
python
def delete_item(key): CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key) if os.path.isfile(CACHED_KEY_FILE): os.remove(CACHED_KEY_FILE)
[ "def", "delete_item", "(", "key", ")", ":", "CACHED_KEY_FILE", "=", "os", ".", "path", ".", "join", "(", "CURRENT_DIR", ",", "key", ")", "if", "os", ".", "path", ".", "isfile", "(", "CACHED_KEY_FILE", ")", ":", "os", ".", "remove", "(", "CACHED_KEY_FILE", ")" ]
Delete cached file if present
[ "Delete", "cached", "file", "if", "present" ]
ea9bb352c7dd84b92f3462177645eaa4d448d50b
https://github.com/netpieio/microgear-python/blob/ea9bb352c7dd84b92f3462177645eaa4d448d50b/microgear/cache.py#L28-L33
2,680
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.__parse_json_data
def __parse_json_data(self, data): """Process Json data :@param data :@type data: json/dict :throws TypeError """ if isinstance(data, dict) or isinstance(data, list): self._raw_data = data self._json_data = copy.deepcopy(self._raw_data) else: raise TypeError("Provided Data is not json")
python
def __parse_json_data(self, data): if isinstance(data, dict) or isinstance(data, list): self._raw_data = data self._json_data = copy.deepcopy(self._raw_data) else: raise TypeError("Provided Data is not json")
[ "def", "__parse_json_data", "(", "self", ",", "data", ")", ":", "if", "isinstance", "(", "data", ",", "dict", ")", "or", "isinstance", "(", "data", ",", "list", ")", ":", "self", ".", "_raw_data", "=", "data", "self", ".", "_json_data", "=", "copy", ".", "deepcopy", "(", "self", ".", "_raw_data", ")", "else", ":", "raise", "TypeError", "(", "\"Provided Data is not json\"", ")" ]
Process Json data :@param data :@type data: json/dict :throws TypeError
[ "Process", "Json", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L32-L44
2,681
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.__parse_json_file
def __parse_json_file(self, file_path): """Process Json file data :@param file_path :@type file_path: string :@throws IOError """ if file_path == '' or os.path.splitext(file_path)[1] != '.json': raise IOError('Invalid Json file') with open(file_path) as json_file: self._raw_data = json.load(json_file) self._json_data = copy.deepcopy(self._raw_data)
python
def __parse_json_file(self, file_path): if file_path == '' or os.path.splitext(file_path)[1] != '.json': raise IOError('Invalid Json file') with open(file_path) as json_file: self._raw_data = json.load(json_file) self._json_data = copy.deepcopy(self._raw_data)
[ "def", "__parse_json_file", "(", "self", ",", "file_path", ")", ":", "if", "file_path", "==", "''", "or", "os", ".", "path", ".", "splitext", "(", "file_path", ")", "[", "1", "]", "!=", "'.json'", ":", "raise", "IOError", "(", "'Invalid Json file'", ")", "with", "open", "(", "file_path", ")", "as", "json_file", ":", "self", ".", "_raw_data", "=", "json", ".", "load", "(", "json_file", ")", "self", ".", "_json_data", "=", "copy", ".", "deepcopy", "(", "self", ".", "_raw_data", ")" ]
Process Json file data :@param file_path :@type file_path: string :@throws IOError
[ "Process", "Json", "file", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L46-L60
2,682
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.__get_value_from_data
def __get_value_from_data(self, key, data): """Find value from json data :@pram key :@type: string :@pram data :@type data: dict :@return object :@throws KeyError """ if key.isdigit(): return data[int(key)] if key not in data: raise KeyError("Key not exists") return data.get(key)
python
def __get_value_from_data(self, key, data): if key.isdigit(): return data[int(key)] if key not in data: raise KeyError("Key not exists") return data.get(key)
[ "def", "__get_value_from_data", "(", "self", ",", "key", ",", "data", ")", ":", "if", "key", ".", "isdigit", "(", ")", ":", "return", "data", "[", "int", "(", "key", ")", "]", "if", "key", "not", "in", "data", ":", "raise", "KeyError", "(", "\"Key not exists\"", ")", "return", "data", ".", "get", "(", "key", ")" ]
Find value from json data :@pram key :@type: string :@pram data :@type data: dict :@return object :@throws KeyError
[ "Find", "value", "from", "json", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L62-L80
2,683
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.at
def at(self, root): """Set root where PyJsonq start to prepare :@param root :@type root: string :@return self :@throws KeyError """ leafs = root.strip(" ").split('.') for leaf in leafs: if leaf: self._json_data = self.__get_value_from_data(leaf, self._json_data) return self
python
def at(self, root): leafs = root.strip(" ").split('.') for leaf in leafs: if leaf: self._json_data = self.__get_value_from_data(leaf, self._json_data) return self
[ "def", "at", "(", "self", ",", "root", ")", ":", "leafs", "=", "root", ".", "strip", "(", "\" \"", ")", ".", "split", "(", "'.'", ")", "for", "leaf", "in", "leafs", ":", "if", "leaf", ":", "self", ".", "_json_data", "=", "self", ".", "__get_value_from_data", "(", "leaf", ",", "self", ".", "_json_data", ")", "return", "self" ]
Set root where PyJsonq start to prepare :@param root :@type root: string :@return self :@throws KeyError
[ "Set", "root", "where", "PyJsonq", "start", "to", "prepare" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L101-L114
2,684
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.reset
def reset(self, data={}): """JsonQuery object cen be reset to new data according to given data or previously given raw Json data :@param data: {} :@type data: json/dict :@return self """ if data and (isinstance(data, dict) or isinstance(data, list)): self._json_data = data else: self._json_data = copy.deepcopy(self._raw_data) self.__reset_queries() return self
python
def reset(self, data={}): if data and (isinstance(data, dict) or isinstance(data, list)): self._json_data = data else: self._json_data = copy.deepcopy(self._raw_data) self.__reset_queries() return self
[ "def", "reset", "(", "self", ",", "data", "=", "{", "}", ")", ":", "if", "data", "and", "(", "isinstance", "(", "data", ",", "dict", ")", "or", "isinstance", "(", "data", ",", "list", ")", ")", ":", "self", ".", "_json_data", "=", "data", "else", ":", "self", ".", "_json_data", "=", "copy", ".", "deepcopy", "(", "self", ".", "_raw_data", ")", "self", ".", "__reset_queries", "(", ")", "return", "self" ]
JsonQuery object cen be reset to new data according to given data or previously given raw Json data :@param data: {} :@type data: json/dict :@return self
[ "JsonQuery", "object", "cen", "be", "reset", "to", "new", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L120-L136
2,685
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.__execute_queries
def __execute_queries(self): """Execute all condition and filter result data""" def func(item): or_check = False for queries in self._queries: and_check = True for query in queries: and_check &= self._matcher._match( item.get(query.get('key'), None), query.get('operator'), query.get('value') ) or_check |= and_check return or_check self._json_data = list(filter(lambda item: func(item), self._json_data))
python
def __execute_queries(self): def func(item): or_check = False for queries in self._queries: and_check = True for query in queries: and_check &= self._matcher._match( item.get(query.get('key'), None), query.get('operator'), query.get('value') ) or_check |= and_check return or_check self._json_data = list(filter(lambda item: func(item), self._json_data))
[ "def", "__execute_queries", "(", "self", ")", ":", "def", "func", "(", "item", ")", ":", "or_check", "=", "False", "for", "queries", "in", "self", ".", "_queries", ":", "and_check", "=", "True", "for", "query", "in", "queries", ":", "and_check", "&=", "self", ".", "_matcher", ".", "_match", "(", "item", ".", "get", "(", "query", ".", "get", "(", "'key'", ")", ",", "None", ")", ",", "query", ".", "get", "(", "'operator'", ")", ",", "query", ".", "get", "(", "'value'", ")", ")", "or_check", "|=", "and_check", "return", "or_check", "self", ".", "_json_data", "=", "list", "(", "filter", "(", "lambda", "item", ":", "func", "(", "item", ")", ",", "self", ".", "_json_data", ")", ")" ]
Execute all condition and filter result data
[ "Execute", "all", "condition", "and", "filter", "result", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L157-L173
2,686
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.or_where
def or_where(self, key, operator, value): """Make or_where clause :@param key :@param operator :@param value :@type key, operator, value: string :@return self """ if len(self._queries) > 0: self._current_query_index += 1 self.__store_query({"key": key, "operator": operator, "value": value}) return self
python
def or_where(self, key, operator, value): if len(self._queries) > 0: self._current_query_index += 1 self.__store_query({"key": key, "operator": operator, "value": value}) return self
[ "def", "or_where", "(", "self", ",", "key", ",", "operator", ",", "value", ")", ":", "if", "len", "(", "self", ".", "_queries", ")", ">", "0", ":", "self", ".", "_current_query_index", "+=", "1", "self", ".", "__store_query", "(", "{", "\"key\"", ":", "key", ",", "\"operator\"", ":", "operator", ",", "\"value\"", ":", "value", "}", ")", "return", "self" ]
Make or_where clause :@param key :@param operator :@param value :@type key, operator, value: string :@return self
[ "Make", "or_where", "clause" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L190-L203
2,687
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.nth
def nth(self, index): """Getting the nth element of the collection :@param index :@type index: int :@return object """ self.__prepare() return None if self.count() < math.fabs(index) else self._json_data[index]
python
def nth(self, index): self.__prepare() return None if self.count() < math.fabs(index) else self._json_data[index]
[ "def", "nth", "(", "self", ",", "index", ")", ":", "self", ".", "__prepare", "(", ")", "return", "None", "if", "self", ".", "count", "(", ")", "<", "math", ".", "fabs", "(", "index", ")", "else", "self", ".", "_json_data", "[", "index", "]" ]
Getting the nth element of the collection :@param index :@type index: int :@return object
[ "Getting", "the", "nth", "element", "of", "the", "collection" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L321-L330
2,688
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.sum
def sum(self, property): """Getting the sum according to the given property :@param property :@type property: string :@return int/float """ self.__prepare() total = 0 for i in self._json_data: total += i.get(property) return total
python
def sum(self, property): self.__prepare() total = 0 for i in self._json_data: total += i.get(property) return total
[ "def", "sum", "(", "self", ",", "property", ")", ":", "self", ".", "__prepare", "(", ")", "total", "=", "0", "for", "i", "in", "self", ".", "_json_data", ":", "total", "+=", "i", ".", "get", "(", "property", ")", "return", "total" ]
Getting the sum according to the given property :@param property :@type property: string :@return int/float
[ "Getting", "the", "sum", "according", "to", "the", "given", "property" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L332-L345
2,689
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.max
def max(self, property): """Getting the maximum value from the prepared data :@param property :@type property: string :@return object :@throws KeyError """ self.__prepare() try: return max(self._json_data, key=lambda x: x[property]).get(property) except KeyError: raise KeyError("Key is not exists")
python
def max(self, property): self.__prepare() try: return max(self._json_data, key=lambda x: x[property]).get(property) except KeyError: raise KeyError("Key is not exists")
[ "def", "max", "(", "self", ",", "property", ")", ":", "self", ".", "__prepare", "(", ")", "try", ":", "return", "max", "(", "self", ".", "_json_data", ",", "key", "=", "lambda", "x", ":", "x", "[", "property", "]", ")", ".", "get", "(", "property", ")", "except", "KeyError", ":", "raise", "KeyError", "(", "\"Key is not exists\"", ")" ]
Getting the maximum value from the prepared data :@param property :@type property: string :@return object :@throws KeyError
[ "Getting", "the", "maximum", "value", "from", "the", "prepared", "data" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L347-L360
2,690
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.avg
def avg(self, property): """Getting average according to given property :@param property :@type property: string :@return average: int/float """ self.__prepare() return self.sum(property) / self.count()
python
def avg(self, property): self.__prepare() return self.sum(property) / self.count()
[ "def", "avg", "(", "self", ",", "property", ")", ":", "self", ".", "__prepare", "(", ")", "return", "self", ".", "sum", "(", "property", ")", "/", "self", ".", "count", "(", ")" ]
Getting average according to given property :@param property :@type property: string :@return average: int/float
[ "Getting", "average", "according", "to", "given", "property" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L377-L386
2,691
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.chunk
def chunk(self, size=0): """Group the resulted collection to multiple chunk :@param size: 0 :@type size: integer :@return Chunked List """ if size == 0: raise ValueError('Invalid chunk size') self.__prepare() _new_content = [] while(len(self._json_data) > 0): _new_content.append(self._json_data[0:size]) self._json_data = self._json_data[size:] self._json_data = _new_content return self._json_data
python
def chunk(self, size=0): if size == 0: raise ValueError('Invalid chunk size') self.__prepare() _new_content = [] while(len(self._json_data) > 0): _new_content.append(self._json_data[0:size]) self._json_data = self._json_data[size:] self._json_data = _new_content return self._json_data
[ "def", "chunk", "(", "self", ",", "size", "=", "0", ")", ":", "if", "size", "==", "0", ":", "raise", "ValueError", "(", "'Invalid chunk size'", ")", "self", ".", "__prepare", "(", ")", "_new_content", "=", "[", "]", "while", "(", "len", "(", "self", ".", "_json_data", ")", ">", "0", ")", ":", "_new_content", ".", "append", "(", "self", ".", "_json_data", "[", "0", ":", "size", "]", ")", "self", ".", "_json_data", "=", "self", ".", "_json_data", "[", "size", ":", "]", "self", ".", "_json_data", "=", "_new_content", "return", "self", ".", "_json_data" ]
Group the resulted collection to multiple chunk :@param size: 0 :@type size: integer :@return Chunked List
[ "Group", "the", "resulted", "collection", "to", "multiple", "chunk" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L388-L409
2,692
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.group_by
def group_by(self, property): """Getting the grouped result by the given property :@param property :@type property: string :@return self """ self.__prepare() group_data = {} for data in self._json_data: if data[property] not in group_data: group_data[data[property]] = [] group_data[data[property]].append(data) self._json_data = group_data return self
python
def group_by(self, property): self.__prepare() group_data = {} for data in self._json_data: if data[property] not in group_data: group_data[data[property]] = [] group_data[data[property]].append(data) self._json_data = group_data return self
[ "def", "group_by", "(", "self", ",", "property", ")", ":", "self", ".", "__prepare", "(", ")", "group_data", "=", "{", "}", "for", "data", "in", "self", ".", "_json_data", ":", "if", "data", "[", "property", "]", "not", "in", "group_data", ":", "group_data", "[", "data", "[", "property", "]", "]", "=", "[", "]", "group_data", "[", "data", "[", "property", "]", "]", ".", "append", "(", "data", ")", "self", ".", "_json_data", "=", "group_data", "return", "self" ]
Getting the grouped result by the given property :@param property :@type property: string :@return self
[ "Getting", "the", "grouped", "result", "by", "the", "given", "property" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L411-L427
2,693
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.sort
def sort(self, order="asc"): """Getting the sorted result of the given list :@param order: "asc" :@type order: string :@return self """ self.__prepare() if isinstance(self._json_data, list): if order == "asc": self._json_data = sorted(self._json_data) else: self._json_data = sorted(self._json_data, reverse=True) return self
python
def sort(self, order="asc"): self.__prepare() if isinstance(self._json_data, list): if order == "asc": self._json_data = sorted(self._json_data) else: self._json_data = sorted(self._json_data, reverse=True) return self
[ "def", "sort", "(", "self", ",", "order", "=", "\"asc\"", ")", ":", "self", ".", "__prepare", "(", ")", "if", "isinstance", "(", "self", ".", "_json_data", ",", "list", ")", ":", "if", "order", "==", "\"asc\"", ":", "self", ".", "_json_data", "=", "sorted", "(", "self", ".", "_json_data", ")", "else", ":", "self", ".", "_json_data", "=", "sorted", "(", "self", ".", "_json_data", ",", "reverse", "=", "True", ")", "return", "self" ]
Getting the sorted result of the given list :@param order: "asc" :@type order: string :@return self
[ "Getting", "the", "sorted", "result", "of", "the", "given", "list" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L429-L444
2,694
s1s1ty/py-jsonq
pyjsonq/query.py
JsonQ.sort_by
def sort_by(self, property, order="asc"): """Getting the sorted result by the given property :@param property, order: "asc" :@type property, order: string :@return self """ self.__prepare() if isinstance(self._json_data, list): if order == "asc": self._json_data = sorted( self._json_data, key=lambda x: x.get(property) ) else: self._json_data = sorted( self._json_data, key=lambda x: x.get(property), reverse=True ) return self
python
def sort_by(self, property, order="asc"): self.__prepare() if isinstance(self._json_data, list): if order == "asc": self._json_data = sorted( self._json_data, key=lambda x: x.get(property) ) else: self._json_data = sorted( self._json_data, key=lambda x: x.get(property), reverse=True ) return self
[ "def", "sort_by", "(", "self", ",", "property", ",", "order", "=", "\"asc\"", ")", ":", "self", ".", "__prepare", "(", ")", "if", "isinstance", "(", "self", ".", "_json_data", ",", "list", ")", ":", "if", "order", "==", "\"asc\"", ":", "self", ".", "_json_data", "=", "sorted", "(", "self", ".", "_json_data", ",", "key", "=", "lambda", "x", ":", "x", ".", "get", "(", "property", ")", ")", "else", ":", "self", ".", "_json_data", "=", "sorted", "(", "self", ".", "_json_data", ",", "key", "=", "lambda", "x", ":", "x", ".", "get", "(", "property", ")", ",", "reverse", "=", "True", ")", "return", "self" ]
Getting the sorted result by the given property :@param property, order: "asc" :@type property, order: string :@return self
[ "Getting", "the", "sorted", "result", "by", "the", "given", "property" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L446-L468
2,695
s1s1ty/py-jsonq
pyjsonq/matcher.py
Matcher._match
def _match(self, x, op, y): """Compare the given `x` and `y` based on `op` :@param x, y, op :@type x, y: mixed :@type op: string :@return bool :@throws ValueError """ if (op not in self.condition_mapper): raise ValueError('Invalid where condition given') func = getattr(self, self.condition_mapper.get(op)) return func(x, y)
python
def _match(self, x, op, y): if (op not in self.condition_mapper): raise ValueError('Invalid where condition given') func = getattr(self, self.condition_mapper.get(op)) return func(x, y)
[ "def", "_match", "(", "self", ",", "x", ",", "op", ",", "y", ")", ":", "if", "(", "op", "not", "in", "self", ".", "condition_mapper", ")", ":", "raise", "ValueError", "(", "'Invalid where condition given'", ")", "func", "=", "getattr", "(", "self", ",", "self", ".", "condition_mapper", ".", "get", "(", "op", ")", ")", "return", "func", "(", "x", ",", "y", ")" ]
Compare the given `x` and `y` based on `op` :@param x, y, op :@type x, y: mixed :@type op: string :@return bool :@throws ValueError
[ "Compare", "the", "given", "x", "and", "y", "based", "on", "op" ]
9625597a2578bddcbed4e540174d5253b1fc3b75
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/matcher.py#L162-L176
2,696
mkorpela/overrides
overrides/overrides.py
overrides
def overrides(method): """Decorator to indicate that the decorated method overrides a method in superclass. The decorator code is executed while loading class. Using this method should have minimal runtime performance implications. This is based on my idea about how to do this and fwc:s highly improved algorithm for the implementation fwc:s algorithm : http://stackoverflow.com/a/14631397/308189 my answer : http://stackoverflow.com/a/8313042/308189 How to use: from overrides import overrides class SuperClass(object): def method(self): return 2 class SubClass(SuperClass): @overrides def method(self): return 1 :raises AssertionError if no match in super classes for the method name :return method with possibly added (if the method doesn't have one) docstring from super class """ for super_class in _get_base_classes(sys._getframe(2), method.__globals__): if hasattr(super_class, method.__name__): super_method = getattr(super_class, method.__name__) if hasattr(super_method, "__finalized__"): finalized = getattr(super_method, "__finalized__") if finalized: raise AssertionError('Method "%s" is finalized' % method.__name__) if not method.__doc__: method.__doc__ = super_method.__doc__ return method raise AssertionError('No super class method found for "%s"' % method.__name__)
python
def overrides(method): for super_class in _get_base_classes(sys._getframe(2), method.__globals__): if hasattr(super_class, method.__name__): super_method = getattr(super_class, method.__name__) if hasattr(super_method, "__finalized__"): finalized = getattr(super_method, "__finalized__") if finalized: raise AssertionError('Method "%s" is finalized' % method.__name__) if not method.__doc__: method.__doc__ = super_method.__doc__ return method raise AssertionError('No super class method found for "%s"' % method.__name__)
[ "def", "overrides", "(", "method", ")", ":", "for", "super_class", "in", "_get_base_classes", "(", "sys", ".", "_getframe", "(", "2", ")", ",", "method", ".", "__globals__", ")", ":", "if", "hasattr", "(", "super_class", ",", "method", ".", "__name__", ")", ":", "super_method", "=", "getattr", "(", "super_class", ",", "method", ".", "__name__", ")", "if", "hasattr", "(", "super_method", ",", "\"__finalized__\"", ")", ":", "finalized", "=", "getattr", "(", "super_method", ",", "\"__finalized__\"", ")", "if", "finalized", ":", "raise", "AssertionError", "(", "'Method \"%s\" is finalized'", "%", "method", ".", "__name__", ")", "if", "not", "method", ".", "__doc__", ":", "method", ".", "__doc__", "=", "super_method", ".", "__doc__", "return", "method", "raise", "AssertionError", "(", "'No super class method found for \"%s\"'", "%", "method", ".", "__name__", ")" ]
Decorator to indicate that the decorated method overrides a method in superclass. The decorator code is executed while loading class. Using this method should have minimal runtime performance implications. This is based on my idea about how to do this and fwc:s highly improved algorithm for the implementation fwc:s algorithm : http://stackoverflow.com/a/14631397/308189 my answer : http://stackoverflow.com/a/8313042/308189 How to use: from overrides import overrides class SuperClass(object): def method(self): return 2 class SubClass(SuperClass): @overrides def method(self): return 1 :raises AssertionError if no match in super classes for the method name :return method with possibly added (if the method doesn't have one) docstring from super class
[ "Decorator", "to", "indicate", "that", "the", "decorated", "method", "overrides", "a", "method", "in", "superclass", ".", "The", "decorator", "code", "is", "executed", "while", "loading", "class", ".", "Using", "this", "method", "should", "have", "minimal", "runtime", "performance", "implications", "." ]
196c2fa3c79fe7a7d319d2ade25bb25f6d78f1c2
https://github.com/mkorpela/overrides/blob/196c2fa3c79fe7a7d319d2ade25bb25f6d78f1c2/overrides/overrides.py#L30-L70
2,697
mkorpela/overrides
overrides/overrides.py
_get_base_class_names
def _get_base_class_names(frame): """ Get baseclass names from the code object """ co, lasti = frame.f_code, frame.f_lasti code = co.co_code extends = [] for (op, oparg) in op_stream(code, lasti): if op in dis.hasconst: if type(co.co_consts[oparg]) == str: extends = [] elif op in dis.hasname: if dis.opname[op] == 'LOAD_NAME': extends.append(('name', co.co_names[oparg])) if dis.opname[op] == 'LOAD_ATTR': extends.append(('attr', co.co_names[oparg])) if dis.opname[op] == 'LOAD_GLOBAL': extends.append(('name', co.co_names[oparg])) items = [] previous_item = [] for t, s in extends: if t == 'name': if previous_item: items.append(previous_item) previous_item = [s] else: previous_item += [s] if previous_item: items.append(previous_item) return items
python
def _get_base_class_names(frame): co, lasti = frame.f_code, frame.f_lasti code = co.co_code extends = [] for (op, oparg) in op_stream(code, lasti): if op in dis.hasconst: if type(co.co_consts[oparg]) == str: extends = [] elif op in dis.hasname: if dis.opname[op] == 'LOAD_NAME': extends.append(('name', co.co_names[oparg])) if dis.opname[op] == 'LOAD_ATTR': extends.append(('attr', co.co_names[oparg])) if dis.opname[op] == 'LOAD_GLOBAL': extends.append(('name', co.co_names[oparg])) items = [] previous_item = [] for t, s in extends: if t == 'name': if previous_item: items.append(previous_item) previous_item = [s] else: previous_item += [s] if previous_item: items.append(previous_item) return items
[ "def", "_get_base_class_names", "(", "frame", ")", ":", "co", ",", "lasti", "=", "frame", ".", "f_code", ",", "frame", ".", "f_lasti", "code", "=", "co", ".", "co_code", "extends", "=", "[", "]", "for", "(", "op", ",", "oparg", ")", "in", "op_stream", "(", "code", ",", "lasti", ")", ":", "if", "op", "in", "dis", ".", "hasconst", ":", "if", "type", "(", "co", ".", "co_consts", "[", "oparg", "]", ")", "==", "str", ":", "extends", "=", "[", "]", "elif", "op", "in", "dis", ".", "hasname", ":", "if", "dis", ".", "opname", "[", "op", "]", "==", "'LOAD_NAME'", ":", "extends", ".", "append", "(", "(", "'name'", ",", "co", ".", "co_names", "[", "oparg", "]", ")", ")", "if", "dis", ".", "opname", "[", "op", "]", "==", "'LOAD_ATTR'", ":", "extends", ".", "append", "(", "(", "'attr'", ",", "co", ".", "co_names", "[", "oparg", "]", ")", ")", "if", "dis", ".", "opname", "[", "op", "]", "==", "'LOAD_GLOBAL'", ":", "extends", ".", "append", "(", "(", "'name'", ",", "co", ".", "co_names", "[", "oparg", "]", ")", ")", "items", "=", "[", "]", "previous_item", "=", "[", "]", "for", "t", ",", "s", "in", "extends", ":", "if", "t", "==", "'name'", ":", "if", "previous_item", ":", "items", ".", "append", "(", "previous_item", ")", "previous_item", "=", "[", "s", "]", "else", ":", "previous_item", "+=", "[", "s", "]", "if", "previous_item", ":", "items", ".", "append", "(", "previous_item", ")", "return", "items" ]
Get baseclass names from the code object
[ "Get", "baseclass", "names", "from", "the", "code", "object" ]
196c2fa3c79fe7a7d319d2ade25bb25f6d78f1c2
https://github.com/mkorpela/overrides/blob/196c2fa3c79fe7a7d319d2ade25bb25f6d78f1c2/overrides/overrides.py#L126-L155
2,698
firecat53/urlscan
urlscan/urlscan.py
load_tlds
def load_tlds(): """Load all legal TLD extensions from assets """ file = os.path.join(os.path.dirname(__file__), 'assets', 'tlds-alpha-by-domain.txt') with open(file) as fobj: return [elem for elem in fobj.read().lower().splitlines()[1:] if "--" not in elem]
python
def load_tlds(): file = os.path.join(os.path.dirname(__file__), 'assets', 'tlds-alpha-by-domain.txt') with open(file) as fobj: return [elem for elem in fobj.read().lower().splitlines()[1:] if "--" not in elem]
[ "def", "load_tlds", "(", ")", ":", "file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'assets'", ",", "'tlds-alpha-by-domain.txt'", ")", "with", "open", "(", "file", ")", "as", "fobj", ":", "return", "[", "elem", "for", "elem", "in", "fobj", ".", "read", "(", ")", ".", "lower", "(", ")", ".", "splitlines", "(", ")", "[", "1", ":", "]", "if", "\"--\"", "not", "in", "elem", "]" ]
Load all legal TLD extensions from assets
[ "Load", "all", "legal", "TLD", "extensions", "from", "assets" ]
2d10807d01167873733da3b478c784f8fa21bbc0
https://github.com/firecat53/urlscan/blob/2d10807d01167873733da3b478c784f8fa21bbc0/urlscan/urlscan.py#L268-L277
2,699
firecat53/urlscan
urlscan/urlscan.py
parse_text_urls
def parse_text_urls(mesg): """Parse a block of text, splitting it into its url and non-url components.""" rval = [] loc = 0 for match in URLRE.finditer(mesg): if loc < match.start(): rval.append(Chunk(mesg[loc:match.start()], None)) # Turn email addresses into mailto: links email = match.group("email") if email and "mailto" not in email: mailto = "mailto:{}".format(email) else: mailto = match.group(1) rval.append(Chunk(None, mailto)) loc = match.end() if loc < len(mesg): rval.append(Chunk(mesg[loc:], None)) return rval
python
def parse_text_urls(mesg): rval = [] loc = 0 for match in URLRE.finditer(mesg): if loc < match.start(): rval.append(Chunk(mesg[loc:match.start()], None)) # Turn email addresses into mailto: links email = match.group("email") if email and "mailto" not in email: mailto = "mailto:{}".format(email) else: mailto = match.group(1) rval.append(Chunk(None, mailto)) loc = match.end() if loc < len(mesg): rval.append(Chunk(mesg[loc:], None)) return rval
[ "def", "parse_text_urls", "(", "mesg", ")", ":", "rval", "=", "[", "]", "loc", "=", "0", "for", "match", "in", "URLRE", ".", "finditer", "(", "mesg", ")", ":", "if", "loc", "<", "match", ".", "start", "(", ")", ":", "rval", ".", "append", "(", "Chunk", "(", "mesg", "[", "loc", ":", "match", ".", "start", "(", ")", "]", ",", "None", ")", ")", "# Turn email addresses into mailto: links", "email", "=", "match", ".", "group", "(", "\"email\"", ")", "if", "email", "and", "\"mailto\"", "not", "in", "email", ":", "mailto", "=", "\"mailto:{}\"", ".", "format", "(", "email", ")", "else", ":", "mailto", "=", "match", ".", "group", "(", "1", ")", "rval", ".", "append", "(", "Chunk", "(", "None", ",", "mailto", ")", ")", "loc", "=", "match", ".", "end", "(", ")", "if", "loc", "<", "len", "(", "mesg", ")", ":", "rval", ".", "append", "(", "Chunk", "(", "mesg", "[", "loc", ":", "]", ",", "None", ")", ")", "return", "rval" ]
Parse a block of text, splitting it into its url and non-url components.
[ "Parse", "a", "block", "of", "text", "splitting", "it", "into", "its", "url", "and", "non", "-", "url", "components", "." ]
2d10807d01167873733da3b478c784f8fa21bbc0
https://github.com/firecat53/urlscan/blob/2d10807d01167873733da3b478c784f8fa21bbc0/urlscan/urlscan.py#L309-L332