idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
61,900 |
def startswith ( self , prefix , start = None , end = None ) : prefix = Bits ( prefix ) start , end = self . _validate_slice ( start , end ) if end < start + prefix . len : return False end = start + prefix . len return self . _slice ( start , end ) == prefix
|
Return whether the current bitstring starts with prefix .
|
61,901 |
def endswith ( self , suffix , start = None , end = None ) : suffix = Bits ( suffix ) start , end = self . _validate_slice ( start , end ) if start + suffix . len > end : return False start = end - suffix . len return self . _slice ( start , end ) == suffix
|
Return whether the current bitstring ends with suffix .
|
61,902 |
def all ( self , value , pos = None ) : value = bool ( value ) length = self . len if pos is None : pos = xrange ( self . len ) for p in pos : if p < 0 : p += length if not 0 <= p < length : raise IndexError ( "Bit position {0} out of range." . format ( p ) ) if not self . _datastore . getbit ( p ) is value : return False return True
|
Return True if one or many bits are all set to value .
|
61,903 |
def count ( self , value ) : if not self . len : return 0 count = sum ( BIT_COUNT [ self . _datastore . getbyte ( i ) ] for i in xrange ( self . _datastore . bytelength - 1 ) ) if self . _offset : count -= BIT_COUNT [ self . _datastore . getbyte ( 0 ) >> ( 8 - self . _offset ) ] endbits = self . _datastore . bytelength * 8 - ( self . _offset + self . len ) count += BIT_COUNT [ self . _datastore . getbyte ( self . _datastore . bytelength - 1 ) >> endbits ] return count if value else self . len - count
|
Return count of total number of either zero or one bits .
|
61,904 |
def replace ( self , old , new , start = None , end = None , count = None , bytealigned = None ) : old = Bits ( old ) new = Bits ( new ) if not old . len : raise ValueError ( "Empty bitstring cannot be replaced." ) start , end = self . _validate_slice ( start , end ) if bytealigned is None : bytealigned = globals ( ) [ 'bytealigned' ] if count is not None : count += 1 sections = self . split ( old , start , end , count , bytealigned ) lengths = [ s . len for s in sections ] if len ( lengths ) == 1 : return 0 if new is self : new = copy . copy ( self ) positions = [ lengths [ 0 ] + start ] for l in lengths [ 1 : - 1 ] : positions . append ( positions [ - 1 ] + l ) positions . reverse ( ) try : newpos = self . _pos for p in positions : self [ p : p + old . len ] = new if old . len != new . len : diff = new . len - old . len for p in positions : if p >= newpos : continue if p + old . len <= newpos : newpos += diff else : newpos = p self . _pos = newpos except AttributeError : for p in positions : self [ p : p + old . len ] = new assert self . _assertsanity ( ) return len ( lengths ) - 1
|
Replace all occurrences of old with new in place .
|
61,905 |
def insert ( self , bs , pos = None ) : bs = Bits ( bs ) if not bs . len : return self if bs is self : bs = self . __copy__ ( ) if pos is None : try : pos = self . _pos except AttributeError : raise TypeError ( "insert require a bit position for this type." ) if pos < 0 : pos += self . len if not 0 <= pos <= self . len : raise ValueError ( "Invalid insert position." ) self . _insert ( bs , pos )
|
Insert bs at bit position pos .
|
61,906 |
def overwrite ( self , bs , pos = None ) : bs = Bits ( bs ) if not bs . len : return if pos is None : try : pos = self . _pos except AttributeError : raise TypeError ( "overwrite require a bit position for this type." ) if pos < 0 : pos += self . len if pos < 0 or pos + bs . len > self . len : raise ValueError ( "Overwrite exceeds boundary of bitstring." ) self . _overwrite ( bs , pos ) try : self . _pos = pos + bs . len except AttributeError : pass
|
Overwrite with bs at bit position pos .
|
61,907 |
def append ( self , bs ) : bs = self . _converttobitstring ( bs , offset = ( self . len + self . _offset ) % 8 ) self . _append ( bs )
|
Append a bitstring to the current bitstring .
|
61,908 |
def reverse ( self , start = None , end = None ) : start , end = self . _validate_slice ( start , end ) if start == 0 and end == self . len : self . _reverse ( ) return s = self . _slice ( start , end ) s . _reverse ( ) self [ start : end ] = s
|
Reverse bits in - place .
|
61,909 |
def set ( self , value , pos = None ) : f = self . _set if value else self . _unset if pos is None : pos = xrange ( self . len ) try : length = self . len for p in pos : if p < 0 : p += length if not 0 <= p < length : raise IndexError ( "Bit position {0} out of range." . format ( p ) ) f ( p ) except TypeError : if pos < 0 : pos += self . len if not 0 <= pos < length : raise IndexError ( "Bit position {0} out of range." . format ( pos ) ) f ( pos )
|
Set one or many bits to 1 or 0 .
|
61,910 |
def invert ( self , pos = None ) : if pos is None : self . _invert_all ( ) return if not isinstance ( pos , collections . Iterable ) : pos = ( pos , ) length = self . len for p in pos : if p < 0 : p += length if not 0 <= p < length : raise IndexError ( "Bit position {0} out of range." . format ( p ) ) self . _invert ( p )
|
Invert one or many bits from 0 to 1 or vice versa .
|
61,911 |
def ror ( self , bits , start = None , end = None ) : if not self . len : raise Error ( "Cannot rotate an empty bitstring." ) if bits < 0 : raise ValueError ( "Cannot rotate right by negative amount." ) start , end = self . _validate_slice ( start , end ) bits %= ( end - start ) if not bits : return rhs = self . _slice ( end - bits , end ) self . _delete ( bits , end - bits ) self . _insert ( rhs , start )
|
Rotate bits to the right in - place .
|
61,912 |
def rol ( self , bits , start = None , end = None ) : if not self . len : raise Error ( "Cannot rotate an empty bitstring." ) if bits < 0 : raise ValueError ( "Cannot rotate left by negative amount." ) start , end = self . _validate_slice ( start , end ) bits %= ( end - start ) if not bits : return lhs = self . _slice ( start , start + bits ) self . _delete ( bits , start ) self . _insert ( lhs , end - bits )
|
Rotate bits to the left in - place .
|
61,913 |
def byteswap ( self , fmt = None , start = None , end = None , repeat = True ) : start , end = self . _validate_slice ( start , end ) if fmt is None or fmt == 0 : bytesizes = [ ( end - start ) // 8 ] elif isinstance ( fmt , numbers . Integral ) : if fmt < 0 : raise ValueError ( "Improper byte length {0}." . format ( fmt ) ) bytesizes = [ fmt ] elif isinstance ( fmt , basestring ) : m = STRUCT_PACK_RE . match ( fmt ) if not m : raise ValueError ( "Cannot parse format string {0}." . format ( fmt ) ) formatlist = re . findall ( STRUCT_SPLIT_RE , m . group ( 'fmt' ) ) bytesizes = [ ] for f in formatlist : if len ( f ) == 1 : bytesizes . append ( PACK_CODE_SIZE [ f ] ) else : bytesizes . extend ( [ PACK_CODE_SIZE [ f [ - 1 ] ] ] * int ( f [ : - 1 ] ) ) elif isinstance ( fmt , collections . Iterable ) : bytesizes = fmt for bytesize in bytesizes : if not isinstance ( bytesize , numbers . Integral ) or bytesize < 0 : raise ValueError ( "Improper byte length {0}." . format ( bytesize ) ) else : raise TypeError ( "Format must be an integer, string or iterable." ) repeats = 0 totalbitsize = 8 * sum ( bytesizes ) if not totalbitsize : return 0 if repeat : finalbit = end else : finalbit = start + totalbitsize for patternend in xrange ( start + totalbitsize , finalbit + 1 , totalbitsize ) : bytestart = patternend - totalbitsize for bytesize in bytesizes : byteend = bytestart + bytesize * 8 self . _reversebytes ( bytestart , byteend ) bytestart += bytesize * 8 repeats += 1 return repeats
|
Change the endianness in - place . Return number of repeats of fmt done .
|
61,914 |
def _setbitpos ( self , pos ) : if pos < 0 : raise ValueError ( "Bit position cannot be negative." ) if pos > self . len : raise ValueError ( "Cannot seek past the end of the data." ) self . _pos = pos
|
Move to absolute postion bit in bitstream .
|
61,915 |
def read ( self , fmt ) : if isinstance ( fmt , numbers . Integral ) : if fmt < 0 : raise ValueError ( "Cannot read negative amount." ) if fmt > self . len - self . _pos : raise ReadError ( "Cannot read {0} bits, only {1} available." , fmt , self . len - self . _pos ) bs = self . _slice ( self . _pos , self . _pos + fmt ) self . _pos += fmt return bs p = self . _pos _ , token = tokenparser ( fmt ) if len ( token ) != 1 : self . _pos = p raise ValueError ( "Format string should be a single token, not {0} " "tokens - use readlist() instead." . format ( len ( token ) ) ) name , length , _ = token [ 0 ] if length is None : length = self . len - self . _pos value , self . _pos = self . _readtoken ( name , self . _pos , length ) return value
|
Interpret next bits according to the format string and return result .
|
61,916 |
def readto ( self , bs , bytealigned = None ) : if isinstance ( bs , numbers . Integral ) : raise ValueError ( "Integers cannot be searched for" ) bs = Bits ( bs ) oldpos = self . _pos p = self . find ( bs , self . _pos , bytealigned = bytealigned ) if not p : raise ReadError ( "Substring not found" ) self . _pos += bs . len return self . _slice ( oldpos , self . _pos )
|
Read up to and including next occurrence of bs and return result .
|
61,917 |
def peek ( self , fmt ) : pos_before = self . _pos value = self . read ( fmt ) self . _pos = pos_before return value
|
Interpret next bits according to format string and return result .
|
61,918 |
def bytealign ( self ) : skipped = ( 8 - ( self . _pos % 8 ) ) % 8 self . pos += self . _offset + skipped assert self . _assertsanity ( ) return skipped
|
Align to next byte and return number of skipped bits .
|
61,919 |
def prepend ( self , bs ) : bs = self . _converttobitstring ( bs ) self . _prepend ( bs ) self . _pos += bs . len
|
Prepend a bitstring to the current bitstring .
|
61,920 |
def find_inodes_in_use ( fds ) : self_pid = os . getpid ( ) id_fd_assoc = collections . defaultdict ( list ) for fd in fds : st = os . fstat ( fd ) id_fd_assoc [ ( st . st_dev , st . st_ino ) ] . append ( fd ) def st_id_candidates ( it ) : for proc_path in it : try : st = os . stat ( proc_path ) except OSError as e : if e . errno in ( errno . ENOENT , errno . ESTALE ) : continue raise st_id = ( st . st_dev , st . st_ino ) if st_id not in id_fd_assoc : continue yield proc_path , st_id for proc_path , st_id in st_id_candidates ( glob . glob ( '/proc/[1-9]*/fd/*' ) ) : other_pid , other_fd = map ( int , PROC_PATH_RE . match ( proc_path ) . groups ( ) ) original_fds = id_fd_assoc [ st_id ] if other_pid == self_pid : if other_fd in original_fds : continue use_info = proc_use_info ( proc_path ) if not use_info : continue for fd in original_fds : yield ( fd , use_info ) for proc_path , st_id in st_id_candidates ( glob . glob ( '/proc/[1-9]*/map_files/*' ) ) : use_info = proc_use_info ( proc_path ) if not use_info : continue original_fds = id_fd_assoc [ st_id ] for fd in original_fds : yield ( fd , use_info )
|
Find which of these inodes are in use and give their open modes .
|
61,921 |
def set_idle_priority ( pid = None ) : if pid is None : pid = os . getpid ( ) lib . ioprio_set ( lib . IOPRIO_WHO_PROCESS , pid , lib . IOPRIO_PRIO_VALUE ( lib . IOPRIO_CLASS_IDLE , 0 ) )
|
Puts a process in the idle io priority class .
|
61,922 |
def futimens ( fd , ns ) : times = ffi . new ( 'struct timespec[2]' ) atime , mtime = ns assert 0 <= atime . tv_nsec < 1e9 assert 0 <= mtime . tv_nsec < 1e9 times [ 0 ] = atime times [ 1 ] = mtime if lib . futimens ( fd , times ) != 0 : raise IOError ( ffi . errno , os . strerror ( ffi . errno ) , ( fd , atime . tv_sec , atime . tv_nsec , mtime . tv_sec , mtime . tv_nsec ) )
|
set inode atime and mtime
|
61,923 |
def fopenat ( base_fd , path ) : return os . fdopen ( openat ( base_fd , path , os . O_RDONLY ) , 'rb' )
|
Does openat read - only then does fdopen to get a file object
|
61,924 |
def fopenat_rw ( base_fd , path ) : return os . fdopen ( openat ( base_fd , path , os . O_RDWR ) , 'rb+' )
|
Does openat read - write then does fdopen to get a file object
|
61,925 |
def fiemap ( fd ) : count = 72 fiemap_cbuf = ffi . new ( 'char[]' , ffi . sizeof ( 'struct fiemap' ) + count * ffi . sizeof ( 'struct fiemap_extent' ) ) fiemap_pybuf = ffi . buffer ( fiemap_cbuf ) fiemap_ptr = ffi . cast ( 'struct fiemap*' , fiemap_cbuf ) assert ffi . sizeof ( fiemap_cbuf ) <= 4096 while True : fiemap_ptr . fm_length = lib . FIEMAP_MAX_OFFSET fiemap_ptr . fm_extent_count = count fcntl . ioctl ( fd , lib . FS_IOC_FIEMAP , fiemap_pybuf ) if fiemap_ptr . fm_mapped_extents == 0 : break for i in range ( fiemap_ptr . fm_mapped_extents ) : extent = fiemap_ptr . fm_extents [ i ] yield FiemapExtent ( extent . fe_logical , extent . fe_physical , extent . fe_length , extent . fe_flags ) fiemap_ptr . fm_start = extent . fe_logical + extent . fe_length
|
Gets a map of file extents .
|
61,926 |
def getflags ( fd ) : flags_ptr = ffi . new ( 'uint64_t*' ) flags_buf = ffi . buffer ( flags_ptr ) fcntl . ioctl ( fd , lib . FS_IOC_GETFLAGS , flags_buf ) return flags_ptr [ 0 ]
|
Gets per - file filesystem flags .
|
61,927 |
def editflags ( fd , add_flags = 0 , remove_flags = 0 ) : if add_flags & remove_flags != 0 : raise ValueError ( 'Added and removed flags shouldn\'t overlap' , add_flags , remove_flags ) flags_ptr = ffi . new ( 'uint64_t*' ) flags_buf = ffi . buffer ( flags_ptr ) fcntl . ioctl ( fd , lib . FS_IOC_GETFLAGS , flags_buf ) prev_flags = flags_ptr [ 0 ] flags_ptr [ 0 ] |= add_flags flags_ptr [ 0 ] &= ~ remove_flags fcntl . ioctl ( fd , lib . FS_IOC_SETFLAGS , flags_buf ) return prev_flags & ( add_flags | remove_flags )
|
Sets and unsets per - file filesystem flags .
|
61,928 |
def connect ( host , username , password , ** kwargs ) : arguments = ChainMap ( kwargs , defaults ) transport = create_transport ( host , ** arguments ) protocol = ApiProtocol ( transport = transport , encoding = arguments [ 'encoding' ] ) api = arguments [ 'subclass' ] ( protocol = protocol ) for method in arguments [ 'login_methods' ] : try : method ( api = api , username = username , password = password ) return api except ( TrapError , MultiTrapError ) : pass except ( ConnectionError , FatalError ) : transport . close ( ) raise
|
Connect and login to routeros device . Upon success return a Api class .
|
61,929 |
def _readSentence ( self ) : reply_word , words = self . protocol . readSentence ( ) words = dict ( parseWord ( word ) for word in words ) return reply_word , words
|
Read one sentence and parse words .
|
61,930 |
def _readResponse ( self ) : traps = [ ] reply_word = None while reply_word != '!done' : reply_word , words = self . _readSentence ( ) if reply_word == '!trap' : traps . append ( TrapError ( ** words ) ) elif reply_word in ( '!re' , '!done' ) and words : yield words if len ( traps ) > 1 : raise MultiTrapError ( * traps ) elif len ( traps ) == 1 : raise traps [ 0 ]
|
Yield each row of response untill !done is received .
|
61,931 |
def encodeSentence ( self , * words ) : encoded = map ( self . encodeWord , words ) encoded = b'' . join ( encoded ) encoded += b'\x00' return encoded
|
Encode given sentence in API format .
|
61,932 |
def encodeWord ( self , word ) : encoded_word = word . encode ( encoding = self . encoding , errors = 'strict' ) return Encoder . encodeLength ( len ( word ) ) + encoded_word
|
Encode word in API format .
|
61,933 |
def encodeLength ( length ) : if length < 128 : ored_length = length offset = - 1 elif length < 16384 : ored_length = length | 0x8000 offset = - 2 elif length < 2097152 : ored_length = length | 0xC00000 offset = - 3 elif length < 268435456 : ored_length = length | 0xE0000000 offset = - 4 else : raise ConnectionError ( 'Unable to encode length of {}' . format ( length ) ) return pack ( '!I' , ored_length ) [ offset : ]
|
Encode given length in mikrotik format .
|
61,934 |
def determineLength ( length ) : integer = ord ( length ) if integer < 128 : return 0 elif integer < 192 : return 1 elif integer < 224 : return 2 elif integer < 240 : return 3 else : raise ConnectionError ( 'Unknown controll byte {}' . format ( length ) )
|
Given first read byte determine how many more bytes needs to be known in order to get fully encoded length .
|
61,935 |
def decodeLength ( length ) : bytes_length = len ( length ) if bytes_length < 2 : offset = b'\x00\x00\x00' XOR = 0 elif bytes_length < 3 : offset = b'\x00\x00' XOR = 0x8000 elif bytes_length < 4 : offset = b'\x00' XOR = 0xC00000 elif bytes_length < 5 : offset = b'' XOR = 0xE0000000 else : raise ConnectionError ( 'Unable to decode length of {}' . format ( length ) ) decoded = unpack ( '!I' , ( offset + length ) ) [ 0 ] decoded ^= XOR return decoded
|
Decode length based on given bytes .
|
61,936 |
def writeSentence ( self , cmd , * words ) : encoded = self . encodeSentence ( cmd , * words ) self . log ( '<---' , cmd , * words ) self . transport . write ( encoded )
|
Write encoded sentence .
|
61,937 |
def read ( self , length ) : data = bytearray ( ) while len ( data ) != length : data += self . sock . recv ( ( length - len ( data ) ) ) if not data : raise ConnectionError ( 'Connection unexpectedly closed.' ) return data
|
Read as many bytes from socket as specified in length . Loop as long as every byte is read unless exception is raised .
|
61,938 |
def parseWord ( word ) : mapping = { 'yes' : True , 'true' : True , 'no' : False , 'false' : False } _ , key , value = word . split ( '=' , 2 ) try : value = int ( value ) except ValueError : value = mapping . get ( value , value ) return ( key , value )
|
Split given attribute word to key value pair .
|
61,939 |
def composeWord ( key , value ) : mapping = { True : 'yes' , False : 'no' } if type ( value ) == int : value = str ( value ) else : value = mapping . get ( value , str ( value ) ) return '={}={}' . format ( key , value )
|
Create a attribute word from key value pair . Values are casted to api equivalents .
|
61,940 |
def login_token ( api , username , password ) : sentence = api ( '/login' ) token = tuple ( sentence ) [ 0 ] [ 'ret' ] encoded = encode_password ( token , password ) tuple ( api ( '/login' , ** { 'name' : username , 'response' : encoded } ) )
|
Login using pre routeros 6 . 43 authorization method .
|
61,941 |
def check_relations ( self , relations ) : for rel in relations : if not rel : continue fields = rel . split ( '.' , 1 ) local_field = fields [ 0 ] if local_field not in self . fields : raise ValueError ( 'Unknown field "{}"' . format ( local_field ) ) field = self . fields [ local_field ] if not isinstance ( field , BaseRelationship ) : raise ValueError ( 'Can only include relationships. "{}" is a "{}"' . format ( field . name , field . __class__ . __name__ ) ) field . include_data = True if len ( fields ) > 1 : field . schema . check_relations ( fields [ 1 : ] )
|
Recursive function which checks if a relation is valid .
|
61,942 |
def format_json_api_response ( self , data , many ) : ret = self . format_items ( data , many ) ret = self . wrap_response ( ret , many ) ret = self . render_included_data ( ret ) ret = self . render_meta_document ( ret ) return ret
|
Post - dump hook that formats serialized data as a top - level JSON API object .
|
61,943 |
def _do_load ( self , data , many = None , ** kwargs ) : many = self . many if many is None else bool ( many ) self . included_data = data . get ( 'included' , { } ) self . document_meta = data . get ( 'meta' , { } ) try : result = super ( Schema , self ) . _do_load ( data , many , ** kwargs ) except ValidationError as err : error_messages = err . messages if '_schema' in error_messages : error_messages = error_messages [ '_schema' ] formatted_messages = self . format_errors ( error_messages , many = many ) err . messages = formatted_messages raise err else : if _MARSHMALLOW_VERSION_INFO [ 0 ] < 3 : data , error_messages = result if '_schema' in error_messages : error_messages = error_messages [ '_schema' ] formatted_messages = self . format_errors ( error_messages , many = many ) return data , formatted_messages return result
|
Override marshmallow . Schema . _do_load for custom JSON API handling .
|
61,944 |
def _extract_from_included ( self , data ) : return ( item for item in self . included_data if item [ 'type' ] == data [ 'type' ] and str ( item [ 'id' ] ) == str ( data [ 'id' ] ) )
|
Extract included data matching the items in data .
|
61,945 |
def inflect ( self , text ) : return self . opts . inflect ( text ) if self . opts . inflect else text
|
Inflect text if the inflect class Meta option is defined otherwise do nothing .
|
61,946 |
def format_errors ( self , errors , many ) : if not errors : return { } if isinstance ( errors , ( list , tuple ) ) : return { 'errors' : errors } formatted_errors = [ ] if many : for index , errors in iteritems ( errors ) : for field_name , field_errors in iteritems ( errors ) : formatted_errors . extend ( [ self . format_error ( field_name , message , index = index ) for message in field_errors ] ) else : for field_name , field_errors in iteritems ( errors ) : formatted_errors . extend ( [ self . format_error ( field_name , message ) for message in field_errors ] ) return { 'errors' : formatted_errors }
|
Format validation errors as JSON Error objects .
|
61,947 |
def format_error ( self , field_name , message , index = None ) : pointer = [ '/data' ] if index is not None : pointer . append ( str ( index ) ) relationship = isinstance ( self . declared_fields . get ( field_name ) , BaseRelationship , ) if relationship : pointer . append ( 'relationships' ) elif field_name != 'id' : pointer . append ( 'attributes' ) pointer . append ( self . inflect ( field_name ) ) if relationship : pointer . append ( 'data' ) return { 'detail' : message , 'source' : { 'pointer' : '/' . join ( pointer ) , } , }
|
Override - able hook to format a single error message as an Error object .
|
61,948 |
def format_item ( self , item ) : if not item : return None ret = self . dict_class ( ) ret [ TYPE ] = self . opts . type_ attributes = { ( get_dump_key ( self . fields [ field ] ) or field ) : field for field in self . fields } for field_name , value in iteritems ( item ) : attribute = attributes [ field_name ] if attribute == ID : ret [ ID ] = value elif isinstance ( self . fields [ attribute ] , DocumentMeta ) : if not self . document_meta : self . document_meta = self . dict_class ( ) self . document_meta . update ( value ) elif isinstance ( self . fields [ attribute ] , ResourceMeta ) : if 'meta' not in ret : ret [ 'meta' ] = self . dict_class ( ) ret [ 'meta' ] . update ( value ) elif isinstance ( self . fields [ attribute ] , BaseRelationship ) : if value : if 'relationships' not in ret : ret [ 'relationships' ] = self . dict_class ( ) ret [ 'relationships' ] [ self . inflect ( field_name ) ] = value else : if 'attributes' not in ret : ret [ 'attributes' ] = self . dict_class ( ) ret [ 'attributes' ] [ self . inflect ( field_name ) ] = value links = self . get_resource_links ( item ) if links : ret [ 'links' ] = links return ret
|
Format a single datum as a Resource object .
|
61,949 |
def format_items ( self , data , many ) : if many : return [ self . format_item ( item ) for item in data ] else : return self . format_item ( data )
|
Format data as a Resource object or list of Resource objects .
|
61,950 |
def get_top_level_links ( self , data , many ) : self_link = None if many : if self . opts . self_url_many : self_link = self . generate_url ( self . opts . self_url_many ) else : if self . opts . self_url : self_link = data . get ( 'links' , { } ) . get ( 'self' , None ) return { 'self' : self_link }
|
Hook for adding links to the root of the response data .
|
61,951 |
def get_resource_links ( self , item ) : if self . opts . self_url : ret = self . dict_class ( ) kwargs = resolve_params ( item , self . opts . self_url_kwargs or { } ) ret [ 'self' ] = self . generate_url ( self . opts . self_url , ** kwargs ) return ret return None
|
Hook for adding links to a resource object .
|
61,952 |
def wrap_response ( self , data , many ) : ret = { 'data' : data } if many or data : top_level_links = self . get_top_level_links ( data , many ) if top_level_links [ 'self' ] : ret [ 'links' ] = top_level_links return ret
|
Wrap data and links according to the JSON API
|
61,953 |
def extract_value ( self , data ) : errors = [ ] if 'id' not in data : errors . append ( 'Must have an `id` field' ) if 'type' not in data : errors . append ( 'Must have a `type` field' ) elif data [ 'type' ] != self . type_ : errors . append ( 'Invalid `type` specified' ) if errors : raise ValidationError ( errors ) if 'attributes' in data and self . __schema : result = self . schema . load ( { 'data' : data , 'included' : self . root . included_data } ) return result . data if _MARSHMALLOW_VERSION_INFO [ 0 ] < 3 else result id_value = data . get ( 'id' ) if self . __schema : id_value = self . schema . fields [ 'id' ] . deserialize ( id_value ) return id_value
|
Extract the id key and validate the request structure .
|
61,954 |
def fill_package_digests ( generated_project : Project ) -> Project : for package_version in chain ( generated_project . pipfile_lock . packages , generated_project . pipfile_lock . dev_packages ) : if package_version . hashes : continue if package_version . index : scanned_hashes = package_version . index . get_package_hashes ( package_version . name , package_version . locked_version ) else : for source in generated_project . pipfile . meta . sources . values ( ) : try : scanned_hashes = source . get_package_hashes ( package_version . name , package_version . locked_version ) break except Exception : continue else : raise ValueError ( "Unable to find package hashes" ) for entry in scanned_hashes : package_version . hashes . append ( "sha256:" + entry [ "sha256" ] ) return generated_project
|
Temporary fill package digests stated in Pipfile . lock .
|
61,955 |
def fetch_digests ( self , package_name : str , package_version : str ) -> dict : report = { } for source in self . _sources : try : report [ source . url ] = source . get_package_hashes ( package_name , package_version ) except NotFound as exc : _LOGGER . debug ( f"Package {package_name} in version {package_version} not " f"found on index {source.name}: {str(exc)}" ) return report
|
Fetch digests for the given package in specified version from the given package index .
|
61,956 |
def random_passphrase_from_wordlist ( phrase_length , wordlist ) : passphrase_words = [ ] numbytes_of_entropy = phrase_length * 2 entropy = list ( dev_random_entropy ( numbytes_of_entropy , fallback_to_urandom = True ) ) bytes_per_word = int ( ceil ( log ( len ( wordlist ) , 2 ) / 8 ) ) if ( phrase_length * bytes_per_word > 64 ) : raise Exception ( "Error! This operation requires too much entropy. \ Try a shorter phrase length or word list." ) for i in range ( phrase_length ) : current_entropy = entropy [ i * bytes_per_word : ( i + 1 ) * bytes_per_word ] index = int ( '' . join ( current_entropy ) . encode ( 'hex' ) , 16 ) % len ( wordlist ) word = wordlist [ index ] passphrase_words . append ( word ) return " " . join ( passphrase_words )
|
An extremely entropy efficient passphrase generator .
|
61,957 |
def reverse_hash ( hash , hex_format = True ) : if not hex_format : hash = hexlify ( hash ) return "" . join ( reversed ( [ hash [ i : i + 2 ] for i in range ( 0 , len ( hash ) , 2 ) ] ) )
|
hash is in hex or binary format
|
61,958 |
def get_num_words_with_entropy ( bits_of_entropy , wordlist ) : entropy_per_word = math . log ( len ( wordlist ) ) / math . log ( 2 ) num_words = int ( math . ceil ( bits_of_entropy / entropy_per_word ) ) return num_words
|
Gets the number of words randomly selected from a given wordlist that would result in the number of bits of entropy specified .
|
61,959 |
def create_passphrase ( bits_of_entropy = None , num_words = None , language = 'english' , word_source = 'wiktionary' ) : wordlist = get_wordlist ( language , word_source ) if not num_words : if not bits_of_entropy : bits_of_entropy = 80 num_words = get_num_words_with_entropy ( bits_of_entropy , wordlist ) return ' ' . join ( pick_random_words_from_wordlist ( wordlist , num_words ) )
|
Creates a passphrase that has a certain number of bits of entropy OR a certain number of words .
|
61,960 |
def serialize_input ( input , signature_script_hex = '' ) : if not ( isinstance ( input , dict ) and 'transaction_hash' in input and 'output_index' in input ) : raise Exception ( 'Required parameters: transaction_hash, output_index' ) if is_hex ( str ( input [ 'transaction_hash' ] ) ) and len ( str ( input [ 'transaction_hash' ] ) ) != 64 : raise Exception ( "Transaction hash '%s' must be 32 bytes" % input [ 'transaction_hash' ] ) elif not is_hex ( str ( input [ 'transaction_hash' ] ) ) and len ( str ( input [ 'transaction_hash' ] ) ) != 32 : raise Exception ( "Transaction hash '%s' must be 32 bytes" % hexlify ( input [ 'transaction_hash' ] ) ) if not 'sequence' in input : input [ 'sequence' ] = UINT_MAX return '' . join ( [ flip_endian ( input [ 'transaction_hash' ] ) , hexlify ( struct . pack ( '<I' , input [ 'output_index' ] ) ) , hexlify ( variable_length_int ( len ( signature_script_hex ) / 2 ) ) , signature_script_hex , hexlify ( struct . pack ( '<I' , input [ 'sequence' ] ) ) ] )
|
Serializes a transaction input .
|
61,961 |
def serialize_output ( output ) : if not ( 'value' in output and 'script_hex' in output ) : raise Exception ( 'Invalid output' ) return '' . join ( [ hexlify ( struct . pack ( '<Q' , output [ 'value' ] ) ) , hexlify ( variable_length_int ( len ( output [ 'script_hex' ] ) / 2 ) ) , output [ 'script_hex' ] ] )
|
Serializes a transaction output .
|
61,962 |
def serialize_transaction ( inputs , outputs , lock_time = 0 , version = 1 ) : serialized_inputs = '' . join ( [ serialize_input ( input ) for input in inputs ] ) serialized_outputs = '' . join ( [ serialize_output ( output ) for output in outputs ] ) return '' . join ( [ hexlify ( struct . pack ( '<I' , version ) ) , hexlify ( variable_length_int ( len ( inputs ) ) ) , serialized_inputs , hexlify ( variable_length_int ( len ( outputs ) ) ) , serialized_outputs , hexlify ( struct . pack ( '<I' , lock_time ) ) , ] )
|
Serializes a transaction .
|
61,963 |
def deserialize_transaction ( tx_hex ) : tx = bitcoin . deserialize ( str ( tx_hex ) ) inputs = tx [ "ins" ] outputs = tx [ "outs" ] ret_inputs = [ ] ret_outputs = [ ] for inp in inputs : ret_inp = { "transaction_hash" : inp [ "outpoint" ] [ "hash" ] , "output_index" : int ( inp [ "outpoint" ] [ "index" ] ) , } if "sequence" in inp : ret_inp [ "sequence" ] = int ( inp [ "sequence" ] ) if "script" in inp : ret_inp [ "script_sig" ] = inp [ "script" ] ret_inputs . append ( ret_inp ) for out in outputs : ret_out = { "value" : out [ "value" ] , "script_hex" : out [ "script" ] } ret_outputs . append ( ret_out ) return ret_inputs , ret_outputs , tx [ "locktime" ] , tx [ "version" ]
|
Given a serialized transaction return its inputs outputs locktime and version
|
61,964 |
def variable_length_int ( i ) : if not isinstance ( i , ( int , long ) ) : raise Exception ( 'i must be an integer' ) if i < ( 2 ** 8 - 3 ) : return chr ( i ) elif i < ( 2 ** 16 ) : return chr ( 253 ) + struct . pack ( '<H' , i ) elif i < ( 2 ** 32 ) : return chr ( 254 ) + struct . pack ( '<I' , i ) elif i < ( 2 ** 64 ) : return chr ( 255 ) + struct . pack ( '<Q' , i ) else : raise Exception ( 'Integer cannot exceed 8 bytes in length.' )
|
Encodes integers into variable length integers which are used in Bitcoin in order to save space .
|
61,965 |
def make_pay_to_address_script ( address ) : hash160 = hexlify ( b58check_decode ( address ) ) script_string = 'OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG' % hash160 return script_to_hex ( script_string )
|
Takes in an address and returns the script
|
61,966 |
def make_op_return_script ( data , format = 'bin' ) : if format == 'hex' : assert ( is_hex ( data ) ) hex_data = data elif format == 'bin' : hex_data = hexlify ( data ) else : raise Exception ( "Format must be either 'hex' or 'bin'" ) num_bytes = count_bytes ( hex_data ) if num_bytes > MAX_BYTES_AFTER_OP_RETURN : raise Exception ( 'Data is %i bytes - must not exceed 40.' % num_bytes ) script_string = 'OP_RETURN %s' % hex_data return script_to_hex ( script_string )
|
Takes in raw ascii data to be embedded and returns a script .
|
61,967 |
def create_bitcoind_service_proxy ( rpc_username , rpc_password , server = '127.0.0.1' , port = 8332 , use_https = False ) : protocol = 'https' if use_https else 'http' uri = '%s://%s:%s@%s:%s' % ( protocol , rpc_username , rpc_password , server , port ) return AuthServiceProxy ( uri )
|
create a bitcoind service proxy
|
61,968 |
def get_unspents ( address , blockchain_client = BlockchainInfoClient ( ) ) : if isinstance ( blockchain_client , BlockcypherClient ) : return blockcypher . get_unspents ( address , blockchain_client ) elif isinstance ( blockchain_client , BlockchainInfoClient ) : return blockchain_info . get_unspents ( address , blockchain_client ) elif isinstance ( blockchain_client , ChainComClient ) : return chain_com . get_unspents ( address , blockchain_client ) elif isinstance ( blockchain_client , ( BitcoindClient , AuthServiceProxy ) ) : return bitcoind . get_unspents ( address , blockchain_client ) elif hasattr ( blockchain_client , "get_unspents" ) : return blockchain_client . get_unspents ( address ) elif isinstance ( blockchain_client , BlockchainClient ) : raise Exception ( 'That blockchain interface is not supported.' ) else : raise Exception ( 'A BlockchainClient object is required' )
|
Gets the unspent outputs for a given address .
|
61,969 |
def broadcast_transaction ( hex_tx , blockchain_client ) : if isinstance ( blockchain_client , BlockcypherClient ) : return blockcypher . broadcast_transaction ( hex_tx , blockchain_client ) elif isinstance ( blockchain_client , BlockchainInfoClient ) : return blockchain_info . broadcast_transaction ( hex_tx , blockchain_client ) elif isinstance ( blockchain_client , ChainComClient ) : return chain_com . broadcast_transaction ( hex_tx , blockchain_client ) elif isinstance ( blockchain_client , ( BitcoindClient , AuthServiceProxy ) ) : return bitcoind . broadcast_transaction ( hex_tx , blockchain_client ) elif hasattr ( blockchain_client , "broadcast_transaction" ) : return blockchain_client . broadcast_transaction ( hex_tx ) elif isinstance ( blockchain_client , BlockchainClient ) : raise Exception ( 'That blockchain interface is not supported.' ) else : raise Exception ( 'A BlockchainClient object is required' )
|
Dispatches a raw hex transaction to the network .
|
61,970 |
def make_send_to_address_tx ( recipient_address , amount , private_key , blockchain_client = BlockchainInfoClient ( ) , fee = STANDARD_FEE , change_address = None ) : private_key_obj , from_address , inputs = analyze_private_key ( private_key , blockchain_client ) if not change_address : change_address = from_address outputs = make_pay_to_address_outputs ( recipient_address , amount , inputs , change_address , fee = fee ) unsigned_tx = serialize_transaction ( inputs , outputs ) for i in xrange ( 0 , len ( inputs ) ) : signed_tx = sign_transaction ( unsigned_tx , i , private_key_obj . to_hex ( ) ) unsigned_tx = signed_tx return signed_tx
|
Builds and signs a send to address transaction .
|
61,971 |
def make_op_return_tx ( data , private_key , blockchain_client = BlockchainInfoClient ( ) , fee = OP_RETURN_FEE , change_address = None , format = 'bin' ) : private_key_obj , from_address , inputs = analyze_private_key ( private_key , blockchain_client ) if not change_address : change_address = from_address outputs = make_op_return_outputs ( data , inputs , change_address , fee = fee , format = format ) unsigned_tx = serialize_transaction ( inputs , outputs ) for i in xrange ( 0 , len ( inputs ) ) : signed_tx = sign_transaction ( unsigned_tx , i , private_key_obj . to_hex ( ) ) unsigned_tx = signed_tx return signed_tx
|
Builds and signs an OP_RETURN transaction .
|
61,972 |
def send_to_address ( recipient_address , amount , private_key , blockchain_client = BlockchainInfoClient ( ) , fee = STANDARD_FEE , change_address = None ) : signed_tx = make_send_to_address_tx ( recipient_address , amount , private_key , blockchain_client , fee = fee , change_address = change_address ) response = broadcast_transaction ( signed_tx , blockchain_client ) return response
|
Builds signs and dispatches a send to address transaction .
|
61,973 |
def embed_data_in_blockchain ( data , private_key , blockchain_client = BlockchainInfoClient ( ) , fee = OP_RETURN_FEE , change_address = None , format = 'bin' ) : signed_tx = make_op_return_tx ( data , private_key , blockchain_client , fee = fee , change_address = change_address , format = format ) response = broadcast_transaction ( signed_tx , blockchain_client ) return response
|
Builds signs and dispatches an OP_RETURN transaction .
|
61,974 |
def sign_all_unsigned_inputs ( hex_privkey , unsigned_tx_hex ) : inputs , outputs , locktime , version = deserialize_transaction ( unsigned_tx_hex ) tx_hex = unsigned_tx_hex for index in xrange ( 0 , len ( inputs ) ) : if len ( inputs [ index ] [ 'script_sig' ] ) == 0 : tx_hex = sign_transaction ( str ( unsigned_tx_hex ) , index , hex_privkey ) unsigned_tx_hex = tx_hex return tx_hex
|
Sign a serialized transaction s unsigned inputs
|
61,975 |
def calculate_merkle_root ( hashes , hash_function = bin_double_sha256 , hex_format = True ) : if hex_format : hashes = hex_to_bin_reversed_hashes ( hashes ) while len ( hashes ) > 1 : hashes = calculate_merkle_pairs ( hashes , hash_function ) merkle_root = hashes [ 0 ] if hex_format : return bin_to_hex_reversed ( merkle_root ) return merkle_root
|
takes in a list of binary hashes returns a binary hash
|
61,976 |
def b58check_encode ( bin_s , version_byte = 0 ) : bin_s = chr ( int ( version_byte ) ) + bin_s num_leading_zeros = len ( re . match ( r'^\x00*' , bin_s ) . group ( 0 ) ) bin_s = bin_s + bin_checksum ( bin_s ) hex_s = hexlify ( bin_s ) b58_s = change_charset ( hex_s , HEX_KEYSPACE , B58_KEYSPACE ) return B58_KEYSPACE [ 0 ] * num_leading_zeros + b58_s
|
Takes in a binary string and converts it to a base 58 check string .
|
61,977 |
def make_pay_to_address_outputs ( to_address , send_amount , inputs , change_address , fee = STANDARD_FEE ) : return [ { "script_hex" : make_pay_to_address_script ( to_address ) , "value" : send_amount } , { "script_hex" : make_pay_to_address_script ( change_address ) , "value" : calculate_change_amount ( inputs , send_amount , fee ) } ]
|
Builds the outputs for a pay to address transaction .
|
61,978 |
def make_op_return_outputs ( data , inputs , change_address , fee = OP_RETURN_FEE , send_amount = 0 , format = 'bin' ) : return [ { "script_hex" : make_op_return_script ( data , format = format ) , "value" : send_amount } , { "script_hex" : make_pay_to_address_script ( change_address ) , "value" : calculate_change_amount ( inputs , send_amount , fee ) } ]
|
Builds the outputs for an OP_RETURN transaction .
|
61,979 |
def add_timezone ( value , tz = None ) : tz = tz or timezone . get_current_timezone ( ) try : if timezone . is_naive ( value ) : return timezone . make_aware ( value , tz ) except AttributeError : dt = datetime . datetime . combine ( value , datetime . time ( ) ) return timezone . make_aware ( dt , tz ) return value
|
If the value is naive then the timezone is added to it .
|
61,980 |
def get_active_entry ( user , select_for_update = False ) : entries = apps . get_model ( 'entries' , 'Entry' ) . no_join if select_for_update : entries = entries . select_for_update ( ) entries = entries . filter ( user = user , end_time__isnull = True ) if not entries . exists ( ) : return None if entries . count ( ) > 1 : raise ActiveEntryError ( 'Only one active entry is allowed.' ) return entries [ 0 ]
|
Returns the user s currently - active entry or None .
|
61,981 |
def get_month_start ( day = None ) : day = add_timezone ( day or datetime . date . today ( ) ) return day . replace ( day = 1 )
|
Returns the first day of the given month .
|
61,982 |
def get_setting ( name , ** kwargs ) : if hasattr ( settings , name ) : return getattr ( settings , name ) if 'default' in kwargs : return kwargs [ 'default' ] if hasattr ( defaults , name ) : return getattr ( defaults , name ) msg = '{0} must be specified in your project settings.' . format ( name ) raise AttributeError ( msg )
|
Returns the user - defined value for the setting or a default value .
|
61,983 |
def get_week_start ( day = None ) : day = add_timezone ( day or datetime . date . today ( ) ) days_since_monday = day . weekday ( ) if days_since_monday != 0 : day = day - relativedelta ( days = days_since_monday ) return day
|
Returns the Monday of the given week .
|
61,984 |
def get_year_start ( day = None ) : day = add_timezone ( day or datetime . date . today ( ) ) return day . replace ( month = 1 ) . replace ( day = 1 )
|
Returns January 1 of the given year .
|
61,985 |
def to_datetime ( date ) : return datetime . datetime ( date . year , date . month , date . day )
|
Transforms a date or datetime object into a date object .
|
61,986 |
def report_estimation_accuracy ( request ) : contracts = ProjectContract . objects . filter ( status = ProjectContract . STATUS_COMPLETE , type = ProjectContract . PROJECT_FIXED ) data = [ ( 'Target (hrs)' , 'Actual (hrs)' , 'Point Label' ) ] for c in contracts : if c . contracted_hours ( ) == 0 : continue pt_label = "%s (%.2f%%)" % ( c . name , c . hours_worked / c . contracted_hours ( ) * 100 ) data . append ( ( c . contracted_hours ( ) , c . hours_worked , pt_label ) ) chart_max = max ( [ max ( x [ 0 ] , x [ 1 ] ) for x in data [ 1 : ] ] ) return render ( request , 'timepiece/reports/estimation_accuracy.html' , { 'data' : json . dumps ( data , cls = DecimalEncoder ) , 'chart_max' : chart_max , } )
|
Idea from Software Estimation Demystifying the Black Art McConnel 2006 Fig 3 - 3 .
|
61,987 |
def get_context_data ( self , ** kwargs ) : context = super ( ReportMixin , self ) . get_context_data ( ** kwargs ) form = self . get_form ( ) if form . is_valid ( ) : data = form . cleaned_data start , end = form . save ( ) entryQ = self . get_entry_query ( start , end , data ) trunc = data [ 'trunc' ] if entryQ : vals = ( 'pk' , 'activity' , 'project' , 'project__name' , 'project__status' , 'project__type__label' ) entries = Entry . objects . date_trunc ( trunc , extra_values = vals ) . filter ( entryQ ) else : entries = Entry . objects . none ( ) end = end - relativedelta ( days = 1 ) date_headers = generate_dates ( start , end , by = trunc ) context . update ( { 'from_date' : start , 'to_date' : end , 'date_headers' : date_headers , 'entries' : entries , 'filter_form' : form , 'trunc' : trunc , } ) else : context . update ( { 'from_date' : None , 'to_date' : None , 'date_headers' : [ ] , 'entries' : Entry . objects . none ( ) , 'filter_form' : form , 'trunc' : '' , } ) return context
|
Processes form data to get relevant entries & date_headers .
|
61,988 |
def get_entry_query ( self , start , end , data ) : incl_billable = data . get ( 'billable' , True ) incl_nonbillable = data . get ( 'non_billable' , True ) incl_leave = data . get ( 'paid_leave' , True ) if not any ( ( incl_billable , incl_nonbillable , incl_leave ) ) : return None basicQ = Q ( end_time__gte = start , end_time__lt = end ) projects = data . get ( 'projects' , None ) basicQ &= Q ( project__in = projects ) if projects else Q ( ) if 'users' in data : basicQ &= Q ( user__in = data . get ( 'users' ) ) if 'activities' in data : basicQ &= Q ( activity__in = data . get ( 'activities' ) ) if 'project_types' in data : basicQ &= Q ( project__type__in = data . get ( 'project_types' ) ) if all ( ( incl_billable , incl_nonbillable , incl_leave ) ) : return basicQ billableQ = None if incl_billable and not incl_nonbillable : billableQ = Q ( activity__billable = True , project__type__billable = True ) if incl_nonbillable and not incl_billable : billableQ = Q ( activity__billable = False ) | Q ( project__type__billable = False ) leave_ids = utils . get_setting ( 'TIMEPIECE_PAID_LEAVE_PROJECTS' ) . values ( ) leaveQ = Q ( project__in = leave_ids ) if incl_leave : extraQ = ( leaveQ | billableQ ) if billableQ else leaveQ else : extraQ = ( ~ leaveQ & billableQ ) if billableQ else ~ leaveQ return basicQ & extraQ
|
Builds Entry query from form data .
|
61,989 |
def get_headers ( self , date_headers , from_date , to_date , trunc ) : date_headers = list ( date_headers ) if date_headers and date_headers [ 0 ] < from_date : date_headers [ 0 ] = from_date if date_headers and trunc != 'day' : count = len ( date_headers ) range_headers = [ 0 ] * count for i in range ( count - 1 ) : range_headers [ i ] = ( date_headers [ i ] , date_headers [ i + 1 ] - relativedelta ( days = 1 ) ) range_headers [ count - 1 ] = ( date_headers [ count - 1 ] , to_date ) else : range_headers = date_headers return date_headers , range_headers
|
Adjust date headers & get range headers .
|
61,990 |
def get_previous_month ( self ) : end = utils . get_month_start ( ) - relativedelta ( days = 1 ) end = utils . to_datetime ( end ) start = utils . get_month_start ( end ) return start , end
|
Returns date range for the previous full month .
|
61,991 |
def convert_context_to_csv ( self , context ) : content = [ ] date_headers = context [ 'date_headers' ] headers = [ 'Name' ] headers . extend ( [ date . strftime ( '%m/%d/%Y' ) for date in date_headers ] ) headers . append ( 'Total' ) content . append ( headers ) summaries = context [ 'summaries' ] summary = summaries . get ( self . export , [ ] ) for rows , totals in summary : for name , user_id , hours in rows : data = [ name ] data . extend ( hours ) content . append ( data ) total = [ 'Totals' ] total . extend ( totals ) content . append ( total ) return content
|
Convert the context dictionary into a CSV file .
|
61,992 |
def defaults ( self ) : ( start , end ) = get_week_window ( timezone . now ( ) - relativedelta ( days = 7 ) ) return { 'from_date' : start , 'to_date' : end , 'billable' : True , 'non_billable' : False , 'paid_leave' : False , 'trunc' : 'day' , 'projects' : [ ] , }
|
Default filter form data when no GET data is provided .
|
61,993 |
def get_hours_data ( self , entries , date_headers ) : project_totals = get_project_totals ( entries , date_headers , total_column = False ) if entries else [ ] data_map = { } for rows , totals in project_totals : for user , user_id , periods in rows : for period in periods : day = period [ 'day' ] if day not in data_map : data_map [ day ] = { 'billable' : 0 , 'nonbillable' : 0 } data_map [ day ] [ 'billable' ] += period [ 'billable' ] data_map [ day ] [ 'nonbillable' ] += period [ 'nonbillable' ] return data_map
|
Sum billable and non - billable hours across all users .
|
61,994 |
def add_parameters ( url , parameters ) : if parameters : sep = '&' if '?' in url else '?' return '{0}{1}{2}' . format ( url , sep , urlencode ( parameters ) ) return url
|
Appends URL - encoded parameters to the base URL . It appends after & if ? is found in the URL ; otherwise it appends using ? . Keep in mind that this tag does not take into account the value of existing params ; it is therefore possible to add another value for a pre - existing parameter .
|
61,995 |
def get_max_hours ( context ) : progress = context [ 'project_progress' ] return max ( [ 0 ] + [ max ( p [ 'worked' ] , p [ 'assigned' ] ) for p in progress ] )
|
Return the largest number of hours worked or assigned on any project .
|
61,996 |
def get_uninvoiced_hours ( entries , billable = None ) : statuses = ( 'invoiced' , 'not-invoiced' ) if billable is not None : billable = ( billable . lower ( ) == u'billable' ) entries = [ e for e in entries if e . activity . billable == billable ] hours = sum ( [ e . hours for e in entries if e . status not in statuses ] ) return '{0:.2f}' . format ( hours )
|
Given an iterable of entries return the total hours that have not been invoiced . If billable is passed as billable or nonbillable limit to the corresponding entries .
|
61,997 |
def humanize_hours ( total_hours , frmt = '{hours:02d}:{minutes:02d}:{seconds:02d}' , negative_frmt = None ) : seconds = int ( float ( total_hours ) * 3600 ) return humanize_seconds ( seconds , frmt , negative_frmt )
|
Given time in hours return a string representing the time .
|
61,998 |
def _timesheet_url ( url_name , pk , date = None ) : url = reverse ( url_name , args = ( pk , ) ) if date : params = { 'month' : date . month , 'year' : date . year } return '?' . join ( ( url , urlencode ( params ) ) ) return url
|
Utility to create a time sheet URL with optional date parameters .
|
61,999 |
def reject_user_timesheet ( request , user_id ) : form = YearMonthForm ( request . GET or request . POST ) user = User . objects . get ( pk = user_id ) if form . is_valid ( ) : from_date , to_date = form . save ( ) entries = Entry . no_join . filter ( status = Entry . VERIFIED , user = user , start_time__gte = from_date , end_time__lte = to_date ) if request . POST . get ( 'yes' ) : if entries . exists ( ) : count = entries . count ( ) Entry . no_join . filter ( pk__in = entries ) . update ( status = Entry . UNVERIFIED ) msg = 'You have rejected %d previously verified entries.' % count else : msg = 'There are no verified entries to reject.' messages . info ( request , msg ) else : return render ( request , 'timepiece/user/timesheet/reject.html' , { 'date' : from_date , 'timesheet_user' : user } ) else : msg = 'You must provide a month and year for entries to be rejected.' messages . error ( request , msg ) url = reverse ( 'view_user_timesheet' , args = ( user_id , ) ) return HttpResponseRedirect ( url )
|
This allows admins to reject all entries instead of just one
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.