nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
datacenter/acitoolkit
629b84887dd0f0183b81efc8adb16817f985541a
acitoolkit/acibaseobject.py
python
BaseACIObject._check_attachment
(self, item, status)
return check in self._attachments
Internal function to return whether an attachment exists to the specified item with the given status. :returns: True or False, True indicates the attachment exists.
Internal function to return whether an attachment exists to the specified item with the given status.
[ "Internal", "function", "to", "return", "whether", "an", "attachment", "exists", "to", "the", "specified", "item", "with", "the", "given", "status", "." ]
def _check_attachment(self, item, status): """ Internal function to return whether an attachment exists to the specified item with the given status. :returns: True or False, True indicates the attachment exists. """ check = BaseRelation(item, status) return check in self._attachments
[ "def", "_check_attachment", "(", "self", ",", "item", ",", "status", ")", ":", "check", "=", "BaseRelation", "(", "item", ",", "status", ")", "return", "check", "in", "self", ".", "_attachments" ]
https://github.com/datacenter/acitoolkit/blob/629b84887dd0f0183b81efc8adb16817f985541a/acitoolkit/acibaseobject.py#L661-L669
log2timeline/dfvfs
4ca7bf06b15cdc000297a7122a065f0ca71de544
dfvfs/helpers/file_system_searcher.py
python
FindSpec._SplitPath
(self, path, path_separator)
return list(filter(None, path.split(path_separator)))
Splits the path into path segments. Args: path (str): path. path_separator (str): path separator. Returns: list[str]: path segments without the root path segment, which is an empty string.
Splits the path into path segments.
[ "Splits", "the", "path", "into", "path", "segments", "." ]
def _SplitPath(self, path, path_separator): """Splits the path into path segments. Args: path (str): path. path_separator (str): path separator. Returns: list[str]: path segments without the root path segment, which is an empty string. """ # Split the path with the path separator and remove empty path segments. return list(filter(None, path.split(path_separator)))
[ "def", "_SplitPath", "(", "self", ",", "path", ",", "path_separator", ")", ":", "# Split the path with the path separator and remove empty path segments.", "return", "list", "(", "filter", "(", "None", ",", "path", ".", "split", "(", "path_separator", ")", ")", ")" ]
https://github.com/log2timeline/dfvfs/blob/4ca7bf06b15cdc000297a7122a065f0ca71de544/dfvfs/helpers/file_system_searcher.py#L318-L330
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/email/_header_value_parser.py
python
WhiteSpaceTerminal.startswith_fws
(self)
return True
[]
def startswith_fws(self): return True
[ "def", "startswith_fws", "(", "self", ")", ":", "return", "True" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/email/_header_value_parser.py#L919-L920
achael/eht-imaging
bbd3aeb06bef52bf89fa1c06de71e5509a5b0015
ehtim/io/load.py
python
load_movie_dat
(basename, nframes, startframe=0, framedur_sec=1, psize=-1, interp=ehc.INTERP_DEFAULT, bounds_error=ehc.BOUNDS_ERROR, ra=ehc.RA_DEFAULT, dec=ehc.DEC_DEFAULT, rf=ehc.RF_DEFAULT, pulse=ehc.PULSE_DEFAULT)
return(ehtim.movie.Movie(sim, times, psize, ra, dec, rf, interp=interp, bounds_error=bounds_error))
Read in a movie from dat files and create a Movie object. Args: basename (str): The base name of individual movie frames. Files should have names basename + 000001, etc. nframes (int): The total number of frames startframe (int): The index of the first frame to load framedur_sec (float): The frame duration in seconds (default = 1) psize (float): The pixel size in radian ra (float): the right ascension of the source (default for SgrA*) dec (float): the declination of the source (default for SgrA*) rf (float): The refrence frequency of the observation pulse (function): The function convolved with the pixel values for continuous image interp (str): Interpolation method, input to scipy.interpolate.interp1d kind keyword bounds_error (bool): if False, return nearest frame outside [start_hr, stop_hr] Returns: Movie: a Movie object
Read in a movie from dat files and create a Movie object.
[ "Read", "in", "a", "movie", "from", "dat", "files", "and", "create", "a", "Movie", "object", "." ]
def load_movie_dat(basename, nframes, startframe=0, framedur_sec=1, psize=-1, interp=ehc.INTERP_DEFAULT, bounds_error=ehc.BOUNDS_ERROR, ra=ehc.RA_DEFAULT, dec=ehc.DEC_DEFAULT, rf=ehc.RF_DEFAULT, pulse=ehc.PULSE_DEFAULT): """Read in a movie from dat files and create a Movie object. Args: basename (str): The base name of individual movie frames. Files should have names basename + 000001, etc. nframes (int): The total number of frames startframe (int): The index of the first frame to load framedur_sec (float): The frame duration in seconds (default = 1) psize (float): The pixel size in radian ra (float): the right ascension of the source (default for SgrA*) dec (float): the declination of the source (default for SgrA*) rf (float): The refrence frequency of the observation pulse (function): The function convolved with the pixel values for continuous image interp (str): Interpolation method, input to scipy.interpolate.interp1d kind keyword bounds_error (bool): if False, return nearest frame outside [start_hr, stop_hr] Returns: Movie: a Movie object """ for i in range(startframe, startframe + nframes): filename = basename + "%04d" % i + '.dat' sys.stdout.write('\rReading Movie Image %i/%i...' % (i - startframe, nframes)) sys.stdout.flush() datatable = np.loadtxt(filename, dtype=np.float64) if i == startframe: sim = np.zeros([nframes, datatable.shape[0]]) sim[i - startframe, :] = datatable[:, 2] npix = np.sqrt(sim.shape[1]).astype('int') sim = np.reshape(sim, [sim.shape[0], npix, npix]) sim = np.array([im.transpose()[::-1, :] for im in sim]) # TODO: read frame times from files? hour0 = 0 framedur_hr = framedur_sec / 3600. nframes = len(sim) tstart = hour0 tstop = hour0 + framedur_hr * nframes times = np.linspace(tstart, tstop, nframes) return(ehtim.movie.Movie(sim, times, psize, ra, dec, rf, interp=interp, bounds_error=bounds_error))
[ "def", "load_movie_dat", "(", "basename", ",", "nframes", ",", "startframe", "=", "0", ",", "framedur_sec", "=", "1", ",", "psize", "=", "-", "1", ",", "interp", "=", "ehc", ".", "INTERP_DEFAULT", ",", "bounds_error", "=", "ehc", ".", "BOUNDS_ERROR", ",", "ra", "=", "ehc", ".", "RA_DEFAULT", ",", "dec", "=", "ehc", ".", "DEC_DEFAULT", ",", "rf", "=", "ehc", ".", "RF_DEFAULT", ",", "pulse", "=", "ehc", ".", "PULSE_DEFAULT", ")", ":", "for", "i", "in", "range", "(", "startframe", ",", "startframe", "+", "nframes", ")", ":", "filename", "=", "basename", "+", "\"%04d\"", "%", "i", "+", "'.dat'", "sys", ".", "stdout", ".", "write", "(", "'\\rReading Movie Image %i/%i...'", "%", "(", "i", "-", "startframe", ",", "nframes", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "datatable", "=", "np", ".", "loadtxt", "(", "filename", ",", "dtype", "=", "np", ".", "float64", ")", "if", "i", "==", "startframe", ":", "sim", "=", "np", ".", "zeros", "(", "[", "nframes", ",", "datatable", ".", "shape", "[", "0", "]", "]", ")", "sim", "[", "i", "-", "startframe", ",", ":", "]", "=", "datatable", "[", ":", ",", "2", "]", "npix", "=", "np", ".", "sqrt", "(", "sim", ".", "shape", "[", "1", "]", ")", ".", "astype", "(", "'int'", ")", "sim", "=", "np", ".", "reshape", "(", "sim", ",", "[", "sim", ".", "shape", "[", "0", "]", ",", "npix", ",", "npix", "]", ")", "sim", "=", "np", ".", "array", "(", "[", "im", ".", "transpose", "(", ")", "[", ":", ":", "-", "1", ",", ":", "]", "for", "im", "in", "sim", "]", ")", "# TODO: read frame times from files?", "hour0", "=", "0", "framedur_hr", "=", "framedur_sec", "/", "3600.", "nframes", "=", "len", "(", "sim", ")", "tstart", "=", "hour0", "tstop", "=", "hour0", "+", "framedur_hr", "*", "nframes", "times", "=", "np", ".", "linspace", "(", "tstart", ",", "tstop", ",", "nframes", ")", "return", "(", "ehtim", ".", "movie", ".", "Movie", "(", "sim", ",", "times", ",", "psize", ",", "ra", ",", "dec", ",", "rf", ",", "interp", "=", "interp", ",", "bounds_error", "=", "bounds_error", ")", ")" ]
https://github.com/achael/eht-imaging/blob/bbd3aeb06bef52bf89fa1c06de71e5509a5b0015/ehtim/io/load.py#L729-L780
pyside/pyside2-setup
d526f801ced4687d5413907a93dedcd782ef72fa
build_scripts/qp5_tool.py
python
expand_reference
(cache_dict, value)
return value
Expand references to other keys in config files $(name) by value.
Expand references to other keys in config files $(name) by value.
[ "Expand", "references", "to", "other", "keys", "in", "config", "files", "$", "(", "name", ")", "by", "value", "." ]
def expand_reference(cache_dict, value): """Expand references to other keys in config files $(name) by value.""" pattern = re.compile(r"\$\([^)]+\)") while True: match = pattern.match(value) if not match: break key = match.group(0)[2:-1] value = value[:match.start(0)] + cache_dict[key] + value[match.end(0):] return value
[ "def", "expand_reference", "(", "cache_dict", ",", "value", ")", ":", "pattern", "=", "re", ".", "compile", "(", "r\"\\$\\([^)]+\\)\"", ")", "while", "True", ":", "match", "=", "pattern", ".", "match", "(", "value", ")", "if", "not", "match", ":", "break", "key", "=", "match", ".", "group", "(", "0", ")", "[", "2", ":", "-", "1", "]", "value", "=", "value", "[", ":", "match", ".", "start", "(", "0", ")", "]", "+", "cache_dict", "[", "key", "]", "+", "value", "[", "match", ".", "end", "(", "0", ")", ":", "]", "return", "value" ]
https://github.com/pyside/pyside2-setup/blob/d526f801ced4687d5413907a93dedcd782ef72fa/build_scripts/qp5_tool.py#L167-L176
joelgrus/data-science-from-scratch
d5d0f117f41b3ccab3b07f1ee1fa21cfcf69afa1
first-edition/code/simple_linear_regression.py
python
total_sum_of_squares
(y)
return sum(v ** 2 for v in de_mean(y))
the total squared variation of y_i's from their mean
the total squared variation of y_i's from their mean
[ "the", "total", "squared", "variation", "of", "y_i", "s", "from", "their", "mean" ]
def total_sum_of_squares(y): """the total squared variation of y_i's from their mean""" return sum(v ** 2 for v in de_mean(y))
[ "def", "total_sum_of_squares", "(", "y", ")", ":", "return", "sum", "(", "v", "**", "2", "for", "v", "in", "de_mean", "(", "y", ")", ")" ]
https://github.com/joelgrus/data-science-from-scratch/blob/d5d0f117f41b3ccab3b07f1ee1fa21cfcf69afa1/first-edition/code/simple_linear_regression.py#L25-L27
pymedusa/Medusa
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
ext3/bs4/element.py
python
PageElement.setup
(self, parent=None, previous_element=None, next_element=None, previous_sibling=None, next_sibling=None)
Sets up the initial relations between this element and other elements. :param parent: The parent of this element. :param previous_element: The element parsed immediately before this one. :param next_element: The element parsed immediately before this one. :param previous_sibling: The most recently encountered element on the same level of the parse tree as this one. :param previous_sibling: The next element to be encountered on the same level of the parse tree as this one.
Sets up the initial relations between this element and other elements.
[ "Sets", "up", "the", "initial", "relations", "between", "this", "element", "and", "other", "elements", "." ]
def setup(self, parent=None, previous_element=None, next_element=None, previous_sibling=None, next_sibling=None): """Sets up the initial relations between this element and other elements. :param parent: The parent of this element. :param previous_element: The element parsed immediately before this one. :param next_element: The element parsed immediately before this one. :param previous_sibling: The most recently encountered element on the same level of the parse tree as this one. :param previous_sibling: The next element to be encountered on the same level of the parse tree as this one. """ self.parent = parent self.previous_element = previous_element if previous_element is not None: self.previous_element.next_element = self self.next_element = next_element if self.next_element is not None: self.next_element.previous_element = self self.next_sibling = next_sibling if self.next_sibling is not None: self.next_sibling.previous_sibling = self if (previous_sibling is None and self.parent is not None and self.parent.contents): previous_sibling = self.parent.contents[-1] self.previous_sibling = previous_sibling if previous_sibling is not None: self.previous_sibling.next_sibling = self
[ "def", "setup", "(", "self", ",", "parent", "=", "None", ",", "previous_element", "=", "None", ",", "next_element", "=", "None", ",", "previous_sibling", "=", "None", ",", "next_sibling", "=", "None", ")", ":", "self", ".", "parent", "=", "parent", "self", ".", "previous_element", "=", "previous_element", "if", "previous_element", "is", "not", "None", ":", "self", ".", "previous_element", ".", "next_element", "=", "self", "self", ".", "next_element", "=", "next_element", "if", "self", ".", "next_element", "is", "not", "None", ":", "self", ".", "next_element", ".", "previous_element", "=", "self", "self", ".", "next_sibling", "=", "next_sibling", "if", "self", ".", "next_sibling", "is", "not", "None", ":", "self", ".", "next_sibling", ".", "previous_sibling", "=", "self", "if", "(", "previous_sibling", "is", "None", "and", "self", ".", "parent", "is", "not", "None", "and", "self", ".", "parent", ".", "contents", ")", ":", "previous_sibling", "=", "self", ".", "parent", ".", "contents", "[", "-", "1", "]", "self", ".", "previous_sibling", "=", "previous_sibling", "if", "previous_sibling", "is", "not", "None", ":", "self", ".", "previous_sibling", ".", "next_sibling", "=", "self" ]
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/ext3/bs4/element.py#L158-L197
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/distutils/ccompiler.py
python
CCompiler._find_macro
(self, name)
return None
[]
def _find_macro(self, name): i = 0 for defn in self.macros: if defn[0] == name: return i i += 1 return None
[ "def", "_find_macro", "(", "self", ",", "name", ")", ":", "i", "=", "0", "for", "defn", "in", "self", ".", "macros", ":", "if", "defn", "[", "0", "]", "==", "name", ":", "return", "i", "i", "+=", "1", "return", "None" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/distutils/ccompiler.py#L159-L165
MartinThoma/algorithms
6199cfa3446e1056c7b4d75ca6e306e9e56fd95b
ML/gtsdb/analyze_model.py
python
load_data
()
return X_train, X_val, y_train, y_val
Load data.
Load data.
[ "Load", "data", "." ]
def load_data(): """Load data.""" data = gtsdb.load_data() X_train = data['x_train'] y_train = data['y_train'] X_val = None y_val = None # X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, # test_size=0.10, # random_state=42) X_train = X_train.astype('float32') # X_val = X_val.astype('float32') # X_test = X_test.astype('float32') X_train /= 255 # X_val /= 255 return X_train, X_val, y_train, y_val
[ "def", "load_data", "(", ")", ":", "data", "=", "gtsdb", ".", "load_data", "(", ")", "X_train", "=", "data", "[", "'x_train'", "]", "y_train", "=", "data", "[", "'y_train'", "]", "X_val", "=", "None", "y_val", "=", "None", "# X_train, X_val, y_train, y_val = train_test_split(X_train, y_train,", "# test_size=0.10,", "# random_state=42)", "X_train", "=", "X_train", ".", "astype", "(", "'float32'", ")", "# X_val = X_val.astype('float32')", "# X_test = X_test.astype('float32')", "X_train", "/=", "255", "# X_val /= 255", "return", "X_train", ",", "X_val", ",", "y_train", ",", "y_val" ]
https://github.com/MartinThoma/algorithms/blob/6199cfa3446e1056c7b4d75ca6e306e9e56fd95b/ML/gtsdb/analyze_model.py#L47-L63
mesalock-linux/mesapy
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
pypy/objspace/std/unicodeobject.py
python
W_UnicodeObject.__repr__
(self)
return "%s(%r)" % (self.__class__.__name__, self._value)
representation for debugging purposes
representation for debugging purposes
[ "representation", "for", "debugging", "purposes" ]
def __repr__(self): """representation for debugging purposes""" return "%s(%r)" % (self.__class__.__name__, self._value)
[ "def", "__repr__", "(", "self", ")", ":", "return", "\"%s(%r)\"", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "self", ".", "_value", ")" ]
https://github.com/mesalock-linux/mesapy/blob/ed546d59a21b36feb93e2309d5c6b75aa0ad95c9/pypy/objspace/std/unicodeobject.py#L39-L41
Samsung/cotopaxi
d19178b1235017257fec20d0a41edc918de55574
cotopaxi/protocol_fuzzer.py
python
FuzzingCase.test_payload
(self, test_params, test_timeouts, alive_before=True)
return True
Send payload for fuzzing. test_timeouts list is extended if applicable.
Send payload for fuzzing.
[ "Send", "payload", "for", "fuzzing", "." ]
def test_payload(self, test_params, test_timeouts, alive_before=True): """Send payload for fuzzing. test_timeouts list is extended if applicable. """ if not alive_before: alive_before = service_ping(test_params) if not alive_before: print( "[+] Server {}:{} is not responding before sending payload".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port ) ) else: print_verbose( test_params, "[+] Server {}:{} is alive before sending payload".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port ), ) if not alive_before and not test_params.ignore_ping_check: print( "[.] Fuzzing stopped for {}:{} because server is not responding\n" " (use --ignore-ping-check if you want to continue anyway)!".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port ) ) return False print_verbose( test_params, prepare_separator("-", post_separator_text="Request:") ) payload_sent_time = time.time() test_result = sr1_file(test_params, self.payload_file, test_params.verbose) print_verbose(test_params, prepare_separator("-")) print("[.] Payload {} sent".format(self.payload_file)) if test_result is not None: test_timeouts.append( (time.time() - payload_sent_time, self.payload_file, test_result) ) print(prepare_separator("-", post_separator_text="Response:")) try: proto_handler = PROTOCOL_TESTERS[test_params.protocol].response_parser() packet = proto_handler(test_result[Raw].load) packet.show() except (TypeError, IndexError, struct.error): pass print(prepare_separator("-")) else: print("Received no response from server") print(prepare_separator("-")) alive_after = service_ping(test_params) flag = True if not alive_after: alive_after = service_ping(test_params) flag = False if not alive_after and alive_before and not test_params.ignore_ping_check: print( "[+] Server {}:{} is dead after sending payload".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port ) ) test_params.test_stats.active_endpoints[test_params.protocol].append( "{}:{} - payload: {}".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port, self.payload_file, ) ) if not self.wait_server_respawn(test_params): return False else: flag = True if flag and alive_after and not test_params.ignore_ping_check: print_verbose( test_params, "[+] Server {}:{} is alive after sending payload {}".format( test_params.dst_endpoint.ip_addr, test_params.dst_endpoint.port, self.payload_file, ), ) print_verbose( test_params, "[+] Finished fuzzing with payload: {}".format(self.payload_file), ) print_verbose(test_params, prepare_separator()) return True
[ "def", "test_payload", "(", "self", ",", "test_params", ",", "test_timeouts", ",", "alive_before", "=", "True", ")", ":", "if", "not", "alive_before", ":", "alive_before", "=", "service_ping", "(", "test_params", ")", "if", "not", "alive_before", ":", "print", "(", "\"[+] Server {}:{} is not responding before sending payload\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ")", ")", "else", ":", "print_verbose", "(", "test_params", ",", "\"[+] Server {}:{} is alive before sending payload\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ")", ",", ")", "if", "not", "alive_before", "and", "not", "test_params", ".", "ignore_ping_check", ":", "print", "(", "\"[.] Fuzzing stopped for {}:{} because server is not responding\\n\"", "\" (use --ignore-ping-check if you want to continue anyway)!\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ")", ")", "return", "False", "print_verbose", "(", "test_params", ",", "prepare_separator", "(", "\"-\"", ",", "post_separator_text", "=", "\"Request:\"", ")", ")", "payload_sent_time", "=", "time", ".", "time", "(", ")", "test_result", "=", "sr1_file", "(", "test_params", ",", "self", ".", "payload_file", ",", "test_params", ".", "verbose", ")", "print_verbose", "(", "test_params", ",", "prepare_separator", "(", "\"-\"", ")", ")", "print", "(", "\"[.] Payload {} sent\"", ".", "format", "(", "self", ".", "payload_file", ")", ")", "if", "test_result", "is", "not", "None", ":", "test_timeouts", ".", "append", "(", "(", "time", ".", "time", "(", ")", "-", "payload_sent_time", ",", "self", ".", "payload_file", ",", "test_result", ")", ")", "print", "(", "prepare_separator", "(", "\"-\"", ",", "post_separator_text", "=", "\"Response:\"", ")", ")", "try", ":", "proto_handler", "=", "PROTOCOL_TESTERS", "[", "test_params", ".", "protocol", "]", ".", "response_parser", "(", ")", "packet", "=", "proto_handler", "(", "test_result", "[", "Raw", "]", ".", "load", ")", "packet", ".", "show", "(", ")", "except", "(", "TypeError", ",", "IndexError", ",", "struct", ".", "error", ")", ":", "pass", "print", "(", "prepare_separator", "(", "\"-\"", ")", ")", "else", ":", "print", "(", "\"Received no response from server\"", ")", "print", "(", "prepare_separator", "(", "\"-\"", ")", ")", "alive_after", "=", "service_ping", "(", "test_params", ")", "flag", "=", "True", "if", "not", "alive_after", ":", "alive_after", "=", "service_ping", "(", "test_params", ")", "flag", "=", "False", "if", "not", "alive_after", "and", "alive_before", "and", "not", "test_params", ".", "ignore_ping_check", ":", "print", "(", "\"[+] Server {}:{} is dead after sending payload\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ")", ")", "test_params", ".", "test_stats", ".", "active_endpoints", "[", "test_params", ".", "protocol", "]", ".", "append", "(", "\"{}:{} - payload: {}\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ",", "self", ".", "payload_file", ",", ")", ")", "if", "not", "self", ".", "wait_server_respawn", "(", "test_params", ")", ":", "return", "False", "else", ":", "flag", "=", "True", "if", "flag", "and", "alive_after", "and", "not", "test_params", ".", "ignore_ping_check", ":", "print_verbose", "(", "test_params", ",", "\"[+] Server {}:{} is alive after sending payload {}\"", ".", "format", "(", "test_params", ".", "dst_endpoint", ".", "ip_addr", ",", "test_params", ".", "dst_endpoint", ".", "port", ",", "self", ".", "payload_file", ",", ")", ",", ")", "print_verbose", "(", "test_params", ",", "\"[+] Finished fuzzing with payload: {}\"", ".", "format", "(", "self", ".", "payload_file", ")", ",", ")", "print_verbose", "(", "test_params", ",", "prepare_separator", "(", ")", ")", "return", "True" ]
https://github.com/Samsung/cotopaxi/blob/d19178b1235017257fec20d0a41edc918de55574/cotopaxi/protocol_fuzzer.py#L70-L156
CouchPotato/CouchPotatoServer
7260c12f72447ddb6f062367c6dfbda03ecd4e9c
libs/subliminal/subtitles.py
python
Subtitle.exists
(self)
return False
Whether the subtitle exists or not
Whether the subtitle exists or not
[ "Whether", "the", "subtitle", "exists", "or", "not" ]
def exists(self): """Whether the subtitle exists or not""" if self.path: return os.path.exists(self.path) return False
[ "def", "exists", "(", "self", ")", ":", "if", "self", ".", "path", ":", "return", "os", ".", "path", ".", "exists", "(", "self", ".", "path", ")", "return", "False" ]
https://github.com/CouchPotato/CouchPotatoServer/blob/7260c12f72447ddb6f062367c6dfbda03ecd4e9c/libs/subliminal/subtitles.py#L44-L48
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
example/ctp/stock/__init__.py
python
MdApi.OnRspError
(self, pRspInfo, nRequestID, bIsLast)
错误应答
错误应答
[ "错误应答" ]
def OnRspError(self, pRspInfo, nRequestID, bIsLast): """错误应答"""
[ "def", "OnRspError", "(", "self", ",", "pRspInfo", ",", "nRequestID", ",", "bIsLast", ")", ":" ]
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/stock/__init__.py#L98-L99
PyCQA/pylint
3fc855f9d0fa8e6410be5a23cf954ffd5471b4eb
pylint/extensions/typing.py
python
TypingChecker._check_for_typing_alias
( self, node: Union[nodes.Name, nodes.Attribute], )
Check if typing alias is deprecated or could be replaced. Requires - Python 3.9 - OR: Python 3.7+ with postponed evaluation in a type annotation context For Python 3.7+: Only emit message if change doesn't create any name collisions, only ever used in a type annotation context, and can safely be replaced.
Check if typing alias is deprecated or could be replaced.
[ "Check", "if", "typing", "alias", "is", "deprecated", "or", "could", "be", "replaced", "." ]
def _check_for_typing_alias( self, node: Union[nodes.Name, nodes.Attribute], ) -> None: """Check if typing alias is deprecated or could be replaced. Requires - Python 3.9 - OR: Python 3.7+ with postponed evaluation in a type annotation context For Python 3.7+: Only emit message if change doesn't create any name collisions, only ever used in a type annotation context, and can safely be replaced. """ inferred = safe_infer(node) if not isinstance(inferred, nodes.ClassDef): return alias = DEPRECATED_TYPING_ALIASES.get(inferred.qname(), None) if alias is None: return if self._py39_plus: self.add_message( "deprecated-typing-alias", node=node, args=(inferred.qname(), alias.name), ) return # For PY37+, check for type annotation context first if not is_node_in_type_annotation_context(node) and isinstance( node.parent, nodes.Subscript ): if alias.name_collision is True: self._alias_name_collisions.add(inferred.qname()) return self._consider_using_alias_msgs.append( DeprecatedTypingAliasMsg( node, inferred.qname(), alias.name, isinstance(node.parent, nodes.Subscript), ) )
[ "def", "_check_for_typing_alias", "(", "self", ",", "node", ":", "Union", "[", "nodes", ".", "Name", ",", "nodes", ".", "Attribute", "]", ",", ")", "->", "None", ":", "inferred", "=", "safe_infer", "(", "node", ")", "if", "not", "isinstance", "(", "inferred", ",", "nodes", ".", "ClassDef", ")", ":", "return", "alias", "=", "DEPRECATED_TYPING_ALIASES", ".", "get", "(", "inferred", ".", "qname", "(", ")", ",", "None", ")", "if", "alias", "is", "None", ":", "return", "if", "self", ".", "_py39_plus", ":", "self", ".", "add_message", "(", "\"deprecated-typing-alias\"", ",", "node", "=", "node", ",", "args", "=", "(", "inferred", ".", "qname", "(", ")", ",", "alias", ".", "name", ")", ",", ")", "return", "# For PY37+, check for type annotation context first", "if", "not", "is_node_in_type_annotation_context", "(", "node", ")", "and", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "Subscript", ")", ":", "if", "alias", ".", "name_collision", "is", "True", ":", "self", ".", "_alias_name_collisions", ".", "add", "(", "inferred", ".", "qname", "(", ")", ")", "return", "self", ".", "_consider_using_alias_msgs", ".", "append", "(", "DeprecatedTypingAliasMsg", "(", "node", ",", "inferred", ".", "qname", "(", ")", ",", "alias", ".", "name", ",", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "Subscript", ")", ",", ")", ")" ]
https://github.com/PyCQA/pylint/blob/3fc855f9d0fa8e6410be5a23cf954ffd5471b4eb/pylint/extensions/typing.py#L212-L256
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1alpha1_cluster_role.py
python
V1alpha1ClusterRole.__eq__
(self, other)
return self.to_dict() == other.to_dict()
Returns true if both objects are equal
Returns true if both objects are equal
[ "Returns", "true", "if", "both", "objects", "are", "equal" ]
def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1alpha1ClusterRole): return False return self.to_dict() == other.to_dict()
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "V1alpha1ClusterRole", ")", ":", "return", "False", "return", "self", ".", "to_dict", "(", ")", "==", "other", ".", "to_dict", "(", ")" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1alpha1_cluster_role.py#L218-L223
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/os.py
python
execl
(file, *args)
execl(file, *args) Execute the executable file with argument list args, replacing the current process.
execl(file, *args)
[ "execl", "(", "file", "*", "args", ")" ]
def execl(file, *args): """execl(file, *args) Execute the executable file with argument list args, replacing the current process. """ execv(file, args)
[ "def", "execl", "(", "file", ",", "*", "args", ")", ":", "execv", "(", "file", ",", "args", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/os.py#L309-L314
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/stat.py
python
S_ISPORT
(mode)
return False
Return True if mode is from an event port.
Return True if mode is from an event port.
[ "Return", "True", "if", "mode", "is", "from", "an", "event", "port", "." ]
def S_ISPORT(mode): """Return True if mode is from an event port.""" return False
[ "def", "S_ISPORT", "(", "mode", ")", ":", "return", "False" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/stat.py#L82-L84
P1sec/pycrate
d12bbccf1df8c9c7891a26967a9d2635610ec5b8
pycrate_asn1c/tokenizer.py
python
scan_module_opt
(Tok)
return tag, ext
extract module options from given tokens
extract module options from given tokens
[ "extract", "module", "options", "from", "given", "tokens" ]
def scan_module_opt(Tok): """extract module options from given tokens """ # scan TagDefault and ExtensionDefault # TODO: scan EncodingReferenceDefault first tag, ext = None, False if not Tok.has_next(): return tag, next tok = Tok.get_next() if tok in (TOK_AUTO, TOK_TEXP, TOK_TIMP): tag = Tok.get_tok() if Tok.get_next() != TOK_TAGS: raise(ASN1ProcTextErr('invalid module options, missing TAGS keyword')) if not Tok.has_next(): return tag, ext tok = Tok.get_next() if tok == TOK_EXTI: ext = True else: raise(ASN1ProcTextErr('invalid module options')) if Tok.has_next(): raise(ASN1ProcTextErr('invalid module options')) return tag, ext
[ "def", "scan_module_opt", "(", "Tok", ")", ":", "# scan TagDefault and ExtensionDefault", "# TODO: scan EncodingReferenceDefault first", "tag", ",", "ext", "=", "None", ",", "False", "if", "not", "Tok", ".", "has_next", "(", ")", ":", "return", "tag", ",", "next", "tok", "=", "Tok", ".", "get_next", "(", ")", "if", "tok", "in", "(", "TOK_AUTO", ",", "TOK_TEXP", ",", "TOK_TIMP", ")", ":", "tag", "=", "Tok", ".", "get_tok", "(", ")", "if", "Tok", ".", "get_next", "(", ")", "!=", "TOK_TAGS", ":", "raise", "(", "ASN1ProcTextErr", "(", "'invalid module options, missing TAGS keyword'", ")", ")", "if", "not", "Tok", ".", "has_next", "(", ")", ":", "return", "tag", ",", "ext", "tok", "=", "Tok", ".", "get_next", "(", ")", "if", "tok", "==", "TOK_EXTI", ":", "ext", "=", "True", "else", ":", "raise", "(", "ASN1ProcTextErr", "(", "'invalid module options'", ")", ")", "if", "Tok", ".", "has_next", "(", ")", ":", "raise", "(", "ASN1ProcTextErr", "(", "'invalid module options'", ")", ")", "return", "tag", ",", "ext" ]
https://github.com/P1sec/pycrate/blob/d12bbccf1df8c9c7891a26967a9d2635610ec5b8/pycrate_asn1c/tokenizer.py#L575-L597
OpenMDAO/OpenMDAO-Framework
f2e37b7de3edeaaeb2d251b375917adec059db9b
openmdao.lib/src/openmdao/lib/datatypes/domain/plot3d.py
python
_write_plot3d_coords
(zone, stream, planes, logger)
Write coordinates (& blanking) to Plot3D stream.
Write coordinates (& blanking) to Plot3D stream.
[ "Write", "coordinates", "(", "&", "blanking", ")", "to", "Plot3D", "stream", "." ]
def _write_plot3d_coords(zone, stream, planes, logger): """ Write coordinates (& blanking) to Plot3D stream. """ if hasattr(zone.grid_coordinates, 'iblank'): raise NotImplementedError('blanking not supported yet') if planes: raise NotImplementedError('planar format not supported yet') shape = zone.shape dim = len(shape) if stream.unformatted: if dim > 2: imax, jmax, kmax = shape total = 3 * imax * jmax * kmax else: imax, jmax = shape total = 2 * imax * jmax reclen = stream.reclen_floats(total) stream.write_recordmark(reclen) logger.debug(' x min %g, max %g', zone.grid_coordinates.x.min(), zone.grid_coordinates.x.max()) _write_array(zone.grid_coordinates.x, zone.grid_coordinates.ghosts, stream) logger.debug(' y min %g, max %g', zone.grid_coordinates.y.min(), zone.grid_coordinates.y.max()) _write_array(zone.grid_coordinates.y, zone.grid_coordinates.ghosts, stream) if dim > 2: logger.debug(' z min %g, max %g', zone.grid_coordinates.z.min(), zone.grid_coordinates.z.max()) _write_array(zone.grid_coordinates.z, zone.grid_coordinates.ghosts, stream) if stream.unformatted: stream.write_recordmark(reclen)
[ "def", "_write_plot3d_coords", "(", "zone", ",", "stream", ",", "planes", ",", "logger", ")", ":", "if", "hasattr", "(", "zone", ".", "grid_coordinates", ",", "'iblank'", ")", ":", "raise", "NotImplementedError", "(", "'blanking not supported yet'", ")", "if", "planes", ":", "raise", "NotImplementedError", "(", "'planar format not supported yet'", ")", "shape", "=", "zone", ".", "shape", "dim", "=", "len", "(", "shape", ")", "if", "stream", ".", "unformatted", ":", "if", "dim", ">", "2", ":", "imax", ",", "jmax", ",", "kmax", "=", "shape", "total", "=", "3", "*", "imax", "*", "jmax", "*", "kmax", "else", ":", "imax", ",", "jmax", "=", "shape", "total", "=", "2", "*", "imax", "*", "jmax", "reclen", "=", "stream", ".", "reclen_floats", "(", "total", ")", "stream", ".", "write_recordmark", "(", "reclen", ")", "logger", ".", "debug", "(", "' x min %g, max %g'", ",", "zone", ".", "grid_coordinates", ".", "x", ".", "min", "(", ")", ",", "zone", ".", "grid_coordinates", ".", "x", ".", "max", "(", ")", ")", "_write_array", "(", "zone", ".", "grid_coordinates", ".", "x", ",", "zone", ".", "grid_coordinates", ".", "ghosts", ",", "stream", ")", "logger", ".", "debug", "(", "' y min %g, max %g'", ",", "zone", ".", "grid_coordinates", ".", "y", ".", "min", "(", ")", ",", "zone", ".", "grid_coordinates", ".", "y", ".", "max", "(", ")", ")", "_write_array", "(", "zone", ".", "grid_coordinates", ".", "y", ",", "zone", ".", "grid_coordinates", ".", "ghosts", ",", "stream", ")", "if", "dim", ">", "2", ":", "logger", ".", "debug", "(", "' z min %g, max %g'", ",", "zone", ".", "grid_coordinates", ".", "z", ".", "min", "(", ")", ",", "zone", ".", "grid_coordinates", ".", "z", ".", "max", "(", ")", ")", "_write_array", "(", "zone", ".", "grid_coordinates", ".", "z", ",", "zone", ".", "grid_coordinates", ".", "ghosts", ",", "stream", ")", "if", "stream", ".", "unformatted", ":", "stream", ".", "write_recordmark", "(", "reclen", ")" ]
https://github.com/OpenMDAO/OpenMDAO-Framework/blob/f2e37b7de3edeaaeb2d251b375917adec059db9b/openmdao.lib/src/openmdao/lib/datatypes/domain/plot3d.py#L698-L734
IronLanguages/ironpython2
51fdedeeda15727717fb8268a805f71b06c0b9f1
Src/StdLib/Lib/locale.py
python
atof
(string, func=float)
return func(string)
Parses a string as a float according to the locale settings.
Parses a string as a float according to the locale settings.
[ "Parses", "a", "string", "as", "a", "float", "according", "to", "the", "locale", "settings", "." ]
def atof(string, func=float): "Parses a string as a float according to the locale settings." #First, get rid of the grouping ts = localeconv()['thousands_sep'] if ts: string = string.replace(ts, '') #next, replace the decimal point with a dot dd = localeconv()['decimal_point'] if dd: string = string.replace(dd, '.') #finally, parse the string return func(string)
[ "def", "atof", "(", "string", ",", "func", "=", "float", ")", ":", "#First, get rid of the grouping", "ts", "=", "localeconv", "(", ")", "[", "'thousands_sep'", "]", "if", "ts", ":", "string", "=", "string", ".", "replace", "(", "ts", ",", "''", ")", "#next, replace the decimal point with a dot", "dd", "=", "localeconv", "(", ")", "[", "'decimal_point'", "]", "if", "dd", ":", "string", "=", "string", ".", "replace", "(", "dd", ",", "'.'", ")", "#finally, parse the string", "return", "func", "(", "string", ")" ]
https://github.com/IronLanguages/ironpython2/blob/51fdedeeda15727717fb8268a805f71b06c0b9f1/Src/StdLib/Lib/locale.py#L307-L318
deanishe/alfred-reddit
2f7545e682fc1579489947baa679e19b4eb1900e
src/workflow/workflow.py
python
Workflow._create
(self, dirpath)
return dirpath
Create directory `dirpath` if it doesn't exist. :param dirpath: path to directory :type dirpath: ``unicode`` :returns: ``dirpath`` argument :rtype: ``unicode``
Create directory `dirpath` if it doesn't exist.
[ "Create", "directory", "dirpath", "if", "it", "doesn", "t", "exist", "." ]
def _create(self, dirpath): """Create directory `dirpath` if it doesn't exist. :param dirpath: path to directory :type dirpath: ``unicode`` :returns: ``dirpath`` argument :rtype: ``unicode`` """ if not os.path.exists(dirpath): os.makedirs(dirpath) return dirpath
[ "def", "_create", "(", "self", ",", "dirpath", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "dirpath", ")", ":", "os", ".", "makedirs", "(", "dirpath", ")", "return", "dirpath" ]
https://github.com/deanishe/alfred-reddit/blob/2f7545e682fc1579489947baa679e19b4eb1900e/src/workflow/workflow.py#L2769-L2780
mayank93/Twitter-Sentiment-Analysis
f095c6ca6bf69787582b5dabb140fefaf278eb37
front-end/web2py/gluon/contrib/pymysql/connections.py
python
Connection.literal
(self, obj)
return escape_item(obj, self.charset)
Alias for escape()
Alias for escape()
[ "Alias", "for", "escape", "()" ]
def literal(self, obj): ''' Alias for escape() ''' return escape_item(obj, self.charset)
[ "def", "literal", "(", "self", ",", "obj", ")", ":", "return", "escape_item", "(", "obj", ",", "self", ".", "charset", ")" ]
https://github.com/mayank93/Twitter-Sentiment-Analysis/blob/f095c6ca6bf69787582b5dabb140fefaf278eb37/front-end/web2py/gluon/contrib/pymysql/connections.py#L646-L648
EventGhost/EventGhost
177be516849e74970d2e13cda82244be09f277ce
lib27/site-packages/requests/utils.py
python
default_user_agent
(name="python-requests")
return '%s/%s' % (name, __version__)
Return a string representing the default user agent.
Return a string representing the default user agent.
[ "Return", "a", "string", "representing", "the", "default", "user", "agent", "." ]
def default_user_agent(name="python-requests"): """Return a string representing the default user agent.""" return '%s/%s' % (name, __version__)
[ "def", "default_user_agent", "(", "name", "=", "\"python-requests\"", ")", ":", "return", "'%s/%s'", "%", "(", "name", ",", "__version__", ")" ]
https://github.com/EventGhost/EventGhost/blob/177be516849e74970d2e13cda82244be09f277ce/lib27/site-packages/requests/utils.py#L591-L593
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit /tools/sqli/plugins/dbms/maxdb/syntax.py
python
Syntax.__init__
(self)
[]
def __init__(self): GenericSyntax.__init__(self)
[ "def", "__init__", "(", "self", ")", ":", "GenericSyntax", ".", "__init__", "(", "self", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/sqli/plugins/dbms/maxdb/syntax.py#L11-L12
rowliny/DiffHelper
ab3a96f58f9579d0023aed9ebd785f4edf26f8af
Tool/SitePackages/nltk/tokenize/punkt.py
python
PunktSentenceTokenizer.text_contains_sentbreak
(self, text)
return False
Returns True if the given text includes a sentence break.
Returns True if the given text includes a sentence break.
[ "Returns", "True", "if", "the", "given", "text", "includes", "a", "sentence", "break", "." ]
def text_contains_sentbreak(self, text): """ Returns True if the given text includes a sentence break. """ found = False # used to ignore last token for tok in self._annotate_tokens(self._tokenize_words(text)): if found: return True if tok.sentbreak: found = True return False
[ "def", "text_contains_sentbreak", "(", "self", ",", "text", ")", ":", "found", "=", "False", "# used to ignore last token", "for", "tok", "in", "self", ".", "_annotate_tokens", "(", "self", ".", "_tokenize_words", "(", "text", ")", ")", ":", "if", "found", ":", "return", "True", "if", "tok", ".", "sentbreak", ":", "found", "=", "True", "return", "False" ]
https://github.com/rowliny/DiffHelper/blob/ab3a96f58f9579d0023aed9ebd785f4edf26f8af/Tool/SitePackages/nltk/tokenize/punkt.py#L1381-L1391
bookwyrm-social/bookwyrm
0c2537e27a2cdbc0136880dfbbf170d5fec72986
bookwyrm/models/user.py
python
User.deleted
(self)
return not self.is_active
for consistent naming
for consistent naming
[ "for", "consistent", "naming" ]
def deleted(self): """for consistent naming""" return not self.is_active
[ "def", "deleted", "(", "self", ")", ":", "return", "not", "self", ".", "is_active" ]
https://github.com/bookwyrm-social/bookwyrm/blob/0c2537e27a2cdbc0136880dfbbf170d5fec72986/bookwyrm/models/user.py#L200-L202
calebstewart/pwncat
d67865bdaac60dd0761d0698062e7b443a62c6db
pwncat/platform/__init__.py
python
Platform.chdir
(self, path: Union[str, Path])
Change directories to the given path. This method returns the current working directory prior to the change. :param path: a relative or absolute path to change to :type path: Union[str, Path] :return: current working directory prior to the change :raises: FileNotFoundError: the specified path doesn't exist NotADirectoryError: the specified path is not a directory
Change directories to the given path. This method returns the current working directory prior to the change.
[ "Change", "directories", "to", "the", "given", "path", ".", "This", "method", "returns", "the", "current", "working", "directory", "prior", "to", "the", "change", "." ]
def chdir(self, path: Union[str, Path]): """ Change directories to the given path. This method returns the current working directory prior to the change. :param path: a relative or absolute path to change to :type path: Union[str, Path] :return: current working directory prior to the change :raises: FileNotFoundError: the specified path doesn't exist NotADirectoryError: the specified path is not a directory """
[ "def", "chdir", "(", "self", ",", "path", ":", "Union", "[", "str", ",", "Path", "]", ")", ":" ]
https://github.com/calebstewart/pwncat/blob/d67865bdaac60dd0761d0698062e7b443a62c6db/pwncat/platform/__init__.py#L812-L823
dmnfarrell/tkintertable
f3fc8950aaa0f087de100d671ce13c24006d9639
tkintertable/App.py
python
ToolBar.__init__
(self, parent=None, parentapp=None)
return
[]
def __init__(self, parent=None, parentapp=None): Frame.__init__(self, parent, width=600, height=40) from . import Table_images self.parentframe = parent self.parentapp = parentapp #add buttons img = Table_images.new_proj() self.add_button('New Project', self.parentapp.new_project, img) img = Table_images.open_proj() self.add_button('Open Project', self.parentapp.open_project, img) img = Table_images.save_proj() self.add_button('Save Project', self.parentapp.save_project, img) img = Table_images.add_row() self.add_button('Add record', self.parentapp.add_Row, img) img = Table_images.add_col() self.add_button('Add col', self.parentapp.add_Column, img) img = Table_images.del_row() self.add_button('Delete record', self.parentapp.delete_Row, img) img = Table_images.del_col() self.add_button('Delete col', self.parentapp.delete_Column, img) img = Table_images.plot() self.add_button('Plot', self.parentapp.plot, img) img = Table_images.plotprefs() self.add_button('Plot Prefs', self.parentapp.plotSetup, img) return
[ "def", "__init__", "(", "self", ",", "parent", "=", "None", ",", "parentapp", "=", "None", ")", ":", "Frame", ".", "__init__", "(", "self", ",", "parent", ",", "width", "=", "600", ",", "height", "=", "40", ")", "from", ".", "import", "Table_images", "self", ".", "parentframe", "=", "parent", "self", ".", "parentapp", "=", "parentapp", "#add buttons", "img", "=", "Table_images", ".", "new_proj", "(", ")", "self", ".", "add_button", "(", "'New Project'", ",", "self", ".", "parentapp", ".", "new_project", ",", "img", ")", "img", "=", "Table_images", ".", "open_proj", "(", ")", "self", ".", "add_button", "(", "'Open Project'", ",", "self", ".", "parentapp", ".", "open_project", ",", "img", ")", "img", "=", "Table_images", ".", "save_proj", "(", ")", "self", ".", "add_button", "(", "'Save Project'", ",", "self", ".", "parentapp", ".", "save_project", ",", "img", ")", "img", "=", "Table_images", ".", "add_row", "(", ")", "self", ".", "add_button", "(", "'Add record'", ",", "self", ".", "parentapp", ".", "add_Row", ",", "img", ")", "img", "=", "Table_images", ".", "add_col", "(", ")", "self", ".", "add_button", "(", "'Add col'", ",", "self", ".", "parentapp", ".", "add_Column", ",", "img", ")", "img", "=", "Table_images", ".", "del_row", "(", ")", "self", ".", "add_button", "(", "'Delete record'", ",", "self", ".", "parentapp", ".", "delete_Row", ",", "img", ")", "img", "=", "Table_images", ".", "del_col", "(", ")", "self", ".", "add_button", "(", "'Delete col'", ",", "self", ".", "parentapp", ".", "delete_Column", ",", "img", ")", "img", "=", "Table_images", ".", "plot", "(", ")", "self", ".", "add_button", "(", "'Plot'", ",", "self", ".", "parentapp", ".", "plot", ",", "img", ")", "img", "=", "Table_images", ".", "plotprefs", "(", ")", "self", ".", "add_button", "(", "'Plot Prefs'", ",", "self", ".", "parentapp", ".", "plotSetup", ",", "img", ")", "return" ]
https://github.com/dmnfarrell/tkintertable/blob/f3fc8950aaa0f087de100d671ce13c24006d9639/tkintertable/App.py#L503-L528
nlloyd/SubliminalCollaborator
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
libs/twisted/words/xish/utility.py
python
CallbackList.removeCallback
(self, method)
Remove callback. @param method: The callable to be removed.
Remove callback.
[ "Remove", "callback", "." ]
def removeCallback(self, method): """ Remove callback. @param method: The callable to be removed. """ if method in self.callbacks: del self.callbacks[method]
[ "def", "removeCallback", "(", "self", ",", "method", ")", ":", "if", "method", "in", "self", ".", "callbacks", ":", "del", "self", ".", "callbacks", "[", "method", "]" ]
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/words/xish/utility.py#L77-L85
robclewley/compneuro
b10edb3cb44b31389a03f5e408294a751e34afbb
phaseplane.py
python
plot_PP_vf
(gen, xname, yname, N=20, subdomain=None, scale_exp=0)
Draw 2D vector field in (xname, yname) coordinates of given Generator, sampling on a uniform grid of n by n points. Optional subdomain dictionary specifies axes limits in each variable, otherwise Generator's xdomain attribute will be used. For systems of dimension > 2, the non-phase plane variables will be held constant at their initial condition values set in the Generator. Optional scale_exp is an exponent (domain is all reals) which rescales size of arrows in case of disparate scales in the vector field. Larger values of scale magnify the arrow sizes. For stiff vector fields, values from -3 to 3 may be necessary to resolve arrows in certain regions. Requires matplotlib 0.99 or later
Draw 2D vector field in (xname, yname) coordinates of given Generator, sampling on a uniform grid of n by n points.
[ "Draw", "2D", "vector", "field", "in", "(", "xname", "yname", ")", "coordinates", "of", "given", "Generator", "sampling", "on", "a", "uniform", "grid", "of", "n", "by", "n", "points", "." ]
def plot_PP_vf(gen, xname, yname, N=20, subdomain=None, scale_exp=0): """Draw 2D vector field in (xname, yname) coordinates of given Generator, sampling on a uniform grid of n by n points. Optional subdomain dictionary specifies axes limits in each variable, otherwise Generator's xdomain attribute will be used. For systems of dimension > 2, the non-phase plane variables will be held constant at their initial condition values set in the Generator. Optional scale_exp is an exponent (domain is all reals) which rescales size of arrows in case of disparate scales in the vector field. Larger values of scale magnify the arrow sizes. For stiff vector fields, values from -3 to 3 may be necessary to resolve arrows in certain regions. Requires matplotlib 0.99 or later """ assert N > 1 xdom = gen.xdomain[xname] ydom = gen.xdomain[yname] if subdomain is not None: try: xdom = subdomain[xname] except KeyError: pass try: ydom = subdomain[yname] except KeyError: pass assert all(isfinite(xdom)), "Must specify a finite domain for x direction" assert all(isfinite(ydom)), "Must specify a finite domain for y direction" w = xdom[1]-xdom[0] h = ydom[1]-ydom[0] xdict = gen.initialconditions.copy() xix = gen.funcspec.vars.index(xname) yix = gen.funcspec.vars.index(yname) xs = np.linspace(xdom[0], xdom[1], N) ys = np.linspace(ydom[0], ydom[1], N) X, Y = np.meshgrid(xs, ys) dxs, dys = np.meshgrid(xs, ys) ## dx_big = 0 ## dy_big = 0 dz_big = 0 vec_dict = {} # dxs = array((n,), float) # dys = array((n,), float) for xi, x in enumerate(xs): for yi, y in enumerate(ys): xdict.update({xname: x, yname: y}) dx, dy = gen.Rhs(0, xdict)[[xix, yix]] # note order of indices dxs[yi,xi] = dx dys[yi,xi] = dy dz = np.linalg.norm((dx,dy)) ## vec_dict[ (x,y) ] = (dx, dy, dz) ## if dx > dx_big: ## dx_big = dx ## if dy > dy_big: ## dy_big = dy if dz > dz_big: dz_big = dz plt.quiver(X, Y, dxs, dys, angles='xy', pivot='middle', units='inches', scale=dz_big*max(h,w)/(10*exp(2*scale_exp)), lw=0.01/exp(scale_exp-1), headwidth=max(2,1.5/(exp(scale_exp-1))), #headlength=2*max(2,1.5/(exp(scale_exp-1))), width=0.001*max(h,w), minshaft=2, minlength=0.001) ## # Use 95% of interval size ## longest_x = w*0.95/(n-1) ## longest_y = h*0.95/(n-1) ## longest = min(longest_x, longest_y) ## ## scaling_x = longest_x/dx_big ## scaling_y = longest_y/dy_big ## scaling = min(scaling_x, scaling_y) ax = plt.gca() ## hw = longest/10 ## hl = hw*2 ## for x in xs: ## for y in ys: ## dx, dy, dz = vec_dict[ (x,y) ] ## plt.arrow(x, y, scaling*dx, yscale*scaling*dy, ## head_length=hl, head_width=hw, length_includes_head=True) ax.set_xlim(xdom) ax.set_ylim(ydom) plt.draw()
[ "def", "plot_PP_vf", "(", "gen", ",", "xname", ",", "yname", ",", "N", "=", "20", ",", "subdomain", "=", "None", ",", "scale_exp", "=", "0", ")", ":", "assert", "N", ">", "1", "xdom", "=", "gen", ".", "xdomain", "[", "xname", "]", "ydom", "=", "gen", ".", "xdomain", "[", "yname", "]", "if", "subdomain", "is", "not", "None", ":", "try", ":", "xdom", "=", "subdomain", "[", "xname", "]", "except", "KeyError", ":", "pass", "try", ":", "ydom", "=", "subdomain", "[", "yname", "]", "except", "KeyError", ":", "pass", "assert", "all", "(", "isfinite", "(", "xdom", ")", ")", ",", "\"Must specify a finite domain for x direction\"", "assert", "all", "(", "isfinite", "(", "ydom", ")", ")", ",", "\"Must specify a finite domain for y direction\"", "w", "=", "xdom", "[", "1", "]", "-", "xdom", "[", "0", "]", "h", "=", "ydom", "[", "1", "]", "-", "ydom", "[", "0", "]", "xdict", "=", "gen", ".", "initialconditions", ".", "copy", "(", ")", "xix", "=", "gen", ".", "funcspec", ".", "vars", ".", "index", "(", "xname", ")", "yix", "=", "gen", ".", "funcspec", ".", "vars", ".", "index", "(", "yname", ")", "xs", "=", "np", ".", "linspace", "(", "xdom", "[", "0", "]", ",", "xdom", "[", "1", "]", ",", "N", ")", "ys", "=", "np", ".", "linspace", "(", "ydom", "[", "0", "]", ",", "ydom", "[", "1", "]", ",", "N", ")", "X", ",", "Y", "=", "np", ".", "meshgrid", "(", "xs", ",", "ys", ")", "dxs", ",", "dys", "=", "np", ".", "meshgrid", "(", "xs", ",", "ys", ")", "## dx_big = 0", "## dy_big = 0", "dz_big", "=", "0", "vec_dict", "=", "{", "}", "# dxs = array((n,), float)", "# dys = array((n,), float)", "for", "xi", ",", "x", "in", "enumerate", "(", "xs", ")", ":", "for", "yi", ",", "y", "in", "enumerate", "(", "ys", ")", ":", "xdict", ".", "update", "(", "{", "xname", ":", "x", ",", "yname", ":", "y", "}", ")", "dx", ",", "dy", "=", "gen", ".", "Rhs", "(", "0", ",", "xdict", ")", "[", "[", "xix", ",", "yix", "]", "]", "# note order of indices", "dxs", "[", "yi", ",", "xi", "]", "=", "dx", "dys", "[", "yi", ",", "xi", "]", "=", "dy", "dz", "=", "np", ".", "linalg", ".", "norm", "(", "(", "dx", ",", "dy", ")", ")", "## vec_dict[ (x,y) ] = (dx, dy, dz)", "## if dx > dx_big:", "## dx_big = dx", "## if dy > dy_big:", "## dy_big = dy", "if", "dz", ">", "dz_big", ":", "dz_big", "=", "dz", "plt", ".", "quiver", "(", "X", ",", "Y", ",", "dxs", ",", "dys", ",", "angles", "=", "'xy'", ",", "pivot", "=", "'middle'", ",", "units", "=", "'inches'", ",", "scale", "=", "dz_big", "*", "max", "(", "h", ",", "w", ")", "/", "(", "10", "*", "exp", "(", "2", "*", "scale_exp", ")", ")", ",", "lw", "=", "0.01", "/", "exp", "(", "scale_exp", "-", "1", ")", ",", "headwidth", "=", "max", "(", "2", ",", "1.5", "/", "(", "exp", "(", "scale_exp", "-", "1", ")", ")", ")", ",", "#headlength=2*max(2,1.5/(exp(scale_exp-1))),", "width", "=", "0.001", "*", "max", "(", "h", ",", "w", ")", ",", "minshaft", "=", "2", ",", "minlength", "=", "0.001", ")", "## # Use 95% of interval size", "## longest_x = w*0.95/(n-1)", "## longest_y = h*0.95/(n-1)", "## longest = min(longest_x, longest_y)", "##", "## scaling_x = longest_x/dx_big", "## scaling_y = longest_y/dy_big", "## scaling = min(scaling_x, scaling_y)", "ax", "=", "plt", ".", "gca", "(", ")", "## hw = longest/10", "## hl = hw*2", "## for x in xs:", "## for y in ys:", "## dx, dy, dz = vec_dict[ (x,y) ]", "## plt.arrow(x, y, scaling*dx, yscale*scaling*dy,", "## head_length=hl, head_width=hw, length_includes_head=True)", "ax", ".", "set_xlim", "(", "xdom", ")", "ax", ".", "set_ylim", "(", "ydom", ")", "plt", ".", "draw", "(", ")" ]
https://github.com/robclewley/compneuro/blob/b10edb3cb44b31389a03f5e408294a751e34afbb/phaseplane.py#L3775-L3868
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_clusterrole.py
python
Utils.filter_versions
(stdout)
return version_dict
filter the oc version output
filter the oc version output
[ "filter", "the", "oc", "version", "output" ]
def filter_versions(stdout): ''' filter the oc version output ''' version_dict = {} version_search = ['oc', 'openshift', 'kubernetes'] for line in stdout.strip().split('\n'): for term in version_search: if not line: continue if line.startswith(term): version_dict[term] = line.split()[-1] # horrible hack to get openshift version in Openshift 3.2 # By default "oc version in 3.2 does not return an "openshift" version if "openshift" not in version_dict: version_dict["openshift"] = version_dict["oc"] return version_dict
[ "def", "filter_versions", "(", "stdout", ")", ":", "version_dict", "=", "{", "}", "version_search", "=", "[", "'oc'", ",", "'openshift'", ",", "'kubernetes'", "]", "for", "line", "in", "stdout", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", ":", "for", "term", "in", "version_search", ":", "if", "not", "line", ":", "continue", "if", "line", ".", "startswith", "(", "term", ")", ":", "version_dict", "[", "term", "]", "=", "line", ".", "split", "(", ")", "[", "-", "1", "]", "# horrible hack to get openshift version in Openshift 3.2", "# By default \"oc version in 3.2 does not return an \"openshift\" version", "if", "\"openshift\"", "not", "in", "version_dict", ":", "version_dict", "[", "\"openshift\"", "]", "=", "version_dict", "[", "\"oc\"", "]", "return", "version_dict" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_clusterrole.py#L1268-L1286
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/fax/v1/fax/fax_media.py
python
FaxMediaInstance.sid
(self)
return self._properties['sid']
:returns: The unique string that identifies the resource :rtype: unicode
:returns: The unique string that identifies the resource :rtype: unicode
[ ":", "returns", ":", "The", "unique", "string", "that", "identifies", "the", "resource", ":", "rtype", ":", "unicode" ]
def sid(self): """ :returns: The unique string that identifies the resource :rtype: unicode """ return self._properties['sid']
[ "def", "sid", "(", "self", ")", ":", "return", "self", ".", "_properties", "[", "'sid'", "]" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/fax/v1/fax/fax_media.py#L290-L295
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/south/creator/freezer.py
python
model_dependencies
(model, checked_models=None)
return depends
Returns a set of models this one depends on to be defined; things like OneToOneFields as ID, ForeignKeys everywhere, etc.
Returns a set of models this one depends on to be defined; things like OneToOneFields as ID, ForeignKeys everywhere, etc.
[ "Returns", "a", "set", "of", "models", "this", "one", "depends", "on", "to", "be", "defined", ";", "things", "like", "OneToOneFields", "as", "ID", "ForeignKeys", "everywhere", "etc", "." ]
def model_dependencies(model, checked_models=None): """ Returns a set of models this one depends on to be defined; things like OneToOneFields as ID, ForeignKeys everywhere, etc. """ depends = set() checked_models = checked_models or set() # Get deps for each field for field in model._meta.fields + model._meta.many_to_many: depends.update(field_dependencies(field, checked_models)) # Add in any non-abstract bases for base in model.__bases__: if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract: depends.add(base) # Now recurse new_to_check = depends - checked_models while new_to_check: checked_model = new_to_check.pop() if checked_model == model or checked_model in checked_models: continue checked_models.add(checked_model) deps = model_dependencies(checked_model, checked_models) # Loop through dependencies... for dep in deps: # If the new dep is not already checked, add to the queue if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models): new_to_check.add(dep) depends.add(dep) return depends
[ "def", "model_dependencies", "(", "model", ",", "checked_models", "=", "None", ")", ":", "depends", "=", "set", "(", ")", "checked_models", "=", "checked_models", "or", "set", "(", ")", "# Get deps for each field", "for", "field", "in", "model", ".", "_meta", ".", "fields", "+", "model", ".", "_meta", ".", "many_to_many", ":", "depends", ".", "update", "(", "field_dependencies", "(", "field", ",", "checked_models", ")", ")", "# Add in any non-abstract bases", "for", "base", "in", "model", ".", "__bases__", ":", "if", "issubclass", "(", "base", ",", "models", ".", "Model", ")", "and", "hasattr", "(", "base", ",", "'_meta'", ")", "and", "not", "base", ".", "_meta", ".", "abstract", ":", "depends", ".", "add", "(", "base", ")", "# Now recurse", "new_to_check", "=", "depends", "-", "checked_models", "while", "new_to_check", ":", "checked_model", "=", "new_to_check", ".", "pop", "(", ")", "if", "checked_model", "==", "model", "or", "checked_model", "in", "checked_models", ":", "continue", "checked_models", ".", "add", "(", "checked_model", ")", "deps", "=", "model_dependencies", "(", "checked_model", ",", "checked_models", ")", "# Loop through dependencies...", "for", "dep", "in", "deps", ":", "# If the new dep is not already checked, add to the queue", "if", "(", "dep", "not", "in", "depends", ")", "and", "(", "dep", "not", "in", "new_to_check", ")", "and", "(", "dep", "not", "in", "checked_models", ")", ":", "new_to_check", ".", "add", "(", "dep", ")", "depends", ".", "add", "(", "dep", ")", "return", "depends" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/south/creator/freezer.py#L85-L113
aaronportnoy/toolbag
2d39457a7617b2f334d203d8c8cf88a5a25ef1fa
toolbag/agent/dbg/vtrace/platforms/base.py
python
TracerBase.fireNotifiers
(self, event)
Fire the registered notifiers for the NOTIFY_* event.
Fire the registered notifiers for the NOTIFY_* event.
[ "Fire", "the", "registered", "notifiers", "for", "the", "NOTIFY_", "*", "event", "." ]
def fireNotifiers(self, event): """ Fire the registered notifiers for the NOTIFY_* event. """ if event == vtrace.NOTIFY_SIGNAL: signo = self.getCurrentSignal() if signo in self.getMeta("IgnoredSignals", []): if vtrace.verbose: print "Ignoring",signo self.runAgain() return alllist = self.getNotifiers(vtrace.NOTIFY_ALL) nlist = self.getNotifiers(event) trace = self # if the trace has a proxy it's notifiers # need that, cause we can't be pickled ;) if self.proxy: trace = self.proxy # First we notify ourself.... self.handleEvent(event, self) # The "NOTIFY_ALL" guys get priority for notifier in alllist: try: notifier.handleEvent(event,trace) except: print "WARNING: Notifier exception for",repr(notifier) traceback.print_exc() for notifier in nlist: try: notifier.handleEvent(event,trace) except: print "WARNING: Notifier exception for",repr(notifier) traceback.print_exc()
[ "def", "fireNotifiers", "(", "self", ",", "event", ")", ":", "if", "event", "==", "vtrace", ".", "NOTIFY_SIGNAL", ":", "signo", "=", "self", ".", "getCurrentSignal", "(", ")", "if", "signo", "in", "self", ".", "getMeta", "(", "\"IgnoredSignals\"", ",", "[", "]", ")", ":", "if", "vtrace", ".", "verbose", ":", "print", "\"Ignoring\"", ",", "signo", "self", ".", "runAgain", "(", ")", "return", "alllist", "=", "self", ".", "getNotifiers", "(", "vtrace", ".", "NOTIFY_ALL", ")", "nlist", "=", "self", ".", "getNotifiers", "(", "event", ")", "trace", "=", "self", "# if the trace has a proxy it's notifiers", "# need that, cause we can't be pickled ;)", "if", "self", ".", "proxy", ":", "trace", "=", "self", ".", "proxy", "# First we notify ourself....", "self", ".", "handleEvent", "(", "event", ",", "self", ")", "# The \"NOTIFY_ALL\" guys get priority", "for", "notifier", "in", "alllist", ":", "try", ":", "notifier", ".", "handleEvent", "(", "event", ",", "trace", ")", "except", ":", "print", "\"WARNING: Notifier exception for\"", ",", "repr", "(", "notifier", ")", "traceback", ".", "print_exc", "(", ")", "for", "notifier", "in", "nlist", ":", "try", ":", "notifier", ".", "handleEvent", "(", "event", ",", "trace", ")", "except", ":", "print", "\"WARNING: Notifier exception for\"", ",", "repr", "(", "notifier", ")", "traceback", ".", "print_exc", "(", ")" ]
https://github.com/aaronportnoy/toolbag/blob/2d39457a7617b2f334d203d8c8cf88a5a25ef1fa/toolbag/agent/dbg/vtrace/platforms/base.py#L338-L374
F8LEFT/DecLLVM
d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c
python/idc.py
python
GetPrevIndex
(tag, array_id, idx)
Get index of the previous existing array element. @param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR @param array_id: The array ID. @param idx: Index of the current element. @return: -1 if no more elements, otherwise returns index of the previous array element of given type.
Get index of the previous existing array element.
[ "Get", "index", "of", "the", "previous", "existing", "array", "element", "." ]
def GetPrevIndex(tag, array_id, idx): """ Get index of the previous existing array element. @param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR @param array_id: The array ID. @param idx: Index of the current element. @return: -1 if no more elements, otherwise returns index of the previous array element of given type. """ node = __GetArrayById(array_id) try: if tag == AR_LONG: return __l2m1(node.altprev(idx, tag)) elif tag == AR_STR: return __l2m1(node.supprev(idx, tag)) else: return -1 except OverflowError: # typically: An index of -1 was passed. return -1
[ "def", "GetPrevIndex", "(", "tag", ",", "array_id", ",", "idx", ")", ":", "node", "=", "__GetArrayById", "(", "array_id", ")", "try", ":", "if", "tag", "==", "AR_LONG", ":", "return", "__l2m1", "(", "node", ".", "altprev", "(", "idx", ",", "tag", ")", ")", "elif", "tag", "==", "AR_STR", ":", "return", "__l2m1", "(", "node", ".", "supprev", "(", "idx", ",", "tag", ")", ")", "else", ":", "return", "-", "1", "except", "OverflowError", ":", "# typically: An index of -1 was passed.", "return", "-", "1" ]
https://github.com/F8LEFT/DecLLVM/blob/d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c/python/idc.py#L6597-L6618
Jajcus/pyxmpp2
59e5fd7c8837991ac265dc6aad23a6bd256768a7
pyxmpp2/xmppparser.py
python
ParserTarget.end
(self, tag)
Handle an end tag. Call the handler's 'stream_end' method with an the root element (built by the `start` method). On the first level below root, sent the built element tree to the handler via the 'stanza methods'. Any tag below will be just added to the tree builder.
Handle an end tag.
[ "Handle", "an", "end", "tag", "." ]
def end(self, tag): """Handle an end tag. Call the handler's 'stream_end' method with an the root element (built by the `start` method). On the first level below root, sent the built element tree to the handler via the 'stanza methods'. Any tag below will be just added to the tree builder. """ self._level -= 1 if self._level < 0: self._handler.stream_parse_error(u"Unexpected end tag for: {0!r}" .format(tag)) return if self._level == 0: if tag != self._root.tag: self._handler.stream_parse_error(u"Unexpected end tag for:" " {0!r} (stream end tag expected)".format(tag)) return self._handler.stream_end() return element = self._builder.end(tag) if self._level == 1: self._handler.stream_element(element)
[ "def", "end", "(", "self", ",", "tag", ")", ":", "self", ".", "_level", "-=", "1", "if", "self", ".", "_level", "<", "0", ":", "self", ".", "_handler", ".", "stream_parse_error", "(", "u\"Unexpected end tag for: {0!r}\"", ".", "format", "(", "tag", ")", ")", "return", "if", "self", ".", "_level", "==", "0", ":", "if", "tag", "!=", "self", ".", "_root", ".", "tag", ":", "self", ".", "_handler", ".", "stream_parse_error", "(", "u\"Unexpected end tag for:\"", "\" {0!r} (stream end tag expected)\"", ".", "format", "(", "tag", ")", ")", "return", "self", ".", "_handler", ".", "stream_end", "(", ")", "return", "element", "=", "self", ".", "_builder", ".", "end", "(", "tag", ")", "if", "self", ".", "_level", "==", "1", ":", "self", ".", "_handler", ".", "stream_element", "(", "element", ")" ]
https://github.com/Jajcus/pyxmpp2/blob/59e5fd7c8837991ac265dc6aad23a6bd256768a7/pyxmpp2/xmppparser.py#L133-L158
OpenCobolIDE/OpenCobolIDE
c78d0d335378e5fe0a5e74f53c19b68b55e85388
open_cobol_ide/extlibs/pygments/formatters/terminal256.py
python
Terminal256Formatter._build_color_table
(self)
[]
def _build_color_table(self): # colors 0..15: 16 basic colors self.xterm_colors.append((0x00, 0x00, 0x00)) # 0 self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1 self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2 self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3 self.xterm_colors.append((0x00, 0x00, 0xee)) # 4 self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5 self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6 self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7 self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8 self.xterm_colors.append((0xff, 0x00, 0x00)) # 9 self.xterm_colors.append((0x00, 0xff, 0x00)) # 10 self.xterm_colors.append((0xff, 0xff, 0x00)) # 11 self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12 self.xterm_colors.append((0xff, 0x00, 0xff)) # 13 self.xterm_colors.append((0x00, 0xff, 0xff)) # 14 self.xterm_colors.append((0xff, 0xff, 0xff)) # 15 # colors 16..232: the 6x6x6 color cube valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff) for i in range(217): r = valuerange[(i // 36) % 6] g = valuerange[(i // 6) % 6] b = valuerange[i % 6] self.xterm_colors.append((r, g, b)) # colors 233..253: grayscale for i in range(1, 22): v = 8 + i * 10 self.xterm_colors.append((v, v, v))
[ "def", "_build_color_table", "(", "self", ")", ":", "# colors 0..15: 16 basic colors", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0x00", ",", "0x00", ")", ")", "# 0", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xcd", ",", "0x00", ",", "0x00", ")", ")", "# 1", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0xcd", ",", "0x00", ")", ")", "# 2", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xcd", ",", "0xcd", ",", "0x00", ")", ")", "# 3", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0x00", ",", "0xee", ")", ")", "# 4", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xcd", ",", "0x00", ",", "0xcd", ")", ")", "# 5", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0xcd", ",", "0xcd", ")", ")", "# 6", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xe5", ",", "0xe5", ",", "0xe5", ")", ")", "# 7", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x7f", ",", "0x7f", ",", "0x7f", ")", ")", "# 8", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xff", ",", "0x00", ",", "0x00", ")", ")", "# 9", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0xff", ",", "0x00", ")", ")", "# 10", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xff", ",", "0xff", ",", "0x00", ")", ")", "# 11", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x5c", ",", "0x5c", ",", "0xff", ")", ")", "# 12", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xff", ",", "0x00", ",", "0xff", ")", ")", "# 13", "self", ".", "xterm_colors", ".", "append", "(", "(", "0x00", ",", "0xff", ",", "0xff", ")", ")", "# 14", "self", ".", "xterm_colors", ".", "append", "(", "(", "0xff", ",", "0xff", ",", "0xff", ")", ")", "# 15", "# colors 16..232: the 6x6x6 color cube", "valuerange", "=", "(", "0x00", ",", "0x5f", ",", "0x87", ",", "0xaf", ",", "0xd7", ",", "0xff", ")", "for", "i", "in", "range", "(", "217", ")", ":", "r", "=", "valuerange", "[", "(", "i", "//", "36", ")", "%", "6", "]", "g", "=", "valuerange", "[", "(", "i", "//", "6", ")", "%", "6", "]", "b", "=", "valuerange", "[", "i", "%", "6", "]", "self", ".", "xterm_colors", ".", "append", "(", "(", "r", ",", "g", ",", "b", ")", ")", "# colors 233..253: grayscale", "for", "i", "in", "range", "(", "1", ",", "22", ")", ":", "v", "=", "8", "+", "i", "*", "10", "self", ".", "xterm_colors", ".", "append", "(", "(", "v", ",", "v", ",", "v", ")", ")" ]
https://github.com/OpenCobolIDE/OpenCobolIDE/blob/c78d0d335378e5fe0a5e74f53c19b68b55e85388/open_cobol_ide/extlibs/pygments/formatters/terminal256.py#L117-L151
mrkipling/maraschino
c6be9286937783ae01df2d6d8cebfc8b2734a7d7
lib/sqlalchemy/dialects/mysql/base.py
python
FLOAT.__init__
(self, precision=None, scale=None, asdecimal=False, **kw)
Construct a FLOAT. :param precision: Total digits in this number. If scale and precision are both None, values are stored to limits allowed by the server. :param scale: The number of digits after the decimal point. :param unsigned: a boolean, optional. :param zerofill: Optional. If true, values will be stored as strings left-padded with zeros. Note that this does not effect the values returned by the underlying database API, which continue to be numeric.
Construct a FLOAT.
[ "Construct", "a", "FLOAT", "." ]
def __init__(self, precision=None, scale=None, asdecimal=False, **kw): """Construct a FLOAT. :param precision: Total digits in this number. If scale and precision are both None, values are stored to limits allowed by the server. :param scale: The number of digits after the decimal point. :param unsigned: a boolean, optional. :param zerofill: Optional. If true, values will be stored as strings left-padded with zeros. Note that this does not effect the values returned by the underlying database API, which continue to be numeric. """ super(FLOAT, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw)
[ "def", "__init__", "(", "self", ",", "precision", "=", "None", ",", "scale", "=", "None", ",", "asdecimal", "=", "False", ",", "*", "*", "kw", ")", ":", "super", "(", "FLOAT", ",", "self", ")", ".", "__init__", "(", "precision", "=", "precision", ",", "scale", "=", "scale", ",", "asdecimal", "=", "asdecimal", ",", "*", "*", "kw", ")" ]
https://github.com/mrkipling/maraschino/blob/c6be9286937783ae01df2d6d8cebfc8b2734a7d7/lib/sqlalchemy/dialects/mysql/base.py#L457-L474
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-50/skeinforge_application/skeinforge_plugins/craft.py
python
writeOutput
(fileName)
return skeinforge_craft.writeOutput(fileName)
Craft a gcode file.
Craft a gcode file.
[ "Craft", "a", "gcode", "file", "." ]
def writeOutput(fileName): "Craft a gcode file." return skeinforge_craft.writeOutput(fileName)
[ "def", "writeOutput", "(", "fileName", ")", ":", "return", "skeinforge_craft", ".", "writeOutput", "(", "fileName", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-50/skeinforge_application/skeinforge_plugins/craft.py#L61-L63
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py
python
split_first
(s, delims)
return s[:min_idx], s[min_idx + 1:], min_delim
Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims.
Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter.
[ "Given", "a", "string", "and", "an", "iterable", "of", "delimiters", "split", "on", "the", "first", "found", "delimiter", ".", "Return", "two", "split", "parts", "and", "the", "matched", "delimiter", "." ]
def split_first(s, delims): """ Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, '', None return s[:min_idx], s[min_idx + 1:], min_delim
[ "def", "split_first", "(", "s", ",", "delims", ")", ":", "min_idx", "=", "None", "min_delim", "=", "None", "for", "d", "in", "delims", ":", "idx", "=", "s", ".", "find", "(", "d", ")", "if", "idx", "<", "0", ":", "continue", "if", "min_idx", "is", "None", "or", "idx", "<", "min_idx", ":", "min_idx", "=", "idx", "min_delim", "=", "d", "if", "min_idx", "is", "None", "or", "min_idx", "<", "0", ":", "return", "s", ",", "''", ",", "None", "return", "s", "[", ":", "min_idx", "]", ",", "s", "[", "min_idx", "+", "1", ":", "]", ",", "min_delim" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py#L99-L129
google/grr
8ad8a4d2c5a93c92729206b7771af19d92d4f915
grr/client/grr_response_client/windows/installers.py
python
_CreateService
(service_name: str, description: str, command_line: str)
Creates a Windows service.
Creates a Windows service.
[ "Creates", "a", "Windows", "service", "." ]
def _CreateService(service_name: str, description: str, command_line: str) -> None: """Creates a Windows service.""" logging.info("Creating service '%s'.", service_name) with contextlib.ExitStack() as stack: hscm = win32service.OpenSCManager(None, None, win32service.SC_MANAGER_ALL_ACCESS) stack.callback(win32service.CloseServiceHandle, hscm) hs = win32service.CreateService(hscm, service_name, service_name, win32service.SERVICE_ALL_ACCESS, win32service.SERVICE_WIN32_OWN_PROCESS, win32service.SERVICE_AUTO_START, win32service.SERVICE_ERROR_NORMAL, command_line, None, 0, None, None, None) stack.callback(win32service.CloseServiceHandle, hs) service_failure_actions = { "ResetPeriod": SERVICE_RESET_FAIL_COUNT_DELAY_SEC, "RebootMsg": u"", "Command": u"", "Actions": [ (win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC), (win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC), (win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC), ] } win32service.ChangeServiceConfig2( hs, win32service.SERVICE_CONFIG_FAILURE_ACTIONS, service_failure_actions) win32service.ChangeServiceConfig2(hs, win32service.SERVICE_CONFIG_DESCRIPTION, description) logging.info("Successfully created service '%s'.", service_name)
[ "def", "_CreateService", "(", "service_name", ":", "str", ",", "description", ":", "str", ",", "command_line", ":", "str", ")", "->", "None", ":", "logging", ".", "info", "(", "\"Creating service '%s'.\"", ",", "service_name", ")", "with", "contextlib", ".", "ExitStack", "(", ")", "as", "stack", ":", "hscm", "=", "win32service", ".", "OpenSCManager", "(", "None", ",", "None", ",", "win32service", ".", "SC_MANAGER_ALL_ACCESS", ")", "stack", ".", "callback", "(", "win32service", ".", "CloseServiceHandle", ",", "hscm", ")", "hs", "=", "win32service", ".", "CreateService", "(", "hscm", ",", "service_name", ",", "service_name", ",", "win32service", ".", "SERVICE_ALL_ACCESS", ",", "win32service", ".", "SERVICE_WIN32_OWN_PROCESS", ",", "win32service", ".", "SERVICE_AUTO_START", ",", "win32service", ".", "SERVICE_ERROR_NORMAL", ",", "command_line", ",", "None", ",", "0", ",", "None", ",", "None", ",", "None", ")", "stack", ".", "callback", "(", "win32service", ".", "CloseServiceHandle", ",", "hs", ")", "service_failure_actions", "=", "{", "\"ResetPeriod\"", ":", "SERVICE_RESET_FAIL_COUNT_DELAY_SEC", ",", "\"RebootMsg\"", ":", "u\"\"", ",", "\"Command\"", ":", "u\"\"", ",", "\"Actions\"", ":", "[", "(", "win32service", ".", "SC_ACTION_RESTART", ",", "SERVICE_RESTART_DELAY_MSEC", ")", ",", "(", "win32service", ".", "SC_ACTION_RESTART", ",", "SERVICE_RESTART_DELAY_MSEC", ")", ",", "(", "win32service", ".", "SC_ACTION_RESTART", ",", "SERVICE_RESTART_DELAY_MSEC", ")", ",", "]", "}", "win32service", ".", "ChangeServiceConfig2", "(", "hs", ",", "win32service", ".", "SERVICE_CONFIG_FAILURE_ACTIONS", ",", "service_failure_actions", ")", "win32service", ".", "ChangeServiceConfig2", "(", "hs", ",", "win32service", ".", "SERVICE_CONFIG_DESCRIPTION", ",", "description", ")", "logging", ".", "info", "(", "\"Successfully created service '%s'.\"", ",", "service_name", ")" ]
https://github.com/google/grr/blob/8ad8a4d2c5a93c92729206b7771af19d92d4f915/grr/client/grr_response_client/windows/installers.py#L142-L177
python-ivi/python-usbtmc
d9bfb20b2ef002da787adb6b093e1679705c00e2
usbtmc/usbtmc.py
python
Instrument._abort_bulk_in
(self, btag=None)
Abort bulk in
Abort bulk in
[ "Abort", "bulk", "in" ]
def _abort_bulk_in(self, btag=None): "Abort bulk in" if not self.connected: return if btag is None: btag = self.last_btag # Send INITIATE_ABORT_BULK_IN b = self.device.ctrl_transfer( bmRequestType=usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), bRequest=USBTMC_REQUEST_INITIATE_ABORT_BULK_IN, wValue=btag, wIndex=self.bulk_in_ep.bEndpointAddress, data_or_wLength=0x0002, timeout=self._timeout_ms ) if (b[0] == USBTMC_STATUS_SUCCESS): # Initiate abort bulk in succeeded, wait for completion while True: # Check status b = self.device.ctrl_transfer( bmRequestType=usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), bRequest=USBTMC_REQUEST_CHECK_ABORT_BULK_IN_STATUS, wValue=0x0000, wIndex=self.bulk_in_ep.bEndpointAddress, data_or_wLength=0x0008, timeout=self._timeout_ms ) time.sleep(0.1) if (b[0] != USBTMC_STATUS_PENDING): break else: # no transfer in progress; nothing to do pass
[ "def", "_abort_bulk_in", "(", "self", ",", "btag", "=", "None", ")", ":", "if", "not", "self", ".", "connected", ":", "return", "if", "btag", "is", "None", ":", "btag", "=", "self", ".", "last_btag", "# Send INITIATE_ABORT_BULK_IN", "b", "=", "self", ".", "device", ".", "ctrl_transfer", "(", "bmRequestType", "=", "usb", ".", "util", ".", "build_request_type", "(", "usb", ".", "util", ".", "CTRL_IN", ",", "usb", ".", "util", ".", "CTRL_TYPE_CLASS", ",", "usb", ".", "util", ".", "CTRL_RECIPIENT_ENDPOINT", ")", ",", "bRequest", "=", "USBTMC_REQUEST_INITIATE_ABORT_BULK_IN", ",", "wValue", "=", "btag", ",", "wIndex", "=", "self", ".", "bulk_in_ep", ".", "bEndpointAddress", ",", "data_or_wLength", "=", "0x0002", ",", "timeout", "=", "self", ".", "_timeout_ms", ")", "if", "(", "b", "[", "0", "]", "==", "USBTMC_STATUS_SUCCESS", ")", ":", "# Initiate abort bulk in succeeded, wait for completion", "while", "True", ":", "# Check status", "b", "=", "self", ".", "device", ".", "ctrl_transfer", "(", "bmRequestType", "=", "usb", ".", "util", ".", "build_request_type", "(", "usb", ".", "util", ".", "CTRL_IN", ",", "usb", ".", "util", ".", "CTRL_TYPE_CLASS", ",", "usb", ".", "util", ".", "CTRL_RECIPIENT_ENDPOINT", ")", ",", "bRequest", "=", "USBTMC_REQUEST_CHECK_ABORT_BULK_IN_STATUS", ",", "wValue", "=", "0x0000", ",", "wIndex", "=", "self", ".", "bulk_in_ep", ".", "bEndpointAddress", ",", "data_or_wLength", "=", "0x0008", ",", "timeout", "=", "self", ".", "_timeout_ms", ")", "time", ".", "sleep", "(", "0.1", ")", "if", "(", "b", "[", "0", "]", "!=", "USBTMC_STATUS_PENDING", ")", ":", "break", "else", ":", "# no transfer in progress; nothing to do", "pass" ]
https://github.com/python-ivi/python-usbtmc/blob/d9bfb20b2ef002da787adb6b093e1679705c00e2/usbtmc/usbtmc.py#L900-L935
playframework/play1
0ecac3bc2421ae2dbec27a368bf671eda1c9cba5
python/Lib/difflib.py
python
Differ._qformat
(self, aline, bline, atags, btags)
r""" Format "?" output and deal with leading tabs. Example: >>> d = Differ() >>> results = d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n', ... ' ^ ^ ^ ', ' ^ ^ ^ ') >>> for line in results: print repr(line) ... '- \tabcDefghiJkl\n' '? \t ^ ^ ^\n' '+ \tabcdefGhijkl\n' '? \t ^ ^ ^\n'
r""" Format "?" output and deal with leading tabs.
[ "r", "Format", "?", "output", "and", "deal", "with", "leading", "tabs", "." ]
def _qformat(self, aline, bline, atags, btags): r""" Format "?" output and deal with leading tabs. Example: >>> d = Differ() >>> results = d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n', ... ' ^ ^ ^ ', ' ^ ^ ^ ') >>> for line in results: print repr(line) ... '- \tabcDefghiJkl\n' '? \t ^ ^ ^\n' '+ \tabcdefGhijkl\n' '? \t ^ ^ ^\n' """ # Can hurt, but will probably help most of the time. common = min(_count_leading(aline, "\t"), _count_leading(bline, "\t")) common = min(common, _count_leading(atags[:common], " ")) common = min(common, _count_leading(btags[:common], " ")) atags = atags[common:].rstrip() btags = btags[common:].rstrip() yield "- " + aline if atags: yield "? %s%s\n" % ("\t" * common, atags) yield "+ " + bline if btags: yield "? %s%s\n" % ("\t" * common, btags)
[ "def", "_qformat", "(", "self", ",", "aline", ",", "bline", ",", "atags", ",", "btags", ")", ":", "# Can hurt, but will probably help most of the time.", "common", "=", "min", "(", "_count_leading", "(", "aline", ",", "\"\\t\"", ")", ",", "_count_leading", "(", "bline", ",", "\"\\t\"", ")", ")", "common", "=", "min", "(", "common", ",", "_count_leading", "(", "atags", "[", ":", "common", "]", ",", "\" \"", ")", ")", "common", "=", "min", "(", "common", ",", "_count_leading", "(", "btags", "[", ":", "common", "]", ",", "\" \"", ")", ")", "atags", "=", "atags", "[", "common", ":", "]", ".", "rstrip", "(", ")", "btags", "=", "btags", "[", "common", ":", "]", ".", "rstrip", "(", ")", "yield", "\"- \"", "+", "aline", "if", "atags", ":", "yield", "\"? %s%s\\n\"", "%", "(", "\"\\t\"", "*", "common", ",", "atags", ")", "yield", "\"+ \"", "+", "bline", "if", "btags", ":", "yield", "\"? %s%s\\n\"", "%", "(", "\"\\t\"", "*", "common", ",", "btags", ")" ]
https://github.com/playframework/play1/blob/0ecac3bc2421ae2dbec27a368bf671eda1c9cba5/python/Lib/difflib.py#L1054-L1085
rwl/PYPOWER
f5be0406aa54dcebded075de075454f99e2a46e6
pypower/makeApq.py
python
makeApq
(baseMVA, gen)
return Apqh, ubpqh, Apql, ubpql, data
Construct linear constraints for generator capability curves. Constructs the parameters for the following linear constraints implementing trapezoidal generator capability curves, where C{Pg} and C{Qg} are the real and reactive generator injections:: Apqh * [Pg, Qg] <= ubpqh Apql * [Pg, Qg] <= ubpql C{data} constains additional information as shown below. Example:: Apqh, ubpqh, Apql, ubpql, data = makeApq(baseMVA, gen) data['h'] [Qc1max-Qc2max, Pc2-Pc1] data['l'] [Qc2min-Qc1min, Pc1-Pc2] data['ipqh'] indices of gens with general PQ cap curves (upper) data['ipql'] indices of gens with general PQ cap curves (lower) @author: Ray Zimmerman (PSERC Cornell) @author: Carlos E. Murillo-Sanchez (PSERC Cornell & Universidad Autonoma de Manizales)
Construct linear constraints for generator capability curves.
[ "Construct", "linear", "constraints", "for", "generator", "capability", "curves", "." ]
def makeApq(baseMVA, gen): """Construct linear constraints for generator capability curves. Constructs the parameters for the following linear constraints implementing trapezoidal generator capability curves, where C{Pg} and C{Qg} are the real and reactive generator injections:: Apqh * [Pg, Qg] <= ubpqh Apql * [Pg, Qg] <= ubpql C{data} constains additional information as shown below. Example:: Apqh, ubpqh, Apql, ubpql, data = makeApq(baseMVA, gen) data['h'] [Qc1max-Qc2max, Pc2-Pc1] data['l'] [Qc2min-Qc1min, Pc1-Pc2] data['ipqh'] indices of gens with general PQ cap curves (upper) data['ipql'] indices of gens with general PQ cap curves (lower) @author: Ray Zimmerman (PSERC Cornell) @author: Carlos E. Murillo-Sanchez (PSERC Cornell & Universidad Autonoma de Manizales) """ data = {} ## data dimensions ng = gen.shape[0] ## number of dispatchable injections ## which generators require additional linear constraints ## (in addition to simple box constraints) on (Pg,Qg) to correctly ## model their PQ capability curves ipqh = find( hasPQcap(gen, 'U') ) ipql = find( hasPQcap(gen, 'L') ) npqh = ipqh.shape[0] ## number of general PQ capability curves (upper) npql = ipql.shape[0] ## number of general PQ capability curves (lower) ## make Apqh if there is a need to add general PQ capability curves ## use normalized coefficient rows so multipliers have right scaling ## in $$/pu if npqh > 0: data["h"] = c_[gen[ipqh, QC1MAX] - gen[ipqh, QC2MAX], gen[ipqh, PC2] - gen[ipqh, PC1]] ubpqh = data["h"][:, 0] * gen[ipqh, PC1] + \ data["h"][:, 1] * gen[ipqh, QC1MAX] for i in range(npqh): tmp = linalg.norm(data["h"][i, :]) data["h"][i, :] = data["h"][i, :] / tmp ubpqh[i] = ubpqh[i] / tmp Apqh = sparse((data["h"].flatten('F'), (r_[arange(npqh), arange(npqh)], r_[ipqh, ipqh+ng])), (npqh, 2*ng)) ubpqh = ubpqh / baseMVA else: data["h"] = array([]) Apqh = zeros((0, 2*ng)) ubpqh = array([]) ## similarly Apql if npql > 0: data["l"] = c_[gen[ipql, QC2MIN] - gen[ipql, QC1MIN], gen[ipql, PC1] - gen[ipql, PC2]] ubpql = data["l"][:, 0] * gen[ipql, PC1] + \ data["l"][:, 1] * gen[ipql, QC1MIN] for i in range(npql): tmp = linalg.norm(data["l"][i, :]) data["l"][i, :] = data["l"][i, :] / tmp ubpql[i] = ubpql[i] / tmp Apql = sparse((data["l"].flatten('F'), (r_[arange(npql), arange(npql)], r_[ipql, ipql+ng])), (npql, 2*ng)) ubpql = ubpql / baseMVA else: data["l"] = array([]) Apql = zeros((0, 2*ng)) ubpql = array([]) data["ipql"] = ipql data["ipqh"] = ipqh return Apqh, ubpqh, Apql, ubpql, data
[ "def", "makeApq", "(", "baseMVA", ",", "gen", ")", ":", "data", "=", "{", "}", "## data dimensions", "ng", "=", "gen", ".", "shape", "[", "0", "]", "## number of dispatchable injections", "## which generators require additional linear constraints", "## (in addition to simple box constraints) on (Pg,Qg) to correctly", "## model their PQ capability curves", "ipqh", "=", "find", "(", "hasPQcap", "(", "gen", ",", "'U'", ")", ")", "ipql", "=", "find", "(", "hasPQcap", "(", "gen", ",", "'L'", ")", ")", "npqh", "=", "ipqh", ".", "shape", "[", "0", "]", "## number of general PQ capability curves (upper)", "npql", "=", "ipql", ".", "shape", "[", "0", "]", "## number of general PQ capability curves (lower)", "## make Apqh if there is a need to add general PQ capability curves", "## use normalized coefficient rows so multipliers have right scaling", "## in $$/pu", "if", "npqh", ">", "0", ":", "data", "[", "\"h\"", "]", "=", "c_", "[", "gen", "[", "ipqh", ",", "QC1MAX", "]", "-", "gen", "[", "ipqh", ",", "QC2MAX", "]", ",", "gen", "[", "ipqh", ",", "PC2", "]", "-", "gen", "[", "ipqh", ",", "PC1", "]", "]", "ubpqh", "=", "data", "[", "\"h\"", "]", "[", ":", ",", "0", "]", "*", "gen", "[", "ipqh", ",", "PC1", "]", "+", "data", "[", "\"h\"", "]", "[", ":", ",", "1", "]", "*", "gen", "[", "ipqh", ",", "QC1MAX", "]", "for", "i", "in", "range", "(", "npqh", ")", ":", "tmp", "=", "linalg", ".", "norm", "(", "data", "[", "\"h\"", "]", "[", "i", ",", ":", "]", ")", "data", "[", "\"h\"", "]", "[", "i", ",", ":", "]", "=", "data", "[", "\"h\"", "]", "[", "i", ",", ":", "]", "/", "tmp", "ubpqh", "[", "i", "]", "=", "ubpqh", "[", "i", "]", "/", "tmp", "Apqh", "=", "sparse", "(", "(", "data", "[", "\"h\"", "]", ".", "flatten", "(", "'F'", ")", ",", "(", "r_", "[", "arange", "(", "npqh", ")", ",", "arange", "(", "npqh", ")", "]", ",", "r_", "[", "ipqh", ",", "ipqh", "+", "ng", "]", ")", ")", ",", "(", "npqh", ",", "2", "*", "ng", ")", ")", "ubpqh", "=", "ubpqh", "/", "baseMVA", "else", ":", "data", "[", "\"h\"", "]", "=", "array", "(", "[", "]", ")", "Apqh", "=", "zeros", "(", "(", "0", ",", "2", "*", "ng", ")", ")", "ubpqh", "=", "array", "(", "[", "]", ")", "## similarly Apql", "if", "npql", ">", "0", ":", "data", "[", "\"l\"", "]", "=", "c_", "[", "gen", "[", "ipql", ",", "QC2MIN", "]", "-", "gen", "[", "ipql", ",", "QC1MIN", "]", ",", "gen", "[", "ipql", ",", "PC1", "]", "-", "gen", "[", "ipql", ",", "PC2", "]", "]", "ubpql", "=", "data", "[", "\"l\"", "]", "[", ":", ",", "0", "]", "*", "gen", "[", "ipql", ",", "PC1", "]", "+", "data", "[", "\"l\"", "]", "[", ":", ",", "1", "]", "*", "gen", "[", "ipql", ",", "QC1MIN", "]", "for", "i", "in", "range", "(", "npql", ")", ":", "tmp", "=", "linalg", ".", "norm", "(", "data", "[", "\"l\"", "]", "[", "i", ",", ":", "]", ")", "data", "[", "\"l\"", "]", "[", "i", ",", ":", "]", "=", "data", "[", "\"l\"", "]", "[", "i", ",", ":", "]", "/", "tmp", "ubpql", "[", "i", "]", "=", "ubpql", "[", "i", "]", "/", "tmp", "Apql", "=", "sparse", "(", "(", "data", "[", "\"l\"", "]", ".", "flatten", "(", "'F'", ")", ",", "(", "r_", "[", "arange", "(", "npql", ")", ",", "arange", "(", "npql", ")", "]", ",", "r_", "[", "ipql", ",", "ipql", "+", "ng", "]", ")", ")", ",", "(", "npql", ",", "2", "*", "ng", ")", ")", "ubpql", "=", "ubpql", "/", "baseMVA", "else", ":", "data", "[", "\"l\"", "]", "=", "array", "(", "[", "]", ")", "Apql", "=", "zeros", "(", "(", "0", ",", "2", "*", "ng", ")", ")", "ubpql", "=", "array", "(", "[", "]", ")", "data", "[", "\"ipql\"", "]", "=", "ipql", "data", "[", "\"ipqh\"", "]", "=", "ipqh", "return", "Apqh", ",", "ubpqh", ",", "Apql", ",", "ubpql", ",", "data" ]
https://github.com/rwl/PYPOWER/blob/f5be0406aa54dcebded075de075454f99e2a46e6/pypower/makeApq.py#L17-L96
SCons/scons
309f0234d1d9cc76955818be47c5c722f577dac6
SCons/Memoize.py
python
CountMethodCall
(fn)
Decorator for counting memoizer hits/misses while retrieving a simple value in a class method. It wraps the given method fn and uses a CountValue object to keep track of the caching statistics. Wrapping gets enabled by calling EnableMemoization().
Decorator for counting memoizer hits/misses while retrieving a simple value in a class method. It wraps the given method fn and uses a CountValue object to keep track of the caching statistics. Wrapping gets enabled by calling EnableMemoization().
[ "Decorator", "for", "counting", "memoizer", "hits", "/", "misses", "while", "retrieving", "a", "simple", "value", "in", "a", "class", "method", ".", "It", "wraps", "the", "given", "method", "fn", "and", "uses", "a", "CountValue", "object", "to", "keep", "track", "of", "the", "caching", "statistics", ".", "Wrapping", "gets", "enabled", "by", "calling", "EnableMemoization", "()", "." ]
def CountMethodCall(fn): """ Decorator for counting memoizer hits/misses while retrieving a simple value in a class method. It wraps the given method fn and uses a CountValue object to keep track of the caching statistics. Wrapping gets enabled by calling EnableMemoization(). """ if use_memoizer: def wrapper(self, *args, **kwargs): global CounterList key = self.__class__.__name__+'.'+fn.__name__ if key not in CounterList: CounterList[key] = CountValue(self.__class__.__name__, fn.__name__) CounterList[key].count(self, *args, **kwargs) return fn(self, *args, **kwargs) wrapper.__name__= fn.__name__ return wrapper else: return fn
[ "def", "CountMethodCall", "(", "fn", ")", ":", "if", "use_memoizer", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "global", "CounterList", "key", "=", "self", ".", "__class__", ".", "__name__", "+", "'.'", "+", "fn", ".", "__name__", "if", "key", "not", "in", "CounterList", ":", "CounterList", "[", "key", "]", "=", "CountValue", "(", "self", ".", "__class__", ".", "__name__", ",", "fn", ".", "__name__", ")", "CounterList", "[", "key", "]", ".", "count", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "fn", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "wrapper", ".", "__name__", "=", "fn", ".", "__name__", "return", "wrapper", "else", ":", "return", "fn" ]
https://github.com/SCons/scons/blob/309f0234d1d9cc76955818be47c5c722f577dac6/SCons/Memoize.py#L193-L211
maas/maas
db2f89970c640758a51247c59bf1ec6f60cf4ab5
src/provisioningserver/drivers/pod/virsh.py
python
VirshSSH.poweron
(self, machine)
return True
Poweron a VM.
Poweron a VM.
[ "Poweron", "a", "VM", "." ]
def poweron(self, machine): """Poweron a VM.""" try: self.run(["start", machine]) except VirshError: return False return True
[ "def", "poweron", "(", "self", ",", "machine", ")", ":", "try", ":", "self", ".", "run", "(", "[", "\"start\"", ",", "machine", "]", ")", "except", "VirshError", ":", "return", "False", "return", "True" ]
https://github.com/maas/maas/blob/db2f89970c640758a51247c59bf1ec6f60cf4ab5/src/provisioningserver/drivers/pod/virsh.py#L845-L851
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/web/http.py
python
_ChunkedTransferDecoder.dataReceived
(self, data)
Interpret data from a request or response body which uses the I{chunked} Transfer-Encoding.
Interpret data from a request or response body which uses the I{chunked} Transfer-Encoding.
[ "Interpret", "data", "from", "a", "request", "or", "response", "body", "which", "uses", "the", "I", "{", "chunked", "}", "Transfer", "-", "Encoding", "." ]
def dataReceived(self, data): """ Interpret data from a request or response body which uses the I{chunked} Transfer-Encoding. """ data = self._buffer + data self._buffer = '' while data: if self.state == 'chunk-length': if '\r\n' in data: line, rest = data.split('\r\n', 1) parts = line.split(';') self.length = int(parts[0], 16) if self.length == 0: self.state = 'trailer' self.finish = True else: self.state = 'body' data = rest else: self._buffer = data data = '' elif self.state == 'trailer': if data.startswith('\r\n'): data = data[2:] if self.finish: self.state = 'finished' self.finishCallback(data) data = '' else: self.state = 'chunk-length' else: self._buffer = data data = '' elif self.state == 'body': if len(data) >= self.length: chunk, data = data[:self.length], data[self.length:] self.dataCallback(chunk) self.state = 'trailer' elif len(data) < self.length: self.length -= len(data) self.dataCallback(data) data = '' elif self.state == 'finished': raise RuntimeError( "_ChunkedTransferDecoder.dataReceived called after last " "chunk was processed")
[ "def", "dataReceived", "(", "self", ",", "data", ")", ":", "data", "=", "self", ".", "_buffer", "+", "data", "self", ".", "_buffer", "=", "''", "while", "data", ":", "if", "self", ".", "state", "==", "'chunk-length'", ":", "if", "'\\r\\n'", "in", "data", ":", "line", ",", "rest", "=", "data", ".", "split", "(", "'\\r\\n'", ",", "1", ")", "parts", "=", "line", ".", "split", "(", "';'", ")", "self", ".", "length", "=", "int", "(", "parts", "[", "0", "]", ",", "16", ")", "if", "self", ".", "length", "==", "0", ":", "self", ".", "state", "=", "'trailer'", "self", ".", "finish", "=", "True", "else", ":", "self", ".", "state", "=", "'body'", "data", "=", "rest", "else", ":", "self", ".", "_buffer", "=", "data", "data", "=", "''", "elif", "self", ".", "state", "==", "'trailer'", ":", "if", "data", ".", "startswith", "(", "'\\r\\n'", ")", ":", "data", "=", "data", "[", "2", ":", "]", "if", "self", ".", "finish", ":", "self", ".", "state", "=", "'finished'", "self", ".", "finishCallback", "(", "data", ")", "data", "=", "''", "else", ":", "self", ".", "state", "=", "'chunk-length'", "else", ":", "self", ".", "_buffer", "=", "data", "data", "=", "''", "elif", "self", ".", "state", "==", "'body'", ":", "if", "len", "(", "data", ")", ">=", "self", ".", "length", ":", "chunk", ",", "data", "=", "data", "[", ":", "self", ".", "length", "]", ",", "data", "[", "self", ".", "length", ":", "]", "self", ".", "dataCallback", "(", "chunk", ")", "self", ".", "state", "=", "'trailer'", "elif", "len", "(", "data", ")", "<", "self", ".", "length", ":", "self", ".", "length", "-=", "len", "(", "data", ")", "self", ".", "dataCallback", "(", "data", ")", "data", "=", "''", "elif", "self", ".", "state", "==", "'finished'", ":", "raise", "RuntimeError", "(", "\"_ChunkedTransferDecoder.dataReceived called after last \"", "\"chunk was processed\"", ")" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/web/http.py#L1422-L1468
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_clusterrole.py
python
Rule.add_resource
(self, inc_resource)
add an resource to the resources array
add an resource to the resources array
[ "add", "an", "resource", "to", "the", "resources", "array" ]
def add_resource(self, inc_resource): '''add an resource to the resources array''' self.resources.append(inc_resource)
[ "def", "add_resource", "(", "self", ",", "inc_resource", ")", ":", "self", ".", "resources", ".", "append", "(", "inc_resource", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_clusterrole.py#L1542-L1544
Tautulli/Tautulli
2410eb33805aaac4bd1c5dad0f71e4f15afaf742
lib/httpagentparser/__init__.py
python
YandexBot.getVersion
(self, agent, word)
return agent[agent.index('Yandex'):].split('/')[-1].split(')')[0].strip()
[]
def getVersion(self, agent, word): return agent[agent.index('Yandex'):].split('/')[-1].split(')')[0].strip()
[ "def", "getVersion", "(", "self", ",", "agent", ",", "word", ")", ":", "return", "agent", "[", "agent", ".", "index", "(", "'Yandex'", ")", ":", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ".", "split", "(", "')'", ")", "[", "0", "]", ".", "strip", "(", ")" ]
https://github.com/Tautulli/Tautulli/blob/2410eb33805aaac4bd1c5dad0f71e4f15afaf742/lib/httpagentparser/__init__.py#L289-L290
xonsh/xonsh
b76d6f994f22a4078f602f8b386f4ec280c8461f
xonsh/procs/proxies.py
python
FileThreadDispatcher.seek
(self, offset, whence=io.SEEK_SET)
return self.handle.seek(offset, whence)
Seeks the current file.
Seeks the current file.
[ "Seeks", "the", "current", "file", "." ]
def seek(self, offset, whence=io.SEEK_SET): """Seeks the current file.""" return self.handle.seek(offset, whence)
[ "def", "seek", "(", "self", ",", "offset", ",", "whence", "=", "io", ".", "SEEK_SET", ")", ":", "return", "self", ".", "handle", ".", "seek", "(", "offset", ",", "whence", ")" ]
https://github.com/xonsh/xonsh/blob/b76d6f994f22a4078f602f8b386f4ec280c8461f/xonsh/procs/proxies.py#L169-L171
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_service.py
python
Yedit.create
(self, path, value)
return (False, self.yaml_dict)
create a yaml file
create a yaml file
[ "create", "a", "yaml", "file" ]
def create(self, path, value): ''' create a yaml file ''' if not self.file_exists(): # deepcopy didn't work # Try to use ruamel.yaml and fallback to pyyaml try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) # set the format attributes if available try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if result is not None: self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict)
[ "def", "create", "(", "self", ",", "path", ",", "value", ")", ":", "if", "not", "self", ".", "file_exists", "(", ")", ":", "# deepcopy didn't work", "# Try to use ruamel.yaml and fallback to pyyaml", "try", ":", "tmp_copy", "=", "yaml", ".", "load", "(", "yaml", ".", "round_trip_dump", "(", "self", ".", "yaml_dict", ",", "default_flow_style", "=", "False", ")", ",", "yaml", ".", "RoundTripLoader", ")", "except", "AttributeError", ":", "tmp_copy", "=", "copy", ".", "deepcopy", "(", "self", ".", "yaml_dict", ")", "# set the format attributes if available", "try", ":", "tmp_copy", ".", "fa", ".", "set_block_style", "(", ")", "except", "AttributeError", ":", "pass", "result", "=", "Yedit", ".", "add_entry", "(", "tmp_copy", ",", "path", ",", "value", ",", "self", ".", "separator", ")", "if", "result", "is", "not", "None", ":", "self", ".", "yaml_dict", "=", "tmp_copy", "return", "(", "True", ",", "self", ".", "yaml_dict", ")", "return", "(", "False", ",", "self", ".", "yaml_dict", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_service.py#L691-L714
cuthbertLab/music21
bd30d4663e52955ed922c10fdf541419d8c67671
music21/stream/iterator.py
python
OffsetIterator.reset
(self)
runs before iteration
runs before iteration
[ "runs", "before", "iteration" ]
def reset(self): ''' runs before iteration ''' super().reset() self.nextToYield = [] self.nextOffsetToYield = None self.raiseStopIterationNext = False
[ "def", "reset", "(", "self", ")", ":", "super", "(", ")", ".", "reset", "(", ")", "self", ".", "nextToYield", "=", "[", "]", "self", ".", "nextOffsetToYield", "=", "None", "self", ".", "raiseStopIterationNext", "=", "False" ]
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/stream/iterator.py#L1496-L1503
otsaloma/gaupol
6dec7826654d223c71a8d3279dcd967e95c46714
aeidon/markups/ssa.py
python
SubStationAlpha.bolden
(self, text, bounds=None)
return "".join((text[:a], target, text[z:]))
Return bolded `text`.
Return bolded `text`.
[ "Return", "bolded", "text", "." ]
def bolden(self, text, bounds=None): """Return bolded `text`.""" a, z = bounds or (0, len(text)) target = "{{\\b1}}{}{{\\b0}}".format(text[a:z]) return "".join((text[:a], target, text[z:]))
[ "def", "bolden", "(", "self", ",", "text", ",", "bounds", "=", "None", ")", ":", "a", ",", "z", "=", "bounds", "or", "(", "0", ",", "len", "(", "text", ")", ")", "target", "=", "\"{{\\\\b1}}{}{{\\\\b0}}\"", ".", "format", "(", "text", "[", "a", ":", "z", "]", ")", "return", "\"\"", ".", "join", "(", "(", "text", "[", ":", "a", "]", ",", "target", ",", "text", "[", "z", ":", "]", ")", ")" ]
https://github.com/otsaloma/gaupol/blob/6dec7826654d223c71a8d3279dcd967e95c46714/aeidon/markups/ssa.py#L58-L62
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/artist.py
python
setp
(obj, *args, **kwargs)
return list(cbook.flatten(ret))
Set a property on an artist object. matplotlib supports the use of :func:`setp` ("set property") and :func:`getp` to set and get object properties, as well as to do introspection on the object. For example, to set the linestyle of a line to be dashed, you can do:: >>> line, = plot([1,2,3]) >>> setp(line, linestyle='--') If you want to know the valid types of arguments, you can provide the name of the property you want to set without a value:: >>> setp(line, 'linestyle') linestyle: [ '-' | '--' | '-.' | ':' | 'steps' | 'None' ] If you want to see all the properties that can be set, and their possible values, you can do:: >>> setp(line) ... long output listing omitted You may specify another output file to `setp` if `sys.stdout` is not acceptable for some reason using the `file` keyword-only argument:: >>> with fopen('output.log') as f: >>> setp(line, file=f) :func:`setp` operates on a single instance or a iterable of instances. If you are in query mode introspecting the possible values, only the first instance in the sequence is used. When actually setting values, all the instances will be set. e.g., suppose you have a list of two lines, the following will make both lines thicker and red:: >>> x = arange(0,1.0,0.01) >>> y1 = sin(2*pi*x) >>> y2 = sin(4*pi*x) >>> lines = plot(x, y1, x, y2) >>> setp(lines, linewidth=2, color='r') :func:`setp` works with the MATLAB style string/value pairs or with python kwargs. For example, the following are equivalent:: >>> setp(lines, 'linewidth', 2, 'color', 'r') # MATLAB style >>> setp(lines, linewidth=2, color='r') # python style
Set a property on an artist object.
[ "Set", "a", "property", "on", "an", "artist", "object", "." ]
def setp(obj, *args, **kwargs): """ Set a property on an artist object. matplotlib supports the use of :func:`setp` ("set property") and :func:`getp` to set and get object properties, as well as to do introspection on the object. For example, to set the linestyle of a line to be dashed, you can do:: >>> line, = plot([1,2,3]) >>> setp(line, linestyle='--') If you want to know the valid types of arguments, you can provide the name of the property you want to set without a value:: >>> setp(line, 'linestyle') linestyle: [ '-' | '--' | '-.' | ':' | 'steps' | 'None' ] If you want to see all the properties that can be set, and their possible values, you can do:: >>> setp(line) ... long output listing omitted You may specify another output file to `setp` if `sys.stdout` is not acceptable for some reason using the `file` keyword-only argument:: >>> with fopen('output.log') as f: >>> setp(line, file=f) :func:`setp` operates on a single instance or a iterable of instances. If you are in query mode introspecting the possible values, only the first instance in the sequence is used. When actually setting values, all the instances will be set. e.g., suppose you have a list of two lines, the following will make both lines thicker and red:: >>> x = arange(0,1.0,0.01) >>> y1 = sin(2*pi*x) >>> y2 = sin(4*pi*x) >>> lines = plot(x, y1, x, y2) >>> setp(lines, linewidth=2, color='r') :func:`setp` works with the MATLAB style string/value pairs or with python kwargs. For example, the following are equivalent:: >>> setp(lines, 'linewidth', 2, 'color', 'r') # MATLAB style >>> setp(lines, linewidth=2, color='r') # python style """ if isinstance(obj, Artist): objs = [obj] else: objs = list(cbook.flatten(obj)) if not objs: return insp = ArtistInspector(objs[0]) # file has to be popped before checking if kwargs is empty printArgs = {} if 'file' in kwargs: printArgs['file'] = kwargs.pop('file') if not kwargs and len(args) < 2: if args: print(insp.pprint_setters(prop=args[0]), **printArgs) else: print('\n'.join(insp.pprint_setters()), **printArgs) return if len(args) % 2: raise ValueError('The set args must be string, value pairs') # put args into ordereddict to maintain order funcvals = OrderedDict((k, v) for k, v in zip(args[::2], args[1::2])) ret = [o.update(funcvals) for o in objs] + [o.set(**kwargs) for o in objs] return list(cbook.flatten(ret))
[ "def", "setp", "(", "obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "obj", ",", "Artist", ")", ":", "objs", "=", "[", "obj", "]", "else", ":", "objs", "=", "list", "(", "cbook", ".", "flatten", "(", "obj", ")", ")", "if", "not", "objs", ":", "return", "insp", "=", "ArtistInspector", "(", "objs", "[", "0", "]", ")", "# file has to be popped before checking if kwargs is empty", "printArgs", "=", "{", "}", "if", "'file'", "in", "kwargs", ":", "printArgs", "[", "'file'", "]", "=", "kwargs", ".", "pop", "(", "'file'", ")", "if", "not", "kwargs", "and", "len", "(", "args", ")", "<", "2", ":", "if", "args", ":", "print", "(", "insp", ".", "pprint_setters", "(", "prop", "=", "args", "[", "0", "]", ")", ",", "*", "*", "printArgs", ")", "else", ":", "print", "(", "'\\n'", ".", "join", "(", "insp", ".", "pprint_setters", "(", ")", ")", ",", "*", "*", "printArgs", ")", "return", "if", "len", "(", "args", ")", "%", "2", ":", "raise", "ValueError", "(", "'The set args must be string, value pairs'", ")", "# put args into ordereddict to maintain order", "funcvals", "=", "OrderedDict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "zip", "(", "args", "[", ":", ":", "2", "]", ",", "args", "[", "1", ":", ":", "2", "]", ")", ")", "ret", "=", "[", "o", ".", "update", "(", "funcvals", ")", "for", "o", "in", "objs", "]", "+", "[", "o", ".", "set", "(", "*", "*", "kwargs", ")", "for", "o", "in", "objs", "]", "return", "list", "(", "cbook", ".", "flatten", "(", "ret", ")", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/artist.py#L1430-L1508
Kismuz/btgym
7fb3316e67f1d7a17c620630fb62fb29428b2cec
btgym/rendering/plotter.py
python
BTgymPlotter.__init__
(self, **kwargs)
pass
pass
[ "pass" ]
def __init__(self, **kwargs): """ pass """ super(BTgymPlotter, self).__init__(**kwargs)
[ "def", "__init__", "(", "self", ",", "*", "*", "kwargs", ")", ":", "super", "(", "BTgymPlotter", ",", "self", ")", ".", "__init__", "(", "*", "*", "kwargs", ")" ]
https://github.com/Kismuz/btgym/blob/7fb3316e67f1d7a17c620630fb62fb29428b2cec/btgym/rendering/plotter.py#L31-L35
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/logging/__init__.py
python
getLogger
(name=None)
Return a logger with the specified name, creating it if necessary. If no name is specified, return the root logger.
Return a logger with the specified name, creating it if necessary.
[ "Return", "a", "logger", "with", "the", "specified", "name", "creating", "it", "if", "necessary", "." ]
def getLogger(name=None): """ Return a logger with the specified name, creating it if necessary. If no name is specified, return the root logger. """ if name: return Logger.manager.getLogger(name) else: return root
[ "def", "getLogger", "(", "name", "=", "None", ")", ":", "if", "name", ":", "return", "Logger", ".", "manager", ".", "getLogger", "(", "name", ")", "else", ":", "return", "root" ]
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/logging/__init__.py#L1258-L1267
llSourcell/AI_Artist
3038c06c2e389b9c919c881c9a169efe2fd7810e
lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
python
LegacySpecifier._compare_equal
(self, prospective, spec)
return prospective == self._coerce_version(spec)
[]
def _compare_equal(self, prospective, spec): return prospective == self._coerce_version(spec)
[ "def", "_compare_equal", "(", "self", ",", "prospective", ",", "spec", ")", ":", "return", "prospective", "==", "self", ".", "_coerce_version", "(", "spec", ")" ]
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py#L247-L248
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/conch/client/knownhosts.py
python
_BaseEntry.matchesKey
(self, keyObject)
return self.publicKey == keyObject
Check to see if this entry matches a given key object. @type keyObject: L{Key} @rtype: bool
Check to see if this entry matches a given key object.
[ "Check", "to", "see", "if", "this", "entry", "matches", "a", "given", "key", "object", "." ]
def matchesKey(self, keyObject): """ Check to see if this entry matches a given key object. @type keyObject: L{Key} @rtype: bool """ return self.publicKey == keyObject
[ "def", "matchesKey", "(", "self", ",", "keyObject", ")", ":", "return", "self", ".", "publicKey", "==", "keyObject" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/conch/client/knownhosts.py#L85-L93
liqd/adhocracy
a143e7101f788f56c78e00bd30b2fe2e15bf3552
src/adhocracy/lib/base.py
python
BaseController.__call__
(self, environ, start_response)
Invoke the Controller
Invoke the Controller
[ "Invoke", "the", "Controller" ]
def __call__(self, environ, start_response): """Invoke the Controller""" c.body_css_classes = [] c.body_css_classes.append('controller-' + self.identifier) if self.identifier in ['proposals', 'milestones', 'norms', 'category', 'members']: c.active_subheader_nav = self.identifier c.body_css_classes.append('area-' + self.identifier) c.instance = model.instance_filter.get_instance() if c.instance is not None: c.body_css_classes.append(u'instance-%s' % c.instance.key) # setup a global variable to mark the current item in # the global navigation global_nav = 'instances' if c.instance is not None else 'home' c.active_global_nav = global_nav c.body_css_classes.append('global_nav_' + global_nav) user_id = environ.get('repoze.who.identity', {}).get('user', None) user = None # make sure we're not using a detached user object if user_id is not None: user = model.meta.Session.merge(user_id) if user and (user.banned or user.delete_time): user = None if user is not None: c.body_css_classes.append('logged_in') else: c.body_css_classes.append('not_logged_in') c.user = user c.active_controller = request.environ.get('pylons.routes_dict')\ .get('controller') c.debug = config.get_bool('debug') i18n.handle_request() if h.site.is_local_url(request.params.get(u'came_from', u'')): c.came_from = request.params.get(u'came_from', u'') monitor_page_time_interval = config.get_int( 'adhocracy.monitor_page_time_interval', -1) c.page_stats_url = h.base_url('/stats/on_page') if monitor_page_time_interval > 0: c.monitor_page_time_interval = monitor_page_time_interval if config.get_bool('adhocracy.monitor_external_links'): c.monitor_external_links_url = h.base_url('/stats/record_external') if config.get_bool('adhocracy.monitor_browser_values'): c.monitor_browser_values = "enabled" if config.get_bool('adhocracy.monitor_extended'): c.monitor_extended = "enabled" if config.get_bool('adhocracy.monitor_page_performance'): c.monitor_page_performance = "enabled" if config.get_bool('adhocracy.monitor_pager_clicks'): c.monitor_pager_clicks = "enabled" h.add_rss("%s News" % h.site.name(), h.base_url('/feed.rss', None)) if c.instance: h.add_rss("%s News" % c.instance.label, h.base_url('/instance/%s.rss' % c.instance.key)) h.add_meta("description", config.get( 'adhocracy.site.description', _(u"A liquid democracy platform for making decisions in " u"distributed, open groups by cooperatively creating " u"proposals and voting on them to establish their " u"support."))) h.add_meta("keywords", _("adhocracy, direct democracy, liquid democracy, liqd, " "democracy, wiki, voting,participation, group decisions, " "decisions, decision-making")) try: return WSGIController.__call__(self, environ, start_response) except Exception, e: log.exception(e) model.meta.Session.rollback() raise finally: if isinstance(model.meta.Session, ScopedSession): model.meta.Session.remove()
[ "def", "__call__", "(", "self", ",", "environ", ",", "start_response", ")", ":", "c", ".", "body_css_classes", "=", "[", "]", "c", ".", "body_css_classes", ".", "append", "(", "'controller-'", "+", "self", ".", "identifier", ")", "if", "self", ".", "identifier", "in", "[", "'proposals'", ",", "'milestones'", ",", "'norms'", ",", "'category'", ",", "'members'", "]", ":", "c", ".", "active_subheader_nav", "=", "self", ".", "identifier", "c", ".", "body_css_classes", ".", "append", "(", "'area-'", "+", "self", ".", "identifier", ")", "c", ".", "instance", "=", "model", ".", "instance_filter", ".", "get_instance", "(", ")", "if", "c", ".", "instance", "is", "not", "None", ":", "c", ".", "body_css_classes", ".", "append", "(", "u'instance-%s'", "%", "c", ".", "instance", ".", "key", ")", "# setup a global variable to mark the current item in", "# the global navigation", "global_nav", "=", "'instances'", "if", "c", ".", "instance", "is", "not", "None", "else", "'home'", "c", ".", "active_global_nav", "=", "global_nav", "c", ".", "body_css_classes", ".", "append", "(", "'global_nav_'", "+", "global_nav", ")", "user_id", "=", "environ", ".", "get", "(", "'repoze.who.identity'", ",", "{", "}", ")", ".", "get", "(", "'user'", ",", "None", ")", "user", "=", "None", "# make sure we're not using a detached user object", "if", "user_id", "is", "not", "None", ":", "user", "=", "model", ".", "meta", ".", "Session", ".", "merge", "(", "user_id", ")", "if", "user", "and", "(", "user", ".", "banned", "or", "user", ".", "delete_time", ")", ":", "user", "=", "None", "if", "user", "is", "not", "None", ":", "c", ".", "body_css_classes", ".", "append", "(", "'logged_in'", ")", "else", ":", "c", ".", "body_css_classes", ".", "append", "(", "'not_logged_in'", ")", "c", ".", "user", "=", "user", "c", ".", "active_controller", "=", "request", ".", "environ", ".", "get", "(", "'pylons.routes_dict'", ")", ".", "get", "(", "'controller'", ")", "c", ".", "debug", "=", "config", ".", "get_bool", "(", "'debug'", ")", "i18n", ".", "handle_request", "(", ")", "if", "h", ".", "site", ".", "is_local_url", "(", "request", ".", "params", ".", "get", "(", "u'came_from'", ",", "u''", ")", ")", ":", "c", ".", "came_from", "=", "request", ".", "params", ".", "get", "(", "u'came_from'", ",", "u''", ")", "monitor_page_time_interval", "=", "config", ".", "get_int", "(", "'adhocracy.monitor_page_time_interval'", ",", "-", "1", ")", "c", ".", "page_stats_url", "=", "h", ".", "base_url", "(", "'/stats/on_page'", ")", "if", "monitor_page_time_interval", ">", "0", ":", "c", ".", "monitor_page_time_interval", "=", "monitor_page_time_interval", "if", "config", ".", "get_bool", "(", "'adhocracy.monitor_external_links'", ")", ":", "c", ".", "monitor_external_links_url", "=", "h", ".", "base_url", "(", "'/stats/record_external'", ")", "if", "config", ".", "get_bool", "(", "'adhocracy.monitor_browser_values'", ")", ":", "c", ".", "monitor_browser_values", "=", "\"enabled\"", "if", "config", ".", "get_bool", "(", "'adhocracy.monitor_extended'", ")", ":", "c", ".", "monitor_extended", "=", "\"enabled\"", "if", "config", ".", "get_bool", "(", "'adhocracy.monitor_page_performance'", ")", ":", "c", ".", "monitor_page_performance", "=", "\"enabled\"", "if", "config", ".", "get_bool", "(", "'adhocracy.monitor_pager_clicks'", ")", ":", "c", ".", "monitor_pager_clicks", "=", "\"enabled\"", "h", ".", "add_rss", "(", "\"%s News\"", "%", "h", ".", "site", ".", "name", "(", ")", ",", "h", ".", "base_url", "(", "'/feed.rss'", ",", "None", ")", ")", "if", "c", ".", "instance", ":", "h", ".", "add_rss", "(", "\"%s News\"", "%", "c", ".", "instance", ".", "label", ",", "h", ".", "base_url", "(", "'/instance/%s.rss'", "%", "c", ".", "instance", ".", "key", ")", ")", "h", ".", "add_meta", "(", "\"description\"", ",", "config", ".", "get", "(", "'adhocracy.site.description'", ",", "_", "(", "u\"A liquid democracy platform for making decisions in \"", "u\"distributed, open groups by cooperatively creating \"", "u\"proposals and voting on them to establish their \"", "u\"support.\"", ")", ")", ")", "h", ".", "add_meta", "(", "\"keywords\"", ",", "_", "(", "\"adhocracy, direct democracy, liquid democracy, liqd, \"", "\"democracy, wiki, voting,participation, group decisions, \"", "\"decisions, decision-making\"", ")", ")", "try", ":", "return", "WSGIController", ".", "__call__", "(", "self", ",", "environ", ",", "start_response", ")", "except", "Exception", ",", "e", ":", "log", ".", "exception", "(", "e", ")", "model", ".", "meta", ".", "Session", ".", "rollback", "(", ")", "raise", "finally", ":", "if", "isinstance", "(", "model", ".", "meta", ".", "Session", ",", "ScopedSession", ")", ":", "model", ".", "meta", ".", "Session", ".", "remove", "(", ")" ]
https://github.com/liqd/adhocracy/blob/a143e7101f788f56c78e00bd30b2fe2e15bf3552/src/adhocracy/lib/base.py#L27-L111
1040003585/WebScrapingWithPython
a770fa5b03894076c8c9539b1ffff34424ffc016
ResourceCode/wswp-places-c573d29efa3a/web2py/gluon/contrib/pg8000/core.py
python
Cursor.fetchall
(self)
Fetches all remaining rows of a query result. This method is part of the `DBAPI 2.0 specification <http://www.python.org/dev/peps/pep-0249/>`_. :returns: A sequence, each entry of which is a sequence of field values making up a row.
Fetches all remaining rows of a query result.
[ "Fetches", "all", "remaining", "rows", "of", "a", "query", "result", "." ]
def fetchall(self): """Fetches all remaining rows of a query result. This method is part of the `DBAPI 2.0 specification <http://www.python.org/dev/peps/pep-0249/>`_. :returns: A sequence, each entry of which is a sequence of field values making up a row. """ try: return tuple(self) except TypeError: raise ProgrammingError("attempting to use unexecuted cursor")
[ "def", "fetchall", "(", "self", ")", ":", "try", ":", "return", "tuple", "(", "self", ")", "except", "TypeError", ":", "raise", "ProgrammingError", "(", "\"attempting to use unexecuted cursor\"", ")" ]
https://github.com/1040003585/WebScrapingWithPython/blob/a770fa5b03894076c8c9539b1ffff34424ffc016/ResourceCode/wswp-places-c573d29efa3a/web2py/gluon/contrib/pg8000/core.py#L647-L661
iclavera/learning_to_adapt
bd7d99ba402521c96631e7d09714128f549db0f1
learning_to_adapt/mujoco_py/glfw.py
python
get_monitor_physical_size
(monitor)
return width_value.value, height_value.value
Returns the physical size of the monitor. Wrapper for: void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
Returns the physical size of the monitor.
[ "Returns", "the", "physical", "size", "of", "the", "monitor", "." ]
def get_monitor_physical_size(monitor): ''' Returns the physical size of the monitor. Wrapper for: void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height); ''' width_value = ctypes.c_int(0) width = ctypes.pointer(width_value) height_value = ctypes.c_int(0) height = ctypes.pointer(height_value) _glfw.glfwGetMonitorPhysicalSize(monitor, width, height) return width_value.value, height_value.value
[ "def", "get_monitor_physical_size", "(", "monitor", ")", ":", "width_value", "=", "ctypes", ".", "c_int", "(", "0", ")", "width", "=", "ctypes", ".", "pointer", "(", "width_value", ")", "height_value", "=", "ctypes", ".", "c_int", "(", "0", ")", "height", "=", "ctypes", ".", "pointer", "(", "height_value", ")", "_glfw", ".", "glfwGetMonitorPhysicalSize", "(", "monitor", ",", "width", ",", "height", ")", "return", "width_value", ".", "value", ",", "height_value", ".", "value" ]
https://github.com/iclavera/learning_to_adapt/blob/bd7d99ba402521c96631e7d09714128f549db0f1/learning_to_adapt/mujoco_py/glfw.py#L642-L654
Arelle/Arelle
20f3d8a8afd41668e1520799acd333349ce0ba17
arelle/ModelXbrl.py
python
ModelXbrl.prefixedNamespaces
(self)
return prefixedNamespaces
Dict of prefixes for namespaces defined in DTS
Dict of prefixes for namespaces defined in DTS
[ "Dict", "of", "prefixes", "for", "namespaces", "defined", "in", "DTS" ]
def prefixedNamespaces(self): """Dict of prefixes for namespaces defined in DTS """ prefixedNamespaces = {} for nsDocs in self.namespaceDocs.values(): for nsDoc in nsDocs: ns = nsDoc.targetNamespace if ns: prefix = XmlUtil.xmlnsprefix(nsDoc.xmlRootElement, ns) if prefix and prefix not in prefixedNamespaces: prefixedNamespaces[prefix] = ns return prefixedNamespaces
[ "def", "prefixedNamespaces", "(", "self", ")", ":", "prefixedNamespaces", "=", "{", "}", "for", "nsDocs", "in", "self", ".", "namespaceDocs", ".", "values", "(", ")", ":", "for", "nsDoc", "in", "nsDocs", ":", "ns", "=", "nsDoc", ".", "targetNamespace", "if", "ns", ":", "prefix", "=", "XmlUtil", ".", "xmlnsprefix", "(", "nsDoc", ".", "xmlRootElement", ",", "ns", ")", "if", "prefix", "and", "prefix", "not", "in", "prefixedNamespaces", ":", "prefixedNamespaces", "[", "prefix", "]", "=", "ns", "return", "prefixedNamespaces" ]
https://github.com/Arelle/Arelle/blob/20f3d8a8afd41668e1520799acd333349ce0ba17/arelle/ModelXbrl.py#L505-L516
deepgully/me
f7ad65edc2fe435310c6676bc2e322cfe5d4c8f0
libs/mako/runtime.py
python
Namespace.get_template
(self, uri)
return _lookup_template(self.context, uri, self._templateuri)
Return a :class:`.Template` from the given ``uri``. The ``uri`` resolution is relative to the ``uri`` of this :class:`.Namespace` object's :class:`.Template`.
Return a :class:`.Template` from the given ``uri``.
[ "Return", "a", ":", "class", ":", ".", "Template", "from", "the", "given", "uri", "." ]
def get_template(self, uri): """Return a :class:`.Template` from the given ``uri``. The ``uri`` resolution is relative to the ``uri`` of this :class:`.Namespace` object's :class:`.Template`. """ return _lookup_template(self.context, uri, self._templateuri)
[ "def", "get_template", "(", "self", ",", "uri", ")", ":", "return", "_lookup_template", "(", "self", ".", "context", ",", "uri", ",", "self", ".", "_templateuri", ")" ]
https://github.com/deepgully/me/blob/f7ad65edc2fe435310c6676bc2e322cfe5d4c8f0/libs/mako/runtime.py#L433-L440
janpipek/physt
e7bce911532fac5f96e4e2d54881152e7e668a41
physt/plotting/matplotlib.py
python
_add_colorbar
( ax: Axes, cmap: colors.Colormap, cmap_data: np.ndarray, norm: colors.Normalize )
Show a colorbar right of the plot.
Show a colorbar right of the plot.
[ "Show", "a", "colorbar", "right", "of", "the", "plot", "." ]
def _add_colorbar( ax: Axes, cmap: colors.Colormap, cmap_data: np.ndarray, norm: colors.Normalize ) -> None: """Show a colorbar right of the plot.""" fig = ax.get_figure() mappable = cm.ScalarMappable(cmap=cmap, norm=norm) mappable.set_array(cmap_data) # TODO: Or what??? fig.colorbar(mappable, ax=ax)
[ "def", "_add_colorbar", "(", "ax", ":", "Axes", ",", "cmap", ":", "colors", ".", "Colormap", ",", "cmap_data", ":", "np", ".", "ndarray", ",", "norm", ":", "colors", ".", "Normalize", ")", "->", "None", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "mappable", "=", "cm", ".", "ScalarMappable", "(", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "mappable", ".", "set_array", "(", "cmap_data", ")", "# TODO: Or what???", "fig", ".", "colorbar", "(", "mappable", ",", "ax", "=", "ax", ")" ]
https://github.com/janpipek/physt/blob/e7bce911532fac5f96e4e2d54881152e7e668a41/physt/plotting/matplotlib.py#L957-L964
materialsproject/pymatgen
8128f3062a334a2edd240e4062b5b9bdd1ae6f58
pymatgen/io/vasp/outputs.py
python
Outcar.read_lepsilon
(self)
Reads an LEPSILON run. # TODO: Document the actual variables.
Reads an LEPSILON run.
[ "Reads", "an", "LEPSILON", "run", "." ]
def read_lepsilon(self): """ Reads an LEPSILON run. # TODO: Document the actual variables. """ try: search = [] def dielectric_section_start(results, match): results.dielectric_index = -1 search.append( [ r"MACROSCOPIC STATIC DIELECTRIC TENSOR \(", None, dielectric_section_start, ] ) def dielectric_section_start2(results, match): results.dielectric_index = 0 search.append( [ r"-------------------------------------", lambda results, line: results.dielectric_index == -1, dielectric_section_start2, ] ) def dielectric_data(results, match): results.dielectric_tensor[results.dielectric_index, :] = np.array( [float(match.group(i)) for i in range(1, 4)] ) results.dielectric_index += 1 search.append( [ r"^ *([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) *$", lambda results, line: results.dielectric_index >= 0 if results.dielectric_index is not None else None, dielectric_data, ] ) def dielectric_section_stop(results, match): results.dielectric_index = None search.append( [ r"-------------------------------------", lambda results, line: results.dielectric_index >= 1 if results.dielectric_index is not None else None, dielectric_section_stop, ] ) self.dielectric_index = None self.dielectric_tensor = np.zeros((3, 3)) def piezo_section_start(results, match): results.piezo_index = 0 search.append( [ r"PIEZOELECTRIC TENSOR for field in x, y, z " r"\(C/m\^2\)", None, piezo_section_start, ] ) def piezo_data(results, match): results.piezo_tensor[results.piezo_index, :] = np.array([float(match.group(i)) for i in range(1, 7)]) results.piezo_index += 1 search.append( [ r"^ *[xyz] +([-0-9.Ee+]+) +([-0-9.Ee+]+)" + r" +([-0-9.Ee+]+) *([-0-9.Ee+]+) +([-0-9.Ee+]+)" + r" +([-0-9.Ee+]+)*$", lambda results, line: results.piezo_index >= 0 if results.piezo_index is not None else None, piezo_data, ] ) def piezo_section_stop(results, match): results.piezo_index = None search.append( [ r"-------------------------------------", lambda results, line: results.piezo_index >= 1 if results.piezo_index is not None else None, piezo_section_stop, ] ) self.piezo_index = None self.piezo_tensor = np.zeros((3, 6)) def born_section_start(results, match): results.born_ion = -1 search.append([r"BORN EFFECTIVE CHARGES ", None, born_section_start]) def born_ion(results, match): results.born_ion = int(match.group(1)) - 1 results.born.append(np.zeros((3, 3))) search.append( [ r"ion +([0-9]+)", lambda results, line: results.born_ion is not None, born_ion, ] ) def born_data(results, match): results.born[results.born_ion][int(match.group(1)) - 1, :] = np.array( [float(match.group(i)) for i in range(2, 5)] ) search.append( [ r"^ *([1-3]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+)$", lambda results, line: results.born_ion >= 0 if results.born_ion is not None else results.born_ion, born_data, ] ) def born_section_stop(results, match): results.born_ion = None search.append( [ r"-------------------------------------", lambda results, line: results.born_ion >= 1 if results.born_ion is not None else results.born_ion, born_section_stop, ] ) self.born_ion = None self.born = [] micro_pyawk(self.filename, search, self) self.born = np.array(self.born) self.dielectric_tensor = self.dielectric_tensor.tolist() self.piezo_tensor = self.piezo_tensor.tolist() except Exception: raise Exception("LEPSILON OUTCAR could not be parsed.")
[ "def", "read_lepsilon", "(", "self", ")", ":", "try", ":", "search", "=", "[", "]", "def", "dielectric_section_start", "(", "results", ",", "match", ")", ":", "results", ".", "dielectric_index", "=", "-", "1", "search", ".", "append", "(", "[", "r\"MACROSCOPIC STATIC DIELECTRIC TENSOR \\(\"", ",", "None", ",", "dielectric_section_start", ",", "]", ")", "def", "dielectric_section_start2", "(", "results", ",", "match", ")", ":", "results", ".", "dielectric_index", "=", "0", "search", ".", "append", "(", "[", "r\"-------------------------------------\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "dielectric_index", "==", "-", "1", ",", "dielectric_section_start2", ",", "]", ")", "def", "dielectric_data", "(", "results", ",", "match", ")", ":", "results", ".", "dielectric_tensor", "[", "results", ".", "dielectric_index", ",", ":", "]", "=", "np", ".", "array", "(", "[", "float", "(", "match", ".", "group", "(", "i", ")", ")", "for", "i", "in", "range", "(", "1", ",", "4", ")", "]", ")", "results", ".", "dielectric_index", "+=", "1", "search", ".", "append", "(", "[", "r\"^ *([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) *$\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "dielectric_index", ">=", "0", "if", "results", ".", "dielectric_index", "is", "not", "None", "else", "None", ",", "dielectric_data", ",", "]", ")", "def", "dielectric_section_stop", "(", "results", ",", "match", ")", ":", "results", ".", "dielectric_index", "=", "None", "search", ".", "append", "(", "[", "r\"-------------------------------------\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "dielectric_index", ">=", "1", "if", "results", ".", "dielectric_index", "is", "not", "None", "else", "None", ",", "dielectric_section_stop", ",", "]", ")", "self", ".", "dielectric_index", "=", "None", "self", ".", "dielectric_tensor", "=", "np", ".", "zeros", "(", "(", "3", ",", "3", ")", ")", "def", "piezo_section_start", "(", "results", ",", "match", ")", ":", "results", ".", "piezo_index", "=", "0", "search", ".", "append", "(", "[", "r\"PIEZOELECTRIC TENSOR for field in x, y, z \"", "r\"\\(C/m\\^2\\)\"", ",", "None", ",", "piezo_section_start", ",", "]", ")", "def", "piezo_data", "(", "results", ",", "match", ")", ":", "results", ".", "piezo_tensor", "[", "results", ".", "piezo_index", ",", ":", "]", "=", "np", ".", "array", "(", "[", "float", "(", "match", ".", "group", "(", "i", ")", ")", "for", "i", "in", "range", "(", "1", ",", "7", ")", "]", ")", "results", ".", "piezo_index", "+=", "1", "search", ".", "append", "(", "[", "r\"^ *[xyz] +([-0-9.Ee+]+) +([-0-9.Ee+]+)\"", "+", "r\" +([-0-9.Ee+]+) *([-0-9.Ee+]+) +([-0-9.Ee+]+)\"", "+", "r\" +([-0-9.Ee+]+)*$\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "piezo_index", ">=", "0", "if", "results", ".", "piezo_index", "is", "not", "None", "else", "None", ",", "piezo_data", ",", "]", ")", "def", "piezo_section_stop", "(", "results", ",", "match", ")", ":", "results", ".", "piezo_index", "=", "None", "search", ".", "append", "(", "[", "r\"-------------------------------------\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "piezo_index", ">=", "1", "if", "results", ".", "piezo_index", "is", "not", "None", "else", "None", ",", "piezo_section_stop", ",", "]", ")", "self", ".", "piezo_index", "=", "None", "self", ".", "piezo_tensor", "=", "np", ".", "zeros", "(", "(", "3", ",", "6", ")", ")", "def", "born_section_start", "(", "results", ",", "match", ")", ":", "results", ".", "born_ion", "=", "-", "1", "search", ".", "append", "(", "[", "r\"BORN EFFECTIVE CHARGES \"", ",", "None", ",", "born_section_start", "]", ")", "def", "born_ion", "(", "results", ",", "match", ")", ":", "results", ".", "born_ion", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "-", "1", "results", ".", "born", ".", "append", "(", "np", ".", "zeros", "(", "(", "3", ",", "3", ")", ")", ")", "search", ".", "append", "(", "[", "r\"ion +([0-9]+)\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "born_ion", "is", "not", "None", ",", "born_ion", ",", "]", ")", "def", "born_data", "(", "results", ",", "match", ")", ":", "results", ".", "born", "[", "results", ".", "born_ion", "]", "[", "int", "(", "match", ".", "group", "(", "1", ")", ")", "-", "1", ",", ":", "]", "=", "np", ".", "array", "(", "[", "float", "(", "match", ".", "group", "(", "i", ")", ")", "for", "i", "in", "range", "(", "2", ",", "5", ")", "]", ")", "search", ".", "append", "(", "[", "r\"^ *([1-3]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+)$\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "born_ion", ">=", "0", "if", "results", ".", "born_ion", "is", "not", "None", "else", "results", ".", "born_ion", ",", "born_data", ",", "]", ")", "def", "born_section_stop", "(", "results", ",", "match", ")", ":", "results", ".", "born_ion", "=", "None", "search", ".", "append", "(", "[", "r\"-------------------------------------\"", ",", "lambda", "results", ",", "line", ":", "results", ".", "born_ion", ">=", "1", "if", "results", ".", "born_ion", "is", "not", "None", "else", "results", ".", "born_ion", ",", "born_section_stop", ",", "]", ")", "self", ".", "born_ion", "=", "None", "self", ".", "born", "=", "[", "]", "micro_pyawk", "(", "self", ".", "filename", ",", "search", ",", "self", ")", "self", ".", "born", "=", "np", ".", "array", "(", "self", ".", "born", ")", "self", ".", "dielectric_tensor", "=", "self", ".", "dielectric_tensor", ".", "tolist", "(", ")", "self", ".", "piezo_tensor", "=", "self", ".", "piezo_tensor", ".", "tolist", "(", ")", "except", "Exception", ":", "raise", "Exception", "(", "\"LEPSILON OUTCAR could not be parsed.\"", ")" ]
https://github.com/materialsproject/pymatgen/blob/8128f3062a334a2edd240e4062b5b9bdd1ae6f58/pymatgen/io/vasp/outputs.py#L2749-L2903
DeepPavlov/convai
54d921f99606960941ece4865a396925dfc264f4
2017/solutions/kaib/ParlAI/parlai/agents/seq2seq_v2/beam.py
python
Beam.advance_end
(self, word_lk)
return self.done
Advance the beam. Until each beam meets __eos__ Do not generate __unk__
Advance the beam. Until each beam meets __eos__ Do not generate __unk__
[ "Advance", "the", "beam", ".", "Until", "each", "beam", "meets", "__eos__", "Do", "not", "generate", "__unk__" ]
def advance_end(self, word_lk): """Advance the beam. Until each beam meets __eos__ Do not generate __unk__ """ num_words = word_lk.size(1) if debug: print("score mask") print(self.score_mask) # Sum the previous scores. if len(self.prevKs) > 0: beam_lk = self.score_mask.unsqueeze(1).expand_as(word_lk)*word_lk + self.scores.unsqueeze(1).expand_as(word_lk) beam_lk = beam_lk.index_select(0, self.active_idx) else: beam_lk = word_lk[0] # Avoid generating UNK token if not self.gen_unk: if beam_lk.dim() == 1: beam_lk[self.unk] = -100000 else: beam_lk[:, self.unk] = -100000 ## self.score_mask --> exclude the row ## and sorting flat_beam_lk = beam_lk.view(-1) bestScores, bestScoresId = flat_beam_lk.topk(len(self.active_idx_list), 0, True, True) ## self.size self.scores.scatter_(0, self.active_idx, bestScores) # bestScoresId is flattened beam x word array, so calculate which # word and beam each score came from prev_k = bestScoresId / num_words next_ys = bestScoresId - prev_k * num_words if self.tt == torch.cuda: prev_k1 = torch.arange(0,self.size).long().cuda().scatter_(0, self.active_idx, self.active_idx[prev_k]) else: prev_k1 = torch.arange(0,self.size).long().scatter_(0, self.active_idx, self.active_idx[prev_k]) next_ys1 = self.tt.LongTensor(self.size).fill_(self.pad).scatter_(0, self.active_idx, next_ys) self.prevKs.append(prev_k1) # trasform prev_k => original index self.nextYs.append(next_ys1) # mask done = True for i in range(self.size): if self.nextYs[-1][i] == self.eos: self.doneYs[i] = True self.score_mask[i] = 0 self.active_idx_list.remove(i) if debug: pdb.set_trace() print(i) print(self.active_idx_list) done *= self.doneYs[i] self.active_idx = self.tt.LongTensor(self.active_idx_list) self.done = done return self.done
[ "def", "advance_end", "(", "self", ",", "word_lk", ")", ":", "num_words", "=", "word_lk", ".", "size", "(", "1", ")", "if", "debug", ":", "print", "(", "\"score mask\"", ")", "print", "(", "self", ".", "score_mask", ")", "# Sum the previous scores.", "if", "len", "(", "self", ".", "prevKs", ")", ">", "0", ":", "beam_lk", "=", "self", ".", "score_mask", ".", "unsqueeze", "(", "1", ")", ".", "expand_as", "(", "word_lk", ")", "*", "word_lk", "+", "self", ".", "scores", ".", "unsqueeze", "(", "1", ")", ".", "expand_as", "(", "word_lk", ")", "beam_lk", "=", "beam_lk", ".", "index_select", "(", "0", ",", "self", ".", "active_idx", ")", "else", ":", "beam_lk", "=", "word_lk", "[", "0", "]", "# Avoid generating UNK token", "if", "not", "self", ".", "gen_unk", ":", "if", "beam_lk", ".", "dim", "(", ")", "==", "1", ":", "beam_lk", "[", "self", ".", "unk", "]", "=", "-", "100000", "else", ":", "beam_lk", "[", ":", ",", "self", ".", "unk", "]", "=", "-", "100000", "## self.score_mask --> exclude the row", "## and sorting ", "flat_beam_lk", "=", "beam_lk", ".", "view", "(", "-", "1", ")", "bestScores", ",", "bestScoresId", "=", "flat_beam_lk", ".", "topk", "(", "len", "(", "self", ".", "active_idx_list", ")", ",", "0", ",", "True", ",", "True", ")", "## self.size", "self", ".", "scores", ".", "scatter_", "(", "0", ",", "self", ".", "active_idx", ",", "bestScores", ")", "# bestScoresId is flattened beam x word array, so calculate which", "# word and beam each score came from", "prev_k", "=", "bestScoresId", "/", "num_words", "next_ys", "=", "bestScoresId", "-", "prev_k", "*", "num_words", "if", "self", ".", "tt", "==", "torch", ".", "cuda", ":", "prev_k1", "=", "torch", ".", "arange", "(", "0", ",", "self", ".", "size", ")", ".", "long", "(", ")", ".", "cuda", "(", ")", ".", "scatter_", "(", "0", ",", "self", ".", "active_idx", ",", "self", ".", "active_idx", "[", "prev_k", "]", ")", "else", ":", "prev_k1", "=", "torch", ".", "arange", "(", "0", ",", "self", ".", "size", ")", ".", "long", "(", ")", ".", "scatter_", "(", "0", ",", "self", ".", "active_idx", ",", "self", ".", "active_idx", "[", "prev_k", "]", ")", "next_ys1", "=", "self", ".", "tt", ".", "LongTensor", "(", "self", ".", "size", ")", ".", "fill_", "(", "self", ".", "pad", ")", ".", "scatter_", "(", "0", ",", "self", ".", "active_idx", ",", "next_ys", ")", "self", ".", "prevKs", ".", "append", "(", "prev_k1", ")", "# trasform prev_k => original index", "self", ".", "nextYs", ".", "append", "(", "next_ys1", ")", "# mask", "done", "=", "True", "for", "i", "in", "range", "(", "self", ".", "size", ")", ":", "if", "self", ".", "nextYs", "[", "-", "1", "]", "[", "i", "]", "==", "self", ".", "eos", ":", "self", ".", "doneYs", "[", "i", "]", "=", "True", "self", ".", "score_mask", "[", "i", "]", "=", "0", "self", ".", "active_idx_list", ".", "remove", "(", "i", ")", "if", "debug", ":", "pdb", ".", "set_trace", "(", ")", "print", "(", "i", ")", "print", "(", "self", ".", "active_idx_list", ")", "done", "*=", "self", ".", "doneYs", "[", "i", "]", "self", ".", "active_idx", "=", "self", ".", "tt", ".", "LongTensor", "(", "self", ".", "active_idx_list", ")", "self", ".", "done", "=", "done", "return", "self", ".", "done" ]
https://github.com/DeepPavlov/convai/blob/54d921f99606960941ece4865a396925dfc264f4/2017/solutions/kaib/ParlAI/parlai/agents/seq2seq_v2/beam.py#L107-L167
HypothesisWorks/hypothesis
d1bfc4acc86899caa7a40f892322e1a69fbf36f4
hypothesis-python/src/hypothesis/extra/ghostwriter.py
python
_get_params
(func: Callable)
return OrderedDict((p.name, p) for p in params if p.kind not in var_param_kinds)
Get non-vararg parameters of `func` as an ordered dict.
Get non-vararg parameters of `func` as an ordered dict.
[ "Get", "non", "-", "vararg", "parameters", "of", "func", "as", "an", "ordered", "dict", "." ]
def _get_params(func: Callable) -> Dict[str, inspect.Parameter]: """Get non-vararg parameters of `func` as an ordered dict.""" var_param_kinds = (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD) try: params = list(inspect.signature(func).parameters.values()) except Exception: if ( isinstance(func, (types.BuiltinFunctionType, types.BuiltinMethodType)) and hasattr(func, "__doc__") and isinstance(func.__doc__, str) ): # inspect.signature doesn't work on all builtin functions or methods. # In such cases, we can try to reconstruct simple signatures from the docstring. match = re.match(rf"^{func.__name__}\((.+?)\)", func.__doc__) if match is None: raise args = match.group(1).replace("[", "").replace("]", "") params = [] # Even if the signature doesn't contain a /, we assume that arguments # are positional-only until shown otherwise - the / is often omitted. kind: inspect._ParameterKind = inspect.Parameter.POSITIONAL_ONLY for arg in args.split(", "): arg, *_ = arg.partition("=") if arg == "/": kind = inspect.Parameter.POSITIONAL_OR_KEYWORD continue if arg.startswith("*"): kind = inspect.Parameter.KEYWORD_ONLY continue # we omit *varargs, if there are any if arg.startswith("**"): break # and likewise omit **varkw params.append(inspect.Parameter(name=arg, kind=kind)) elif _is_probably_ufunc(func): # `inspect.signature` doesn't work on ufunc objects, but we can work out # what the required parameters would look like if it did. # Note that we use args named a, b, c... to match the `operator` module, # rather than x1, x2, x3... like the Numpy docs. Because they're pos-only # this doesn't make a runtime difference, and it's much nicer for use-cases # like `equivalent(numpy.add, operator.add)`. params = [ inspect.Parameter(name=name, kind=inspect.Parameter.POSITIONAL_ONLY) for name in ascii_lowercase[: func.nin] # type: ignore ] else: # If we haven't managed to recover a signature through the tricks above, # we're out of ideas and should just re-raise the exception. raise return OrderedDict((p.name, p) for p in params if p.kind not in var_param_kinds)
[ "def", "_get_params", "(", "func", ":", "Callable", ")", "->", "Dict", "[", "str", ",", "inspect", ".", "Parameter", "]", ":", "var_param_kinds", "=", "(", "inspect", ".", "Parameter", ".", "VAR_POSITIONAL", ",", "inspect", ".", "Parameter", ".", "VAR_KEYWORD", ")", "try", ":", "params", "=", "list", "(", "inspect", ".", "signature", "(", "func", ")", ".", "parameters", ".", "values", "(", ")", ")", "except", "Exception", ":", "if", "(", "isinstance", "(", "func", ",", "(", "types", ".", "BuiltinFunctionType", ",", "types", ".", "BuiltinMethodType", ")", ")", "and", "hasattr", "(", "func", ",", "\"__doc__\"", ")", "and", "isinstance", "(", "func", ".", "__doc__", ",", "str", ")", ")", ":", "# inspect.signature doesn't work on all builtin functions or methods.", "# In such cases, we can try to reconstruct simple signatures from the docstring.", "match", "=", "re", ".", "match", "(", "rf\"^{func.__name__}\\((.+?)\\)\"", ",", "func", ".", "__doc__", ")", "if", "match", "is", "None", ":", "raise", "args", "=", "match", ".", "group", "(", "1", ")", ".", "replace", "(", "\"[\"", ",", "\"\"", ")", ".", "replace", "(", "\"]\"", ",", "\"\"", ")", "params", "=", "[", "]", "# Even if the signature doesn't contain a /, we assume that arguments", "# are positional-only until shown otherwise - the / is often omitted.", "kind", ":", "inspect", ".", "_ParameterKind", "=", "inspect", ".", "Parameter", ".", "POSITIONAL_ONLY", "for", "arg", "in", "args", ".", "split", "(", "\", \"", ")", ":", "arg", ",", "", "*", "_", "=", "arg", ".", "partition", "(", "\"=\"", ")", "if", "arg", "==", "\"/\"", ":", "kind", "=", "inspect", ".", "Parameter", ".", "POSITIONAL_OR_KEYWORD", "continue", "if", "arg", ".", "startswith", "(", "\"*\"", ")", ":", "kind", "=", "inspect", ".", "Parameter", ".", "KEYWORD_ONLY", "continue", "# we omit *varargs, if there are any", "if", "arg", ".", "startswith", "(", "\"**\"", ")", ":", "break", "# and likewise omit **varkw", "params", ".", "append", "(", "inspect", ".", "Parameter", "(", "name", "=", "arg", ",", "kind", "=", "kind", ")", ")", "elif", "_is_probably_ufunc", "(", "func", ")", ":", "# `inspect.signature` doesn't work on ufunc objects, but we can work out", "# what the required parameters would look like if it did.", "# Note that we use args named a, b, c... to match the `operator` module,", "# rather than x1, x2, x3... like the Numpy docs. Because they're pos-only", "# this doesn't make a runtime difference, and it's much nicer for use-cases", "# like `equivalent(numpy.add, operator.add)`.", "params", "=", "[", "inspect", ".", "Parameter", "(", "name", "=", "name", ",", "kind", "=", "inspect", ".", "Parameter", ".", "POSITIONAL_ONLY", ")", "for", "name", "in", "ascii_lowercase", "[", ":", "func", ".", "nin", "]", "# type: ignore", "]", "else", ":", "# If we haven't managed to recover a signature through the tricks above,", "# we're out of ideas and should just re-raise the exception.", "raise", "return", "OrderedDict", "(", "(", "p", ".", "name", ",", "p", ")", "for", "p", "in", "params", "if", "p", ".", "kind", "not", "in", "var_param_kinds", ")" ]
https://github.com/HypothesisWorks/hypothesis/blob/d1bfc4acc86899caa7a40f892322e1a69fbf36f4/hypothesis-python/src/hypothesis/extra/ghostwriter.py#L324-L372
pilotmoon/PopClip-Extensions
29fc472befc09ee350092ac70283bd9fdb456cb6
source/InstantTranslate/requests/packages/urllib3/util/retry.py
python
Retry.is_forced_retry
(self, method, status_code)
return self.status_forcelist and status_code in self.status_forcelist
Is this method/status code retryable? (Based on method/codes whitelists)
Is this method/status code retryable? (Based on method/codes whitelists)
[ "Is", "this", "method", "/", "status", "code", "retryable?", "(", "Based", "on", "method", "/", "codes", "whitelists", ")" ]
def is_forced_retry(self, method, status_code): """ Is this method/status code retryable? (Based on method/codes whitelists) """ if self.method_whitelist and method.upper() not in self.method_whitelist: return False return self.status_forcelist and status_code in self.status_forcelist
[ "def", "is_forced_retry", "(", "self", ",", "method", ",", "status_code", ")", ":", "if", "self", ".", "method_whitelist", "and", "method", ".", "upper", "(", ")", "not", "in", "self", ".", "method_whitelist", ":", "return", "False", "return", "self", ".", "status_forcelist", "and", "status_code", "in", "self", ".", "status_forcelist" ]
https://github.com/pilotmoon/PopClip-Extensions/blob/29fc472befc09ee350092ac70283bd9fdb456cb6/source/InstantTranslate/requests/packages/urllib3/util/retry.py#L192-L198
Dozed12/df-style-worldgen
937455d54f4b02df9c4b10ae6418f4c932fd97bf
dist/libtcodpy.py
python
parser_get_string_property
(parser, name)
return _lib.TCOD_parser_get_string_property(parser, c_char_p(name))
[]
def parser_get_string_property(parser, name): return _lib.TCOD_parser_get_string_property(parser, c_char_p(name))
[ "def", "parser_get_string_property", "(", "parser", ",", "name", ")", ":", "return", "_lib", ".", "TCOD_parser_get_string_property", "(", "parser", ",", "c_char_p", "(", "name", ")", ")" ]
https://github.com/Dozed12/df-style-worldgen/blob/937455d54f4b02df9c4b10ae6418f4c932fd97bf/dist/libtcodpy.py#L1376-L1377
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/kombu/transport/base.py
python
Transport.supports_heartbeats
(self)
return self.implements.heartbeats
[]
def supports_heartbeats(self): return self.implements.heartbeats
[ "def", "supports_heartbeats", "(", "self", ")", ":", "return", "self", ".", "implements", ".", "heartbeats" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/kombu/transport/base.py#L250-L251
smicallef/spiderfoot
fd4bf9394c9ab3ecc90adc3115c56349fb23165b
spiderfoot/plugin.py
python
SpiderFootPlugin.notifyListeners
(self, sfEvent)
Call the handleEvent() method of every other plug-in listening for events from this plug-in. Remember that those plug-ins will be called within the same execution context of this thread, not on their own. Args: sfEvent (SpiderFootEvent): event Raises: TypeError: sfEvent argument was invalid type
Call the handleEvent() method of every other plug-in listening for events from this plug-in. Remember that those plug-ins will be called within the same execution context of this thread, not on their own.
[ "Call", "the", "handleEvent", "()", "method", "of", "every", "other", "plug", "-", "in", "listening", "for", "events", "from", "this", "plug", "-", "in", ".", "Remember", "that", "those", "plug", "-", "ins", "will", "be", "called", "within", "the", "same", "execution", "context", "of", "this", "thread", "not", "on", "their", "own", "." ]
def notifyListeners(self, sfEvent): """Call the handleEvent() method of every other plug-in listening for events from this plug-in. Remember that those plug-ins will be called within the same execution context of this thread, not on their own. Args: sfEvent (SpiderFootEvent): event Raises: TypeError: sfEvent argument was invalid type """ from spiderfoot import SpiderFootEvent if not isinstance(sfEvent, SpiderFootEvent): raise TypeError(f"sfEvent is {type(sfEvent)}; expected SpiderFootEvent") eventName = sfEvent.eventType eventData = sfEvent.data if self.__outputFilter__: # Be strict about what events to pass on, unless they are # the ROOT event or the event type of the target. if eventName not in ('ROOT', self.getTarget().targetType): if eventName not in self.__outputFilter__: return storeOnly = False # Under some conditions, only store and don't notify if not eventData: return if self.checkForStop(): return # Look back to ensure the original notification for an element # is what's linked to children. For instance, sfp_dns may find # xyz.abc.com, and then sfp_ripe obtains some raw data for the # same, and then sfp_dns finds xyz.abc.com in there, we should # suppress the notification of that to other modules, as the # original xyz.abc.com notification from sfp_dns will trigger # those modules anyway. This also avoids messy iterations that # traverse many many levels. # storeOnly is used in this case so that the source to dest # relationship is made, but no further events are triggered # from dest, as we are already operating on dest's original # notification from one of the upstream events. prevEvent = sfEvent.sourceEvent while prevEvent is not None: if prevEvent.sourceEvent is not None: if prevEvent.sourceEvent.eventType == sfEvent.eventType and prevEvent.sourceEvent.data.lower() == eventData.lower(): storeOnly = True break prevEvent = prevEvent.sourceEvent # output to queue if applicable if self.outgoingEventQueue is not None: self.outgoingEventQueue.put(sfEvent) # otherwise, call other modules directly else: self._listenerModules.sort(key=lambda m: m._priority) for listener in self._listenerModules: if eventName not in listener.watchedEvents() and '*' not in listener.watchedEvents(): continue if storeOnly and "__stor" not in listener.__module__: continue listener._currentEvent = sfEvent # Check if we've been asked to stop in the meantime, so that # notifications stop triggering module activity. if self.checkForStop(): return try: listener.handleEvent(sfEvent) except Exception as e: self.sf.error(f"Module ({listener.__module__}) encountered an error: {e}") # set errorState self.errorState = True # clear incoming queue if self.incomingEventQueue: with suppress(queue.Empty): while 1: self.incomingEventQueue.get_nowait()
[ "def", "notifyListeners", "(", "self", ",", "sfEvent", ")", ":", "from", "spiderfoot", "import", "SpiderFootEvent", "if", "not", "isinstance", "(", "sfEvent", ",", "SpiderFootEvent", ")", ":", "raise", "TypeError", "(", "f\"sfEvent is {type(sfEvent)}; expected SpiderFootEvent\"", ")", "eventName", "=", "sfEvent", ".", "eventType", "eventData", "=", "sfEvent", ".", "data", "if", "self", ".", "__outputFilter__", ":", "# Be strict about what events to pass on, unless they are", "# the ROOT event or the event type of the target.", "if", "eventName", "not", "in", "(", "'ROOT'", ",", "self", ".", "getTarget", "(", ")", ".", "targetType", ")", ":", "if", "eventName", "not", "in", "self", ".", "__outputFilter__", ":", "return", "storeOnly", "=", "False", "# Under some conditions, only store and don't notify", "if", "not", "eventData", ":", "return", "if", "self", ".", "checkForStop", "(", ")", ":", "return", "# Look back to ensure the original notification for an element", "# is what's linked to children. For instance, sfp_dns may find", "# xyz.abc.com, and then sfp_ripe obtains some raw data for the", "# same, and then sfp_dns finds xyz.abc.com in there, we should", "# suppress the notification of that to other modules, as the", "# original xyz.abc.com notification from sfp_dns will trigger", "# those modules anyway. This also avoids messy iterations that", "# traverse many many levels.", "# storeOnly is used in this case so that the source to dest", "# relationship is made, but no further events are triggered", "# from dest, as we are already operating on dest's original", "# notification from one of the upstream events.", "prevEvent", "=", "sfEvent", ".", "sourceEvent", "while", "prevEvent", "is", "not", "None", ":", "if", "prevEvent", ".", "sourceEvent", "is", "not", "None", ":", "if", "prevEvent", ".", "sourceEvent", ".", "eventType", "==", "sfEvent", ".", "eventType", "and", "prevEvent", ".", "sourceEvent", ".", "data", ".", "lower", "(", ")", "==", "eventData", ".", "lower", "(", ")", ":", "storeOnly", "=", "True", "break", "prevEvent", "=", "prevEvent", ".", "sourceEvent", "# output to queue if applicable", "if", "self", ".", "outgoingEventQueue", "is", "not", "None", ":", "self", ".", "outgoingEventQueue", ".", "put", "(", "sfEvent", ")", "# otherwise, call other modules directly", "else", ":", "self", ".", "_listenerModules", ".", "sort", "(", "key", "=", "lambda", "m", ":", "m", ".", "_priority", ")", "for", "listener", "in", "self", ".", "_listenerModules", ":", "if", "eventName", "not", "in", "listener", ".", "watchedEvents", "(", ")", "and", "'*'", "not", "in", "listener", ".", "watchedEvents", "(", ")", ":", "continue", "if", "storeOnly", "and", "\"__stor\"", "not", "in", "listener", ".", "__module__", ":", "continue", "listener", ".", "_currentEvent", "=", "sfEvent", "# Check if we've been asked to stop in the meantime, so that", "# notifications stop triggering module activity.", "if", "self", ".", "checkForStop", "(", ")", ":", "return", "try", ":", "listener", ".", "handleEvent", "(", "sfEvent", ")", "except", "Exception", "as", "e", ":", "self", ".", "sf", ".", "error", "(", "f\"Module ({listener.__module__}) encountered an error: {e}\"", ")", "# set errorState", "self", ".", "errorState", "=", "True", "# clear incoming queue", "if", "self", ".", "incomingEventQueue", ":", "with", "suppress", "(", "queue", ".", "Empty", ")", ":", "while", "1", ":", "self", ".", "incomingEventQueue", ".", "get_nowait", "(", ")" ]
https://github.com/smicallef/spiderfoot/blob/fd4bf9394c9ab3ecc90adc3115c56349fb23165b/spiderfoot/plugin.py#L312-L400
bitcoin-core/HWI
6871946c2176f2f9777b6ac8f0614d96d99bfa0e
hwilib/devices/jadepy/jade.py
python
JadeAPI.run_remote_selfcheck
(self)
return self._jadeRpc('debug_selfcheck', long_timeout=True)
[]
def run_remote_selfcheck(self): return self._jadeRpc('debug_selfcheck', long_timeout=True)
[ "def", "run_remote_selfcheck", "(", "self", ")", ":", "return", "self", ".", "_jadeRpc", "(", "'debug_selfcheck'", ",", "long_timeout", "=", "True", ")" ]
https://github.com/bitcoin-core/HWI/blob/6871946c2176f2f9777b6ac8f0614d96d99bfa0e/hwilib/devices/jadepy/jade.py#L210-L211
Netflix/security_monkey
c28592ffd518fa399527d26262683fc860c30eef
security_monkey/watchers/vpc/networkacl.py
python
NetworkACL.slurp
(self)
return slurp_items()
:returns: item_list - list of network acls. :returns: exception_map - A dict where the keys are a tuple containing the location of the exception and the value is the actual exception
:returns: item_list - list of network acls. :returns: exception_map - A dict where the keys are a tuple containing the location of the exception and the value is the actual exception
[ ":", "returns", ":", "item_list", "-", "list", "of", "network", "acls", ".", ":", "returns", ":", "exception_map", "-", "A", "dict", "where", "the", "keys", "are", "a", "tuple", "containing", "the", "location", "of", "the", "exception", "and", "the", "value", "is", "the", "actual", "exception" ]
def slurp(self): """ :returns: item_list - list of network acls. :returns: exception_map - A dict where the keys are a tuple containing the location of the exception and the value is the actual exception """ self.prep_for_slurp() @iter_account_region(index=self.index, accounts=self.accounts, service_name='ec2') def slurp_items(**kwargs): item_list = [] exception_map = {} kwargs['exception_map'] = exception_map app.logger.debug("Checking {}/{}/{}".format(self.index, kwargs['account_name'], kwargs['region'])) networkacls = self.describe_network_acls(**kwargs) if networkacls: for nacl in networkacls: nacl_id = nacl.get('NetworkAclId') if self.check_ignore_list(nacl_id): continue config = { 'id': nacl_id, 'vpc_id': nacl.get('VpcId'), 'is_default': bool(nacl.get('IsDefault')), 'entries': nacl.get('Entries'), 'associations': nacl.get('Associations'), 'tags': nacl.get('Tags') } item = NetworkACLItem(region=kwargs['region'], account=kwargs['account_name'], name=nacl_id, config=config, source_watcher=self) item_list.append(item) return item_list, exception_map return slurp_items()
[ "def", "slurp", "(", "self", ")", ":", "self", ".", "prep_for_slurp", "(", ")", "@", "iter_account_region", "(", "index", "=", "self", ".", "index", ",", "accounts", "=", "self", ".", "accounts", ",", "service_name", "=", "'ec2'", ")", "def", "slurp_items", "(", "*", "*", "kwargs", ")", ":", "item_list", "=", "[", "]", "exception_map", "=", "{", "}", "kwargs", "[", "'exception_map'", "]", "=", "exception_map", "app", ".", "logger", ".", "debug", "(", "\"Checking {}/{}/{}\"", ".", "format", "(", "self", ".", "index", ",", "kwargs", "[", "'account_name'", "]", ",", "kwargs", "[", "'region'", "]", ")", ")", "networkacls", "=", "self", ".", "describe_network_acls", "(", "*", "*", "kwargs", ")", "if", "networkacls", ":", "for", "nacl", "in", "networkacls", ":", "nacl_id", "=", "nacl", ".", "get", "(", "'NetworkAclId'", ")", "if", "self", ".", "check_ignore_list", "(", "nacl_id", ")", ":", "continue", "config", "=", "{", "'id'", ":", "nacl_id", ",", "'vpc_id'", ":", "nacl", ".", "get", "(", "'VpcId'", ")", ",", "'is_default'", ":", "bool", "(", "nacl", ".", "get", "(", "'IsDefault'", ")", ")", ",", "'entries'", ":", "nacl", ".", "get", "(", "'Entries'", ")", ",", "'associations'", ":", "nacl", ".", "get", "(", "'Associations'", ")", ",", "'tags'", ":", "nacl", ".", "get", "(", "'Tags'", ")", "}", "item", "=", "NetworkACLItem", "(", "region", "=", "kwargs", "[", "'region'", "]", ",", "account", "=", "kwargs", "[", "'account_name'", "]", ",", "name", "=", "nacl_id", ",", "config", "=", "config", ",", "source_watcher", "=", "self", ")", "item_list", ".", "append", "(", "item", ")", "return", "item_list", ",", "exception_map", "return", "slurp_items", "(", ")" ]
https://github.com/Netflix/security_monkey/blob/c28592ffd518fa399527d26262683fc860c30eef/security_monkey/watchers/vpc/networkacl.py#L48-L90
xlcnd/isbntools
e7c85f0f4b3dd023b43b0b5daccbd8f6f62250f9
isbntools/_shelvecache.py
python
ShelveCache.__init__
(self, filepath, allow_empty=True)
Initialize attributes.
Initialize attributes.
[ "Initialize", "attributes", "." ]
def __init__(self, filepath, allow_empty=True): """Initialize attributes.""" self._sh = shelve self.filepath = filepath self._allow_empty = allow_empty self._allow_empty_default = allow_empty try: s = self._sh.open(self.filepath) try: self._keys = list(s.keys()) if len(self._keys) > self.MAXLEN: self.purge() except Exception: pass except Exception: s = self._sh.open(self.filepath, 'n') self._keys = [] finally: s.close()
[ "def", "__init__", "(", "self", ",", "filepath", ",", "allow_empty", "=", "True", ")", ":", "self", ".", "_sh", "=", "shelve", "self", ".", "filepath", "=", "filepath", "self", ".", "_allow_empty", "=", "allow_empty", "self", ".", "_allow_empty_default", "=", "allow_empty", "try", ":", "s", "=", "self", ".", "_sh", ".", "open", "(", "self", ".", "filepath", ")", "try", ":", "self", ".", "_keys", "=", "list", "(", "s", ".", "keys", "(", ")", ")", "if", "len", "(", "self", ".", "_keys", ")", ">", "self", ".", "MAXLEN", ":", "self", ".", "purge", "(", ")", "except", "Exception", ":", "pass", "except", "Exception", ":", "s", "=", "self", ".", "_sh", ".", "open", "(", "self", ".", "filepath", ",", "'n'", ")", "self", ".", "_keys", "=", "[", "]", "finally", ":", "s", ".", "close", "(", ")" ]
https://github.com/xlcnd/isbntools/blob/e7c85f0f4b3dd023b43b0b5daccbd8f6f62250f9/isbntools/_shelvecache.py#L51-L69
google/timesketch
1ce6b60e125d104e6644947c6f1dbe1b82ac76b6
api_client/python/timesketch_api_client/config.py
python
get_client
( config_dict: Optional[Dict[Text, Any]] = None, config_path: Optional[Text] = '', config_section: Optional[Text] = 'timesketch', token_password: Optional[Text] = '', confirm_choices: Optional[bool] = False, load_cli_config: Optional[bool] = False )
Returns a Timesketch API client using the configuration assistant. Args: config_dict (dict): optional dict that will be used to configure the client. config_path (str): optional path to the configuration file, if not supplied a default path will be used. config_section (str): The configuration section to read from. This is optional and defaults to timesketch. This can be useful if you have multiple Timesketch servers to connect to, with each one of them having a separate section in the config file. token_password (str): an optional password to decrypt the credential token file. confirm_choices (bool): an optional bool. if set to the user is given a choice to change the value for all already configured parameters. This defaults to False. load_cli_config (bool): Determine if the CLI config section should be loaded. This is optional and defaults to False. Returns: A timesketch client (TimesketchApi) or None if not possible.
Returns a Timesketch API client using the configuration assistant.
[ "Returns", "a", "Timesketch", "API", "client", "using", "the", "configuration", "assistant", "." ]
def get_client( config_dict: Optional[Dict[Text, Any]] = None, config_path: Optional[Text] = '', config_section: Optional[Text] = 'timesketch', token_password: Optional[Text] = '', confirm_choices: Optional[bool] = False, load_cli_config: Optional[bool] = False ) -> Optional[client.TimesketchApi]: """Returns a Timesketch API client using the configuration assistant. Args: config_dict (dict): optional dict that will be used to configure the client. config_path (str): optional path to the configuration file, if not supplied a default path will be used. config_section (str): The configuration section to read from. This is optional and defaults to timesketch. This can be useful if you have multiple Timesketch servers to connect to, with each one of them having a separate section in the config file. token_password (str): an optional password to decrypt the credential token file. confirm_choices (bool): an optional bool. if set to the user is given a choice to change the value for all already configured parameters. This defaults to False. load_cli_config (bool): Determine if the CLI config section should be loaded. This is optional and defaults to False. Returns: A timesketch client (TimesketchApi) or None if not possible. """ assistant = ConfigAssistant() try: assistant.load_config_file( config_path, section=config_section, load_cli_config=load_cli_config) if config_dict: assistant.load_config_dict(config_dict) except IOError as e: logger.error('Unable to load the config file, is it valid?') logger.error('Error: %s', e) try: configure_missing_parameters( config_assistant=assistant, token_password=token_password, confirm_choices=confirm_choices, config_section=config_section) return assistant.get_client(token_password=token_password) except (RuntimeError, requests.ConnectionError) as e: logger.error( 'Unable to connect to the Timesketch server, are you ' 'connected to the network? Is the timesketch server ' 'running and accessible from your host? The error ' 'message is %s', e) except IOError as e: logger.error('Unable to get a client, with error: %s', e) logger.error( 'If the issue is in the credentials then one solution ' 'is to remove the ~/.timesketch.token file and the ' 'credential section in ~/.timesketchrc or to remove ' 'both files. Or you could have supplied a wrong ' 'password to undecrypt the token file.')
[ "def", "get_client", "(", "config_dict", ":", "Optional", "[", "Dict", "[", "Text", ",", "Any", "]", "]", "=", "None", ",", "config_path", ":", "Optional", "[", "Text", "]", "=", "''", ",", "config_section", ":", "Optional", "[", "Text", "]", "=", "'timesketch'", ",", "token_password", ":", "Optional", "[", "Text", "]", "=", "''", ",", "confirm_choices", ":", "Optional", "[", "bool", "]", "=", "False", ",", "load_cli_config", ":", "Optional", "[", "bool", "]", "=", "False", ")", "->", "Optional", "[", "client", ".", "TimesketchApi", "]", ":", "assistant", "=", "ConfigAssistant", "(", ")", "try", ":", "assistant", ".", "load_config_file", "(", "config_path", ",", "section", "=", "config_section", ",", "load_cli_config", "=", "load_cli_config", ")", "if", "config_dict", ":", "assistant", ".", "load_config_dict", "(", "config_dict", ")", "except", "IOError", "as", "e", ":", "logger", ".", "error", "(", "'Unable to load the config file, is it valid?'", ")", "logger", ".", "error", "(", "'Error: %s'", ",", "e", ")", "try", ":", "configure_missing_parameters", "(", "config_assistant", "=", "assistant", ",", "token_password", "=", "token_password", ",", "confirm_choices", "=", "confirm_choices", ",", "config_section", "=", "config_section", ")", "return", "assistant", ".", "get_client", "(", "token_password", "=", "token_password", ")", "except", "(", "RuntimeError", ",", "requests", ".", "ConnectionError", ")", "as", "e", ":", "logger", ".", "error", "(", "'Unable to connect to the Timesketch server, are you '", "'connected to the network? Is the timesketch server '", "'running and accessible from your host? The error '", "'message is %s'", ",", "e", ")", "except", "IOError", "as", "e", ":", "logger", ".", "error", "(", "'Unable to get a client, with error: %s'", ",", "e", ")", "logger", ".", "error", "(", "'If the issue is in the credentials then one solution '", "'is to remove the ~/.timesketch.token file and the '", "'credential section in ~/.timesketchrc or to remove '", "'both files. Or you could have supplied a wrong '", "'password to undecrypt the token file.'", ")" ]
https://github.com/google/timesketch/blob/1ce6b60e125d104e6644947c6f1dbe1b82ac76b6/api_client/python/timesketch_api_client/config.py#L376-L438
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/dexcom/config_flow.py
python
DexcomOptionsFlowHandler.__init__
(self, config_entry: config_entries.ConfigEntry)
Initialize options flow.
Initialize options flow.
[ "Initialize", "options", "flow", "." ]
def __init__(self, config_entry: config_entries.ConfigEntry) -> None: """Initialize options flow.""" self.config_entry = config_entry
[ "def", "__init__", "(", "self", ",", "config_entry", ":", "config_entries", ".", "ConfigEntry", ")", "->", "None", ":", "self", ".", "config_entry", "=", "config_entry" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/dexcom/config_flow.py#L64-L66
couchbase/couchbase-python-client
58ccfd42af320bde6b733acf094fd5a4cf34e0ad
couchbase_version.py
python
gen_version
(do_write=True, txt=None)
Generate a version based on git tag info. This will write the couchbase/_version.py file. If not inside a git tree it will raise a CantInvokeGit exception - which is normal (and squashed by setup.py) if we are running from a tarball
Generate a version based on git tag info. This will write the couchbase/_version.py file. If not inside a git tree it will raise a CantInvokeGit exception - which is normal (and squashed by setup.py) if we are running from a tarball
[ "Generate", "a", "version", "based", "on", "git", "tag", "info", ".", "This", "will", "write", "the", "couchbase", "/", "_version", ".", "py", "file", ".", "If", "not", "inside", "a", "git", "tree", "it", "will", "raise", "a", "CantInvokeGit", "exception", "-", "which", "is", "normal", "(", "and", "squashed", "by", "setup", ".", "py", ")", "if", "we", "are", "running", "from", "a", "tarball" ]
def gen_version(do_write=True, txt=None): """ Generate a version based on git tag info. This will write the couchbase/_version.py file. If not inside a git tree it will raise a CantInvokeGit exception - which is normal (and squashed by setup.py) if we are running from a tarball """ if txt is None: txt = get_git_describe() try: info = VersionInfo(txt) vstr = info.package_version except MalformedGitTag: warnings.warn("Malformed input '{0}'".format(txt)) vstr = '0.0.0' + txt if not do_write: print(vstr) return lines = ( '# This file automatically generated by', '# {0}'.format(__file__), '# at', '# {0}'.format(datetime.datetime.now().isoformat(' ')), "__version__ = '{0}'".format(vstr) ) with open(VERSION_FILE, "w") as fp: fp.write("\n".join(lines))
[ "def", "gen_version", "(", "do_write", "=", "True", ",", "txt", "=", "None", ")", ":", "if", "txt", "is", "None", ":", "txt", "=", "get_git_describe", "(", ")", "try", ":", "info", "=", "VersionInfo", "(", "txt", ")", "vstr", "=", "info", ".", "package_version", "except", "MalformedGitTag", ":", "warnings", ".", "warn", "(", "\"Malformed input '{0}'\"", ".", "format", "(", "txt", ")", ")", "vstr", "=", "'0.0.0'", "+", "txt", "if", "not", "do_write", ":", "print", "(", "vstr", ")", "return", "lines", "=", "(", "'# This file automatically generated by'", ",", "'# {0}'", ".", "format", "(", "__file__", ")", ",", "'# at'", ",", "'# {0}'", ".", "format", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", "' '", ")", ")", ",", "\"__version__ = '{0}'\"", ".", "format", "(", "vstr", ")", ")", "with", "open", "(", "VERSION_FILE", ",", "\"w\"", ")", "as", "fp", ":", "fp", ".", "write", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")" ]
https://github.com/couchbase/couchbase-python-client/blob/58ccfd42af320bde6b733acf094fd5a4cf34e0ad/couchbase_version.py#L131-L161
llSourcell/3D_Pose_Estimation
87c9d77e3bb0c1105eae74046c26f8b7f101ca45
src/data_utils.py
python
project_to_cameras
( poses_set, cams, ncams=4 )
return t2d
Project 3d poses using camera parameters Args poses_set: dictionary with 3d poses cams: dictionary with camera parameters ncams: number of cameras per subject Returns t2d: dictionary with 2d poses
Project 3d poses using camera parameters
[ "Project", "3d", "poses", "using", "camera", "parameters" ]
def project_to_cameras( poses_set, cams, ncams=4 ): """ Project 3d poses using camera parameters Args poses_set: dictionary with 3d poses cams: dictionary with camera parameters ncams: number of cameras per subject Returns t2d: dictionary with 2d poses """ t2d = {} for t3dk in sorted( poses_set.keys() ): subj, a, seqname = t3dk t3d = poses_set[ t3dk ] for cam in range( ncams ): R, T, f, c, k, p, name = cams[ (subj, cam+1) ] pts2d, _, _, _, _ = cameras.project_point_radial( np.reshape(t3d, [-1, 3]), R, T, f, c, k, p ) pts2d = np.reshape( pts2d, [-1, len(H36M_NAMES)*2] ) sname = seqname[:-3]+"."+name+".h5" # e.g.: Waiting 1.58860488.h5 t2d[ (subj, a, sname) ] = pts2d return t2d
[ "def", "project_to_cameras", "(", "poses_set", ",", "cams", ",", "ncams", "=", "4", ")", ":", "t2d", "=", "{", "}", "for", "t3dk", "in", "sorted", "(", "poses_set", ".", "keys", "(", ")", ")", ":", "subj", ",", "a", ",", "seqname", "=", "t3dk", "t3d", "=", "poses_set", "[", "t3dk", "]", "for", "cam", "in", "range", "(", "ncams", ")", ":", "R", ",", "T", ",", "f", ",", "c", ",", "k", ",", "p", ",", "name", "=", "cams", "[", "(", "subj", ",", "cam", "+", "1", ")", "]", "pts2d", ",", "_", ",", "_", ",", "_", ",", "_", "=", "cameras", ".", "project_point_radial", "(", "np", ".", "reshape", "(", "t3d", ",", "[", "-", "1", ",", "3", "]", ")", ",", "R", ",", "T", ",", "f", ",", "c", ",", "k", ",", "p", ")", "pts2d", "=", "np", ".", "reshape", "(", "pts2d", ",", "[", "-", "1", ",", "len", "(", "H36M_NAMES", ")", "*", "2", "]", ")", "sname", "=", "seqname", "[", ":", "-", "3", "]", "+", "\".\"", "+", "name", "+", "\".h5\"", "# e.g.: Waiting 1.58860488.h5", "t2d", "[", "(", "subj", ",", "a", ",", "sname", ")", "]", "=", "pts2d", "return", "t2d" ]
https://github.com/llSourcell/3D_Pose_Estimation/blob/87c9d77e3bb0c1105eae74046c26f8b7f101ca45/src/data_utils.py#L337-L362
RichardFrangenberg/Prism
09283b5146d9cdf9d489dcf252f7927083534a48
Prism/Plugins/ProjectManagers/Shotgun/external_modules/shotgun_api3/lib/httplib2/python3/__init__.py
python
HmacDigestAuthentication.request
(self, method, request_uri, headers, content)
Modify the request headers
Modify the request headers
[ "Modify", "the", "request", "headers" ]
def request(self, method, request_uri, headers, content): """Modify the request headers""" keys = _get_end2end_headers(headers) keylist = "".join(["%s " % k for k in keys]) headers_val = "".join([headers[k] for k in keys]) created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) cnonce = _cnonce() request_digest = "%s:%s:%s:%s:%s" % ( method, request_uri, cnonce, self.challenge["snonce"], headers_val, ) request_digest = ( hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() ) headers["authorization"] = ( 'HMACDigest username="%s", realm="%s", snonce="%s",' ' cnonce="%s", uri="%s", created="%s", ' 'response="%s", headers="%s"' ) % ( self.credentials[0], self.challenge["realm"], self.challenge["snonce"], cnonce, request_uri, created, request_digest, keylist, )
[ "def", "request", "(", "self", ",", "method", ",", "request_uri", ",", "headers", ",", "content", ")", ":", "keys", "=", "_get_end2end_headers", "(", "headers", ")", "keylist", "=", "\"\"", ".", "join", "(", "[", "\"%s \"", "%", "k", "for", "k", "in", "keys", "]", ")", "headers_val", "=", "\"\"", ".", "join", "(", "[", "headers", "[", "k", "]", "for", "k", "in", "keys", "]", ")", "created", "=", "time", ".", "strftime", "(", "\"%Y-%m-%dT%H:%M:%SZ\"", ",", "time", ".", "gmtime", "(", ")", ")", "cnonce", "=", "_cnonce", "(", ")", "request_digest", "=", "\"%s:%s:%s:%s:%s\"", "%", "(", "method", ",", "request_uri", ",", "cnonce", ",", "self", ".", "challenge", "[", "\"snonce\"", "]", ",", "headers_val", ",", ")", "request_digest", "=", "(", "hmac", ".", "new", "(", "self", ".", "key", ",", "request_digest", ",", "self", ".", "hashmod", ")", ".", "hexdigest", "(", ")", ".", "lower", "(", ")", ")", "headers", "[", "\"authorization\"", "]", "=", "(", "'HMACDigest username=\"%s\", realm=\"%s\", snonce=\"%s\",'", "' cnonce=\"%s\", uri=\"%s\", created=\"%s\", '", "'response=\"%s\", headers=\"%s\"'", ")", "%", "(", "self", ".", "credentials", "[", "0", "]", ",", "self", ".", "challenge", "[", "\"realm\"", "]", ",", "self", ".", "challenge", "[", "\"snonce\"", "]", ",", "cnonce", ",", "request_uri", ",", "created", ",", "request_digest", ",", "keylist", ",", ")" ]
https://github.com/RichardFrangenberg/Prism/blob/09283b5146d9cdf9d489dcf252f7927083534a48/Prism/Plugins/ProjectManagers/Shotgun/external_modules/shotgun_api3/lib/httplib2/python3/__init__.py#L788-L818
alanhamlett/pip-update-requirements
ce875601ef278c8ce00ad586434a978731525561
pur/packages/pip/_vendor/pyparsing.py
python
ParserElement.__rxor__
(self, other )
return other ^ self
Implementation of ^ operator when left operand is not a :class:`ParserElement`
Implementation of ^ operator when left operand is not a :class:`ParserElement`
[ "Implementation", "of", "^", "operator", "when", "left", "operand", "is", "not", "a", ":", "class", ":", "ParserElement" ]
def __rxor__(self, other ): """ Implementation of ^ operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other ^ self
[ "def", "__rxor__", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "basestring", ")", ":", "other", "=", "ParserElement", ".", "_literalStringClass", "(", "other", ")", "if", "not", "isinstance", "(", "other", ",", "ParserElement", ")", ":", "warnings", ".", "warn", "(", "\"Cannot combine element of type %s with ParserElement\"", "%", "type", "(", "other", ")", ",", "SyntaxWarning", ",", "stacklevel", "=", "2", ")", "return", "None", "return", "other", "^", "self" ]
https://github.com/alanhamlett/pip-update-requirements/blob/ce875601ef278c8ce00ad586434a978731525561/pur/packages/pip/_vendor/pyparsing.py#L2157-L2167
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python-modules/zope/zope/interface/common/mapping.py
python
IEnumerableMapping.items
()
Return the items of the mapping object.
Return the items of the mapping object.
[ "Return", "the", "items", "of", "the", "mapping", "object", "." ]
def items(): """Return the items of the mapping object. """
[ "def", "items", "(", ")", ":" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/zope/zope/interface/common/mapping.py#L71-L73
invesalius/invesalius3
0616d3e73bfe0baf7525877dbf6acab697395eb9
invesalius/data/coregistration.py
python
tracker_to_image
(m_change, m_probe_ref, r_obj_img, m_obj_raw, s0_dyn)
return m_img
Compute affine transformation matrix to the reference basis :param m_change: Corregistration transformation obtained from fiducials :type m_change: numpy.ndarray :param m_probe_ref: Object or probe in reference coordinate system :type m_probe_ref: numpy.ndarray :param r_obj_img: Object coordinate system in image space (3d model) :type r_obj_img: numpy.ndarray :param m_obj_raw: Object basis in raw coordinates from tracker :type m_obj_raw: numpy.ndarray :param s0_dyn: Initial alignment of probe fixed in the object in reference (or static) frame :type s0_dyn: numpy.ndarray :return: 4 x 4 numpy double array :rtype: numpy.ndarray
Compute affine transformation matrix to the reference basis
[ "Compute", "affine", "transformation", "matrix", "to", "the", "reference", "basis" ]
def tracker_to_image(m_change, m_probe_ref, r_obj_img, m_obj_raw, s0_dyn): """Compute affine transformation matrix to the reference basis :param m_change: Corregistration transformation obtained from fiducials :type m_change: numpy.ndarray :param m_probe_ref: Object or probe in reference coordinate system :type m_probe_ref: numpy.ndarray :param r_obj_img: Object coordinate system in image space (3d model) :type r_obj_img: numpy.ndarray :param m_obj_raw: Object basis in raw coordinates from tracker :type m_obj_raw: numpy.ndarray :param s0_dyn: Initial alignment of probe fixed in the object in reference (or static) frame :type s0_dyn: numpy.ndarray :return: 4 x 4 numpy double array :rtype: numpy.ndarray """ m_img = m_change @ m_probe_ref r_obj = r_obj_img @ np.linalg.inv(m_obj_raw) @ np.linalg.inv(s0_dyn) @ m_probe_ref @ m_obj_raw m_img[:3, :3] = r_obj[:3, :3] return m_img
[ "def", "tracker_to_image", "(", "m_change", ",", "m_probe_ref", ",", "r_obj_img", ",", "m_obj_raw", ",", "s0_dyn", ")", ":", "m_img", "=", "m_change", "@", "m_probe_ref", "r_obj", "=", "r_obj_img", "@", "np", ".", "linalg", ".", "inv", "(", "m_obj_raw", ")", "@", "np", ".", "linalg", ".", "inv", "(", "s0_dyn", ")", "@", "m_probe_ref", "@", "m_obj_raw", "m_img", "[", ":", "3", ",", ":", "3", "]", "=", "r_obj", "[", ":", "3", ",", ":", "3", "]", "return", "m_img" ]
https://github.com/invesalius/invesalius3/blob/0616d3e73bfe0baf7525877dbf6acab697395eb9/invesalius/data/coregistration.py#L82-L102
getsentry/sentry
83b1f25aac3e08075e0e2495bc29efaf35aca18a
src/sentry/charts/base.py
python
ChartRenderer.generate_chart
(self, style: ChartType, data: Any, upload: bool = True)
Produces a chart. You may specify the upload kwarg to have the chart uploaded to storage and receive a public URL for the chart
Produces a chart. You may specify the upload kwarg to have the chart uploaded to storage and receive a public URL for the chart
[ "Produces", "a", "chart", ".", "You", "may", "specify", "the", "upload", "kwarg", "to", "have", "the", "chart", "uploaded", "to", "storage", "and", "receive", "a", "public", "URL", "for", "the", "chart" ]
def generate_chart(self, style: ChartType, data: Any, upload: bool = True) -> Union[str, bytes]: """ Produces a chart. You may specify the upload kwarg to have the chart uploaded to storage and receive a public URL for the chart """ raise NotImplementedError
[ "def", "generate_chart", "(", "self", ",", "style", ":", "ChartType", ",", "data", ":", "Any", ",", "upload", ":", "bool", "=", "True", ")", "->", "Union", "[", "str", ",", "bytes", "]", ":", "raise", "NotImplementedError" ]
https://github.com/getsentry/sentry/blob/83b1f25aac3e08075e0e2495bc29efaf35aca18a/src/sentry/charts/base.py#L32-L37
jparkhill/TensorMol
d52104dc7ee46eec8301d332a95d672270ac0bd1
TensorMol/Simulations/SimpleMD.py
python
NoseThermostat.__init__
(self,m_,v_)
return
Velocity Verlet step with a Nose-Hoover Thermostat.
Velocity Verlet step with a Nose-Hoover Thermostat.
[ "Velocity", "Verlet", "step", "with", "a", "Nose", "-", "Hoover", "Thermostat", "." ]
def __init__(self,m_,v_): """ Velocity Verlet step with a Nose-Hoover Thermostat. """ self.m = m_.copy() self.N = len(m_) self.T = PARAMS["MDTemp"] # Length of NH chain. self.eta = 0.0 self.name = "Nose" self.Rescale(v_) print("Using ", self.name, " thermostat at ",self.T, " degrees Kelvin") return
[ "def", "__init__", "(", "self", ",", "m_", ",", "v_", ")", ":", "self", ".", "m", "=", "m_", ".", "copy", "(", ")", "self", ".", "N", "=", "len", "(", "m_", ")", "self", ".", "T", "=", "PARAMS", "[", "\"MDTemp\"", "]", "# Length of NH chain.", "self", ".", "eta", "=", "0.0", "self", ".", "name", "=", "\"Nose\"", "self", ".", "Rescale", "(", "v_", ")", "print", "(", "\"Using \"", ",", "self", ".", "name", ",", "\" thermostat at \"", ",", "self", ".", "T", ",", "\" degrees Kelvin\"", ")", "return" ]
https://github.com/jparkhill/TensorMol/blob/d52104dc7ee46eec8301d332a95d672270ac0bd1/TensorMol/Simulations/SimpleMD.py#L91-L102
LKI/chinese-calendar
bbbc7a2df493930f5824ac63e72f6c12160e3e8f
chinese_calendar/scripts/data.py
python
Arrangement.maf
(self)
return self.mark(chinese_calendar.Holiday.mid_autumn_festival)
中秋节 Mid-autumn Festival
中秋节 Mid-autumn Festival
[ "中秋节", "Mid", "-", "autumn", "Festival" ]
def maf(self): """中秋节 Mid-autumn Festival""" return self.mark(chinese_calendar.Holiday.mid_autumn_festival)
[ "def", "maf", "(", "self", ")", ":", "return", "self", ".", "mark", "(", "chinese_calendar", ".", "Holiday", ".", "mid_autumn_festival", ")" ]
https://github.com/LKI/chinese-calendar/blob/bbbc7a2df493930f5824ac63e72f6c12160e3e8f/chinese_calendar/scripts/data.py#L509-L511
dingjiansw101/RoITransformer_DOTA
9125055aed313dde7a68882e9931c9ea58c6f6ab
faster_rcnn/core/module.py
python
MutableModule.fit
(self, train_data, eval_data=None, eval_metric='acc', epoch_end_callback=None, batch_end_callback=None, kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),), eval_end_callback=None, eval_batch_end_callback=None, initializer=Uniform(0.01), arg_params=None, aux_params=None, allow_missing=False, force_rebind=False, force_init=False, begin_epoch=0, num_epoch=None, validation_metric=None, monitor=None, prefix=None, state=None)
Train the module parameters. Parameters ---------- train_data : DataIter eval_data : DataIter If not `None`, will be used as validation set and evaluate the performance after each epoch. eval_metric : str or EvalMetric Default `'acc'`. The performance measure used to display during training. epoch_end_callback : function or list of function Each callback will be called with the current `epoch`, `symbol`, `arg_params` and `aux_params`. batch_end_callback : function or list of function Each callback will be called with a `BatchEndParam`. kvstore : str or KVStore Default `'local'`. optimizer : str or Optimizer Default `'sgd'` optimizer_params : dict Default `(('learning_rate', 0.01),)`. The parameters for the optimizer constructor. The default value is not a `dict`, just to avoid pylint warning on dangerous default values. eval_end_callback : function or list of function These will be called at the end of each full evaluation, with the metrics over the entire evaluation set. eval_batch_end_callback : function or list of function These will be called at the end of each minibatch during evaluation initializer : Initializer Will be called to initialize the module parameters if not already initialized. arg_params : dict Default `None`, if not `None`, should be existing parameters from a trained model or loaded from a checkpoint (previously saved model). In this case, the value here will be used to initialize the module parameters, unless they are already initialized by the user via a call to `init_params` or `fit`. `arg_params` has higher priority to `initializer`. aux_params : dict Default `None`. Similar to `arg_params`, except for auxiliary states. allow_missing : bool Default `False`. Indicate whether we allow missing parameters when `arg_params` and `aux_params` are not `None`. If this is `True`, then the missing parameters will be initialized via the `initializer`. force_rebind : bool Default `False`. Whether to force rebinding the executors if already binded. force_init : bool Default `False`. Indicate whether we should force initialization even if the parameters are already initialized. begin_epoch : int Default `0`. Indicate the starting epoch. Usually, if we are resuming from a checkpoint saved at a previous training phase at epoch N, then we should specify this value as N+1. num_epoch : int Number of epochs to run training. Examples -------- An example of using fit for training:: >>> #Assume training dataIter and validation dataIter are ready >>> mod.fit(train_data=train_dataiter, eval_data=val_dataiter, optimizer_params={'learning_rate':0.01, 'momentum': 0.9}, num_epoch=10)
Train the module parameters.
[ "Train", "the", "module", "parameters", "." ]
def fit(self, train_data, eval_data=None, eval_metric='acc', epoch_end_callback=None, batch_end_callback=None, kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),), eval_end_callback=None, eval_batch_end_callback=None, initializer=Uniform(0.01), arg_params=None, aux_params=None, allow_missing=False, force_rebind=False, force_init=False, begin_epoch=0, num_epoch=None, validation_metric=None, monitor=None, prefix=None, state=None): """Train the module parameters. Parameters ---------- train_data : DataIter eval_data : DataIter If not `None`, will be used as validation set and evaluate the performance after each epoch. eval_metric : str or EvalMetric Default `'acc'`. The performance measure used to display during training. epoch_end_callback : function or list of function Each callback will be called with the current `epoch`, `symbol`, `arg_params` and `aux_params`. batch_end_callback : function or list of function Each callback will be called with a `BatchEndParam`. kvstore : str or KVStore Default `'local'`. optimizer : str or Optimizer Default `'sgd'` optimizer_params : dict Default `(('learning_rate', 0.01),)`. The parameters for the optimizer constructor. The default value is not a `dict`, just to avoid pylint warning on dangerous default values. eval_end_callback : function or list of function These will be called at the end of each full evaluation, with the metrics over the entire evaluation set. eval_batch_end_callback : function or list of function These will be called at the end of each minibatch during evaluation initializer : Initializer Will be called to initialize the module parameters if not already initialized. arg_params : dict Default `None`, if not `None`, should be existing parameters from a trained model or loaded from a checkpoint (previously saved model). In this case, the value here will be used to initialize the module parameters, unless they are already initialized by the user via a call to `init_params` or `fit`. `arg_params` has higher priority to `initializer`. aux_params : dict Default `None`. Similar to `arg_params`, except for auxiliary states. allow_missing : bool Default `False`. Indicate whether we allow missing parameters when `arg_params` and `aux_params` are not `None`. If this is `True`, then the missing parameters will be initialized via the `initializer`. force_rebind : bool Default `False`. Whether to force rebinding the executors if already binded. force_init : bool Default `False`. Indicate whether we should force initialization even if the parameters are already initialized. begin_epoch : int Default `0`. Indicate the starting epoch. Usually, if we are resuming from a checkpoint saved at a previous training phase at epoch N, then we should specify this value as N+1. num_epoch : int Number of epochs to run training. Examples -------- An example of using fit for training:: >>> #Assume training dataIter and validation dataIter are ready >>> mod.fit(train_data=train_dataiter, eval_data=val_dataiter, optimizer_params={'learning_rate':0.01, 'momentum': 0.9}, num_epoch=10) """ assert num_epoch is not None, 'please specify number of epochs' self.bind(data_shapes=train_data.provide_data, label_shapes=train_data.provide_label, for_training=True, force_rebind=force_rebind) if monitor is not None: self.install_monitor(monitor) self.init_params(initializer=initializer, arg_params=arg_params, aux_params=aux_params, allow_missing=allow_missing, force_init=force_init) self.init_optimizer(kvstore=kvstore, optimizer=optimizer, optimizer_params=optimizer_params) if state is not None: self._curr_module.load_optimizer_states(state) if validation_metric is None: validation_metric = eval_metric if not isinstance(eval_metric, metric.EvalMetric): eval_metric = metric.create(eval_metric) ################################################################################ # training loop ################################################################################ for epoch in range(begin_epoch, num_epoch): tic = time.time() eval_metric.reset() ct = 0 for nbatch, data_batch in enumerate(train_data): if monitor is not None: monitor.tic() self.forward_backward(data_batch) self.update() ct = ct + 1 # print 'ct: ', ct # pdb.set_trace() if ct % 50 == 0: ct = 0 self.update_metric(eval_metric, data_batch.label) sys.stdout.flush() if monitor is not None: monitor.toc_print() if batch_end_callback is not None: batch_end_params = BatchEndParam(epoch=epoch, nbatch=nbatch, eval_metric=eval_metric, locals=locals()) for callback in _as_list(batch_end_callback): callback(batch_end_params) # one epoch of training is finished for name, val in eval_metric.get_name_value(): self.logger.info('Epoch[%d] Train-%s=%f', epoch, name, val) toc = time.time() self.logger.info('Epoch[%d] Time cost=%.3f', epoch, (toc-tic)) # sync aux params across devices arg_params, aux_params = self.get_params() self.set_params(arg_params, aux_params) if epoch_end_callback is not None: for callback in _as_list(epoch_end_callback): callback(epoch, self.symbol, arg_params, aux_params) #---------------------------------------- # evaluation on validation set if eval_data: res = self.score(eval_data, validation_metric, score_end_callback=eval_end_callback, batch_end_callback=eval_batch_end_callback, epoch=epoch) #TODO: pull this into default for name, val in res: self.logger.info('Epoch[%d] Validation-%s=%f', epoch, name, val) # end of 1 epoch, reset the data-iter for another epoch train_data.reset()
[ "def", "fit", "(", "self", ",", "train_data", ",", "eval_data", "=", "None", ",", "eval_metric", "=", "'acc'", ",", "epoch_end_callback", "=", "None", ",", "batch_end_callback", "=", "None", ",", "kvstore", "=", "'local'", ",", "optimizer", "=", "'sgd'", ",", "optimizer_params", "=", "(", "(", "'learning_rate'", ",", "0.01", ")", ",", ")", ",", "eval_end_callback", "=", "None", ",", "eval_batch_end_callback", "=", "None", ",", "initializer", "=", "Uniform", "(", "0.01", ")", ",", "arg_params", "=", "None", ",", "aux_params", "=", "None", ",", "allow_missing", "=", "False", ",", "force_rebind", "=", "False", ",", "force_init", "=", "False", ",", "begin_epoch", "=", "0", ",", "num_epoch", "=", "None", ",", "validation_metric", "=", "None", ",", "monitor", "=", "None", ",", "prefix", "=", "None", ",", "state", "=", "None", ")", ":", "assert", "num_epoch", "is", "not", "None", ",", "'please specify number of epochs'", "self", ".", "bind", "(", "data_shapes", "=", "train_data", ".", "provide_data", ",", "label_shapes", "=", "train_data", ".", "provide_label", ",", "for_training", "=", "True", ",", "force_rebind", "=", "force_rebind", ")", "if", "monitor", "is", "not", "None", ":", "self", ".", "install_monitor", "(", "monitor", ")", "self", ".", "init_params", "(", "initializer", "=", "initializer", ",", "arg_params", "=", "arg_params", ",", "aux_params", "=", "aux_params", ",", "allow_missing", "=", "allow_missing", ",", "force_init", "=", "force_init", ")", "self", ".", "init_optimizer", "(", "kvstore", "=", "kvstore", ",", "optimizer", "=", "optimizer", ",", "optimizer_params", "=", "optimizer_params", ")", "if", "state", "is", "not", "None", ":", "self", ".", "_curr_module", ".", "load_optimizer_states", "(", "state", ")", "if", "validation_metric", "is", "None", ":", "validation_metric", "=", "eval_metric", "if", "not", "isinstance", "(", "eval_metric", ",", "metric", ".", "EvalMetric", ")", ":", "eval_metric", "=", "metric", ".", "create", "(", "eval_metric", ")", "################################################################################", "# training loop", "################################################################################", "for", "epoch", "in", "range", "(", "begin_epoch", ",", "num_epoch", ")", ":", "tic", "=", "time", ".", "time", "(", ")", "eval_metric", ".", "reset", "(", ")", "ct", "=", "0", "for", "nbatch", ",", "data_batch", "in", "enumerate", "(", "train_data", ")", ":", "if", "monitor", "is", "not", "None", ":", "monitor", ".", "tic", "(", ")", "self", ".", "forward_backward", "(", "data_batch", ")", "self", ".", "update", "(", ")", "ct", "=", "ct", "+", "1", "# print 'ct: ', ct", "# pdb.set_trace()", "if", "ct", "%", "50", "==", "0", ":", "ct", "=", "0", "self", ".", "update_metric", "(", "eval_metric", ",", "data_batch", ".", "label", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "monitor", "is", "not", "None", ":", "monitor", ".", "toc_print", "(", ")", "if", "batch_end_callback", "is", "not", "None", ":", "batch_end_params", "=", "BatchEndParam", "(", "epoch", "=", "epoch", ",", "nbatch", "=", "nbatch", ",", "eval_metric", "=", "eval_metric", ",", "locals", "=", "locals", "(", ")", ")", "for", "callback", "in", "_as_list", "(", "batch_end_callback", ")", ":", "callback", "(", "batch_end_params", ")", "# one epoch of training is finished", "for", "name", ",", "val", "in", "eval_metric", ".", "get_name_value", "(", ")", ":", "self", ".", "logger", ".", "info", "(", "'Epoch[%d] Train-%s=%f'", ",", "epoch", ",", "name", ",", "val", ")", "toc", "=", "time", ".", "time", "(", ")", "self", ".", "logger", ".", "info", "(", "'Epoch[%d] Time cost=%.3f'", ",", "epoch", ",", "(", "toc", "-", "tic", ")", ")", "# sync aux params across devices", "arg_params", ",", "aux_params", "=", "self", ".", "get_params", "(", ")", "self", ".", "set_params", "(", "arg_params", ",", "aux_params", ")", "if", "epoch_end_callback", "is", "not", "None", ":", "for", "callback", "in", "_as_list", "(", "epoch_end_callback", ")", ":", "callback", "(", "epoch", ",", "self", ".", "symbol", ",", "arg_params", ",", "aux_params", ")", "#----------------------------------------", "# evaluation on validation set", "if", "eval_data", ":", "res", "=", "self", ".", "score", "(", "eval_data", ",", "validation_metric", ",", "score_end_callback", "=", "eval_end_callback", ",", "batch_end_callback", "=", "eval_batch_end_callback", ",", "epoch", "=", "epoch", ")", "#TODO: pull this into default", "for", "name", ",", "val", "in", "res", ":", "self", ".", "logger", ".", "info", "(", "'Epoch[%d] Validation-%s=%f'", ",", "epoch", ",", "name", ",", "val", ")", "# end of 1 epoch, reset the data-iter for another epoch", "train_data", ".", "reset", "(", ")" ]
https://github.com/dingjiansw101/RoITransformer_DOTA/blob/9125055aed313dde7a68882e9931c9ea58c6f6ab/faster_rcnn/core/module.py#L884-L1026
open-mmlab/mmdetection
ff9bc39913cb3ff5dde79d3933add7dc2561bab7
mmdet/core/mask/structures.py
python
polygon_to_bitmap
(polygons, height, width)
return bitmap_mask
Convert masks from the form of polygons to bitmaps. Args: polygons (list[ndarray]): masks in polygon representation height (int): mask height width (int): mask width Return: ndarray: the converted masks in bitmap representation
Convert masks from the form of polygons to bitmaps.
[ "Convert", "masks", "from", "the", "form", "of", "polygons", "to", "bitmaps", "." ]
def polygon_to_bitmap(polygons, height, width): """Convert masks from the form of polygons to bitmaps. Args: polygons (list[ndarray]): masks in polygon representation height (int): mask height width (int): mask width Return: ndarray: the converted masks in bitmap representation """ rles = maskUtils.frPyObjects(polygons, height, width) rle = maskUtils.merge(rles) bitmap_mask = maskUtils.decode(rle).astype(np.bool) return bitmap_mask
[ "def", "polygon_to_bitmap", "(", "polygons", ",", "height", ",", "width", ")", ":", "rles", "=", "maskUtils", ".", "frPyObjects", "(", "polygons", ",", "height", ",", "width", ")", "rle", "=", "maskUtils", ".", "merge", "(", "rles", ")", "bitmap_mask", "=", "maskUtils", ".", "decode", "(", "rle", ")", ".", "astype", "(", "np", ".", "bool", ")", "return", "bitmap_mask" ]
https://github.com/open-mmlab/mmdetection/blob/ff9bc39913cb3ff5dde79d3933add7dc2561bab7/mmdet/core/mask/structures.py#L1057-L1071
spyder-ide/spyder
55da47c032dfcf519600f67f8b30eab467f965e7
external-deps/spyder-kernels/spyder_kernels/console/kernel.py
python
SpyderKernel.is_defined
(self, obj, force_import=False)
return isdefined(obj, force_import=force_import, namespace=ns)
Return True if object is defined in current namespace
Return True if object is defined in current namespace
[ "Return", "True", "if", "object", "is", "defined", "in", "current", "namespace" ]
def is_defined(self, obj, force_import=False): """Return True if object is defined in current namespace""" from spyder_kernels.utils.dochelpers import isdefined ns = self._get_current_namespace(with_magics=True) return isdefined(obj, force_import=force_import, namespace=ns)
[ "def", "is_defined", "(", "self", ",", "obj", ",", "force_import", "=", "False", ")", ":", "from", "spyder_kernels", ".", "utils", ".", "dochelpers", "import", "isdefined", "ns", "=", "self", ".", "_get_current_namespace", "(", "with_magics", "=", "True", ")", "return", "isdefined", "(", "obj", ",", "force_import", "=", "force_import", ",", "namespace", "=", "ns", ")" ]
https://github.com/spyder-ide/spyder/blob/55da47c032dfcf519600f67f8b30eab467f965e7/external-deps/spyder-kernels/spyder_kernels/console/kernel.py#L403-L408
getalp/Flaubert
ded1cf89820a22dbf885c85ba3dccc8ab360681b
xlm/trainer.py
python
Trainer.__init__
(self, data, params)
Initialize trainer.
Initialize trainer.
[ "Initialize", "trainer", "." ]
def __init__(self, data, params): """ Initialize trainer. """ # epoch / iteration size self.epoch_size = params.epoch_size if self.epoch_size == -1: self.epoch_size = self.data assert self.epoch_size > 0 # data iterators self.iterators = {} # list memory components self.memory_list = [] self.ffn_list = [] for name in self.MODEL_NAMES: # find_modules(getattr(self, name), f'self.{name}', HashingMemory, self.memory_list) find_modules(getattr(self, name), "self.{}".format(name), HashingMemory, self.memory_list) # find_modules(getattr(self, name), f'self.{name}', TransformerFFN, self.ffn_list) find_modules(getattr(self, name), "self.{}".format(name), TransformerFFN, self.ffn_list) logger.info("Found %i memories." % len(self.memory_list)) logger.info("Found %i FFN." % len(self.ffn_list)) # set parameters self.set_parameters() # float16 / distributed (no AMP) assert params.amp >= 1 or not params.fp16 assert params.amp >= 0 or params.accumulate_gradients == 1 if params.multi_gpu and params.amp == -1: logger.info("Using nn.parallel.DistributedDataParallel ...") find_unused_parameters = False if params.layerdrop > 0.0: find_unused_parameters = True for name in self.MODEL_NAMES: setattr(self, name, nn.parallel.DistributedDataParallel(getattr(self, name), device_ids=[params.local_rank], output_device=params.local_rank, broadcast_buffers=True, find_unused_parameters=find_unused_parameters)) # set optimizers self.set_optimizers() # float16 / distributed (AMP) if params.amp >= 0: self.init_amp() if params.multi_gpu: if params.use_apex: logger.info("Using apex.parallel.DistributedDataParallel ...") for name in self.MODEL_NAMES: setattr(self, name, apex.parallel.DistributedDataParallel(getattr(self, name), delay_allreduce=True)) else: logger.info("Using nn.parallel.DistributedDataParallel ...") find_unused_parameters = False if params.layerdrop > 0.0: find_unused_parameters = True for name in self.MODEL_NAMES: setattr(self, name, nn.parallel.DistributedDataParallel(getattr(self, name), device_ids=[params.local_rank], output_device=params.local_rank, broadcast_buffers=True, find_unused_parameters=find_unused_parameters)) # stopping criterion used for early stopping if params.stopping_criterion != '': split = params.stopping_criterion.split(',') assert len(split) == 2 and split[1].isdigit() self.decrease_counts_max = int(split[1]) self.decrease_counts = 0 if split[0][0] == '_': self.stopping_criterion = (split[0][1:], False) else: self.stopping_criterion = (split[0], True) self.best_stopping_criterion = -1e12 if self.stopping_criterion[1] else 1e12 else: self.stopping_criterion = None self.best_stopping_criterion = None # probability of masking out / randomize / not modify words to predict params.pred_probs = torch.FloatTensor([params.word_mask, params.word_keep, params.word_rand]) # probabilty to predict a word counts = np.array(list(self.data['dico'].counts.values())) params.mask_scores = np.maximum(counts, 1) ** -params.sample_alpha params.mask_scores[params.pad_index] = 0 # do not predict <PAD> index params.mask_scores[counts == 0] = 0 # do not predict special tokens # validation metrics self.metrics = [] metrics = [m for m in params.validation_metrics.split(',') if m != ''] for m in metrics: m = (m[1:], False) if m[0] == '_' else (m, True) self.metrics.append(m) self.best_metrics = {metric: (-1e12 if biggest else 1e12) for (metric, biggest) in self.metrics} # training statistics self.epoch = 0 self.n_iter = 0 self.n_total_iter = 0 self.n_sentences = 0 self.stats = OrderedDict( [('processed_s', 0), ('processed_w', 0)] + [('CLM-%s' % l, []) for l in params.langs] + [('CLM-%s-%s' % (l1, l2), []) for l1, l2 in data['para'].keys()] + [('CLM-%s-%s' % (l2, l1), []) for l1, l2 in data['para'].keys()] + [('MLM-%s' % l, []) for l in params.langs] + [('MLM-%s-%s' % (l1, l2), []) for l1, l2 in data['para'].keys()] + [('MLM-%s-%s' % (l2, l1), []) for l1, l2 in data['para'].keys()] + [('PC-%s-%s' % (l1, l2), []) for l1, l2 in params.pc_steps] + [('AE-%s' % lang, []) for lang in params.ae_steps] + [('MT-%s-%s' % (l1, l2), []) for l1, l2 in params.mt_steps] + [('BT-%s-%s-%s' % (l1, l2, l3), []) for l1, l2, l3 in params.bt_steps] ) self.last_time = time.time() # reload potential checkpoints self.reload_checkpoint() # initialize lambda coefficients and their configurations parse_lambda_config(params)
[ "def", "__init__", "(", "self", ",", "data", ",", "params", ")", ":", "# epoch / iteration size", "self", ".", "epoch_size", "=", "params", ".", "epoch_size", "if", "self", ".", "epoch_size", "==", "-", "1", ":", "self", ".", "epoch_size", "=", "self", ".", "data", "assert", "self", ".", "epoch_size", ">", "0", "# data iterators", "self", ".", "iterators", "=", "{", "}", "# list memory components", "self", ".", "memory_list", "=", "[", "]", "self", ".", "ffn_list", "=", "[", "]", "for", "name", "in", "self", ".", "MODEL_NAMES", ":", "# find_modules(getattr(self, name), f'self.{name}', HashingMemory, self.memory_list)", "find_modules", "(", "getattr", "(", "self", ",", "name", ")", ",", "\"self.{}\"", ".", "format", "(", "name", ")", ",", "HashingMemory", ",", "self", ".", "memory_list", ")", "# find_modules(getattr(self, name), f'self.{name}', TransformerFFN, self.ffn_list)", "find_modules", "(", "getattr", "(", "self", ",", "name", ")", ",", "\"self.{}\"", ".", "format", "(", "name", ")", ",", "TransformerFFN", ",", "self", ".", "ffn_list", ")", "logger", ".", "info", "(", "\"Found %i memories.\"", "%", "len", "(", "self", ".", "memory_list", ")", ")", "logger", ".", "info", "(", "\"Found %i FFN.\"", "%", "len", "(", "self", ".", "ffn_list", ")", ")", "# set parameters", "self", ".", "set_parameters", "(", ")", "# float16 / distributed (no AMP)", "assert", "params", ".", "amp", ">=", "1", "or", "not", "params", ".", "fp16", "assert", "params", ".", "amp", ">=", "0", "or", "params", ".", "accumulate_gradients", "==", "1", "if", "params", ".", "multi_gpu", "and", "params", ".", "amp", "==", "-", "1", ":", "logger", ".", "info", "(", "\"Using nn.parallel.DistributedDataParallel ...\"", ")", "find_unused_parameters", "=", "False", "if", "params", ".", "layerdrop", ">", "0.0", ":", "find_unused_parameters", "=", "True", "for", "name", "in", "self", ".", "MODEL_NAMES", ":", "setattr", "(", "self", ",", "name", ",", "nn", ".", "parallel", ".", "DistributedDataParallel", "(", "getattr", "(", "self", ",", "name", ")", ",", "device_ids", "=", "[", "params", ".", "local_rank", "]", ",", "output_device", "=", "params", ".", "local_rank", ",", "broadcast_buffers", "=", "True", ",", "find_unused_parameters", "=", "find_unused_parameters", ")", ")", "# set optimizers", "self", ".", "set_optimizers", "(", ")", "# float16 / distributed (AMP)", "if", "params", ".", "amp", ">=", "0", ":", "self", ".", "init_amp", "(", ")", "if", "params", ".", "multi_gpu", ":", "if", "params", ".", "use_apex", ":", "logger", ".", "info", "(", "\"Using apex.parallel.DistributedDataParallel ...\"", ")", "for", "name", "in", "self", ".", "MODEL_NAMES", ":", "setattr", "(", "self", ",", "name", ",", "apex", ".", "parallel", ".", "DistributedDataParallel", "(", "getattr", "(", "self", ",", "name", ")", ",", "delay_allreduce", "=", "True", ")", ")", "else", ":", "logger", ".", "info", "(", "\"Using nn.parallel.DistributedDataParallel ...\"", ")", "find_unused_parameters", "=", "False", "if", "params", ".", "layerdrop", ">", "0.0", ":", "find_unused_parameters", "=", "True", "for", "name", "in", "self", ".", "MODEL_NAMES", ":", "setattr", "(", "self", ",", "name", ",", "nn", ".", "parallel", ".", "DistributedDataParallel", "(", "getattr", "(", "self", ",", "name", ")", ",", "device_ids", "=", "[", "params", ".", "local_rank", "]", ",", "output_device", "=", "params", ".", "local_rank", ",", "broadcast_buffers", "=", "True", ",", "find_unused_parameters", "=", "find_unused_parameters", ")", ")", "# stopping criterion used for early stopping", "if", "params", ".", "stopping_criterion", "!=", "''", ":", "split", "=", "params", ".", "stopping_criterion", ".", "split", "(", "','", ")", "assert", "len", "(", "split", ")", "==", "2", "and", "split", "[", "1", "]", ".", "isdigit", "(", ")", "self", ".", "decrease_counts_max", "=", "int", "(", "split", "[", "1", "]", ")", "self", ".", "decrease_counts", "=", "0", "if", "split", "[", "0", "]", "[", "0", "]", "==", "'_'", ":", "self", ".", "stopping_criterion", "=", "(", "split", "[", "0", "]", "[", "1", ":", "]", ",", "False", ")", "else", ":", "self", ".", "stopping_criterion", "=", "(", "split", "[", "0", "]", ",", "True", ")", "self", ".", "best_stopping_criterion", "=", "-", "1e12", "if", "self", ".", "stopping_criterion", "[", "1", "]", "else", "1e12", "else", ":", "self", ".", "stopping_criterion", "=", "None", "self", ".", "best_stopping_criterion", "=", "None", "# probability of masking out / randomize / not modify words to predict", "params", ".", "pred_probs", "=", "torch", ".", "FloatTensor", "(", "[", "params", ".", "word_mask", ",", "params", ".", "word_keep", ",", "params", ".", "word_rand", "]", ")", "# probabilty to predict a word", "counts", "=", "np", ".", "array", "(", "list", "(", "self", ".", "data", "[", "'dico'", "]", ".", "counts", ".", "values", "(", ")", ")", ")", "params", ".", "mask_scores", "=", "np", ".", "maximum", "(", "counts", ",", "1", ")", "**", "-", "params", ".", "sample_alpha", "params", ".", "mask_scores", "[", "params", ".", "pad_index", "]", "=", "0", "# do not predict <PAD> index", "params", ".", "mask_scores", "[", "counts", "==", "0", "]", "=", "0", "# do not predict special tokens", "# validation metrics", "self", ".", "metrics", "=", "[", "]", "metrics", "=", "[", "m", "for", "m", "in", "params", ".", "validation_metrics", ".", "split", "(", "','", ")", "if", "m", "!=", "''", "]", "for", "m", "in", "metrics", ":", "m", "=", "(", "m", "[", "1", ":", "]", ",", "False", ")", "if", "m", "[", "0", "]", "==", "'_'", "else", "(", "m", ",", "True", ")", "self", ".", "metrics", ".", "append", "(", "m", ")", "self", ".", "best_metrics", "=", "{", "metric", ":", "(", "-", "1e12", "if", "biggest", "else", "1e12", ")", "for", "(", "metric", ",", "biggest", ")", "in", "self", ".", "metrics", "}", "# training statistics", "self", ".", "epoch", "=", "0", "self", ".", "n_iter", "=", "0", "self", ".", "n_total_iter", "=", "0", "self", ".", "n_sentences", "=", "0", "self", ".", "stats", "=", "OrderedDict", "(", "[", "(", "'processed_s'", ",", "0", ")", ",", "(", "'processed_w'", ",", "0", ")", "]", "+", "[", "(", "'CLM-%s'", "%", "l", ",", "[", "]", ")", "for", "l", "in", "params", ".", "langs", "]", "+", "[", "(", "'CLM-%s-%s'", "%", "(", "l1", ",", "l2", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "data", "[", "'para'", "]", ".", "keys", "(", ")", "]", "+", "[", "(", "'CLM-%s-%s'", "%", "(", "l2", ",", "l1", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "data", "[", "'para'", "]", ".", "keys", "(", ")", "]", "+", "[", "(", "'MLM-%s'", "%", "l", ",", "[", "]", ")", "for", "l", "in", "params", ".", "langs", "]", "+", "[", "(", "'MLM-%s-%s'", "%", "(", "l1", ",", "l2", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "data", "[", "'para'", "]", ".", "keys", "(", ")", "]", "+", "[", "(", "'MLM-%s-%s'", "%", "(", "l2", ",", "l1", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "data", "[", "'para'", "]", ".", "keys", "(", ")", "]", "+", "[", "(", "'PC-%s-%s'", "%", "(", "l1", ",", "l2", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "params", ".", "pc_steps", "]", "+", "[", "(", "'AE-%s'", "%", "lang", ",", "[", "]", ")", "for", "lang", "in", "params", ".", "ae_steps", "]", "+", "[", "(", "'MT-%s-%s'", "%", "(", "l1", ",", "l2", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", "in", "params", ".", "mt_steps", "]", "+", "[", "(", "'BT-%s-%s-%s'", "%", "(", "l1", ",", "l2", ",", "l3", ")", ",", "[", "]", ")", "for", "l1", ",", "l2", ",", "l3", "in", "params", ".", "bt_steps", "]", ")", "self", ".", "last_time", "=", "time", ".", "time", "(", ")", "# reload potential checkpoints", "self", ".", "reload_checkpoint", "(", ")", "# initialize lambda coefficients and their configurations", "parse_lambda_config", "(", "params", ")" ]
https://github.com/getalp/Flaubert/blob/ded1cf89820a22dbf885c85ba3dccc8ab360681b/xlm/trainer.py#L34-L155
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/tools/inject/lib/core/common.py
python
Backend.isVersionGreaterOrEqualThan
(version)
return Backend.getVersion() is not None and str(Backend.getVersion()) >= str(version)
[]
def isVersionGreaterOrEqualThan(version): return Backend.getVersion() is not None and str(Backend.getVersion()) >= str(version)
[ "def", "isVersionGreaterOrEqualThan", "(", "version", ")", ":", "return", "Backend", ".", "getVersion", "(", ")", "is", "not", "None", "and", "str", "(", "Backend", ".", "getVersion", "(", ")", ")", ">=", "str", "(", "version", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/tools/inject/lib/core/common.py#L532-L533
balajiln/mondrianforest
eac11fe085160daf56f4e8c6226d0400b78c3908
src/mondrianforest.py
python
MondrianTree.gen_node_ids_print
(self)
generate binary string label for each node root_node is denoted by empty string "e" all other node labels are defined as follows: left(j) = j0, right(j) = j1 e.g. left and right child of root_node are 0 and 1 respectively, left and right of node 0 are 00 and 01 respectively and so on.
generate binary string label for each node root_node is denoted by empty string "e" all other node labels are defined as follows: left(j) = j0, right(j) = j1 e.g. left and right child of root_node are 0 and 1 respectively, left and right of node 0 are 00 and 01 respectively and so on.
[ "generate", "binary", "string", "label", "for", "each", "node", "root_node", "is", "denoted", "by", "empty", "string", "e", "all", "other", "node", "labels", "are", "defined", "as", "follows", ":", "left", "(", "j", ")", "=", "j0", "right", "(", "j", ")", "=", "j1", "e", ".", "g", ".", "left", "and", "right", "child", "of", "root_node", "are", "0", "and", "1", "respectively", "left", "and", "right", "of", "node", "0", "are", "00", "and", "01", "respectively", "and", "so", "on", "." ]
def gen_node_ids_print(self): """ generate binary string label for each node root_node is denoted by empty string "e" all other node labels are defined as follows: left(j) = j0, right(j) = j1 e.g. left and right child of root_node are 0 and 1 respectively, left and right of node 0 are 00 and 01 respectively and so on. """ node_ids = [self.root] self.node_ids_print = {self.root: ''} while node_ids: node_id = node_ids.pop(0) try: feat_id, split = self.node_info[node_id] left, right = node_id.left, node_id.right node_ids.append(left) node_ids.append(right) self.node_ids_print[left] = self.node_ids_print[node_id] + '0' self.node_ids_print[right] = self.node_ids_print[node_id] + '1' except KeyError: continue
[ "def", "gen_node_ids_print", "(", "self", ")", ":", "node_ids", "=", "[", "self", ".", "root", "]", "self", ".", "node_ids_print", "=", "{", "self", ".", "root", ":", "''", "}", "while", "node_ids", ":", "node_id", "=", "node_ids", ".", "pop", "(", "0", ")", "try", ":", "feat_id", ",", "split", "=", "self", ".", "node_info", "[", "node_id", "]", "left", ",", "right", "=", "node_id", ".", "left", ",", "node_id", ".", "right", "node_ids", ".", "append", "(", "left", ")", "node_ids", ".", "append", "(", "right", ")", "self", ".", "node_ids_print", "[", "left", "]", "=", "self", ".", "node_ids_print", "[", "node_id", "]", "+", "'0'", "self", ".", "node_ids_print", "[", "right", "]", "=", "self", ".", "node_ids_print", "[", "node_id", "]", "+", "'1'", "except", "KeyError", ":", "continue" ]
https://github.com/balajiln/mondrianforest/blob/eac11fe085160daf56f4e8c6226d0400b78c3908/src/mondrianforest.py#L317-L337
demisto/content
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandard/CarbonBlackEndpointStandard.py
python
Client.search_alerts_request
(self, suffix_url_path: str = None, minimum_severity: int = None, create_time: Dict = None, policy_id: List = None, device_username: List = None, device_id: List = None, query: str = None, alert_category: List = None, sort_field: str = "create_time", sort_order: str = "ASC", limit: int = 50)
return self._http_request('POST', suffix_url, headers=self.headers, json_data=body)
Searches for Carbon Black alerts using the '/appservices/v6/orgs/{org_key}/alerts/_search' API endpoint All the parameters are passed directly to the API as HTTP POST parameters in the request :type suffix_url_path: ``Optional[str]`` :param suffix_url_path: type of the alert to search for. Options are: 'all' or 'cbanalytics' or 'devicecontrol' :type minimum_severity: ``Optional[int]`` :param minimum_severity: the minimum severity of the alert to search for. :type create_time: ``Optional[Dict]`` :param create_time: A dict presented the the time the alert was created. The syntax is {"start": "<dateTime>", "range": "<string>", "end": "<dateTime>" }. For example: {"start": "2010-09-25T00:10:50.00", "end": "2015-01-20T10:40:00.00Z", "range": "-1d"}. (s for seconds, m for minutes, h for hours, d for days, w for weeks, y for years). :type policy_id: ``Optional[list]`` :param policy_id: The identifier for the policy associated with the device at the time of the alert. :type device_username: ``Optional[list]`` :param device_username: The username of the logged on user during the alert. If the user is not available then it may be populated with the device owner :type device_id: ``Optional[list]`` :param device_id: The identifier assigned by Carbon Black Cloud to the device associated with the alert. :type query: ``Optional[str]`` :param query: Query in lucene syntax and/or including value searches. :type alert_category: ``Optional[list]`` :param alert_category: The category of the alert. Options are: 'THREAT' or 'MONITORED' :type sort_field: ``Optional[str]`` :param sort_field: The field to sort by it :type sort_order: ``Optional[str]`` :param sort_order: The sort order (ASC, DESC) :type limit: ``Optional[int]`` :param limit: The number of results to return. default is 50. :return: Dict containing a List with the found Carbon Black alerts as dicts :rtype: ``Dict[str, Any]``
Searches for Carbon Black alerts using the '/appservices/v6/orgs/{org_key}/alerts/_search' API endpoint
[ "Searches", "for", "Carbon", "Black", "alerts", "using", "the", "/", "appservices", "/", "v6", "/", "orgs", "/", "{", "org_key", "}", "/", "alerts", "/", "_search", "API", "endpoint" ]
def search_alerts_request(self, suffix_url_path: str = None, minimum_severity: int = None, create_time: Dict = None, policy_id: List = None, device_username: List = None, device_id: List = None, query: str = None, alert_category: List = None, sort_field: str = "create_time", sort_order: str = "ASC", limit: int = 50) -> dict: """Searches for Carbon Black alerts using the '/appservices/v6/orgs/{org_key}/alerts/_search' API endpoint All the parameters are passed directly to the API as HTTP POST parameters in the request :type suffix_url_path: ``Optional[str]`` :param suffix_url_path: type of the alert to search for. Options are: 'all' or 'cbanalytics' or 'devicecontrol' :type minimum_severity: ``Optional[int]`` :param minimum_severity: the minimum severity of the alert to search for. :type create_time: ``Optional[Dict]`` :param create_time: A dict presented the the time the alert was created. The syntax is {"start": "<dateTime>", "range": "<string>", "end": "<dateTime>" }. For example: {"start": "2010-09-25T00:10:50.00", "end": "2015-01-20T10:40:00.00Z", "range": "-1d"}. (s for seconds, m for minutes, h for hours, d for days, w for weeks, y for years). :type policy_id: ``Optional[list]`` :param policy_id: The identifier for the policy associated with the device at the time of the alert. :type device_username: ``Optional[list]`` :param device_username: The username of the logged on user during the alert. If the user is not available then it may be populated with the device owner :type device_id: ``Optional[list]`` :param device_id: The identifier assigned by Carbon Black Cloud to the device associated with the alert. :type query: ``Optional[str]`` :param query: Query in lucene syntax and/or including value searches. :type alert_category: ``Optional[list]`` :param alert_category: The category of the alert. Options are: 'THREAT' or 'MONITORED' :type sort_field: ``Optional[str]`` :param sort_field: The field to sort by it :type sort_order: ``Optional[str]`` :param sort_order: The sort order (ASC, DESC) :type limit: ``Optional[int]`` :param limit: The number of results to return. default is 50. :return: Dict containing a List with the found Carbon Black alerts as dicts :rtype: ``Dict[str, Any]`` """ if not suffix_url_path or suffix_url_path == "all": suffix_url = f'appservices/v6/orgs/{self.organization_key}/alerts/_search' else: suffix_url = f'appservices/v6/orgs/{self.organization_key}/alerts/{suffix_url_path}/_search' body = { 'criteria': assign_params( minimum_severity=minimum_severity, create_time=create_time, policy_id=policy_id, device_username=device_username, device_id=device_id, category=alert_category ), 'sort': [ { 'field': sort_field, 'order': sort_order } ], 'rows': limit, } if query: body['query'] = query return self._http_request('POST', suffix_url, headers=self.headers, json_data=body)
[ "def", "search_alerts_request", "(", "self", ",", "suffix_url_path", ":", "str", "=", "None", ",", "minimum_severity", ":", "int", "=", "None", ",", "create_time", ":", "Dict", "=", "None", ",", "policy_id", ":", "List", "=", "None", ",", "device_username", ":", "List", "=", "None", ",", "device_id", ":", "List", "=", "None", ",", "query", ":", "str", "=", "None", ",", "alert_category", ":", "List", "=", "None", ",", "sort_field", ":", "str", "=", "\"create_time\"", ",", "sort_order", ":", "str", "=", "\"ASC\"", ",", "limit", ":", "int", "=", "50", ")", "->", "dict", ":", "if", "not", "suffix_url_path", "or", "suffix_url_path", "==", "\"all\"", ":", "suffix_url", "=", "f'appservices/v6/orgs/{self.organization_key}/alerts/_search'", "else", ":", "suffix_url", "=", "f'appservices/v6/orgs/{self.organization_key}/alerts/{suffix_url_path}/_search'", "body", "=", "{", "'criteria'", ":", "assign_params", "(", "minimum_severity", "=", "minimum_severity", ",", "create_time", "=", "create_time", ",", "policy_id", "=", "policy_id", ",", "device_username", "=", "device_username", ",", "device_id", "=", "device_id", ",", "category", "=", "alert_category", ")", ",", "'sort'", ":", "[", "{", "'field'", ":", "sort_field", ",", "'order'", ":", "sort_order", "}", "]", ",", "'rows'", ":", "limit", ",", "}", "if", "query", ":", "body", "[", "'query'", "]", "=", "query", "return", "self", ".", "_http_request", "(", "'POST'", ",", "suffix_url", ",", "headers", "=", "self", ".", "headers", ",", "json_data", "=", "body", ")" ]
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandard/CarbonBlackEndpointStandard.py#L65-L137
kanzure/nanoengineer
874e4c9f8a9190f093625b267f9767e19f82e6c4
cad/src/dna/commands/OrderDna/OrderDna_PropertyManager.py
python
OrderDna_PropertyManager._update_UI_do_updates
(self)
return
Overrides superclass method.
Overrides superclass method.
[ "Overrides", "superclass", "method", "." ]
def _update_UI_do_updates(self): """ Overrides superclass method. """ self.update_includeStrands() return
[ "def", "_update_UI_do_updates", "(", "self", ")", ":", "self", ".", "update_includeStrands", "(", ")", "return" ]
https://github.com/kanzure/nanoengineer/blob/874e4c9f8a9190f093625b267f9767e19f82e6c4/cad/src/dna/commands/OrderDna/OrderDna_PropertyManager.py#L257-L262
CuriousAI/mean-teacher
546348ff863c998c26be4339021425df973b4a36
tensorflow/mean_teacher/nn.py
python
flip_randomly
(inputs, horizontally, vertically, is_training, name=None)
Flip images randomly. Make separate flipping decision for each image. Args: inputs (4-D tensor): Input images (batch size, height, width, channels). horizontally (bool): If True, flip horizontally with 50% probability. Otherwise, don't. vertically (bool): If True, flip vertically with 50% probability. Otherwise, don't. is_training (bool): If False, no flip is performed. scope: A name for the operation.
Flip images randomly. Make separate flipping decision for each image.
[ "Flip", "images", "randomly", ".", "Make", "separate", "flipping", "decision", "for", "each", "image", "." ]
def flip_randomly(inputs, horizontally, vertically, is_training, name=None): """Flip images randomly. Make separate flipping decision for each image. Args: inputs (4-D tensor): Input images (batch size, height, width, channels). horizontally (bool): If True, flip horizontally with 50% probability. Otherwise, don't. vertically (bool): If True, flip vertically with 50% probability. Otherwise, don't. is_training (bool): If False, no flip is performed. scope: A name for the operation. """ with tf.name_scope(name, "flip_randomly") as scope: batch_size, height, width, _ = tf.unstack(tf.shape(inputs)) vertical_choices = (tf.random_uniform([batch_size], 0, 2, tf.int32) * tf.to_int32(vertically) * tf.to_int32(is_training)) horizontal_choices = (tf.random_uniform([batch_size], 0, 2, tf.int32) * tf.to_int32(horizontally) * tf.to_int32(is_training)) vertically_flipped = tf.reverse_sequence(inputs, vertical_choices * height, 1) both_flipped = tf.reverse_sequence(vertically_flipped, horizontal_choices * width, 2) return tf.identity(both_flipped, name=scope)
[ "def", "flip_randomly", "(", "inputs", ",", "horizontally", ",", "vertically", ",", "is_training", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "name_scope", "(", "name", ",", "\"flip_randomly\"", ")", "as", "scope", ":", "batch_size", ",", "height", ",", "width", ",", "_", "=", "tf", ".", "unstack", "(", "tf", ".", "shape", "(", "inputs", ")", ")", "vertical_choices", "=", "(", "tf", ".", "random_uniform", "(", "[", "batch_size", "]", ",", "0", ",", "2", ",", "tf", ".", "int32", ")", "*", "tf", ".", "to_int32", "(", "vertically", ")", "*", "tf", ".", "to_int32", "(", "is_training", ")", ")", "horizontal_choices", "=", "(", "tf", ".", "random_uniform", "(", "[", "batch_size", "]", ",", "0", ",", "2", ",", "tf", ".", "int32", ")", "*", "tf", ".", "to_int32", "(", "horizontally", ")", "*", "tf", ".", "to_int32", "(", "is_training", ")", ")", "vertically_flipped", "=", "tf", ".", "reverse_sequence", "(", "inputs", ",", "vertical_choices", "*", "height", ",", "1", ")", "both_flipped", "=", "tf", ".", "reverse_sequence", "(", "vertically_flipped", ",", "horizontal_choices", "*", "width", ",", "2", ")", "return", "tf", ".", "identity", "(", "both_flipped", ",", "name", "=", "scope", ")" ]
https://github.com/CuriousAI/mean-teacher/blob/546348ff863c998c26be4339021425df973b4a36/tensorflow/mean_teacher/nn.py#L31-L51
gammapy/gammapy
735b25cd5bbed35e2004d633621896dcd5295e8b
gammapy/data/event_list.py
python
EventList.plot_time
(self, ax=None, **kwargs)
return ax
Plots an event rate time curve. Parameters ---------- ax : `~matplotlib.axes.Axes` or None Axes **kwargs : dict Keyword arguments passed to `~matplotlib.pyplot.errorbar` Returns ------- ax : `~matplotlib.axes.Axes` Axes
Plots an event rate time curve.
[ "Plots", "an", "event", "rate", "time", "curve", "." ]
def plot_time(self, ax=None, **kwargs): """Plots an event rate time curve. Parameters ---------- ax : `~matplotlib.axes.Axes` or None Axes **kwargs : dict Keyword arguments passed to `~matplotlib.pyplot.errorbar` Returns ------- ax : `~matplotlib.axes.Axes` Axes """ import matplotlib.pyplot as plt ax = plt.gca() if ax is None else ax # Note the events are not necessarily in time order time = self.table["TIME"] time = time - np.min(time) ax.set_xlabel("Time (sec)") ax.set_ylabel("Counts") y, x_edges = np.histogram(time, bins=20) xerr = np.diff(x_edges) / 2 x = x_edges[:-1] + xerr yerr = np.sqrt(y) kwargs.setdefault("fmt", "none") ax.errorbar(x=x, y=y, xerr=xerr, yerr=yerr, **kwargs) return ax
[ "def", "plot_time", "(", "self", ",", "ax", "=", "None", ",", "*", "*", "kwargs", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "ax", "=", "plt", ".", "gca", "(", ")", "if", "ax", "is", "None", "else", "ax", "# Note the events are not necessarily in time order", "time", "=", "self", ".", "table", "[", "\"TIME\"", "]", "time", "=", "time", "-", "np", ".", "min", "(", "time", ")", "ax", ".", "set_xlabel", "(", "\"Time (sec)\"", ")", "ax", ".", "set_ylabel", "(", "\"Counts\"", ")", "y", ",", "x_edges", "=", "np", ".", "histogram", "(", "time", ",", "bins", "=", "20", ")", "xerr", "=", "np", ".", "diff", "(", "x_edges", ")", "/", "2", "x", "=", "x_edges", "[", ":", "-", "1", "]", "+", "xerr", "yerr", "=", "np", ".", "sqrt", "(", "y", ")", "kwargs", ".", "setdefault", "(", "\"fmt\"", ",", "\"none\"", ")", "ax", ".", "errorbar", "(", "x", "=", "x", ",", "y", "=", "y", ",", "xerr", "=", "xerr", ",", "yerr", "=", "yerr", ",", "*", "*", "kwargs", ")", "return", "ax" ]
https://github.com/gammapy/gammapy/blob/735b25cd5bbed35e2004d633621896dcd5295e8b/gammapy/data/event_list.py#L351-L386
inducer/pyopencl
71b29745dc023a4d3aa9ddf22ff65c0cb3e6d703
pyopencl/clrandom.py
python
fill_rand
(result, queue=None, luxury=None, a=0, b=1)
Fill *result* with random values of `dtype` in the range [0,1).
Fill *result* with random values of `dtype` in the range [0,1).
[ "Fill", "*", "result", "*", "with", "random", "values", "of", "dtype", "in", "the", "range", "[", "0", "1", ")", "." ]
def fill_rand(result, queue=None, luxury=None, a=0, b=1): """Fill *result* with random values of `dtype` in the range [0,1). """ if luxury is not None: from warnings import warn warn("Specifying the 'luxury' argument is deprecated and will stop being " "supported in PyOpenCL 2018.x", stacklevel=2) if queue is None: queue = result.queue gen = _get_generator(queue.context) gen.fill_uniform(result, a=a, b=b)
[ "def", "fill_rand", "(", "result", ",", "queue", "=", "None", ",", "luxury", "=", "None", ",", "a", "=", "0", ",", "b", "=", "1", ")", ":", "if", "luxury", "is", "not", "None", ":", "from", "warnings", "import", "warn", "warn", "(", "\"Specifying the 'luxury' argument is deprecated and will stop being \"", "\"supported in PyOpenCL 2018.x\"", ",", "stacklevel", "=", "2", ")", "if", "queue", "is", "None", ":", "queue", "=", "result", ".", "queue", "gen", "=", "_get_generator", "(", "queue", ".", "context", ")", "gen", ".", "fill_uniform", "(", "result", ",", "a", "=", "a", ",", "b", "=", "b", ")" ]
https://github.com/inducer/pyopencl/blob/71b29745dc023a4d3aa9ddf22ff65c0cb3e6d703/pyopencl/clrandom.py#L740-L751
sumanj/frankencert
5bbaae85cab35882dec1672ef037a42e29180b40
pyopenssl-19.1.0/src/OpenSSL/crypto.py
python
load_pkcs12
(buffer, passphrase=None)
return pkcs12
Load pkcs12 data from the string *buffer*. If the pkcs12 structure is encrypted, a *passphrase* must be included. The MAC is always checked and thus required. See also the man page for the C function :py:func:`PKCS12_parse`. :param buffer: The buffer the certificate is stored in :param passphrase: (Optional) The password to decrypt the PKCS12 lump :returns: The PKCS12 object
Load pkcs12 data from the string *buffer*. If the pkcs12 structure is encrypted, a *passphrase* must be included. The MAC is always checked and thus required.
[ "Load", "pkcs12", "data", "from", "the", "string", "*", "buffer", "*", ".", "If", "the", "pkcs12", "structure", "is", "encrypted", "a", "*", "passphrase", "*", "must", "be", "included", ".", "The", "MAC", "is", "always", "checked", "and", "thus", "required", "." ]
def load_pkcs12(buffer, passphrase=None): """ Load pkcs12 data from the string *buffer*. If the pkcs12 structure is encrypted, a *passphrase* must be included. The MAC is always checked and thus required. See also the man page for the C function :py:func:`PKCS12_parse`. :param buffer: The buffer the certificate is stored in :param passphrase: (Optional) The password to decrypt the PKCS12 lump :returns: The PKCS12 object """ passphrase = _text_to_bytes_and_warn("passphrase", passphrase) if isinstance(buffer, _text_type): buffer = buffer.encode("ascii") bio = _new_mem_buf(buffer) # Use null passphrase if passphrase is None or empty string. With PKCS#12 # password based encryption no password and a zero length password are two # different things, but OpenSSL implementation will try both to figure out # which one works. if not passphrase: passphrase = _ffi.NULL p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) if p12 == _ffi.NULL: _raise_current_error() p12 = _ffi.gc(p12, _lib.PKCS12_free) pkey = _ffi.new("EVP_PKEY**") cert = _ffi.new("X509**") cacerts = _ffi.new("Cryptography_STACK_OF_X509**") parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts) if not parse_result: _raise_current_error() cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free) # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the # queue for no particular reason. This error isn't interesting to anyone # outside this function. It's not even interesting to us. Get rid of it. try: _raise_current_error() except Error: pass if pkey[0] == _ffi.NULL: pykey = None else: pykey = PKey.__new__(PKey) pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free) if cert[0] == _ffi.NULL: pycert = None friendlyname = None else: pycert = X509._from_raw_x509_ptr(cert[0]) friendlyname_length = _ffi.new("int*") friendlyname_buffer = _lib.X509_alias_get0( cert[0], friendlyname_length ) friendlyname = _ffi.buffer( friendlyname_buffer, friendlyname_length[0] )[:] if friendlyname_buffer == _ffi.NULL: friendlyname = None pycacerts = [] for i in range(_lib.sk_X509_num(cacerts)): x509 = _lib.sk_X509_value(cacerts, i) pycacert = X509._from_raw_x509_ptr(x509) pycacerts.append(pycacert) if not pycacerts: pycacerts = None pkcs12 = PKCS12.__new__(PKCS12) pkcs12._pkey = pykey pkcs12._cert = pycert pkcs12._cacerts = pycacerts pkcs12._friendlyname = friendlyname return pkcs12
[ "def", "load_pkcs12", "(", "buffer", ",", "passphrase", "=", "None", ")", ":", "passphrase", "=", "_text_to_bytes_and_warn", "(", "\"passphrase\"", ",", "passphrase", ")", "if", "isinstance", "(", "buffer", ",", "_text_type", ")", ":", "buffer", "=", "buffer", ".", "encode", "(", "\"ascii\"", ")", "bio", "=", "_new_mem_buf", "(", "buffer", ")", "# Use null passphrase if passphrase is None or empty string. With PKCS#12", "# password based encryption no password and a zero length password are two", "# different things, but OpenSSL implementation will try both to figure out", "# which one works.", "if", "not", "passphrase", ":", "passphrase", "=", "_ffi", ".", "NULL", "p12", "=", "_lib", ".", "d2i_PKCS12_bio", "(", "bio", ",", "_ffi", ".", "NULL", ")", "if", "p12", "==", "_ffi", ".", "NULL", ":", "_raise_current_error", "(", ")", "p12", "=", "_ffi", ".", "gc", "(", "p12", ",", "_lib", ".", "PKCS12_free", ")", "pkey", "=", "_ffi", ".", "new", "(", "\"EVP_PKEY**\"", ")", "cert", "=", "_ffi", ".", "new", "(", "\"X509**\"", ")", "cacerts", "=", "_ffi", ".", "new", "(", "\"Cryptography_STACK_OF_X509**\"", ")", "parse_result", "=", "_lib", ".", "PKCS12_parse", "(", "p12", ",", "passphrase", ",", "pkey", ",", "cert", ",", "cacerts", ")", "if", "not", "parse_result", ":", "_raise_current_error", "(", ")", "cacerts", "=", "_ffi", ".", "gc", "(", "cacerts", "[", "0", "]", ",", "_lib", ".", "sk_X509_free", ")", "# openssl 1.0.0 sometimes leaves an X509_check_private_key error in the", "# queue for no particular reason. This error isn't interesting to anyone", "# outside this function. It's not even interesting to us. Get rid of it.", "try", ":", "_raise_current_error", "(", ")", "except", "Error", ":", "pass", "if", "pkey", "[", "0", "]", "==", "_ffi", ".", "NULL", ":", "pykey", "=", "None", "else", ":", "pykey", "=", "PKey", ".", "__new__", "(", "PKey", ")", "pykey", ".", "_pkey", "=", "_ffi", ".", "gc", "(", "pkey", "[", "0", "]", ",", "_lib", ".", "EVP_PKEY_free", ")", "if", "cert", "[", "0", "]", "==", "_ffi", ".", "NULL", ":", "pycert", "=", "None", "friendlyname", "=", "None", "else", ":", "pycert", "=", "X509", ".", "_from_raw_x509_ptr", "(", "cert", "[", "0", "]", ")", "friendlyname_length", "=", "_ffi", ".", "new", "(", "\"int*\"", ")", "friendlyname_buffer", "=", "_lib", ".", "X509_alias_get0", "(", "cert", "[", "0", "]", ",", "friendlyname_length", ")", "friendlyname", "=", "_ffi", ".", "buffer", "(", "friendlyname_buffer", ",", "friendlyname_length", "[", "0", "]", ")", "[", ":", "]", "if", "friendlyname_buffer", "==", "_ffi", ".", "NULL", ":", "friendlyname", "=", "None", "pycacerts", "=", "[", "]", "for", "i", "in", "range", "(", "_lib", ".", "sk_X509_num", "(", "cacerts", ")", ")", ":", "x509", "=", "_lib", ".", "sk_X509_value", "(", "cacerts", ",", "i", ")", "pycacert", "=", "X509", ".", "_from_raw_x509_ptr", "(", "x509", ")", "pycacerts", ".", "append", "(", "pycacert", ")", "if", "not", "pycacerts", ":", "pycacerts", "=", "None", "pkcs12", "=", "PKCS12", ".", "__new__", "(", "PKCS12", ")", "pkcs12", ".", "_pkey", "=", "pykey", "pkcs12", ".", "_cert", "=", "pycert", "pkcs12", ".", "_cacerts", "=", "pycacerts", "pkcs12", ".", "_friendlyname", "=", "friendlyname", "return", "pkcs12" ]
https://github.com/sumanj/frankencert/blob/5bbaae85cab35882dec1672ef037a42e29180b40/pyopenssl-19.1.0/src/OpenSSL/crypto.py#L3002-L3086
trailofbits/manticore
b050fdf0939f6c63f503cdf87ec0ab159dd41159
manticore/ethereum/detectors.py
python
DetectIntegerOverflow._unsigned_mul_overflow
(state, a, b)
return cond
Sign extend the value to 512 bits and check the result can be represented in 256. Following there is a 32 bit excerpt of this condition: a * b +00000000000000000 +00000000000000001 +0000000003fffffff +0000000007fffffff +00000000080000001 +000000000bfffffff +000000000ffffffff +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000001 +0000000000000000 +0000000000000001 +000000003fffffff +000000007fffffff +0000000080000001 +00000000bfffffff +00000000ffffffff +000000003fffffff +0000000000000000 +000000003fffffff *+0fffffff80000001 *+1fffffff40000001 *+1fffffffbfffffff *+2fffffff00000001 *+3ffffffec0000001 +000000007fffffff +0000000000000000 +000000007fffffff *+1fffffff40000001 *+3fffffff00000001 *+3fffffffffffffff *+5ffffffec0000001 *+7ffffffe80000001 +0000000080000001 +0000000000000000 +0000000080000001 *+1fffffffbfffffff *+3fffffffffffffff *+4000000100000001 *+600000003fffffff *+800000007fffffff +00000000bfffffff +0000000000000000 +00000000bfffffff *+2fffffff00000001 *+5ffffffec0000001 *+600000003fffffff *+8ffffffe80000001 *+bffffffe40000001 +00000000ffffffff +0000000000000000 +00000000ffffffff *+3ffffffec0000001 *+7ffffffe80000001 *+800000007fffffff *+bffffffe40000001 *+fffffffe00000001
Sign extend the value to 512 bits and check the result can be represented in 256. Following there is a 32 bit excerpt of this condition:
[ "Sign", "extend", "the", "value", "to", "512", "bits", "and", "check", "the", "result", "can", "be", "represented", "in", "256", ".", "Following", "there", "is", "a", "32", "bit", "excerpt", "of", "this", "condition", ":" ]
def _unsigned_mul_overflow(state, a, b): """ Sign extend the value to 512 bits and check the result can be represented in 256. Following there is a 32 bit excerpt of this condition: a * b +00000000000000000 +00000000000000001 +0000000003fffffff +0000000007fffffff +00000000080000001 +000000000bfffffff +000000000ffffffff +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000000 +0000000000000001 +0000000000000000 +0000000000000001 +000000003fffffff +000000007fffffff +0000000080000001 +00000000bfffffff +00000000ffffffff +000000003fffffff +0000000000000000 +000000003fffffff *+0fffffff80000001 *+1fffffff40000001 *+1fffffffbfffffff *+2fffffff00000001 *+3ffffffec0000001 +000000007fffffff +0000000000000000 +000000007fffffff *+1fffffff40000001 *+3fffffff00000001 *+3fffffffffffffff *+5ffffffec0000001 *+7ffffffe80000001 +0000000080000001 +0000000000000000 +0000000080000001 *+1fffffffbfffffff *+3fffffffffffffff *+4000000100000001 *+600000003fffffff *+800000007fffffff +00000000bfffffff +0000000000000000 +00000000bfffffff *+2fffffff00000001 *+5ffffffec0000001 *+600000003fffffff *+8ffffffe80000001 *+bffffffe40000001 +00000000ffffffff +0000000000000000 +00000000ffffffff *+3ffffffec0000001 *+7ffffffe80000001 *+800000007fffffff *+bffffffe40000001 *+fffffffe00000001 """ mul = Operators.SEXTEND(a, 256, 512) * Operators.SEXTEND(b, 256, 512) cond = Operators.UGE(mul, 1 << 256) return cond
[ "def", "_unsigned_mul_overflow", "(", "state", ",", "a", ",", "b", ")", ":", "mul", "=", "Operators", ".", "SEXTEND", "(", "a", ",", "256", ",", "512", ")", "*", "Operators", ".", "SEXTEND", "(", "b", ",", "256", ",", "512", ")", "cond", "=", "Operators", ".", "UGE", "(", "mul", ",", "1", "<<", "256", ")", "return", "cond" ]
https://github.com/trailofbits/manticore/blob/b050fdf0939f6c63f503cdf87ec0ab159dd41159/manticore/ethereum/detectors.py#L480-L497
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/weakref.py
python
WeakValueDictionary.itervaluerefs
(self)
return self.data.itervalues()
Return an iterator that yields the weak references to the values. The references are not guaranteed to be 'live' at the time they are used, so the result of calling the references needs to be checked before being used. This can be used to avoid creating references that will cause the garbage collector to keep the values around longer than needed.
Return an iterator that yields the weak references to the values.
[ "Return", "an", "iterator", "that", "yields", "the", "weak", "references", "to", "the", "values", "." ]
def itervaluerefs(self): """Return an iterator that yields the weak references to the values. The references are not guaranteed to be 'live' at the time they are used, so the result of calling the references needs to be checked before being used. This can be used to avoid creating references that will cause the garbage collector to keep the values around longer than needed. """ return self.data.itervalues()
[ "def", "itervaluerefs", "(", "self", ")", ":", "return", "self", ".", "data", ".", "itervalues", "(", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/weakref.py#L134-L144
internetarchive/openlibrary
33b9b005ecb0adeda690c67952f5ae5f1fe3a8d8
openlibrary/plugins/upstream/utils.py
python
kebab_case
(upper_camel_case)
return '-'.join(parts).lower()
:param str upper_camel_case: Text in upper camel case (e.g. "HelloWorld") :return: text in kebab case (e.g. 'hello-world') >>> kebab_case('HelloWorld') 'hello-world' >>> kebab_case("MergeUI") 'merge-u-i'
:param str upper_camel_case: Text in upper camel case (e.g. "HelloWorld") :return: text in kebab case (e.g. 'hello-world')
[ ":", "param", "str", "upper_camel_case", ":", "Text", "in", "upper", "camel", "case", "(", "e", ".", "g", ".", "HelloWorld", ")", ":", "return", ":", "text", "in", "kebab", "case", "(", "e", ".", "g", ".", "hello", "-", "world", ")" ]
def kebab_case(upper_camel_case): """ :param str upper_camel_case: Text in upper camel case (e.g. "HelloWorld") :return: text in kebab case (e.g. 'hello-world') >>> kebab_case('HelloWorld') 'hello-world' >>> kebab_case("MergeUI") 'merge-u-i' """ parts = re.findall(r'[A-Z][^A-Z]*', upper_camel_case) return '-'.join(parts).lower()
[ "def", "kebab_case", "(", "upper_camel_case", ")", ":", "parts", "=", "re", ".", "findall", "(", "r'[A-Z][^A-Z]*'", ",", "upper_camel_case", ")", "return", "'-'", ".", "join", "(", "parts", ")", ".", "lower", "(", ")" ]
https://github.com/internetarchive/openlibrary/blob/33b9b005ecb0adeda690c67952f5ae5f1fe3a8d8/openlibrary/plugins/upstream/utils.py#L123-L134