body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
ae6bd1539ea125c119e9877eb6fe7501e71f9ff23b80ffd39360fa0680dd2e11
def perform_raw(self) -> Point: '\n Perform the scalar-multiplication of the key agreement.\n\n :return: The shared point.\n ' point = self.mult.multiply(int(self.privkey)) return point.to_affine()
Perform the scalar-multiplication of the key agreement. :return: The shared point.
pyecsca/ec/key_agreement.py
perform_raw
Tomko10/pyecsca
24
python
def perform_raw(self) -> Point: '\n Perform the scalar-multiplication of the key agreement.\n\n :return: The shared point.\n ' point = self.mult.multiply(int(self.privkey)) return point.to_affine()
def perform_raw(self) -> Point: '\n Perform the scalar-multiplication of the key agreement.\n\n :return: The shared point.\n ' point = self.mult.multiply(int(self.privkey)) return point.to_affine()<|docstring|>Perform the scalar-multiplication of the key agreement. :return: The shared point.<|endoftext|>
37487c6050bdaba23891d44b9a420b475ccd49cfb2fa50bcc4d0db450ee4d549
def perform(self) -> bytes: '\n Perform the key agreement operation.\n\n :return: The shared secret.\n ' with ECDHAction(self.params, self.hash_algo, self.privkey, self.pubkey) as action: affine_point = self.perform_raw() x = int(affine_point.x) p = self.params.curve.prime n = ((p.bit_length() + 7) // 8) result = x.to_bytes(n, byteorder='big') if (self.hash_algo is not None): result = self.hash_algo(result).digest() return action.exit(result)
Perform the key agreement operation. :return: The shared secret.
pyecsca/ec/key_agreement.py
perform
Tomko10/pyecsca
24
python
def perform(self) -> bytes: '\n Perform the key agreement operation.\n\n :return: The shared secret.\n ' with ECDHAction(self.params, self.hash_algo, self.privkey, self.pubkey) as action: affine_point = self.perform_raw() x = int(affine_point.x) p = self.params.curve.prime n = ((p.bit_length() + 7) // 8) result = x.to_bytes(n, byteorder='big') if (self.hash_algo is not None): result = self.hash_algo(result).digest() return action.exit(result)
def perform(self) -> bytes: '\n Perform the key agreement operation.\n\n :return: The shared secret.\n ' with ECDHAction(self.params, self.hash_algo, self.privkey, self.pubkey) as action: affine_point = self.perform_raw() x = int(affine_point.x) p = self.params.curve.prime n = ((p.bit_length() + 7) // 8) result = x.to_bytes(n, byteorder='big') if (self.hash_algo is not None): result = self.hash_algo(result).digest() return action.exit(result)<|docstring|>Perform the key agreement operation. :return: The shared secret.<|endoftext|>
de5baac508483483b330415f45044fd707fbe6e6f070effaceff65685321dd13
def path(synset1, synset2): 'Return the Path similarity of *synset1* and *synset2*.' distance = len(synset1.shortest_path(synset2, simulate_root=True)) return (1 / (distance + 1))
Return the Path similarity of *synset1* and *synset2*.
wn/similarity.py
path
fushinari/wn
0
python
def path(synset1, synset2): distance = len(synset1.shortest_path(synset2, simulate_root=True)) return (1 / (distance + 1))
def path(synset1, synset2): distance = len(synset1.shortest_path(synset2, simulate_root=True)) return (1 / (distance + 1))<|docstring|>Return the Path similarity of *synset1* and *synset2*.<|endoftext|>
1c709f5d5818731873096147e201b49f864795372d07e264cc7a8ea669b55e30
def wup(synset1: Synset, synset2: Synset) -> float: 'Return the Wu-Palmer similarity of *synset1* and *synset2*.' lch = synset1.lowest_common_hypernyms(synset2, simulate_root=True)[0] n = (lch.max_depth() + 1) n1 = len(synset1.shortest_path(lch, simulate_root=True)) n2 = len(synset2.shortest_path(lch, simulate_root=True)) return ((2 * n) / ((n1 + n2) + (2 * n)))
Return the Wu-Palmer similarity of *synset1* and *synset2*.
wn/similarity.py
wup
fushinari/wn
0
python
def wup(synset1: Synset, synset2: Synset) -> float: lch = synset1.lowest_common_hypernyms(synset2, simulate_root=True)[0] n = (lch.max_depth() + 1) n1 = len(synset1.shortest_path(lch, simulate_root=True)) n2 = len(synset2.shortest_path(lch, simulate_root=True)) return ((2 * n) / ((n1 + n2) + (2 * n)))
def wup(synset1: Synset, synset2: Synset) -> float: lch = synset1.lowest_common_hypernyms(synset2, simulate_root=True)[0] n = (lch.max_depth() + 1) n1 = len(synset1.shortest_path(lch, simulate_root=True)) n2 = len(synset2.shortest_path(lch, simulate_root=True)) return ((2 * n) / ((n1 + n2) + (2 * n)))<|docstring|>Return the Wu-Palmer similarity of *synset1* and *synset2*.<|endoftext|>
89e29cde41cf79e104e5d2f7f677e29cf40f92c1e28a1374807fd6345906f497
def lch(synset1: Synset, synset2: Synset, max_depth: int=0) -> float: 'Return the Leacock-Chodorow similarity of *synset1* and *synset2*.' distance = len(synset1.shortest_path(synset2, simulate_root=True)) if (max_depth <= 0): raise wn.Error('max_depth must be greater than 0') return (- math.log(((distance + 1) / (2 * max_depth))))
Return the Leacock-Chodorow similarity of *synset1* and *synset2*.
wn/similarity.py
lch
fushinari/wn
0
python
def lch(synset1: Synset, synset2: Synset, max_depth: int=0) -> float: distance = len(synset1.shortest_path(synset2, simulate_root=True)) if (max_depth <= 0): raise wn.Error('max_depth must be greater than 0') return (- math.log(((distance + 1) / (2 * max_depth))))
def lch(synset1: Synset, synset2: Synset, max_depth: int=0) -> float: distance = len(synset1.shortest_path(synset2, simulate_root=True)) if (max_depth <= 0): raise wn.Error('max_depth must be greater than 0') return (- math.log(((distance + 1) / (2 * max_depth))))<|docstring|>Return the Leacock-Chodorow similarity of *synset1* and *synset2*.<|endoftext|>
5a6ed699d923991602c05de2a606eea776f8434e3458416979ebbf41fa9c1f66
def render_get(self, request, components, msg): '\n Handle a transaction request. There are four types of requests:\n empty path -- return a list of the committed transactions ids\n txnid -- return the contents of the specified transaction\n txnid and field name -- return the contents of the specified\n transaction\n txnid and HEAD request -- return success only if the transaction\n has been committed\n 404 -- transaction does not exist\n 302 -- transaction exists but has not been committed\n 200 -- transaction has been committed\n\n The request may specify additional parameters:\n blockcount -- the number of blocks (newest to oldest) from which to\n pull txns\n\n Transactions are returned from oldest to newest.\n ' if (components and (len(components[0]) == 0)): components.pop(0) if (len(components) == 0): blkcount = 0 if ('blockcount' in msg): blkcount = int(msg.get('blockcount').pop(0)) txnids = [] blockids = self.Ledger.committed_block_ids(blkcount) while blockids: blockid = blockids.pop() txnids.extend(self.Ledger.BlockStore[blockid].TransactionIDs) return txnids txnid = components.pop(0) if (txnid not in self.Ledger.TransactionStore): raise Error(http.NOT_FOUND, 'no such transaction {0}'.format(txnid)) txn = self.Ledger.TransactionStore[txnid] test_only = (request.method == 'HEAD') if test_only: if (txn.Status == transaction.Status.committed): return None else: raise Error(http.FOUND, 'transaction not committed {0}'.format(txnid)) tinfo = txn.dump() tinfo['Identifier'] = txnid tinfo['Status'] = txn.Status if (txn.Status == transaction.Status.committed): tinfo['InBlock'] = txn.InBlock if (not components): return tinfo field = components.pop(0) if (field not in tinfo): raise Error(http.BAD_REQUEST, 'unknown transaction field {0}'.format(field)) return tinfo[field]
Handle a transaction request. There are four types of requests: empty path -- return a list of the committed transactions ids txnid -- return the contents of the specified transaction txnid and field name -- return the contents of the specified transaction txnid and HEAD request -- return success only if the transaction has been committed 404 -- transaction does not exist 302 -- transaction exists but has not been committed 200 -- transaction has been committed The request may specify additional parameters: blockcount -- the number of blocks (newest to oldest) from which to pull txns Transactions are returned from oldest to newest.
validator/txnserver/web_pages/transaction_page.py
render_get
NunoEdgarGFlowHub/sawtooth-core
4
python
def render_get(self, request, components, msg): '\n Handle a transaction request. There are four types of requests:\n empty path -- return a list of the committed transactions ids\n txnid -- return the contents of the specified transaction\n txnid and field name -- return the contents of the specified\n transaction\n txnid and HEAD request -- return success only if the transaction\n has been committed\n 404 -- transaction does not exist\n 302 -- transaction exists but has not been committed\n 200 -- transaction has been committed\n\n The request may specify additional parameters:\n blockcount -- the number of blocks (newest to oldest) from which to\n pull txns\n\n Transactions are returned from oldest to newest.\n ' if (components and (len(components[0]) == 0)): components.pop(0) if (len(components) == 0): blkcount = 0 if ('blockcount' in msg): blkcount = int(msg.get('blockcount').pop(0)) txnids = [] blockids = self.Ledger.committed_block_ids(blkcount) while blockids: blockid = blockids.pop() txnids.extend(self.Ledger.BlockStore[blockid].TransactionIDs) return txnids txnid = components.pop(0) if (txnid not in self.Ledger.TransactionStore): raise Error(http.NOT_FOUND, 'no such transaction {0}'.format(txnid)) txn = self.Ledger.TransactionStore[txnid] test_only = (request.method == 'HEAD') if test_only: if (txn.Status == transaction.Status.committed): return None else: raise Error(http.FOUND, 'transaction not committed {0}'.format(txnid)) tinfo = txn.dump() tinfo['Identifier'] = txnid tinfo['Status'] = txn.Status if (txn.Status == transaction.Status.committed): tinfo['InBlock'] = txn.InBlock if (not components): return tinfo field = components.pop(0) if (field not in tinfo): raise Error(http.BAD_REQUEST, 'unknown transaction field {0}'.format(field)) return tinfo[field]
def render_get(self, request, components, msg): '\n Handle a transaction request. There are four types of requests:\n empty path -- return a list of the committed transactions ids\n txnid -- return the contents of the specified transaction\n txnid and field name -- return the contents of the specified\n transaction\n txnid and HEAD request -- return success only if the transaction\n has been committed\n 404 -- transaction does not exist\n 302 -- transaction exists but has not been committed\n 200 -- transaction has been committed\n\n The request may specify additional parameters:\n blockcount -- the number of blocks (newest to oldest) from which to\n pull txns\n\n Transactions are returned from oldest to newest.\n ' if (components and (len(components[0]) == 0)): components.pop(0) if (len(components) == 0): blkcount = 0 if ('blockcount' in msg): blkcount = int(msg.get('blockcount').pop(0)) txnids = [] blockids = self.Ledger.committed_block_ids(blkcount) while blockids: blockid = blockids.pop() txnids.extend(self.Ledger.BlockStore[blockid].TransactionIDs) return txnids txnid = components.pop(0) if (txnid not in self.Ledger.TransactionStore): raise Error(http.NOT_FOUND, 'no such transaction {0}'.format(txnid)) txn = self.Ledger.TransactionStore[txnid] test_only = (request.method == 'HEAD') if test_only: if (txn.Status == transaction.Status.committed): return None else: raise Error(http.FOUND, 'transaction not committed {0}'.format(txnid)) tinfo = txn.dump() tinfo['Identifier'] = txnid tinfo['Status'] = txn.Status if (txn.Status == transaction.Status.committed): tinfo['InBlock'] = txn.InBlock if (not components): return tinfo field = components.pop(0) if (field not in tinfo): raise Error(http.BAD_REQUEST, 'unknown transaction field {0}'.format(field)) return tinfo[field]<|docstring|>Handle a transaction request. There are four types of requests: empty path -- return a list of the committed transactions ids txnid -- return the contents of the specified transaction txnid and field name -- return the contents of the specified transaction txnid and HEAD request -- return success only if the transaction has been committed 404 -- transaction does not exist 302 -- transaction exists but has not been committed 200 -- transaction has been committed The request may specify additional parameters: blockcount -- the number of blocks (newest to oldest) from which to pull txns Transactions are returned from oldest to newest.<|endoftext|>
ab9af6e9c914b41a2cfc6add2e0ebbe7a38a36789fddf19f24d410a0be92a487
def framesig(sig, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Frame a signal into overlapping frames.\n\n Args:\n sig: the audio signal to frame.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n an array of frames. Size is NUMFRAMES by frame_len.\n ' slen = len(sig) frame_len = int(round(frame_len)) frame_step = int(round(frame_step)) if (slen <= frame_len): numframes = 1 else: numframes = (1 + int(math.ceil((((1.0 * slen) - frame_len) / frame_step)))) padlen = int((((numframes - 1) * frame_step) + frame_len)) zeros = numpy.zeros(((padlen - slen),)) padsignal = numpy.concatenate((sig, zeros)) indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) frames = padsignal[indices] win = numpy.tile(winfunc(frame_len), (numframes, 1)) return (frames * win)
Frame a signal into overlapping frames. Args: sig: the audio signal to frame. frame_len: length of each frame measured in samples. frame_step: number of samples after the start of the previous frame that the next frame should begin. winfunc: the analysis window to apply to each frame. By default no window is applied. Returns: an array of frames. Size is NUMFRAMES by frame_len.
processing/sigproc.py
framesig
v0lta/tfkaldi
200
python
def framesig(sig, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Frame a signal into overlapping frames.\n\n Args:\n sig: the audio signal to frame.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n an array of frames. Size is NUMFRAMES by frame_len.\n ' slen = len(sig) frame_len = int(round(frame_len)) frame_step = int(round(frame_step)) if (slen <= frame_len): numframes = 1 else: numframes = (1 + int(math.ceil((((1.0 * slen) - frame_len) / frame_step)))) padlen = int((((numframes - 1) * frame_step) + frame_len)) zeros = numpy.zeros(((padlen - slen),)) padsignal = numpy.concatenate((sig, zeros)) indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) frames = padsignal[indices] win = numpy.tile(winfunc(frame_len), (numframes, 1)) return (frames * win)
def framesig(sig, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Frame a signal into overlapping frames.\n\n Args:\n sig: the audio signal to frame.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n an array of frames. Size is NUMFRAMES by frame_len.\n ' slen = len(sig) frame_len = int(round(frame_len)) frame_step = int(round(frame_step)) if (slen <= frame_len): numframes = 1 else: numframes = (1 + int(math.ceil((((1.0 * slen) - frame_len) / frame_step)))) padlen = int((((numframes - 1) * frame_step) + frame_len)) zeros = numpy.zeros(((padlen - slen),)) padsignal = numpy.concatenate((sig, zeros)) indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) frames = padsignal[indices] win = numpy.tile(winfunc(frame_len), (numframes, 1)) return (frames * win)<|docstring|>Frame a signal into overlapping frames. Args: sig: the audio signal to frame. frame_len: length of each frame measured in samples. frame_step: number of samples after the start of the previous frame that the next frame should begin. winfunc: the analysis window to apply to each frame. By default no window is applied. Returns: an array of frames. Size is NUMFRAMES by frame_len.<|endoftext|>
1453bd657388c3ab7fce9b7f54e76a0f7674fe8afd0c0f94e62298e955b32dc4
def deframesig(frames, siglen, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Does overlap-add procedure to undo the action of framesig.\n\n Args:\n frames the: array of frames.\n siglen the: length of the desired signal, use 0 if unknown. Output will\n be truncated to siglen samples.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n a 1-D signal.\n ' frame_len = round(frame_len) frame_step = round(frame_step) numframes = numpy.shape(frames)[0] assert (numpy.shape(frames)[1] == frame_len), '"frames" matrix is wrong\n size, 2nd dim is not equal to frame_len' indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) padlen = (((numframes - 1) * frame_step) + frame_len) if (siglen <= 0): siglen = padlen rec_signal = numpy.zeros((padlen,)) window_correction = numpy.zeros((padlen,)) win = winfunc(frame_len) for i in range(0, numframes): window_correction[indices[(i, :)]] = ((window_correction[indices[(i, :)]] + win) + 1e-15) rec_signal[indices[(i, :)]] = (rec_signal[indices[(i, :)]] + frames[(i, :)]) rec_signal = (rec_signal / window_correction) return rec_signal[0:siglen]
Does overlap-add procedure to undo the action of framesig. Args: frames the: array of frames. siglen the: length of the desired signal, use 0 if unknown. Output will be truncated to siglen samples. frame_len: length of each frame measured in samples. frame_step: number of samples after the start of the previous frame that the next frame should begin. winfunc: the analysis window to apply to each frame. By default no window is applied. Returns: a 1-D signal.
processing/sigproc.py
deframesig
v0lta/tfkaldi
200
python
def deframesig(frames, siglen, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Does overlap-add procedure to undo the action of framesig.\n\n Args:\n frames the: array of frames.\n siglen the: length of the desired signal, use 0 if unknown. Output will\n be truncated to siglen samples.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n a 1-D signal.\n ' frame_len = round(frame_len) frame_step = round(frame_step) numframes = numpy.shape(frames)[0] assert (numpy.shape(frames)[1] == frame_len), '"frames" matrix is wrong\n size, 2nd dim is not equal to frame_len' indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) padlen = (((numframes - 1) * frame_step) + frame_len) if (siglen <= 0): siglen = padlen rec_signal = numpy.zeros((padlen,)) window_correction = numpy.zeros((padlen,)) win = winfunc(frame_len) for i in range(0, numframes): window_correction[indices[(i, :)]] = ((window_correction[indices[(i, :)]] + win) + 1e-15) rec_signal[indices[(i, :)]] = (rec_signal[indices[(i, :)]] + frames[(i, :)]) rec_signal = (rec_signal / window_correction) return rec_signal[0:siglen]
def deframesig(frames, siglen, frame_len, frame_step, winfunc=(lambda x: numpy.ones((x,)))): '\n Does overlap-add procedure to undo the action of framesig.\n\n Args:\n frames the: array of frames.\n siglen the: length of the desired signal, use 0 if unknown. Output will\n be truncated to siglen samples.\n frame_len: length of each frame measured in samples.\n frame_step: number of samples after the start of the previous frame that\n the next frame should begin.\n winfunc: the analysis window to apply to each frame. By default no\n window is applied.\n\n Returns:\n a 1-D signal.\n ' frame_len = round(frame_len) frame_step = round(frame_step) numframes = numpy.shape(frames)[0] assert (numpy.shape(frames)[1] == frame_len), '"frames" matrix is wrong\n size, 2nd dim is not equal to frame_len' indices = (numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile(numpy.arange(0, (numframes * frame_step), frame_step), (frame_len, 1)).T) indices = numpy.array(indices, dtype=numpy.int32) padlen = (((numframes - 1) * frame_step) + frame_len) if (siglen <= 0): siglen = padlen rec_signal = numpy.zeros((padlen,)) window_correction = numpy.zeros((padlen,)) win = winfunc(frame_len) for i in range(0, numframes): window_correction[indices[(i, :)]] = ((window_correction[indices[(i, :)]] + win) + 1e-15) rec_signal[indices[(i, :)]] = (rec_signal[indices[(i, :)]] + frames[(i, :)]) rec_signal = (rec_signal / window_correction) return rec_signal[0:siglen]<|docstring|>Does overlap-add procedure to undo the action of framesig. Args: frames the: array of frames. siglen the: length of the desired signal, use 0 if unknown. Output will be truncated to siglen samples. frame_len: length of each frame measured in samples. frame_step: number of samples after the start of the previous frame that the next frame should begin. winfunc: the analysis window to apply to each frame. By default no window is applied. Returns: a 1-D signal.<|endoftext|>
59dd27f65f3e2742e47e9a48194854b931efa3ba29c4647591d4e8ef06b47008
def magspec(frames, nfft): '\n Compute the magnitude spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n magnitude spectrum of the corresponding frame.\n ' complex_spec = numpy.fft.rfft(frames, nfft) return numpy.absolute(complex_spec)
Compute the magnitude spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the magnitude spectrum of the corresponding frame.
processing/sigproc.py
magspec
v0lta/tfkaldi
200
python
def magspec(frames, nfft): '\n Compute the magnitude spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n magnitude spectrum of the corresponding frame.\n ' complex_spec = numpy.fft.rfft(frames, nfft) return numpy.absolute(complex_spec)
def magspec(frames, nfft): '\n Compute the magnitude spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n magnitude spectrum of the corresponding frame.\n ' complex_spec = numpy.fft.rfft(frames, nfft) return numpy.absolute(complex_spec)<|docstring|>Compute the magnitude spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the magnitude spectrum of the corresponding frame.<|endoftext|>
cc0cb93f78d58260c7b008ff1fd9742ba70801527649b7da38016951ecf4d227
def powspec(frames, nfft): '\n Compute the power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n power spectrum of the corresponding frame.\n ' return ((1.0 / nfft) * numpy.square(magspec(frames, nfft)))
Compute the power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the power spectrum of the corresponding frame.
processing/sigproc.py
powspec
v0lta/tfkaldi
200
python
def powspec(frames, nfft): '\n Compute the power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n power spectrum of the corresponding frame.\n ' return ((1.0 / nfft) * numpy.square(magspec(frames, nfft)))
def powspec(frames, nfft): '\n Compute the power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n power spectrum of the corresponding frame.\n ' return ((1.0 / nfft) * numpy.square(magspec(frames, nfft)))<|docstring|>Compute the power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the power spectrum of the corresponding frame.<|endoftext|>
3ae3c5fbe3c7043bfdde092202b506e0b0e0f0b0c10e8c8273b79032d3453da3
def logpowspec(frames, nfft, norm=1): '\n Compute the log power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n norm: If norm=1, the log power spectrum is normalised so that the max\n value (across all frames) is 1.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n log power spectrum of the corresponding frame.\n ' ps = powspec(frames, nfft) ps[(ps <= 1e-30)] = 1e-30 lps = (10 * numpy.log10(ps)) if norm: return (lps - numpy.max(lps)) else: return lps
Compute the log power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. norm: If norm=1, the log power spectrum is normalised so that the max value (across all frames) is 1. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the log power spectrum of the corresponding frame.
processing/sigproc.py
logpowspec
v0lta/tfkaldi
200
python
def logpowspec(frames, nfft, norm=1): '\n Compute the log power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n norm: If norm=1, the log power spectrum is normalised so that the max\n value (across all frames) is 1.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n log power spectrum of the corresponding frame.\n ' ps = powspec(frames, nfft) ps[(ps <= 1e-30)] = 1e-30 lps = (10 * numpy.log10(ps)) if norm: return (lps - numpy.max(lps)) else: return lps
def logpowspec(frames, nfft, norm=1): '\n Compute the log power spectrum of each frame in frames.\n\n If frames is an NxD matrix, output will be NxNFFT.\n\n Args:\n frames: the array of frames. Each row is a frame.\n nfft: the FFT length to use. If NFFT > frame_len, the frames are\n zero-padded.\n norm: If norm=1, the log power spectrum is normalised so that the max\n value (across all frames) is 1.\n\n Returns:\n If frames is an NxD matrix, output will be NxNFFT. Each row will be the\n log power spectrum of the corresponding frame.\n ' ps = powspec(frames, nfft) ps[(ps <= 1e-30)] = 1e-30 lps = (10 * numpy.log10(ps)) if norm: return (lps - numpy.max(lps)) else: return lps<|docstring|>Compute the log power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. Args: frames: the array of frames. Each row is a frame. nfft: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. norm: If norm=1, the log power spectrum is normalised so that the max value (across all frames) is 1. Returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the log power spectrum of the corresponding frame.<|endoftext|>
21a19c75c99050c1ee902ef950a21cd39fec70c6270300c2c01e29fcf018ea69
def preemphasis(signal, coeff=0.95): '\n perform preemphasis on the input signal.\n\n Args:\n signal: The signal to filter.\n coeff: The preemphasis coefficient. 0 is no filter, default is 0.95.\n\n Returns:\n the filtered signal.\n ' return numpy.append(signal[0], (signal[1:] - (coeff * signal[:(- 1)])))
perform preemphasis on the input signal. Args: signal: The signal to filter. coeff: The preemphasis coefficient. 0 is no filter, default is 0.95. Returns: the filtered signal.
processing/sigproc.py
preemphasis
v0lta/tfkaldi
200
python
def preemphasis(signal, coeff=0.95): '\n perform preemphasis on the input signal.\n\n Args:\n signal: The signal to filter.\n coeff: The preemphasis coefficient. 0 is no filter, default is 0.95.\n\n Returns:\n the filtered signal.\n ' return numpy.append(signal[0], (signal[1:] - (coeff * signal[:(- 1)])))
def preemphasis(signal, coeff=0.95): '\n perform preemphasis on the input signal.\n\n Args:\n signal: The signal to filter.\n coeff: The preemphasis coefficient. 0 is no filter, default is 0.95.\n\n Returns:\n the filtered signal.\n ' return numpy.append(signal[0], (signal[1:] - (coeff * signal[:(- 1)])))<|docstring|>perform preemphasis on the input signal. Args: signal: The signal to filter. coeff: The preemphasis coefficient. 0 is no filter, default is 0.95. Returns: the filtered signal.<|endoftext|>
d558534fb59ed65e019695628121081cb671d29a3f6272b42266ad52f6944c8f
def min_cost(self, costs: List[List[int]]) -> int: '\n Time: O(n), Space: O(1)\n\n :param costs:\n :return:\n ' if (not costs): return 0 pre0 = pre1 = pre2 = 0 for i in range(1, (len(costs) + 1)): cur0 = (min(pre1, pre2) + costs[(i - 1)][0]) cur1 = (min(pre0, pre2) + costs[(i - 1)][1]) cur2 = (min(pre0, pre1) + costs[(i - 1)][2]) (pre0, pre1, pre2) = (cur0, cur1, cur2) return min(pre0, pre1, pre2)
Time: O(n), Space: O(1) :param costs: :return:
python/src/problem/leetcode/easy/leetcode_256.py
min_cost
yipwinghong/Algorithm
9
python
def min_cost(self, costs: List[List[int]]) -> int: '\n Time: O(n), Space: O(1)\n\n :param costs:\n :return:\n ' if (not costs): return 0 pre0 = pre1 = pre2 = 0 for i in range(1, (len(costs) + 1)): cur0 = (min(pre1, pre2) + costs[(i - 1)][0]) cur1 = (min(pre0, pre2) + costs[(i - 1)][1]) cur2 = (min(pre0, pre1) + costs[(i - 1)][2]) (pre0, pre1, pre2) = (cur0, cur1, cur2) return min(pre0, pre1, pre2)
def min_cost(self, costs: List[List[int]]) -> int: '\n Time: O(n), Space: O(1)\n\n :param costs:\n :return:\n ' if (not costs): return 0 pre0 = pre1 = pre2 = 0 for i in range(1, (len(costs) + 1)): cur0 = (min(pre1, pre2) + costs[(i - 1)][0]) cur1 = (min(pre0, pre2) + costs[(i - 1)][1]) cur2 = (min(pre0, pre1) + costs[(i - 1)][2]) (pre0, pre1, pre2) = (cur0, cur1, cur2) return min(pre0, pre1, pre2)<|docstring|>Time: O(n), Space: O(1) :param costs: :return:<|endoftext|>
ca7ea2756d50937c8a0984e077b8f1887d4c1a87ca9e01cb4fb5ebd002c6d478
def require(modulename, package=None): "\n Load, or reload a module.\n\n When under heavy development, a user's tool might consist of multiple\n modules. If those are imported using the standard 'import' mechanism,\n there is no guarantee that the Python implementation will re-read\n and re-evaluate the module's Python code. In fact, it usually doesn't.\n What should be done instead is 'reload()'-ing that module.\n\n This is a simple helper function that will do just that: In case the\n module doesn't exist, it 'import's it, and if it does exist,\n 'reload()'s it.\n\n The importing module (i.e., the module calling require()) will have\n the loaded module bound to its globals(), under the name 'modulename'.\n (If require() is called from the command line, the importing module\n will be '__main__'.)\n\n For more information, see: <http://www.hexblog.com/?p=749>.\n " import inspect (frame_obj, filename, line_number, function_name, lines, index) = inspect.stack()[1] importer_module = inspect.getmodule(frame_obj) if (importer_module is None): importer_module = sys.modules['__main__'] if (modulename in sys.modules.keys()): reload(sys.modules[modulename]) m = sys.modules[modulename] else: import importlib m = importlib.import_module(modulename, package) sys.modules[modulename] = m setattr(importer_module, modulename, m)
Load, or reload a module. When under heavy development, a user's tool might consist of multiple modules. If those are imported using the standard 'import' mechanism, there is no guarantee that the Python implementation will re-read and re-evaluate the module's Python code. In fact, it usually doesn't. What should be done instead is 'reload()'-ing that module. This is a simple helper function that will do just that: In case the module doesn't exist, it 'import's it, and if it does exist, 'reload()'s it. The importing module (i.e., the module calling require()) will have the loaded module bound to its globals(), under the name 'modulename'. (If require() is called from the command line, the importing module will be '__main__'.) For more information, see: <http://www.hexblog.com/?p=749>.
pywraps/py_idaapi.py
require
diamondo25/src
2
python
def require(modulename, package=None): "\n Load, or reload a module.\n\n When under heavy development, a user's tool might consist of multiple\n modules. If those are imported using the standard 'import' mechanism,\n there is no guarantee that the Python implementation will re-read\n and re-evaluate the module's Python code. In fact, it usually doesn't.\n What should be done instead is 'reload()'-ing that module.\n\n This is a simple helper function that will do just that: In case the\n module doesn't exist, it 'import's it, and if it does exist,\n 'reload()'s it.\n\n The importing module (i.e., the module calling require()) will have\n the loaded module bound to its globals(), under the name 'modulename'.\n (If require() is called from the command line, the importing module\n will be '__main__'.)\n\n For more information, see: <http://www.hexblog.com/?p=749>.\n " import inspect (frame_obj, filename, line_number, function_name, lines, index) = inspect.stack()[1] importer_module = inspect.getmodule(frame_obj) if (importer_module is None): importer_module = sys.modules['__main__'] if (modulename in sys.modules.keys()): reload(sys.modules[modulename]) m = sys.modules[modulename] else: import importlib m = importlib.import_module(modulename, package) sys.modules[modulename] = m setattr(importer_module, modulename, m)
def require(modulename, package=None): "\n Load, or reload a module.\n\n When under heavy development, a user's tool might consist of multiple\n modules. If those are imported using the standard 'import' mechanism,\n there is no guarantee that the Python implementation will re-read\n and re-evaluate the module's Python code. In fact, it usually doesn't.\n What should be done instead is 'reload()'-ing that module.\n\n This is a simple helper function that will do just that: In case the\n module doesn't exist, it 'import's it, and if it does exist,\n 'reload()'s it.\n\n The importing module (i.e., the module calling require()) will have\n the loaded module bound to its globals(), under the name 'modulename'.\n (If require() is called from the command line, the importing module\n will be '__main__'.)\n\n For more information, see: <http://www.hexblog.com/?p=749>.\n " import inspect (frame_obj, filename, line_number, function_name, lines, index) = inspect.stack()[1] importer_module = inspect.getmodule(frame_obj) if (importer_module is None): importer_module = sys.modules['__main__'] if (modulename in sys.modules.keys()): reload(sys.modules[modulename]) m = sys.modules[modulename] else: import importlib m = importlib.import_module(modulename, package) sys.modules[modulename] = m setattr(importer_module, modulename, m)<|docstring|>Load, or reload a module. When under heavy development, a user's tool might consist of multiple modules. If those are imported using the standard 'import' mechanism, there is no guarantee that the Python implementation will re-read and re-evaluate the module's Python code. In fact, it usually doesn't. What should be done instead is 'reload()'-ing that module. This is a simple helper function that will do just that: In case the module doesn't exist, it 'import's it, and if it does exist, 'reload()'s it. The importing module (i.e., the module calling require()) will have the loaded module bound to its globals(), under the name 'modulename'. (If require() is called from the command line, the importing module will be '__main__'.) For more information, see: <http://www.hexblog.com/?p=749>.<|endoftext|>
41c5c34f574960874e5db2c0a810ddbe5a46a1dbefedcfd48f5960530989b141
def _bounded_getitem_iterator(self): 'Helper function, to be set as __iter__ method for qvector-, or array-based classes.' for i in range(len(self)): (yield self[i])
Helper function, to be set as __iter__ method for qvector-, or array-based classes.
pywraps/py_idaapi.py
_bounded_getitem_iterator
diamondo25/src
2
python
def _bounded_getitem_iterator(self): for i in range(len(self)): (yield self[i])
def _bounded_getitem_iterator(self): for i in range(len(self)): (yield self[i])<|docstring|>Helper function, to be set as __iter__ method for qvector-, or array-based classes.<|endoftext|>
9f1b39c59300acea70943ecbb062e64c0483b495ecdbbb112249393dd7ec61a5
def as_cstr(val): '\n Returns a C str from the passed value. The passed value can be of type refclass (returned by a call to buffer() or byref())\n It scans for the first \x00 and returns the string value up to that point.\n ' if isinstance(val, PyIdc_cvt_refclass__): val = val.value n = val.find('\x00') return (val if (n == (- 1)) else val[:n])
Returns a C str from the passed value. The passed value can be of type refclass (returned by a call to buffer() or byref()) It scans for the first and returns the string value up to that point.
pywraps/py_idaapi.py
as_cstr
diamondo25/src
2
python
def as_cstr(val): '\n Returns a C str from the passed value. The passed value can be of type refclass (returned by a call to buffer() or byref())\n It scans for the first \x00 and returns the string value up to that point.\n ' if isinstance(val, PyIdc_cvt_refclass__): val = val.value n = val.find('\x00') return (val if (n == (- 1)) else val[:n])
def as_cstr(val): '\n Returns a C str from the passed value. The passed value can be of type refclass (returned by a call to buffer() or byref())\n It scans for the first \x00 and returns the string value up to that point.\n ' if isinstance(val, PyIdc_cvt_refclass__): val = val.value n = val.find('\x00') return (val if (n == (- 1)) else val[:n])<|docstring|>Returns a C str from the passed value. The passed value can be of type refclass (returned by a call to buffer() or byref()) It scans for the first and returns the string value up to that point.<|endoftext|>
f66883c22932ffe78ebb30f8ea3012fb304593598bb927719d790d36dfa84263
def as_unicode(s): 'Convenience function to convert a string into appropriate unicode format' import _ida_ida return unicode(s).encode(('UTF-16' + ('BE' if _ida_ida.cvar.inf.is_be() else 'LE')))
Convenience function to convert a string into appropriate unicode format
pywraps/py_idaapi.py
as_unicode
diamondo25/src
2
python
def as_unicode(s): import _ida_ida return unicode(s).encode(('UTF-16' + ('BE' if _ida_ida.cvar.inf.is_be() else 'LE')))
def as_unicode(s): import _ida_ida return unicode(s).encode(('UTF-16' + ('BE' if _ida_ida.cvar.inf.is_be() else 'LE')))<|docstring|>Convenience function to convert a string into appropriate unicode format<|endoftext|>
74af519fe573f4629db3a4e41ec7490138995b93511835b074e0d81039be68cd
def as_uint32(v): 'Returns a number as an unsigned int32 number' return (v & 4294967295)
Returns a number as an unsigned int32 number
pywraps/py_idaapi.py
as_uint32
diamondo25/src
2
python
def as_uint32(v): return (v & 4294967295)
def as_uint32(v): return (v & 4294967295)<|docstring|>Returns a number as an unsigned int32 number<|endoftext|>
6287a575a9c1da335300b80da3efabc6568f6dc6270909d8de2464cb6ca317be
def as_int32(v): 'Returns a number as a signed int32 number' return (- (((~ v) & 4294967295) + 1))
Returns a number as a signed int32 number
pywraps/py_idaapi.py
as_int32
diamondo25/src
2
python
def as_int32(v): return (- (((~ v) & 4294967295) + 1))
def as_int32(v): return (- (((~ v) & 4294967295) + 1))<|docstring|>Returns a number as a signed int32 number<|endoftext|>
e3546dba1dae5ecb5a6f252908daa065a1f2ab93153e8d436202b3f8ac0cfa5a
def as_signed(v, nbits=32): '\n Returns a number as signed. The number of bits are specified by the user.\n The MSB holds the sign.\n ' return ((- (((~ v) & ((1 << nbits) - 1)) + 1)) if (v & (1 << (nbits - 1))) else v)
Returns a number as signed. The number of bits are specified by the user. The MSB holds the sign.
pywraps/py_idaapi.py
as_signed
diamondo25/src
2
python
def as_signed(v, nbits=32): '\n Returns a number as signed. The number of bits are specified by the user.\n The MSB holds the sign.\n ' return ((- (((~ v) & ((1 << nbits) - 1)) + 1)) if (v & (1 << (nbits - 1))) else v)
def as_signed(v, nbits=32): '\n Returns a number as signed. The number of bits are specified by the user.\n The MSB holds the sign.\n ' return ((- (((~ v) & ((1 << nbits) - 1)) + 1)) if (v & (1 << (nbits - 1))) else v)<|docstring|>Returns a number as signed. The number of bits are specified by the user. The MSB holds the sign.<|endoftext|>
71a5306740c2bf1e8907c5f2927b36de87653f7baedee3b1b383ce9e971b044f
def copy_bits(v, s, e=(- 1)): '\n Copy bits from a value\n @param v: the value\n @param s: starting bit (0-based)\n @param e: ending bit\n ' if (e == (- 1)): e = s if (s > e): (e, s) = (s, e) mask = (~ (((1 << ((e - s) + 1)) - 1) << s)) return ((v & mask) >> s)
Copy bits from a value @param v: the value @param s: starting bit (0-based) @param e: ending bit
pywraps/py_idaapi.py
copy_bits
diamondo25/src
2
python
def copy_bits(v, s, e=(- 1)): '\n Copy bits from a value\n @param v: the value\n @param s: starting bit (0-based)\n @param e: ending bit\n ' if (e == (- 1)): e = s if (s > e): (e, s) = (s, e) mask = (~ (((1 << ((e - s) + 1)) - 1) << s)) return ((v & mask) >> s)
def copy_bits(v, s, e=(- 1)): '\n Copy bits from a value\n @param v: the value\n @param s: starting bit (0-based)\n @param e: ending bit\n ' if (e == (- 1)): e = s if (s > e): (e, s) = (s, e) mask = (~ (((1 << ((e - s) + 1)) - 1) << s)) return ((v & mask) >> s)<|docstring|>Copy bits from a value @param v: the value @param s: starting bit (0-based) @param e: ending bit<|endoftext|>
55ce6278a184609e1542b877c69b0baa8e94e8b5e09135797928e9769499410f
def struct_unpack(buffer, signed=False, offs=0): "\n Unpack a buffer given its length and offset using struct.unpack_from().\n This function will know how to unpack the given buffer by using the lookup table '__struct_unpack_table'\n If the buffer is of unknown length then None is returned. Otherwise the unpacked value is returned.\n " n = len(buffer) if (n not in __struct_unpack_table): return None signed = (1 if signed else 0) return struct.unpack_from(__struct_unpack_table[n][signed], buffer, offs)[0]
Unpack a buffer given its length and offset using struct.unpack_from(). This function will know how to unpack the given buffer by using the lookup table '__struct_unpack_table' If the buffer is of unknown length then None is returned. Otherwise the unpacked value is returned.
pywraps/py_idaapi.py
struct_unpack
diamondo25/src
2
python
def struct_unpack(buffer, signed=False, offs=0): "\n Unpack a buffer given its length and offset using struct.unpack_from().\n This function will know how to unpack the given buffer by using the lookup table '__struct_unpack_table'\n If the buffer is of unknown length then None is returned. Otherwise the unpacked value is returned.\n " n = len(buffer) if (n not in __struct_unpack_table): return None signed = (1 if signed else 0) return struct.unpack_from(__struct_unpack_table[n][signed], buffer, offs)[0]
def struct_unpack(buffer, signed=False, offs=0): "\n Unpack a buffer given its length and offset using struct.unpack_from().\n This function will know how to unpack the given buffer by using the lookup table '__struct_unpack_table'\n If the buffer is of unknown length then None is returned. Otherwise the unpacked value is returned.\n " n = len(buffer) if (n not in __struct_unpack_table): return None signed = (1 if signed else 0) return struct.unpack_from(__struct_unpack_table[n][signed], buffer, offs)[0]<|docstring|>Unpack a buffer given its length and offset using struct.unpack_from(). This function will know how to unpack the given buffer by using the lookup table '__struct_unpack_table' If the buffer is of unknown length then None is returned. Otherwise the unpacked value is returned.<|endoftext|>
d674e4b145772084fa59b8f57049bbe83d2e4493bbf624a1199e4df8a17127cb
def IDAPython_ExecSystem(cmd): '\n Executes a command with popen().\n ' try: cmd = _utf8_native(cmd) f = os.popen(cmd, 'r') s = ''.join(f.readlines()) f.close() return s except Exception as e: return ('%s\n%s' % (str(e), traceback.format_exc()))
Executes a command with popen().
pywraps/py_idaapi.py
IDAPython_ExecSystem
diamondo25/src
2
python
def IDAPython_ExecSystem(cmd): '\n \n ' try: cmd = _utf8_native(cmd) f = os.popen(cmd, 'r') s = .join(f.readlines()) f.close() return s except Exception as e: return ('%s\n%s' % (str(e), traceback.format_exc()))
def IDAPython_ExecSystem(cmd): '\n \n ' try: cmd = _utf8_native(cmd) f = os.popen(cmd, 'r') s = .join(f.readlines()) f.close() return s except Exception as e: return ('%s\n%s' % (str(e), traceback.format_exc()))<|docstring|>Executes a command with popen().<|endoftext|>
bfe580724ccca85005e16b3a47706895bc3bfed973fc61b691920d3827314244
def IDAPython_FormatExc(etype, value, tb, limit=None): '\n This function is used to format an exception given the\n values returned by a PyErr_Fetch()\n ' try: return ''.join(traceback.format_exception(etype, value, tb, limit)) except: return str(value)
This function is used to format an exception given the values returned by a PyErr_Fetch()
pywraps/py_idaapi.py
IDAPython_FormatExc
diamondo25/src
2
python
def IDAPython_FormatExc(etype, value, tb, limit=None): '\n This function is used to format an exception given the\n values returned by a PyErr_Fetch()\n ' try: return .join(traceback.format_exception(etype, value, tb, limit)) except: return str(value)
def IDAPython_FormatExc(etype, value, tb, limit=None): '\n This function is used to format an exception given the\n values returned by a PyErr_Fetch()\n ' try: return .join(traceback.format_exception(etype, value, tb, limit)) except: return str(value)<|docstring|>This function is used to format an exception given the values returned by a PyErr_Fetch()<|endoftext|>
8d446adb7ad278dc0c0d03ef7a455028ea3728f55e9939a1556c3bf5f7eaa3c8
def IDAPython_ExecScript(script, g, print_error=True): '\n Run the specified script.\n It also addresses http://code.google.com/p/idapython/issues/detail?id=42\n\n This function is used by the low-level plugin code.\n ' script = _utf8_native(script) scriptpath = os.path.dirname(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) argv = sys.argv sys.argv = [script] old__file__ = (g['__file__'] if ('__file__' in g) else '') g['__file__'] = script try: execfile(script, g) PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: g['__file__'] = old__file__ sys.argv = argv return PY_COMPILE_ERR
Run the specified script. It also addresses http://code.google.com/p/idapython/issues/detail?id=42 This function is used by the low-level plugin code.
pywraps/py_idaapi.py
IDAPython_ExecScript
diamondo25/src
2
python
def IDAPython_ExecScript(script, g, print_error=True): '\n Run the specified script.\n It also addresses http://code.google.com/p/idapython/issues/detail?id=42\n\n This function is used by the low-level plugin code.\n ' script = _utf8_native(script) scriptpath = os.path.dirname(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) argv = sys.argv sys.argv = [script] old__file__ = (g['__file__'] if ('__file__' in g) else ) g['__file__'] = script try: execfile(script, g) PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: g['__file__'] = old__file__ sys.argv = argv return PY_COMPILE_ERR
def IDAPython_ExecScript(script, g, print_error=True): '\n Run the specified script.\n It also addresses http://code.google.com/p/idapython/issues/detail?id=42\n\n This function is used by the low-level plugin code.\n ' script = _utf8_native(script) scriptpath = os.path.dirname(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) argv = sys.argv sys.argv = [script] old__file__ = (g['__file__'] if ('__file__' in g) else ) g['__file__'] = script try: execfile(script, g) PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: g['__file__'] = old__file__ sys.argv = argv return PY_COMPILE_ERR<|docstring|>Run the specified script. It also addresses http://code.google.com/p/idapython/issues/detail?id=42 This function is used by the low-level plugin code.<|endoftext|>
3dfe03dbe4f74a8621c9e95d540fe026f4d00d0bfb46ca57bf092f3514ef043e
def IDAPython_LoadProcMod(script, g, print_error=True): '\n Load processor module.\n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] (scriptpath, scriptname) = os.path.split(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) procmod_name = os.path.splitext(scriptname)[0] procobj = None fp = None try: (fp, pathname, description) = imp.find_module(procmod_name) procmod = imp.load_module(procmod_name, fp, pathname, description) if parent: setattr(parent, procmod_name, procmod) parent_attrs = getattr(parent, '__all__', (attr for attr in dir(parent) if (not attr.startswith('_')))) for pa in parent_attrs: setattr(procmod, pa, getattr(parent, pa)) if getattr(procmod, 'PROCESSOR_ENTRY', None): procobj = procmod.PROCESSOR_ENTRY() PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: if fp: fp.close() sys.path.remove(scriptpath) return (PY_COMPILE_ERR, procobj)
Load processor module.
pywraps/py_idaapi.py
IDAPython_LoadProcMod
diamondo25/src
2
python
def IDAPython_LoadProcMod(script, g, print_error=True): '\n \n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] (scriptpath, scriptname) = os.path.split(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) procmod_name = os.path.splitext(scriptname)[0] procobj = None fp = None try: (fp, pathname, description) = imp.find_module(procmod_name) procmod = imp.load_module(procmod_name, fp, pathname, description) if parent: setattr(parent, procmod_name, procmod) parent_attrs = getattr(parent, '__all__', (attr for attr in dir(parent) if (not attr.startswith('_')))) for pa in parent_attrs: setattr(procmod, pa, getattr(parent, pa)) if getattr(procmod, 'PROCESSOR_ENTRY', None): procobj = procmod.PROCESSOR_ENTRY() PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: if fp: fp.close() sys.path.remove(scriptpath) return (PY_COMPILE_ERR, procobj)
def IDAPython_LoadProcMod(script, g, print_error=True): '\n \n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] (scriptpath, scriptname) = os.path.split(script) if (len(scriptpath) and (scriptpath not in sys.path)): sys.path.append(scriptpath) procmod_name = os.path.splitext(scriptname)[0] procobj = None fp = None try: (fp, pathname, description) = imp.find_module(procmod_name) procmod = imp.load_module(procmod_name, fp, pathname, description) if parent: setattr(parent, procmod_name, procmod) parent_attrs = getattr(parent, '__all__', (attr for attr in dir(parent) if (not attr.startswith('_')))) for pa in parent_attrs: setattr(procmod, pa, getattr(parent, pa)) if getattr(procmod, 'PROCESSOR_ENTRY', None): procobj = procmod.PROCESSOR_ENTRY() PY_COMPILE_ERR = None except Exception as e: PY_COMPILE_ERR = ('%s\n%s' % (str(e), traceback.format_exc())) if print_error: print(PY_COMPILE_ERR) finally: if fp: fp.close() sys.path.remove(scriptpath) return (PY_COMPILE_ERR, procobj)<|docstring|>Load processor module.<|endoftext|>
28ccbb17fe810e7d5738071234c830af719fb118aa7ae7aeab1412765a6f36b2
def IDAPython_UnLoadProcMod(script, g, print_error=True): '\n Unload processor module.\n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] scriptname = os.path.split(script)[1] procmod_name = os.path.splitext(scriptname)[0] if getattr(parent, procmod_name, None): delattr(parent, procmod_name) del sys.modules[procmod_name] PY_COMPILE_ERR = None return PY_COMPILE_ERR
Unload processor module.
pywraps/py_idaapi.py
IDAPython_UnLoadProcMod
diamondo25/src
2
python
def IDAPython_UnLoadProcMod(script, g, print_error=True): '\n \n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] scriptname = os.path.split(script)[1] procmod_name = os.path.splitext(scriptname)[0] if getattr(parent, procmod_name, None): delattr(parent, procmod_name) del sys.modules[procmod_name] PY_COMPILE_ERR = None return PY_COMPILE_ERR
def IDAPython_UnLoadProcMod(script, g, print_error=True): '\n \n ' script = _utf8_native(script) pname = (g['__name__'] if (g and ('__name__' in g)) else '__main__') parent = sys.modules[pname] scriptname = os.path.split(script)[1] procmod_name = os.path.splitext(scriptname)[0] if getattr(parent, procmod_name, None): delattr(parent, procmod_name) del sys.modules[procmod_name] PY_COMPILE_ERR = None return PY_COMPILE_ERR<|docstring|>Unload processor module.<|endoftext|>
c15cc339218c0715001c09d61208a0d946cb92488a3a8051d3fda323c3eec2b9
def __del__(self): 'Delete the link upon object destruction (only if not static)' self._free()
Delete the link upon object destruction (only if not static)
pywraps/py_idaapi.py
__del__
diamondo25/src
2
python
def __del__(self): self._free()
def __del__(self): self._free()<|docstring|>Delete the link upon object destruction (only if not static)<|endoftext|>
8c0430d31db4dfe8f2e673c2c5a2d1bdc90443378529cbf157193fc16e1f1897
def _free(self): 'Explicitly delete the link (only if not static)' if ((not self.__static_clink__) and (self.__clink__ is not None)): self._del_clink(self.__clink__) self.__clink__ = None
Explicitly delete the link (only if not static)
pywraps/py_idaapi.py
_free
diamondo25/src
2
python
def _free(self): if ((not self.__static_clink__) and (self.__clink__ is not None)): self._del_clink(self.__clink__) self.__clink__ = None
def _free(self): if ((not self.__static_clink__) and (self.__clink__ is not None)): self._del_clink(self.__clink__) self.__clink__ = None<|docstring|>Explicitly delete the link (only if not static)<|endoftext|>
241c6a899853c8498c3e5d66ad856f8546408bb5bd1c0fc07ead6a714be7df68
def copy(self): 'Returns a new copy of this class' inst = self.__class__() inst.assign(self) return inst
Returns a new copy of this class
pywraps/py_idaapi.py
copy
diamondo25/src
2
python
def copy(self): inst = self.__class__() inst.assign(self) return inst
def copy(self): inst = self.__class__() inst.assign(self) return inst<|docstring|>Returns a new copy of this class<|endoftext|>
973ed44551e3917065fab51ab5bc04badac1bae973075e50c5af8f0a6208bc77
def _create_clink(self): '\n Overwrite me.\n Creates a new clink\n @return: PyCObject representing the C link\n ' pass
Overwrite me. Creates a new clink @return: PyCObject representing the C link
pywraps/py_idaapi.py
_create_clink
diamondo25/src
2
python
def _create_clink(self): '\n Overwrite me.\n Creates a new clink\n @return: PyCObject representing the C link\n ' pass
def _create_clink(self): '\n Overwrite me.\n Creates a new clink\n @return: PyCObject representing the C link\n ' pass<|docstring|>Overwrite me. Creates a new clink @return: PyCObject representing the C link<|endoftext|>
30ca0c44d7aa729ab1ed7fdd45011500352daffa3e0e942e6d486366cce1e4a6
def _del_clink(self, lnk): '\n Overwrite me.\n This method deletes the link\n ' pass
Overwrite me. This method deletes the link
pywraps/py_idaapi.py
_del_clink
diamondo25/src
2
python
def _del_clink(self, lnk): '\n Overwrite me.\n This method deletes the link\n ' pass
def _del_clink(self, lnk): '\n Overwrite me.\n This method deletes the link\n ' pass<|docstring|>Overwrite me. This method deletes the link<|endoftext|>
ecff10e13343752c8e15f72ee9f6773144574061442294289e694e169e1adf23
def _get_clink_ptr(self): '\n Overwrite me.\n Returns the C link pointer as a 64bit number\n ' pass
Overwrite me. Returns the C link pointer as a 64bit number
pywraps/py_idaapi.py
_get_clink_ptr
diamondo25/src
2
python
def _get_clink_ptr(self): '\n Overwrite me.\n Returns the C link pointer as a 64bit number\n ' pass
def _get_clink_ptr(self): '\n Overwrite me.\n Returns the C link pointer as a 64bit number\n ' pass<|docstring|>Overwrite me. Returns the C link pointer as a 64bit number<|endoftext|>
e74863946db3403c1c549e2ae2ec3b0817f3721efa58fd4b69e0c005632a0831
def assign(self, other): '\n Overwrite me.\n This method allows you to assign an instance contents to anothers\n @return: Boolean\n ' pass
Overwrite me. This method allows you to assign an instance contents to anothers @return: Boolean
pywraps/py_idaapi.py
assign
diamondo25/src
2
python
def assign(self, other): '\n Overwrite me.\n This method allows you to assign an instance contents to anothers\n @return: Boolean\n ' pass
def assign(self, other): '\n Overwrite me.\n This method allows you to assign an instance contents to anothers\n @return: Boolean\n ' pass<|docstring|>Overwrite me. This method allows you to assign an instance contents to anothers @return: Boolean<|endoftext|>
7897545e3ad17602fe11830ac1caf40753801fb8e4010801803017c5b6f04cdc
def __getitem__(self, idx): 'Allow access to object attributes by index (like dictionaries)' return getattr(self, idx)
Allow access to object attributes by index (like dictionaries)
pywraps/py_idaapi.py
__getitem__
diamondo25/src
2
python
def __getitem__(self, idx): return getattr(self, idx)
def __getitem__(self, idx): return getattr(self, idx)<|docstring|>Allow access to object attributes by index (like dictionaries)<|endoftext|>
3375f14742e298fb62978815cd4950e2384322b1957e0ddc62e2124bc9f81236
def cstr(self): 'Returns the string as a C string (up to the zero termination)' return as_cstr(self.value)
Returns the string as a C string (up to the zero termination)
pywraps/py_idaapi.py
cstr
diamondo25/src
2
python
def cstr(self): return as_cstr(self.value)
def cstr(self): return as_cstr(self.value)<|docstring|>Returns the string as a C string (up to the zero termination)<|endoftext|>
a19f766f5e846d3f1a9559ca44e0acd8b8fd26fe1595604e457aefc502b85e4e
def accuracy(model, questions, lowercase=True, restrict_vocab=30000): '\n Compute accuracy of the model. `questions` is a filename where lines are\n 4-tuples of words, split into sections by ": SECTION NAME" lines.\n See https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt for an example.\n\n The accuracy is reported (=printed to log and returned as a list) for each\n section separately, plus there\'s one aggregate summary at the end.\n\n Use `restrict_vocab` to ignore all questions containing a word whose frequency\n is not in the top-N most frequent words (default top 30,000).\n\n This method corresponds to the `compute-accuracy` script of the original C word2vec.\n\n ' ok_vocab = dict(sorted(model.wv.vocab.items(), key=(lambda item: (- item[1].count)))[:restrict_vocab]) ok_index = set((v.index for v in ok_vocab.values())) def log_accuracy(section): (correct, incorrect) = (section['correct'], section['incorrect']) if ((correct + incorrect) > 0): print(('%s: %.1f%% (%i/%i)' % (section['section'], ((100.0 * correct) / (correct + incorrect)), correct, (correct + incorrect)))) (sections, section) = ([], None) for (line_no, line) in enumerate(open(questions)): if line.startswith(': '): if section: sections.append(section) log_accuracy(section) section = {'section': line.lstrip(': ').strip(), 'correct': 0, 'incorrect': 0} else: if (not section): raise ValueError(('missing section header before line #%i in %s' % (line_no, questions))) try: if lowercase: (a, b, c, expected) = [word.lower() for word in line.split()] else: (a, b, c, expected) = [word for word in line.split()] except: print(('skipping invalid line #%i in %s' % (line_no, questions))) if ((a not in ok_vocab) or (b not in ok_vocab) or (c not in ok_vocab) or (expected not in ok_vocab)): continue ignore = set((model.wv.vocab[v].index for v in [a, b, c])) predicted = None for index in np.argsort(analogy(model, a, b, c))[::(- 1)]: if ((index in ok_index) and (index not in ignore)): predicted = model.wv.index2word[index] break section[('correct' if (predicted == expected) else 'incorrect')] += 1 if section: sections.append(section) log_accuracy(section) total = {'section': 'total', 'correct': sum((s['correct'] for s in sections)), 'incorrect': sum((s['incorrect'] for s in sections))} log_accuracy(total) sections.append(total) return sections
Compute accuracy of the model. `questions` is a filename where lines are 4-tuples of words, split into sections by ": SECTION NAME" lines. See https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt for an example. The accuracy is reported (=printed to log and returned as a list) for each section separately, plus there's one aggregate summary at the end. Use `restrict_vocab` to ignore all questions containing a word whose frequency is not in the top-N most frequent words (default top 30,000). This method corresponds to the `compute-accuracy` script of the original C word2vec.
examples/test_analogy.py
accuracy
cod3licious/conec
23
python
def accuracy(model, questions, lowercase=True, restrict_vocab=30000): '\n Compute accuracy of the model. `questions` is a filename where lines are\n 4-tuples of words, split into sections by ": SECTION NAME" lines.\n See https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt for an example.\n\n The accuracy is reported (=printed to log and returned as a list) for each\n section separately, plus there\'s one aggregate summary at the end.\n\n Use `restrict_vocab` to ignore all questions containing a word whose frequency\n is not in the top-N most frequent words (default top 30,000).\n\n This method corresponds to the `compute-accuracy` script of the original C word2vec.\n\n ' ok_vocab = dict(sorted(model.wv.vocab.items(), key=(lambda item: (- item[1].count)))[:restrict_vocab]) ok_index = set((v.index for v in ok_vocab.values())) def log_accuracy(section): (correct, incorrect) = (section['correct'], section['incorrect']) if ((correct + incorrect) > 0): print(('%s: %.1f%% (%i/%i)' % (section['section'], ((100.0 * correct) / (correct + incorrect)), correct, (correct + incorrect)))) (sections, section) = ([], None) for (line_no, line) in enumerate(open(questions)): if line.startswith(': '): if section: sections.append(section) log_accuracy(section) section = {'section': line.lstrip(': ').strip(), 'correct': 0, 'incorrect': 0} else: if (not section): raise ValueError(('missing section header before line #%i in %s' % (line_no, questions))) try: if lowercase: (a, b, c, expected) = [word.lower() for word in line.split()] else: (a, b, c, expected) = [word for word in line.split()] except: print(('skipping invalid line #%i in %s' % (line_no, questions))) if ((a not in ok_vocab) or (b not in ok_vocab) or (c not in ok_vocab) or (expected not in ok_vocab)): continue ignore = set((model.wv.vocab[v].index for v in [a, b, c])) predicted = None for index in np.argsort(analogy(model, a, b, c))[::(- 1)]: if ((index in ok_index) and (index not in ignore)): predicted = model.wv.index2word[index] break section[('correct' if (predicted == expected) else 'incorrect')] += 1 if section: sections.append(section) log_accuracy(section) total = {'section': 'total', 'correct': sum((s['correct'] for s in sections)), 'incorrect': sum((s['incorrect'] for s in sections))} log_accuracy(total) sections.append(total) return sections
def accuracy(model, questions, lowercase=True, restrict_vocab=30000): '\n Compute accuracy of the model. `questions` is a filename where lines are\n 4-tuples of words, split into sections by ": SECTION NAME" lines.\n See https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt for an example.\n\n The accuracy is reported (=printed to log and returned as a list) for each\n section separately, plus there\'s one aggregate summary at the end.\n\n Use `restrict_vocab` to ignore all questions containing a word whose frequency\n is not in the top-N most frequent words (default top 30,000).\n\n This method corresponds to the `compute-accuracy` script of the original C word2vec.\n\n ' ok_vocab = dict(sorted(model.wv.vocab.items(), key=(lambda item: (- item[1].count)))[:restrict_vocab]) ok_index = set((v.index for v in ok_vocab.values())) def log_accuracy(section): (correct, incorrect) = (section['correct'], section['incorrect']) if ((correct + incorrect) > 0): print(('%s: %.1f%% (%i/%i)' % (section['section'], ((100.0 * correct) / (correct + incorrect)), correct, (correct + incorrect)))) (sections, section) = ([], None) for (line_no, line) in enumerate(open(questions)): if line.startswith(': '): if section: sections.append(section) log_accuracy(section) section = {'section': line.lstrip(': ').strip(), 'correct': 0, 'incorrect': 0} else: if (not section): raise ValueError(('missing section header before line #%i in %s' % (line_no, questions))) try: if lowercase: (a, b, c, expected) = [word.lower() for word in line.split()] else: (a, b, c, expected) = [word for word in line.split()] except: print(('skipping invalid line #%i in %s' % (line_no, questions))) if ((a not in ok_vocab) or (b not in ok_vocab) or (c not in ok_vocab) or (expected not in ok_vocab)): continue ignore = set((model.wv.vocab[v].index for v in [a, b, c])) predicted = None for index in np.argsort(analogy(model, a, b, c))[::(- 1)]: if ((index in ok_index) and (index not in ignore)): predicted = model.wv.index2word[index] break section[('correct' if (predicted == expected) else 'incorrect')] += 1 if section: sections.append(section) log_accuracy(section) total = {'section': 'total', 'correct': sum((s['correct'] for s in sections)), 'incorrect': sum((s['incorrect'] for s in sections))} log_accuracy(total) sections.append(total) return sections<|docstring|>Compute accuracy of the model. `questions` is a filename where lines are 4-tuples of words, split into sections by ": SECTION NAME" lines. See https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt for an example. The accuracy is reported (=printed to log and returned as a list) for each section separately, plus there's one aggregate summary at the end. Use `restrict_vocab` to ignore all questions containing a word whose frequency is not in the top-N most frequent words (default top 30,000). This method corresponds to the `compute-accuracy` script of the original C word2vec.<|endoftext|>
a329f70d10cfdd1fcfdf2c92ca87b03e75d72ee931e50a480ff27c68c15e9c49
def init_ui(self, posterize): '\n Create user interface for :class:`posterize.Posterize`.\n\n The method creates the widget objects in the proper containers\n and assigns the object names to them.\n\n :param posterize: The dialog posterize window\n :type posterize: :class:`posterize.Posterize`\n ' self.operation_ui(self) posterize.setObjectName('posterize') icon = QIcon() icon.addPixmap(QPixmap('icons/posterize.png'), QIcon.Normal, QIcon.Off) posterize.setWindowIcon(icon) self.label_bins_num = QLabel(posterize) self.label_bins_num.setObjectName('label_bins_num') self.label_bins_num.setAlignment(Qt.AlignCenter) self.bins_slider = QSlider(posterize) self.bins_slider.setOrientation(Qt.Horizontal) self.bins_slider.setPageStep(0) self.bins_slider.setObjectName('bins_slider') self.layout.addWidget(self.label_bins_num) self.layout.addWidget(self.bins_slider) self.layout.addWidget(self.show_hist_widget) self.layout.addWidget(self.preview_widget) self.layout.addWidget(self.button_box) posterize.setLayout(self.layout) QMetaObject.connectSlotsByName(posterize)
Create user interface for :class:`posterize.Posterize`. The method creates the widget objects in the proper containers and assigns the object names to them. :param posterize: The dialog posterize window :type posterize: :class:`posterize.Posterize`
src/operations/point/posterize_ui.py
init_ui
vmariiechko/python-image-processing
0
python
def init_ui(self, posterize): '\n Create user interface for :class:`posterize.Posterize`.\n\n The method creates the widget objects in the proper containers\n and assigns the object names to them.\n\n :param posterize: The dialog posterize window\n :type posterize: :class:`posterize.Posterize`\n ' self.operation_ui(self) posterize.setObjectName('posterize') icon = QIcon() icon.addPixmap(QPixmap('icons/posterize.png'), QIcon.Normal, QIcon.Off) posterize.setWindowIcon(icon) self.label_bins_num = QLabel(posterize) self.label_bins_num.setObjectName('label_bins_num') self.label_bins_num.setAlignment(Qt.AlignCenter) self.bins_slider = QSlider(posterize) self.bins_slider.setOrientation(Qt.Horizontal) self.bins_slider.setPageStep(0) self.bins_slider.setObjectName('bins_slider') self.layout.addWidget(self.label_bins_num) self.layout.addWidget(self.bins_slider) self.layout.addWidget(self.show_hist_widget) self.layout.addWidget(self.preview_widget) self.layout.addWidget(self.button_box) posterize.setLayout(self.layout) QMetaObject.connectSlotsByName(posterize)
def init_ui(self, posterize): '\n Create user interface for :class:`posterize.Posterize`.\n\n The method creates the widget objects in the proper containers\n and assigns the object names to them.\n\n :param posterize: The dialog posterize window\n :type posterize: :class:`posterize.Posterize`\n ' self.operation_ui(self) posterize.setObjectName('posterize') icon = QIcon() icon.addPixmap(QPixmap('icons/posterize.png'), QIcon.Normal, QIcon.Off) posterize.setWindowIcon(icon) self.label_bins_num = QLabel(posterize) self.label_bins_num.setObjectName('label_bins_num') self.label_bins_num.setAlignment(Qt.AlignCenter) self.bins_slider = QSlider(posterize) self.bins_slider.setOrientation(Qt.Horizontal) self.bins_slider.setPageStep(0) self.bins_slider.setObjectName('bins_slider') self.layout.addWidget(self.label_bins_num) self.layout.addWidget(self.bins_slider) self.layout.addWidget(self.show_hist_widget) self.layout.addWidget(self.preview_widget) self.layout.addWidget(self.button_box) posterize.setLayout(self.layout) QMetaObject.connectSlotsByName(posterize)<|docstring|>Create user interface for :class:`posterize.Posterize`. The method creates the widget objects in the proper containers and assigns the object names to them. :param posterize: The dialog posterize window :type posterize: :class:`posterize.Posterize`<|endoftext|>
83f60e670b502b3a98997e9e4455a3c64386da5be9d1af2336b4e08ee9b92eeb
def expand(self, action_priors): 'Expand tree by creating new children.\n action_priors: a list of tuples of actions and their prior probability\n according to the policy function.\n ' for (action, prob) in action_priors: if (action not in self._children): self._children[action] = TreeNode(self, prob)
Expand tree by creating new children. action_priors: a list of tuples of actions and their prior probability according to the policy function.
mcts_alphaZero.py
expand
quietsmile/AlphaZero_Gomoku
2,876
python
def expand(self, action_priors): 'Expand tree by creating new children.\n action_priors: a list of tuples of actions and their prior probability\n according to the policy function.\n ' for (action, prob) in action_priors: if (action not in self._children): self._children[action] = TreeNode(self, prob)
def expand(self, action_priors): 'Expand tree by creating new children.\n action_priors: a list of tuples of actions and their prior probability\n according to the policy function.\n ' for (action, prob) in action_priors: if (action not in self._children): self._children[action] = TreeNode(self, prob)<|docstring|>Expand tree by creating new children. action_priors: a list of tuples of actions and their prior probability according to the policy function.<|endoftext|>
5a30224b39866d70ec2a701ebbd517eb279d972d2d3ff2fcd5e69590a45262a5
def select(self, c_puct): 'Select action among children that gives maximum action value Q\n plus bonus u(P).\n Return: A tuple of (action, next_node)\n ' return max(self._children.items(), key=(lambda act_node: act_node[1].get_value(c_puct)))
Select action among children that gives maximum action value Q plus bonus u(P). Return: A tuple of (action, next_node)
mcts_alphaZero.py
select
quietsmile/AlphaZero_Gomoku
2,876
python
def select(self, c_puct): 'Select action among children that gives maximum action value Q\n plus bonus u(P).\n Return: A tuple of (action, next_node)\n ' return max(self._children.items(), key=(lambda act_node: act_node[1].get_value(c_puct)))
def select(self, c_puct): 'Select action among children that gives maximum action value Q\n plus bonus u(P).\n Return: A tuple of (action, next_node)\n ' return max(self._children.items(), key=(lambda act_node: act_node[1].get_value(c_puct)))<|docstring|>Select action among children that gives maximum action value Q plus bonus u(P). Return: A tuple of (action, next_node)<|endoftext|>
e386850f568b732bc7f512af109f714db75c302989a9259e19c2e49aba09ec3c
def update(self, leaf_value): "Update node values from leaf evaluation.\n leaf_value: the value of subtree evaluation from the current player's\n perspective.\n " self._n_visits += 1 self._Q += ((1.0 * (leaf_value - self._Q)) / self._n_visits)
Update node values from leaf evaluation. leaf_value: the value of subtree evaluation from the current player's perspective.
mcts_alphaZero.py
update
quietsmile/AlphaZero_Gomoku
2,876
python
def update(self, leaf_value): "Update node values from leaf evaluation.\n leaf_value: the value of subtree evaluation from the current player's\n perspective.\n " self._n_visits += 1 self._Q += ((1.0 * (leaf_value - self._Q)) / self._n_visits)
def update(self, leaf_value): "Update node values from leaf evaluation.\n leaf_value: the value of subtree evaluation from the current player's\n perspective.\n " self._n_visits += 1 self._Q += ((1.0 * (leaf_value - self._Q)) / self._n_visits)<|docstring|>Update node values from leaf evaluation. leaf_value: the value of subtree evaluation from the current player's perspective.<|endoftext|>
181b778f31a1d04f414398b63d7f675de04299d2863026f390ab61ddccbbfcfb
def update_recursive(self, leaf_value): 'Like a call to update(), but applied recursively for all ancestors.\n ' if self._parent: self._parent.update_recursive((- leaf_value)) self.update(leaf_value)
Like a call to update(), but applied recursively for all ancestors.
mcts_alphaZero.py
update_recursive
quietsmile/AlphaZero_Gomoku
2,876
python
def update_recursive(self, leaf_value): '\n ' if self._parent: self._parent.update_recursive((- leaf_value)) self.update(leaf_value)
def update_recursive(self, leaf_value): '\n ' if self._parent: self._parent.update_recursive((- leaf_value)) self.update(leaf_value)<|docstring|>Like a call to update(), but applied recursively for all ancestors.<|endoftext|>
e5086e940ef4161c69b8182e12716aa005d736e05f8b9dd57ac23b3f6aa84bfa
def get_value(self, c_puct): "Calculate and return the value for this node.\n It is a combination of leaf evaluations Q, and this node's prior\n adjusted for its visit count, u.\n c_puct: a number in (0, inf) controlling the relative impact of\n value Q, and prior probability P, on this node's score.\n " self._u = (((c_puct * self._P) * np.sqrt(self._parent._n_visits)) / (1 + self._n_visits)) return (self._Q + self._u)
Calculate and return the value for this node. It is a combination of leaf evaluations Q, and this node's prior adjusted for its visit count, u. c_puct: a number in (0, inf) controlling the relative impact of value Q, and prior probability P, on this node's score.
mcts_alphaZero.py
get_value
quietsmile/AlphaZero_Gomoku
2,876
python
def get_value(self, c_puct): "Calculate and return the value for this node.\n It is a combination of leaf evaluations Q, and this node's prior\n adjusted for its visit count, u.\n c_puct: a number in (0, inf) controlling the relative impact of\n value Q, and prior probability P, on this node's score.\n " self._u = (((c_puct * self._P) * np.sqrt(self._parent._n_visits)) / (1 + self._n_visits)) return (self._Q + self._u)
def get_value(self, c_puct): "Calculate and return the value for this node.\n It is a combination of leaf evaluations Q, and this node's prior\n adjusted for its visit count, u.\n c_puct: a number in (0, inf) controlling the relative impact of\n value Q, and prior probability P, on this node's score.\n " self._u = (((c_puct * self._P) * np.sqrt(self._parent._n_visits)) / (1 + self._n_visits)) return (self._Q + self._u)<|docstring|>Calculate and return the value for this node. It is a combination of leaf evaluations Q, and this node's prior adjusted for its visit count, u. c_puct: a number in (0, inf) controlling the relative impact of value Q, and prior probability P, on this node's score.<|endoftext|>
d6bb6d8093185401373633d5616a64c748667c5134c695dc543748fbc5b1a23c
def is_leaf(self): 'Check if leaf node (i.e. no nodes below this have been expanded).' return (self._children == {})
Check if leaf node (i.e. no nodes below this have been expanded).
mcts_alphaZero.py
is_leaf
quietsmile/AlphaZero_Gomoku
2,876
python
def is_leaf(self): return (self._children == {})
def is_leaf(self): return (self._children == {})<|docstring|>Check if leaf node (i.e. no nodes below this have been expanded).<|endoftext|>
8e89b20e206e7ed90b8b4dbffb1e0ca05a00c3c65c3afea305029ccf062c9ebe
def __init__(self, policy_value_fn, c_puct=5, n_playout=10000): "\n policy_value_fn: a function that takes in a board state and outputs\n a list of (action, probability) tuples and also a score in [-1, 1]\n (i.e. the expected value of the end game score from the current\n player's perspective) for the current player.\n c_puct: a number in (0, inf) that controls how quickly exploration\n converges to the maximum-value policy. A higher value means\n relying on the prior more.\n " self._root = TreeNode(None, 1.0) self._policy = policy_value_fn self._c_puct = c_puct self._n_playout = n_playout
policy_value_fn: a function that takes in a board state and outputs a list of (action, probability) tuples and also a score in [-1, 1] (i.e. the expected value of the end game score from the current player's perspective) for the current player. c_puct: a number in (0, inf) that controls how quickly exploration converges to the maximum-value policy. A higher value means relying on the prior more.
mcts_alphaZero.py
__init__
quietsmile/AlphaZero_Gomoku
2,876
python
def __init__(self, policy_value_fn, c_puct=5, n_playout=10000): "\n policy_value_fn: a function that takes in a board state and outputs\n a list of (action, probability) tuples and also a score in [-1, 1]\n (i.e. the expected value of the end game score from the current\n player's perspective) for the current player.\n c_puct: a number in (0, inf) that controls how quickly exploration\n converges to the maximum-value policy. A higher value means\n relying on the prior more.\n " self._root = TreeNode(None, 1.0) self._policy = policy_value_fn self._c_puct = c_puct self._n_playout = n_playout
def __init__(self, policy_value_fn, c_puct=5, n_playout=10000): "\n policy_value_fn: a function that takes in a board state and outputs\n a list of (action, probability) tuples and also a score in [-1, 1]\n (i.e. the expected value of the end game score from the current\n player's perspective) for the current player.\n c_puct: a number in (0, inf) that controls how quickly exploration\n converges to the maximum-value policy. A higher value means\n relying on the prior more.\n " self._root = TreeNode(None, 1.0) self._policy = policy_value_fn self._c_puct = c_puct self._n_playout = n_playout<|docstring|>policy_value_fn: a function that takes in a board state and outputs a list of (action, probability) tuples and also a score in [-1, 1] (i.e. the expected value of the end game score from the current player's perspective) for the current player. c_puct: a number in (0, inf) that controls how quickly exploration converges to the maximum-value policy. A higher value means relying on the prior more.<|endoftext|>
f95faa6191d72d519c9dd3f145030c649b9a3d467989b81bf8f1c3417bc338c8
def _playout(self, state): 'Run a single playout from the root to the leaf, getting a value at\n the leaf and propagating it back through its parents.\n State is modified in-place, so a copy must be provided.\n ' node = self._root while 1: if node.is_leaf(): break (action, node) = node.select(self._c_puct) state.do_move(action) (action_probs, leaf_value) = self._policy(state) (end, winner) = state.game_end() if (not end): node.expand(action_probs) elif (winner == (- 1)): leaf_value = 0.0 else: leaf_value = (1.0 if (winner == state.get_current_player()) else (- 1.0)) node.update_recursive((- leaf_value))
Run a single playout from the root to the leaf, getting a value at the leaf and propagating it back through its parents. State is modified in-place, so a copy must be provided.
mcts_alphaZero.py
_playout
quietsmile/AlphaZero_Gomoku
2,876
python
def _playout(self, state): 'Run a single playout from the root to the leaf, getting a value at\n the leaf and propagating it back through its parents.\n State is modified in-place, so a copy must be provided.\n ' node = self._root while 1: if node.is_leaf(): break (action, node) = node.select(self._c_puct) state.do_move(action) (action_probs, leaf_value) = self._policy(state) (end, winner) = state.game_end() if (not end): node.expand(action_probs) elif (winner == (- 1)): leaf_value = 0.0 else: leaf_value = (1.0 if (winner == state.get_current_player()) else (- 1.0)) node.update_recursive((- leaf_value))
def _playout(self, state): 'Run a single playout from the root to the leaf, getting a value at\n the leaf and propagating it back through its parents.\n State is modified in-place, so a copy must be provided.\n ' node = self._root while 1: if node.is_leaf(): break (action, node) = node.select(self._c_puct) state.do_move(action) (action_probs, leaf_value) = self._policy(state) (end, winner) = state.game_end() if (not end): node.expand(action_probs) elif (winner == (- 1)): leaf_value = 0.0 else: leaf_value = (1.0 if (winner == state.get_current_player()) else (- 1.0)) node.update_recursive((- leaf_value))<|docstring|>Run a single playout from the root to the leaf, getting a value at the leaf and propagating it back through its parents. State is modified in-place, so a copy must be provided.<|endoftext|>
19fc78f59089dddcd8325840db2443fea2cb52c84df5c66b9526399cc575cf0b
def get_move_probs(self, state, temp=0.001): 'Run all playouts sequentially and return the available actions and\n their corresponding probabilities.\n state: the current game state\n temp: temperature parameter in (0, 1] controls the level of exploration\n ' for n in range(self._n_playout): state_copy = copy.deepcopy(state) self._playout(state_copy) act_visits = [(act, node._n_visits) for (act, node) in self._root._children.items()] (acts, visits) = zip(*act_visits) act_probs = softmax(((1.0 / temp) * np.log((np.array(visits) + 1e-10)))) return (acts, act_probs)
Run all playouts sequentially and return the available actions and their corresponding probabilities. state: the current game state temp: temperature parameter in (0, 1] controls the level of exploration
mcts_alphaZero.py
get_move_probs
quietsmile/AlphaZero_Gomoku
2,876
python
def get_move_probs(self, state, temp=0.001): 'Run all playouts sequentially and return the available actions and\n their corresponding probabilities.\n state: the current game state\n temp: temperature parameter in (0, 1] controls the level of exploration\n ' for n in range(self._n_playout): state_copy = copy.deepcopy(state) self._playout(state_copy) act_visits = [(act, node._n_visits) for (act, node) in self._root._children.items()] (acts, visits) = zip(*act_visits) act_probs = softmax(((1.0 / temp) * np.log((np.array(visits) + 1e-10)))) return (acts, act_probs)
def get_move_probs(self, state, temp=0.001): 'Run all playouts sequentially and return the available actions and\n their corresponding probabilities.\n state: the current game state\n temp: temperature parameter in (0, 1] controls the level of exploration\n ' for n in range(self._n_playout): state_copy = copy.deepcopy(state) self._playout(state_copy) act_visits = [(act, node._n_visits) for (act, node) in self._root._children.items()] (acts, visits) = zip(*act_visits) act_probs = softmax(((1.0 / temp) * np.log((np.array(visits) + 1e-10)))) return (acts, act_probs)<|docstring|>Run all playouts sequentially and return the available actions and their corresponding probabilities. state: the current game state temp: temperature parameter in (0, 1] controls the level of exploration<|endoftext|>
c4786e0852cb71a204030345a17655f8c27702ec4f4bd7074c07f85b130adef3
def update_with_move(self, last_move): 'Step forward in the tree, keeping everything we already know\n about the subtree.\n ' if (last_move in self._root._children): self._root = self._root._children[last_move] self._root._parent = None else: self._root = TreeNode(None, 1.0)
Step forward in the tree, keeping everything we already know about the subtree.
mcts_alphaZero.py
update_with_move
quietsmile/AlphaZero_Gomoku
2,876
python
def update_with_move(self, last_move): 'Step forward in the tree, keeping everything we already know\n about the subtree.\n ' if (last_move in self._root._children): self._root = self._root._children[last_move] self._root._parent = None else: self._root = TreeNode(None, 1.0)
def update_with_move(self, last_move): 'Step forward in the tree, keeping everything we already know\n about the subtree.\n ' if (last_move in self._root._children): self._root = self._root._children[last_move] self._root._parent = None else: self._root = TreeNode(None, 1.0)<|docstring|>Step forward in the tree, keeping everything we already know about the subtree.<|endoftext|>
dcb92f2d8cd2467c2f66642315704573a1ab47f0092553a11835cb7ffada6a26
def coding_blk(): 'Input: node dict\n Output: TensorType([1, hyper.word_dim])\n ' Wcomb1 = param.get('Wcomb1') Wcomb2 = param.get('Wcomb2') blk = td.Composition() with blk.scope(): direct = embedding.direct_embed_blk().reads(blk.input) composed = embedding.composed_embed_blk().reads(blk.input) Wcomb1 = td.FromTensor(param.get('Wcomb1')) Wcomb2 = td.FromTensor(param.get('Wcomb2')) direct = td.Function(embedding.batch_mul).reads(direct, Wcomb1) composed = td.Function(embedding.batch_mul).reads(composed, Wcomb2) added = td.Function(tf.add).reads(direct, composed) blk.output.reads(added) return blk
Input: node dict Output: TensorType([1, hyper.word_dim])
tbcnn/tbcnn.py
coding_blk
Aetf/tensorflow-tbcnn
34
python
def coding_blk(): 'Input: node dict\n Output: TensorType([1, hyper.word_dim])\n ' Wcomb1 = param.get('Wcomb1') Wcomb2 = param.get('Wcomb2') blk = td.Composition() with blk.scope(): direct = embedding.direct_embed_blk().reads(blk.input) composed = embedding.composed_embed_blk().reads(blk.input) Wcomb1 = td.FromTensor(param.get('Wcomb1')) Wcomb2 = td.FromTensor(param.get('Wcomb2')) direct = td.Function(embedding.batch_mul).reads(direct, Wcomb1) composed = td.Function(embedding.batch_mul).reads(composed, Wcomb2) added = td.Function(tf.add).reads(direct, composed) blk.output.reads(added) return blk
def coding_blk(): 'Input: node dict\n Output: TensorType([1, hyper.word_dim])\n ' Wcomb1 = param.get('Wcomb1') Wcomb2 = param.get('Wcomb2') blk = td.Composition() with blk.scope(): direct = embedding.direct_embed_blk().reads(blk.input) composed = embedding.composed_embed_blk().reads(blk.input) Wcomb1 = td.FromTensor(param.get('Wcomb1')) Wcomb2 = td.FromTensor(param.get('Wcomb2')) direct = td.Function(embedding.batch_mul).reads(direct, Wcomb1) composed = td.Function(embedding.batch_mul).reads(composed, Wcomb2) added = td.Function(tf.add).reads(direct, composed) blk.output.reads(added) return blk<|docstring|>Input: node dict Output: TensorType([1, hyper.word_dim])<|endoftext|>
dfe713751ce23202f71501fea8e187986e70a9a037f5fd6fbe63eb9cea0c338e
def collect_node_for_conv_patch_blk(max_depth=2): 'Input: node dict\n Output: flattened list of all collected nodes, in the format\n [(node, idx, pclen, depth, max_depth), ...]\n ' def _collect_patch(node): collected = [(node, 1, 1, 0, max_depth)] def recurse_helper(node, depth): if (depth > max_depth): return for (idx, c) in enumerate(node['children'], 1): collected.append((c, idx, node['clen'], (depth + 1), max_depth)) recurse_helper(c, (depth + 1)) recurse_helper(node, 0) return collected return td.InputTransform(_collect_patch)
Input: node dict Output: flattened list of all collected nodes, in the format [(node, idx, pclen, depth, max_depth), ...]
tbcnn/tbcnn.py
collect_node_for_conv_patch_blk
Aetf/tensorflow-tbcnn
34
python
def collect_node_for_conv_patch_blk(max_depth=2): 'Input: node dict\n Output: flattened list of all collected nodes, in the format\n [(node, idx, pclen, depth, max_depth), ...]\n ' def _collect_patch(node): collected = [(node, 1, 1, 0, max_depth)] def recurse_helper(node, depth): if (depth > max_depth): return for (idx, c) in enumerate(node['children'], 1): collected.append((c, idx, node['clen'], (depth + 1), max_depth)) recurse_helper(c, (depth + 1)) recurse_helper(node, 0) return collected return td.InputTransform(_collect_patch)
def collect_node_for_conv_patch_blk(max_depth=2): 'Input: node dict\n Output: flattened list of all collected nodes, in the format\n [(node, idx, pclen, depth, max_depth), ...]\n ' def _collect_patch(node): collected = [(node, 1, 1, 0, max_depth)] def recurse_helper(node, depth): if (depth > max_depth): return for (idx, c) in enumerate(node['children'], 1): collected.append((c, idx, node['clen'], (depth + 1), max_depth)) recurse_helper(c, (depth + 1)) recurse_helper(node, 0) return collected return td.InputTransform(_collect_patch)<|docstring|>Input: node dict Output: flattened list of all collected nodes, in the format [(node, idx, pclen, depth, max_depth), ...]<|endoftext|>
6a43a82cd37a2784983d22c7306ace83efd7aa70e7c9ea1eaee47b84a4a257d0
def tri_combined(idx, pclen, depth, max_depth): 'TF function, input: idx, pclen, depth, max_depth as batch (1D Tensor)\n Output: weight tensor (3D Tensor), first dim is batch\n ' Wconvt = param.get('Wconvt') Wconvl = param.get('Wconvl') Wconvr = param.get('Wconvr') dim = tf.unstack(tf.shape(Wconvt))[0] batch_shape = tf.shape(idx) tmp = ((idx - 1) / (pclen - 1)) tmp = tf.where(tf.is_nan(tmp), (tf.ones_like(tmp) * 0.5), tmp) t = ((max_depth - depth) / max_depth) r = ((1 - t) * tmp) l = ((1 - t) * (1 - r)) lb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * l)) rb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * r)) tb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * t)) lb = tf.reshape(lb, [(- 1), dim]) rb = tf.reshape(rb, [(- 1), dim]) tb = tf.reshape(tb, [(- 1), dim]) tmp = ((tf.matmul(lb, Wconvl) + tf.matmul(rb, Wconvr)) + tf.matmul(tb, Wconvt)) tmp = tf.reshape(tmp, [(- 1), hyper.word_dim, hyper.conv_dim]) return tmp
TF function, input: idx, pclen, depth, max_depth as batch (1D Tensor) Output: weight tensor (3D Tensor), first dim is batch
tbcnn/tbcnn.py
tri_combined
Aetf/tensorflow-tbcnn
34
python
def tri_combined(idx, pclen, depth, max_depth): 'TF function, input: idx, pclen, depth, max_depth as batch (1D Tensor)\n Output: weight tensor (3D Tensor), first dim is batch\n ' Wconvt = param.get('Wconvt') Wconvl = param.get('Wconvl') Wconvr = param.get('Wconvr') dim = tf.unstack(tf.shape(Wconvt))[0] batch_shape = tf.shape(idx) tmp = ((idx - 1) / (pclen - 1)) tmp = tf.where(tf.is_nan(tmp), (tf.ones_like(tmp) * 0.5), tmp) t = ((max_depth - depth) / max_depth) r = ((1 - t) * tmp) l = ((1 - t) * (1 - r)) lb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * l)) rb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * r)) tb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * t)) lb = tf.reshape(lb, [(- 1), dim]) rb = tf.reshape(rb, [(- 1), dim]) tb = tf.reshape(tb, [(- 1), dim]) tmp = ((tf.matmul(lb, Wconvl) + tf.matmul(rb, Wconvr)) + tf.matmul(tb, Wconvt)) tmp = tf.reshape(tmp, [(- 1), hyper.word_dim, hyper.conv_dim]) return tmp
def tri_combined(idx, pclen, depth, max_depth): 'TF function, input: idx, pclen, depth, max_depth as batch (1D Tensor)\n Output: weight tensor (3D Tensor), first dim is batch\n ' Wconvt = param.get('Wconvt') Wconvl = param.get('Wconvl') Wconvr = param.get('Wconvr') dim = tf.unstack(tf.shape(Wconvt))[0] batch_shape = tf.shape(idx) tmp = ((idx - 1) / (pclen - 1)) tmp = tf.where(tf.is_nan(tmp), (tf.ones_like(tmp) * 0.5), tmp) t = ((max_depth - depth) / max_depth) r = ((1 - t) * tmp) l = ((1 - t) * (1 - r)) lb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * l)) rb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * r)) tb = tf.transpose((tf.transpose(tf.eye(dim, batch_shape=batch_shape)) * t)) lb = tf.reshape(lb, [(- 1), dim]) rb = tf.reshape(rb, [(- 1), dim]) tb = tf.reshape(tb, [(- 1), dim]) tmp = ((tf.matmul(lb, Wconvl) + tf.matmul(rb, Wconvr)) + tf.matmul(tb, Wconvt)) tmp = tf.reshape(tmp, [(- 1), hyper.word_dim, hyper.conv_dim]) return tmp<|docstring|>TF function, input: idx, pclen, depth, max_depth as batch (1D Tensor) Output: weight tensor (3D Tensor), first dim is batch<|endoftext|>
732265aa411605847109385a891b13d3ce8b096f538cac273f8063a8c63f940d
def weighted_feature_blk(): 'Input: (feature , idx , pclen, depth, max_depth)\n (TensorType([hyper.word_dim, ]), Scalar, Scalar, Scalar, Scalar)\n Output: weighted_feature\n TensorType([hyper.conv_dim, ])\n ' blk = td.Composition() with blk.scope(): fea = blk.input[0] Wi = tri_combined_blk().reads(blk.input[1], blk.input[2], blk.input[3], blk.input[4]) weighted_fea = td.Function(embedding.batch_mul).reads(fea, Wi) blk.output.reads(weighted_fea) return blk
Input: (feature , idx , pclen, depth, max_depth) (TensorType([hyper.word_dim, ]), Scalar, Scalar, Scalar, Scalar) Output: weighted_feature TensorType([hyper.conv_dim, ])
tbcnn/tbcnn.py
weighted_feature_blk
Aetf/tensorflow-tbcnn
34
python
def weighted_feature_blk(): 'Input: (feature , idx , pclen, depth, max_depth)\n (TensorType([hyper.word_dim, ]), Scalar, Scalar, Scalar, Scalar)\n Output: weighted_feature\n TensorType([hyper.conv_dim, ])\n ' blk = td.Composition() with blk.scope(): fea = blk.input[0] Wi = tri_combined_blk().reads(blk.input[1], blk.input[2], blk.input[3], blk.input[4]) weighted_fea = td.Function(embedding.batch_mul).reads(fea, Wi) blk.output.reads(weighted_fea) return blk
def weighted_feature_blk(): 'Input: (feature , idx , pclen, depth, max_depth)\n (TensorType([hyper.word_dim, ]), Scalar, Scalar, Scalar, Scalar)\n Output: weighted_feature\n TensorType([hyper.conv_dim, ])\n ' blk = td.Composition() with blk.scope(): fea = blk.input[0] Wi = tri_combined_blk().reads(blk.input[1], blk.input[2], blk.input[3], blk.input[4]) weighted_fea = td.Function(embedding.batch_mul).reads(fea, Wi) blk.output.reads(weighted_fea) return blk<|docstring|>Input: (feature , idx , pclen, depth, max_depth) (TensorType([hyper.word_dim, ]), Scalar, Scalar, Scalar, Scalar) Output: weighted_feature TensorType([hyper.conv_dim, ])<|endoftext|>
52293ed58beeea3775f22e1c848e78af70d5c6deb15683e971120524065c788f
def feature_detector_blk(max_depth=2): 'Input: node dict\n Output: TensorType([hyper.conv_dim, ])\n Single patch of the conv. Depth is max_depth\n ' blk = td.Composition() with blk.scope(): nodes_in_patch = collect_node_for_conv_patch_blk(max_depth=max_depth).reads(blk.input) mapped = td.Map(td.Record((coding_blk(), td.Scalar(), td.Scalar(), td.Scalar(), td.Scalar()))).reads(nodes_in_patch) weighted = td.Map(weighted_feature_blk()).reads(mapped) added = td.Reduce(td.Function(tf.add)).reads(weighted) biased = td.Function(tf.add).reads(added, td.FromTensor(param.get('Bconv'))) tanh = td.Function(tf.nn.tanh).reads(biased) blk.output.reads(tanh) return blk
Input: node dict Output: TensorType([hyper.conv_dim, ]) Single patch of the conv. Depth is max_depth
tbcnn/tbcnn.py
feature_detector_blk
Aetf/tensorflow-tbcnn
34
python
def feature_detector_blk(max_depth=2): 'Input: node dict\n Output: TensorType([hyper.conv_dim, ])\n Single patch of the conv. Depth is max_depth\n ' blk = td.Composition() with blk.scope(): nodes_in_patch = collect_node_for_conv_patch_blk(max_depth=max_depth).reads(blk.input) mapped = td.Map(td.Record((coding_blk(), td.Scalar(), td.Scalar(), td.Scalar(), td.Scalar()))).reads(nodes_in_patch) weighted = td.Map(weighted_feature_blk()).reads(mapped) added = td.Reduce(td.Function(tf.add)).reads(weighted) biased = td.Function(tf.add).reads(added, td.FromTensor(param.get('Bconv'))) tanh = td.Function(tf.nn.tanh).reads(biased) blk.output.reads(tanh) return blk
def feature_detector_blk(max_depth=2): 'Input: node dict\n Output: TensorType([hyper.conv_dim, ])\n Single patch of the conv. Depth is max_depth\n ' blk = td.Composition() with blk.scope(): nodes_in_patch = collect_node_for_conv_patch_blk(max_depth=max_depth).reads(blk.input) mapped = td.Map(td.Record((coding_blk(), td.Scalar(), td.Scalar(), td.Scalar(), td.Scalar()))).reads(nodes_in_patch) weighted = td.Map(weighted_feature_blk()).reads(mapped) added = td.Reduce(td.Function(tf.add)).reads(weighted) biased = td.Function(tf.add).reads(added, td.FromTensor(param.get('Bconv'))) tanh = td.Function(tf.nn.tanh).reads(biased) blk.output.reads(tanh) return blk<|docstring|>Input: node dict Output: TensorType([hyper.conv_dim, ]) Single patch of the conv. Depth is max_depth<|endoftext|>
64dfef4b9bb6a87ceaad9885f6bcaa29996e329c4e8e81e85c94f5506de37589
def dynamic_pooling_blk(): 'Input: root node dic\n Output: pooled, TensorType([hyper.conv_dim, ])\n ' leaf_case = feature_detector_blk() pool_fwd = td.ForwardDeclaration(td.PyObjectType(), td.TensorType([hyper.conv_dim])) pool = td.Composition() with pool.scope(): cur_fea = feature_detector_blk().reads(pool.input) children = td.GetItem('children').reads(pool.input) mapped = td.Map(pool_fwd()).reads(children) summed = td.Reduce(td.Function(tf.maximum)).reads(mapped) summed = td.Function(tf.maximum).reads(summed, cur_fea) pool.output.reads(summed) pool = td.OneOf((lambda x: (x['clen'] == 0)), {True: leaf_case, False: pool}) pool_fwd.resolve_to(pool) return pool
Input: root node dic Output: pooled, TensorType([hyper.conv_dim, ])
tbcnn/tbcnn.py
dynamic_pooling_blk
Aetf/tensorflow-tbcnn
34
python
def dynamic_pooling_blk(): 'Input: root node dic\n Output: pooled, TensorType([hyper.conv_dim, ])\n ' leaf_case = feature_detector_blk() pool_fwd = td.ForwardDeclaration(td.PyObjectType(), td.TensorType([hyper.conv_dim])) pool = td.Composition() with pool.scope(): cur_fea = feature_detector_blk().reads(pool.input) children = td.GetItem('children').reads(pool.input) mapped = td.Map(pool_fwd()).reads(children) summed = td.Reduce(td.Function(tf.maximum)).reads(mapped) summed = td.Function(tf.maximum).reads(summed, cur_fea) pool.output.reads(summed) pool = td.OneOf((lambda x: (x['clen'] == 0)), {True: leaf_case, False: pool}) pool_fwd.resolve_to(pool) return pool
def dynamic_pooling_blk(): 'Input: root node dic\n Output: pooled, TensorType([hyper.conv_dim, ])\n ' leaf_case = feature_detector_blk() pool_fwd = td.ForwardDeclaration(td.PyObjectType(), td.TensorType([hyper.conv_dim])) pool = td.Composition() with pool.scope(): cur_fea = feature_detector_blk().reads(pool.input) children = td.GetItem('children').reads(pool.input) mapped = td.Map(pool_fwd()).reads(children) summed = td.Reduce(td.Function(tf.maximum)).reads(mapped) summed = td.Function(tf.maximum).reads(summed, cur_fea) pool.output.reads(summed) pool = td.OneOf((lambda x: (x['clen'] == 0)), {True: leaf_case, False: pool}) pool_fwd.resolve_to(pool) return pool<|docstring|>Input: root node dic Output: pooled, TensorType([hyper.conv_dim, ])<|endoftext|>
7c71d46e0660e4e3e9b82dca084db044c757d1e7786dab74067fe91517748e5d
def runMethod(x_i): "\n I'm going to solve the root of a function using Newton-Raphson Method\n " iterator = 0 while True: f = function_ORIGINAL(x_i) f_d = function_DERIVED(x_i) x_next = (x_i - (f / f_d)) print('the value N°', iterator, ':', x_next) ERROR = (math.fabs(((x_next - x_i) / x_next)) * 100) if (ERROR == 0): print('the answer:', x_next) break x_i = x_next iterator = (iterator + 1)
I'm going to solve the root of a function using Newton-Raphson Method
exercises/newton_raphson_method.py
runMethod
leonel-123/python-fundamentals
0
python
def runMethod(x_i): "\n \n " iterator = 0 while True: f = function_ORIGINAL(x_i) f_d = function_DERIVED(x_i) x_next = (x_i - (f / f_d)) print('the value N°', iterator, ':', x_next) ERROR = (math.fabs(((x_next - x_i) / x_next)) * 100) if (ERROR == 0): print('the answer:', x_next) break x_i = x_next iterator = (iterator + 1)
def runMethod(x_i): "\n \n " iterator = 0 while True: f = function_ORIGINAL(x_i) f_d = function_DERIVED(x_i) x_next = (x_i - (f / f_d)) print('the value N°', iterator, ':', x_next) ERROR = (math.fabs(((x_next - x_i) / x_next)) * 100) if (ERROR == 0): print('the answer:', x_next) break x_i = x_next iterator = (iterator + 1)<|docstring|>I'm going to solve the root of a function using Newton-Raphson Method<|endoftext|>
81b6d7abc5c2ee522cc890fe3a8acccb0234670231e61c1d9816656724d81954
def plot_cluster_assignments(evl, data, atts=None, inst_no=False, size=10, title=None, outfile=None, wait=True): '\n Plots the cluster assignments against the specified attributes.\n\n TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html\n\n :param evl: the cluster evaluation to obtain the cluster assignments from\n :type evl: ClusterEvaluation\n :param data: the dataset the clusterer was evaluated against\n :type data: Instances\n :param atts: the list of attribute indices to plot, None for all\n :type atts: list\n :param inst_no: whether to include a fake attribute with the instance number\n :type inst_no: bool\n :param size: the size of the circles in point\n :type size: int\n :param title: an optional title\n :type title: str\n :param outfile: the (optional) file to save the generated plot to. The extension determines the file format.\n :type outfile: str\n :param wait: whether to wait for the user to close the plot\n :type wait: bool\n ' if (not plot.matplotlib_available): logger.error('Matplotlib is not installed, plotting unavailable!') return fig = plt.figure() if (data.class_index == (- 1)): c = None else: c = [] for i in xrange(data.num_instances): inst = data.get_instance(i) c.append(inst.get_value(inst.class_index)) if (atts is None): atts = [] for i in xrange(data.num_attributes): atts.append(i) num_plots = len(atts) if inst_no: num_plots += 1 clusters = evl.cluster_assignments for (index, att) in enumerate(atts): x = data.values(att) ax = fig.add_subplot(1, num_plots, (index + 1)) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title(data.attribute(att).name) ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if inst_no: x = [] for i in xrange(data.num_instances): x.append((i + 1)) ax = fig.add_subplot(1, num_plots, num_plots) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title('Instance number') ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if (title is None): title = data.relationname fig.canvas.set_window_title(title) plt.draw() if (not (outfile is None)): plt.savefig(outfile) if wait: plt.show()
Plots the cluster assignments against the specified attributes. TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html :param evl: the cluster evaluation to obtain the cluster assignments from :type evl: ClusterEvaluation :param data: the dataset the clusterer was evaluated against :type data: Instances :param atts: the list of attribute indices to plot, None for all :type atts: list :param inst_no: whether to include a fake attribute with the instance number :type inst_no: bool :param size: the size of the circles in point :type size: int :param title: an optional title :type title: str :param outfile: the (optional) file to save the generated plot to. The extension determines the file format. :type outfile: str :param wait: whether to wait for the user to close the plot :type wait: bool
flasky2/venv/Lib/site-packages/weka/plot/clusterers.py
plot_cluster_assignments
akshat0109/kisan_backend
0
python
def plot_cluster_assignments(evl, data, atts=None, inst_no=False, size=10, title=None, outfile=None, wait=True): '\n Plots the cluster assignments against the specified attributes.\n\n TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html\n\n :param evl: the cluster evaluation to obtain the cluster assignments from\n :type evl: ClusterEvaluation\n :param data: the dataset the clusterer was evaluated against\n :type data: Instances\n :param atts: the list of attribute indices to plot, None for all\n :type atts: list\n :param inst_no: whether to include a fake attribute with the instance number\n :type inst_no: bool\n :param size: the size of the circles in point\n :type size: int\n :param title: an optional title\n :type title: str\n :param outfile: the (optional) file to save the generated plot to. The extension determines the file format.\n :type outfile: str\n :param wait: whether to wait for the user to close the plot\n :type wait: bool\n ' if (not plot.matplotlib_available): logger.error('Matplotlib is not installed, plotting unavailable!') return fig = plt.figure() if (data.class_index == (- 1)): c = None else: c = [] for i in xrange(data.num_instances): inst = data.get_instance(i) c.append(inst.get_value(inst.class_index)) if (atts is None): atts = [] for i in xrange(data.num_attributes): atts.append(i) num_plots = len(atts) if inst_no: num_plots += 1 clusters = evl.cluster_assignments for (index, att) in enumerate(atts): x = data.values(att) ax = fig.add_subplot(1, num_plots, (index + 1)) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title(data.attribute(att).name) ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if inst_no: x = [] for i in xrange(data.num_instances): x.append((i + 1)) ax = fig.add_subplot(1, num_plots, num_plots) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title('Instance number') ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if (title is None): title = data.relationname fig.canvas.set_window_title(title) plt.draw() if (not (outfile is None)): plt.savefig(outfile) if wait: plt.show()
def plot_cluster_assignments(evl, data, atts=None, inst_no=False, size=10, title=None, outfile=None, wait=True): '\n Plots the cluster assignments against the specified attributes.\n\n TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html\n\n :param evl: the cluster evaluation to obtain the cluster assignments from\n :type evl: ClusterEvaluation\n :param data: the dataset the clusterer was evaluated against\n :type data: Instances\n :param atts: the list of attribute indices to plot, None for all\n :type atts: list\n :param inst_no: whether to include a fake attribute with the instance number\n :type inst_no: bool\n :param size: the size of the circles in point\n :type size: int\n :param title: an optional title\n :type title: str\n :param outfile: the (optional) file to save the generated plot to. The extension determines the file format.\n :type outfile: str\n :param wait: whether to wait for the user to close the plot\n :type wait: bool\n ' if (not plot.matplotlib_available): logger.error('Matplotlib is not installed, plotting unavailable!') return fig = plt.figure() if (data.class_index == (- 1)): c = None else: c = [] for i in xrange(data.num_instances): inst = data.get_instance(i) c.append(inst.get_value(inst.class_index)) if (atts is None): atts = [] for i in xrange(data.num_attributes): atts.append(i) num_plots = len(atts) if inst_no: num_plots += 1 clusters = evl.cluster_assignments for (index, att) in enumerate(atts): x = data.values(att) ax = fig.add_subplot(1, num_plots, (index + 1)) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title(data.attribute(att).name) ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if inst_no: x = [] for i in xrange(data.num_instances): x.append((i + 1)) ax = fig.add_subplot(1, num_plots, num_plots) if (c is None): ax.scatter(clusters, x, s=size, alpha=0.5) else: ax.scatter(clusters, x, c=c, s=size, alpha=0.5) ax.set_xlabel('Clusters') ax.set_title('Instance number') ax.get_xaxis().set_ticks(list(set(clusters))) ax.grid(True) if (title is None): title = data.relationname fig.canvas.set_window_title(title) plt.draw() if (not (outfile is None)): plt.savefig(outfile) if wait: plt.show()<|docstring|>Plots the cluster assignments against the specified attributes. TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html :param evl: the cluster evaluation to obtain the cluster assignments from :type evl: ClusterEvaluation :param data: the dataset the clusterer was evaluated against :type data: Instances :param atts: the list of attribute indices to plot, None for all :type atts: list :param inst_no: whether to include a fake attribute with the instance number :type inst_no: bool :param size: the size of the circles in point :type size: int :param title: an optional title :type title: str :param outfile: the (optional) file to save the generated plot to. The extension determines the file format. :type outfile: str :param wait: whether to wait for the user to close the plot :type wait: bool<|endoftext|>
78f50ad48516f44f303939f645bc9d76018604de70b2e33575be5a1e905e5bc1
def answer(self, results: List[InlineQueryResult], cache_time: Optional[int]=None, is_personal: Optional[bool]=None, next_offset: Optional[str]=None, switch_pm_text: Optional[str]=None, switch_pm_parameter: Optional[str]=None) -> AnswerInlineQuery: '\n :param results:\n :param cache_time:\n :param is_personal:\n :param next_offset:\n :param switch_pm_text:\n :param switch_pm_parameter:\n :return:\n ' from ..methods import AnswerInlineQuery return AnswerInlineQuery(inline_query_id=self.id, results=results, cache_time=cache_time, is_personal=is_personal, next_offset=next_offset, switch_pm_text=switch_pm_text, switch_pm_parameter=switch_pm_parameter)
:param results: :param cache_time: :param is_personal: :param next_offset: :param switch_pm_text: :param switch_pm_parameter: :return:
tgtypes/models/inline_query.py
answer
autogram/tgtypes
0
python
def answer(self, results: List[InlineQueryResult], cache_time: Optional[int]=None, is_personal: Optional[bool]=None, next_offset: Optional[str]=None, switch_pm_text: Optional[str]=None, switch_pm_parameter: Optional[str]=None) -> AnswerInlineQuery: '\n :param results:\n :param cache_time:\n :param is_personal:\n :param next_offset:\n :param switch_pm_text:\n :param switch_pm_parameter:\n :return:\n ' from ..methods import AnswerInlineQuery return AnswerInlineQuery(inline_query_id=self.id, results=results, cache_time=cache_time, is_personal=is_personal, next_offset=next_offset, switch_pm_text=switch_pm_text, switch_pm_parameter=switch_pm_parameter)
def answer(self, results: List[InlineQueryResult], cache_time: Optional[int]=None, is_personal: Optional[bool]=None, next_offset: Optional[str]=None, switch_pm_text: Optional[str]=None, switch_pm_parameter: Optional[str]=None) -> AnswerInlineQuery: '\n :param results:\n :param cache_time:\n :param is_personal:\n :param next_offset:\n :param switch_pm_text:\n :param switch_pm_parameter:\n :return:\n ' from ..methods import AnswerInlineQuery return AnswerInlineQuery(inline_query_id=self.id, results=results, cache_time=cache_time, is_personal=is_personal, next_offset=next_offset, switch_pm_text=switch_pm_text, switch_pm_parameter=switch_pm_parameter)<|docstring|>:param results: :param cache_time: :param is_personal: :param next_offset: :param switch_pm_text: :param switch_pm_parameter: :return:<|endoftext|>
392783ab59f3231d491b6fd2ed430d5224baa27703f88fe0ede71c58a201a00f
def synthesize(evaluator: Callable[([List[Command]], bool)], initial: List[Command], opcodes: List[Opcode], size: int) -> Optional[List[Command]]: '\n Synthesizes a series of commands of exactly the given size\n using the provided set of opcodes and the initial set of commands.\n\n Either returns a list of commands that passes the given evaluator,\n or returns None otherwise.\n ' def helper(current: List[Command]) -> Tuple[(bool, Optional[List[Command]])]: if (len(current) == size): return (evaluator(current), current) else: prev_args = list(range(len(current))) for op in opcodes: for args in product(prev_args, repeat=op.num_args): current.append(Command(op, list(args))) (res, possible) = helper(current) if res: return (res, possible) current.pop() return (False, None) copy = list(initial) (res, out) = helper(copy) return out
Synthesizes a series of commands of exactly the given size using the provided set of opcodes and the initial set of commands. Either returns a list of commands that passes the given evaluator, or returns None otherwise.
synthesis.py
synthesize
Michael0x2a/test_logic
0
python
def synthesize(evaluator: Callable[([List[Command]], bool)], initial: List[Command], opcodes: List[Opcode], size: int) -> Optional[List[Command]]: '\n Synthesizes a series of commands of exactly the given size\n using the provided set of opcodes and the initial set of commands.\n\n Either returns a list of commands that passes the given evaluator,\n or returns None otherwise.\n ' def helper(current: List[Command]) -> Tuple[(bool, Optional[List[Command]])]: if (len(current) == size): return (evaluator(current), current) else: prev_args = list(range(len(current))) for op in opcodes: for args in product(prev_args, repeat=op.num_args): current.append(Command(op, list(args))) (res, possible) = helper(current) if res: return (res, possible) current.pop() return (False, None) copy = list(initial) (res, out) = helper(copy) return out
def synthesize(evaluator: Callable[([List[Command]], bool)], initial: List[Command], opcodes: List[Opcode], size: int) -> Optional[List[Command]]: '\n Synthesizes a series of commands of exactly the given size\n using the provided set of opcodes and the initial set of commands.\n\n Either returns a list of commands that passes the given evaluator,\n or returns None otherwise.\n ' def helper(current: List[Command]) -> Tuple[(bool, Optional[List[Command]])]: if (len(current) == size): return (evaluator(current), current) else: prev_args = list(range(len(current))) for op in opcodes: for args in product(prev_args, repeat=op.num_args): current.append(Command(op, list(args))) (res, possible) = helper(current) if res: return (res, possible) current.pop() return (False, None) copy = list(initial) (res, out) = helper(copy) return out<|docstring|>Synthesizes a series of commands of exactly the given size using the provided set of opcodes and the initial set of commands. Either returns a list of commands that passes the given evaluator, or returns None otherwise.<|endoftext|>
caf5ee7e6237be17ee0a0a392db6aabd5d50ad499f33d2942bf5f38e4926663b
def trace(var_assigns: Dict[(str, bool)], commands: List[Command]) -> bool: 'Evaluates the given commands using the provided variable assignments.' results = [] for cmd in commands: if isinstance(cmd.op, Variable): results.append(var_assigns[cmd.op.name]) else: arg_vals = [results[i] for i in cmd.args] results.append(cmd.op.eval(arg_vals)) return results[(- 1)]
Evaluates the given commands using the provided variable assignments.
synthesis.py
trace
Michael0x2a/test_logic
0
python
def trace(var_assigns: Dict[(str, bool)], commands: List[Command]) -> bool: results = [] for cmd in commands: if isinstance(cmd.op, Variable): results.append(var_assigns[cmd.op.name]) else: arg_vals = [results[i] for i in cmd.args] results.append(cmd.op.eval(arg_vals)) return results[(- 1)]
def trace(var_assigns: Dict[(str, bool)], commands: List[Command]) -> bool: results = [] for cmd in commands: if isinstance(cmd.op, Variable): results.append(var_assigns[cmd.op.name]) else: arg_vals = [results[i] for i in cmd.args] results.append(cmd.op.eval(arg_vals)) return results[(- 1)]<|docstring|>Evaluates the given commands using the provided variable assignments.<|endoftext|>
157355497e34ad2b57274a0fa4a427f4cdab6f6f12a150dbfc930f299be816bb
def get_vars(commands: List[Command]) -> List[str]: 'Extracts free variables from a command list.' out = [] for cmd in commands: if isinstance(cmd.op, Variable): out.append(cmd.op.name) return out
Extracts free variables from a command list.
synthesis.py
get_vars
Michael0x2a/test_logic
0
python
def get_vars(commands: List[Command]) -> List[str]: out = [] for cmd in commands: if isinstance(cmd.op, Variable): out.append(cmd.op.name) return out
def get_vars(commands: List[Command]) -> List[str]: out = [] for cmd in commands: if isinstance(cmd.op, Variable): out.append(cmd.op.name) return out<|docstring|>Extracts free variables from a command list.<|endoftext|>
480802aaffa22febfdcbdee17d627dd6221412185fd5bfb05a3caa9c538b0cac
def truth_table(commands: List[Command]) -> List[Tuple[(Dict[(str, bool)], bool)]]: 'Constructs a truth table of sorts, for debugging.' out = [] variables = get_vars(commands) for assignments in product([True, False], repeat=len(variables)): asgns = {a: b for (a, b) in zip(variables, assignments)} out.append((asgns, trace(asgns, commands))) return out
Constructs a truth table of sorts, for debugging.
synthesis.py
truth_table
Michael0x2a/test_logic
0
python
def truth_table(commands: List[Command]) -> List[Tuple[(Dict[(str, bool)], bool)]]: out = [] variables = get_vars(commands) for assignments in product([True, False], repeat=len(variables)): asgns = {a: b for (a, b) in zip(variables, assignments)} out.append((asgns, trace(asgns, commands))) return out
def truth_table(commands: List[Command]) -> List[Tuple[(Dict[(str, bool)], bool)]]: out = [] variables = get_vars(commands) for assignments in product([True, False], repeat=len(variables)): asgns = {a: b for (a, b) in zip(variables, assignments)} out.append((asgns, trace(asgns, commands))) return out<|docstring|>Constructs a truth table of sorts, for debugging.<|endoftext|>
c021464a757b3008b4d3c18bfc25abf994e6a26a2f35cecac14f8ebf88e36d83
def make_evaluator(var_names: List[str], func: Callable[(..., bool)]) -> Callable[([List[Command]], bool)]: 'Takes a list of variable names, a regular function, and constructs\n the corresponding evaluator.' def eval(commands: List[Command]) -> bool: for assignments in product([True, False], repeat=len(var_names)): asgns = {a: b for (a, b) in zip(var_names, assignments)} given = trace(asgns, commands) expected = func(*assignments) if (given != expected): return False return True return eval
Takes a list of variable names, a regular function, and constructs the corresponding evaluator.
synthesis.py
make_evaluator
Michael0x2a/test_logic
0
python
def make_evaluator(var_names: List[str], func: Callable[(..., bool)]) -> Callable[([List[Command]], bool)]: 'Takes a list of variable names, a regular function, and constructs\n the corresponding evaluator.' def eval(commands: List[Command]) -> bool: for assignments in product([True, False], repeat=len(var_names)): asgns = {a: b for (a, b) in zip(var_names, assignments)} given = trace(asgns, commands) expected = func(*assignments) if (given != expected): return False return True return eval
def make_evaluator(var_names: List[str], func: Callable[(..., bool)]) -> Callable[([List[Command]], bool)]: 'Takes a list of variable names, a regular function, and constructs\n the corresponding evaluator.' def eval(commands: List[Command]) -> bool: for assignments in product([True, False], repeat=len(var_names)): asgns = {a: b for (a, b) in zip(var_names, assignments)} given = trace(asgns, commands) expected = func(*assignments) if (given != expected): return False return True return eval<|docstring|>Takes a list of variable names, a regular function, and constructs the corresponding evaluator.<|endoftext|>
686e1205fe7f5b2e5cc850e5e42a8076e3b53e2300ba91261563a23889316961
def full_synthesize(variables: List[str], func: Callable[(..., bool)], opcodes: List[Opcode], limit: int) -> Optional[List[Command]]: 'Attempts to synthesize a set of commands that match the given function\n using only the provided opcodes. Will give up if the number of commands\n exceeds the provided limit.' cmds = [Command(Variable(v), []) for v in variables] evaluator = make_evaluator(variables, func) for i in range((len(cmds) + 1), limit): res = synthesize(evaluator, cmds, opcodes, i) if (res is not None): return res return None
Attempts to synthesize a set of commands that match the given function using only the provided opcodes. Will give up if the number of commands exceeds the provided limit.
synthesis.py
full_synthesize
Michael0x2a/test_logic
0
python
def full_synthesize(variables: List[str], func: Callable[(..., bool)], opcodes: List[Opcode], limit: int) -> Optional[List[Command]]: 'Attempts to synthesize a set of commands that match the given function\n using only the provided opcodes. Will give up if the number of commands\n exceeds the provided limit.' cmds = [Command(Variable(v), []) for v in variables] evaluator = make_evaluator(variables, func) for i in range((len(cmds) + 1), limit): res = synthesize(evaluator, cmds, opcodes, i) if (res is not None): return res return None
def full_synthesize(variables: List[str], func: Callable[(..., bool)], opcodes: List[Opcode], limit: int) -> Optional[List[Command]]: 'Attempts to synthesize a set of commands that match the given function\n using only the provided opcodes. Will give up if the number of commands\n exceeds the provided limit.' cmds = [Command(Variable(v), []) for v in variables] evaluator = make_evaluator(variables, func) for i in range((len(cmds) + 1), limit): res = synthesize(evaluator, cmds, opcodes, i) if (res is not None): return res return None<|docstring|>Attempts to synthesize a set of commands that match the given function using only the provided opcodes. Will give up if the number of commands exceeds the provided limit.<|endoftext|>
7059c52c119043bb784922407582e7f40cc66b1ebc898b31f5f00996b9a251ea
def display(cmds: Optional[List[Command]]) -> None: 'Nicely displays a command list.' if (cmds is None): print('N\\A') else: for (idx, cmd) in enumerate(cmds): print(idx, cmd) print()
Nicely displays a command list.
synthesis.py
display
Michael0x2a/test_logic
0
python
def display(cmds: Optional[List[Command]]) -> None: if (cmds is None): print('N\\A') else: for (idx, cmd) in enumerate(cmds): print(idx, cmd) print()
def display(cmds: Optional[List[Command]]) -> None: if (cmds is None): print('N\\A') else: for (idx, cmd) in enumerate(cmds): print(idx, cmd) print()<|docstring|>Nicely displays a command list.<|endoftext|>
d8054d0795da4ba3acac56595dd4b070d2bbecc3809b00db1a840a3b352059c0
def get_permission_actions(self): '\n Permisions supported by the plugin.\n ' return ['JANUS_VIEW']
Permisions supported by the plugin.
tracjanusgateway/web_ui.py
get_permission_actions
t-kenji/trac-janus-plugin
0
python
def get_permission_actions(self): '\n \n ' return ['JANUS_VIEW']
def get_permission_actions(self): '\n \n ' return ['JANUS_VIEW']<|docstring|>Permisions supported by the plugin.<|endoftext|>
b6a44da36c7334a3b8915c1cb39dab374624eb857d515dba1ed1294cdb4da56b
def get_active_navigation_item(self, req): '\n This method is only called for the `IRequestHandler` processing the\n request.\n ' return 'janus'
This method is only called for the `IRequestHandler` processing the request.
tracjanusgateway/web_ui.py
get_active_navigation_item
t-kenji/trac-janus-plugin
0
python
def get_active_navigation_item(self, req): '\n This method is only called for the `IRequestHandler` processing the\n request.\n ' return 'janus'
def get_active_navigation_item(self, req): '\n This method is only called for the `IRequestHandler` processing the\n request.\n ' return 'janus'<|docstring|>This method is only called for the `IRequestHandler` processing the request.<|endoftext|>
6263248203c28866789ad2a5bcbc2d7d17b50f06a33b4dd2c8fef862589befad
def process_request(self, req): '\n Processing the request.\n ' req.perm('janus').assert_permission('JANUS_VIEW') plugins = ('echo', 'videocall', 'videoroom', 'audioroom', 'screensharing') m = re.match('/janus/(?P<handler>[\\w/-]+)', req.path_info) if (m is not None): handler = m.group('handler') if (handler in plugins): return self._process_plugin(req, handler) if handler.startswith('event/'): return self._process_event(req, handler[6:]) add_stylesheet(req, 'janus/css/janus.css') return ('janus.html', {}, None)
Processing the request.
tracjanusgateway/web_ui.py
process_request
t-kenji/trac-janus-plugin
0
python
def process_request(self, req): '\n \n ' req.perm('janus').assert_permission('JANUS_VIEW') plugins = ('echo', 'videocall', 'videoroom', 'audioroom', 'screensharing') m = re.match('/janus/(?P<handler>[\\w/-]+)', req.path_info) if (m is not None): handler = m.group('handler') if (handler in plugins): return self._process_plugin(req, handler) if handler.startswith('event/'): return self._process_event(req, handler[6:]) add_stylesheet(req, 'janus/css/janus.css') return ('janus.html', {}, None)
def process_request(self, req): '\n \n ' req.perm('janus').assert_permission('JANUS_VIEW') plugins = ('echo', 'videocall', 'videoroom', 'audioroom', 'screensharing') m = re.match('/janus/(?P<handler>[\\w/-]+)', req.path_info) if (m is not None): handler = m.group('handler') if (handler in plugins): return self._process_plugin(req, handler) if handler.startswith('event/'): return self._process_event(req, handler[6:]) add_stylesheet(req, 'janus/css/janus.css') return ('janus.html', {}, None)<|docstring|>Processing the request.<|endoftext|>
65225dbe3fa50ca1d65edfbeecc72f4dcf54e2c6a168c3e7d9fb81d664b712ea
def _process_plugin(self, req, plugin): '\n Processing the plugin.\n ' data = {} template = 'janus.html' add_stylesheet(req, 'janus/css/jquery-confirm.min.css') add_stylesheet(req, 'janus/css/purecss-base-min.css') add_stylesheet(req, 'janus/css/purecss-forms-min.css') add_stylesheet(req, 'janus/css/purecss-grids-min.css') add_stylesheet(req, 'janus/css/purecss-grids-responsive-min.css') add_stylesheet(req, 'janus/css/purecss-buttons-min.css') add_stylesheet(req, 'janus/css/purecss-menus-min.css') add_stylesheet(req, 'janus/css/font-awesome.min.css') add_script(req, 'janus/js/adapter.min.js') add_script(req, 'janus/js/jquery.blockUI.min.js') add_script(req, 'janus/js/jquery-confirm.min.js') add_script(req, 'janus/js/purecss-menus.js') add_script(req, 'janus/js/spin.min.js') add_script(req, 'janus/js/compat.js') add_script(req, 'janus/js/janus.js') if (req.locale is not None): add_script(req, 'janus/js/tracjanusgateway/{}.js'.format(req.locale)) if isinstance(req.remote_user, basestring): username = req.remote_user elif isinstance(req.authname, basestring): username = req.authname elif ('name' in req.session): username = req.session.get('name', '') else: username = '' data['username'] = username data['video_rooms'] = self.video_rooms data['audio_rooms'] = self.audio_rooms add_script_data(req, {'debug': req.args.get('debug', 'false'), 'event_uri': req.href.janus('event')}) if plugin.startswith('echo'): add_ctxtnav(req, _('Echo')) template = 'echo.html' add_script(req, 'janus/js/echo.js') else: add_ctxtnav(req, _('Echo'), href=req.href.janus('echo')) if plugin.startswith('videocall'): add_ctxtnav(req, _('VideoCall')) template = 'videocall.html' add_script(req, 'janus/js/videocall.js') add_script_data(req, {'avatar_url': req.href.avatar('')}) else: add_ctxtnav(req, _('VideoCall'), href=req.href.janus('videocall')) if plugin.startswith('videoroom'): add_ctxtnav(req, _('VideoRoom')) template = 'videoroom.html' add_script(req, 'janus/js/videoroom.js') else: add_ctxtnav(req, _('VideoRoom'), href=req.href.janus('videoroom')) if plugin.startswith('audioroom'): add_ctxtnav(req, _('AudioRoom')) template = 'audioroom.html' add_script(req, 'janus/js/audiobridge.js') else: add_ctxtnav(req, _('AudioRoom'), href=req.href.janus('audioroom')) if plugin.startswith('screensharing'): add_ctxtnav(req, _('ScreenSharing')) template = 'screensharing.html' add_script(req, 'janus/js/screensharing.js') else: add_ctxtnav(req, _('ScreenSharing'), href=req.href.janus('screensharing')) add_stylesheet(req, 'janus/css/janus.css') return (template, data, None)
Processing the plugin.
tracjanusgateway/web_ui.py
_process_plugin
t-kenji/trac-janus-plugin
0
python
def _process_plugin(self, req, plugin): '\n \n ' data = {} template = 'janus.html' add_stylesheet(req, 'janus/css/jquery-confirm.min.css') add_stylesheet(req, 'janus/css/purecss-base-min.css') add_stylesheet(req, 'janus/css/purecss-forms-min.css') add_stylesheet(req, 'janus/css/purecss-grids-min.css') add_stylesheet(req, 'janus/css/purecss-grids-responsive-min.css') add_stylesheet(req, 'janus/css/purecss-buttons-min.css') add_stylesheet(req, 'janus/css/purecss-menus-min.css') add_stylesheet(req, 'janus/css/font-awesome.min.css') add_script(req, 'janus/js/adapter.min.js') add_script(req, 'janus/js/jquery.blockUI.min.js') add_script(req, 'janus/js/jquery-confirm.min.js') add_script(req, 'janus/js/purecss-menus.js') add_script(req, 'janus/js/spin.min.js') add_script(req, 'janus/js/compat.js') add_script(req, 'janus/js/janus.js') if (req.locale is not None): add_script(req, 'janus/js/tracjanusgateway/{}.js'.format(req.locale)) if isinstance(req.remote_user, basestring): username = req.remote_user elif isinstance(req.authname, basestring): username = req.authname elif ('name' in req.session): username = req.session.get('name', ) else: username = data['username'] = username data['video_rooms'] = self.video_rooms data['audio_rooms'] = self.audio_rooms add_script_data(req, {'debug': req.args.get('debug', 'false'), 'event_uri': req.href.janus('event')}) if plugin.startswith('echo'): add_ctxtnav(req, _('Echo')) template = 'echo.html' add_script(req, 'janus/js/echo.js') else: add_ctxtnav(req, _('Echo'), href=req.href.janus('echo')) if plugin.startswith('videocall'): add_ctxtnav(req, _('VideoCall')) template = 'videocall.html' add_script(req, 'janus/js/videocall.js') add_script_data(req, {'avatar_url': req.href.avatar()}) else: add_ctxtnav(req, _('VideoCall'), href=req.href.janus('videocall')) if plugin.startswith('videoroom'): add_ctxtnav(req, _('VideoRoom')) template = 'videoroom.html' add_script(req, 'janus/js/videoroom.js') else: add_ctxtnav(req, _('VideoRoom'), href=req.href.janus('videoroom')) if plugin.startswith('audioroom'): add_ctxtnav(req, _('AudioRoom')) template = 'audioroom.html' add_script(req, 'janus/js/audiobridge.js') else: add_ctxtnav(req, _('AudioRoom'), href=req.href.janus('audioroom')) if plugin.startswith('screensharing'): add_ctxtnav(req, _('ScreenSharing')) template = 'screensharing.html' add_script(req, 'janus/js/screensharing.js') else: add_ctxtnav(req, _('ScreenSharing'), href=req.href.janus('screensharing')) add_stylesheet(req, 'janus/css/janus.css') return (template, data, None)
def _process_plugin(self, req, plugin): '\n \n ' data = {} template = 'janus.html' add_stylesheet(req, 'janus/css/jquery-confirm.min.css') add_stylesheet(req, 'janus/css/purecss-base-min.css') add_stylesheet(req, 'janus/css/purecss-forms-min.css') add_stylesheet(req, 'janus/css/purecss-grids-min.css') add_stylesheet(req, 'janus/css/purecss-grids-responsive-min.css') add_stylesheet(req, 'janus/css/purecss-buttons-min.css') add_stylesheet(req, 'janus/css/purecss-menus-min.css') add_stylesheet(req, 'janus/css/font-awesome.min.css') add_script(req, 'janus/js/adapter.min.js') add_script(req, 'janus/js/jquery.blockUI.min.js') add_script(req, 'janus/js/jquery-confirm.min.js') add_script(req, 'janus/js/purecss-menus.js') add_script(req, 'janus/js/spin.min.js') add_script(req, 'janus/js/compat.js') add_script(req, 'janus/js/janus.js') if (req.locale is not None): add_script(req, 'janus/js/tracjanusgateway/{}.js'.format(req.locale)) if isinstance(req.remote_user, basestring): username = req.remote_user elif isinstance(req.authname, basestring): username = req.authname elif ('name' in req.session): username = req.session.get('name', ) else: username = data['username'] = username data['video_rooms'] = self.video_rooms data['audio_rooms'] = self.audio_rooms add_script_data(req, {'debug': req.args.get('debug', 'false'), 'event_uri': req.href.janus('event')}) if plugin.startswith('echo'): add_ctxtnav(req, _('Echo')) template = 'echo.html' add_script(req, 'janus/js/echo.js') else: add_ctxtnav(req, _('Echo'), href=req.href.janus('echo')) if plugin.startswith('videocall'): add_ctxtnav(req, _('VideoCall')) template = 'videocall.html' add_script(req, 'janus/js/videocall.js') add_script_data(req, {'avatar_url': req.href.avatar()}) else: add_ctxtnav(req, _('VideoCall'), href=req.href.janus('videocall')) if plugin.startswith('videoroom'): add_ctxtnav(req, _('VideoRoom')) template = 'videoroom.html' add_script(req, 'janus/js/videoroom.js') else: add_ctxtnav(req, _('VideoRoom'), href=req.href.janus('videoroom')) if plugin.startswith('audioroom'): add_ctxtnav(req, _('AudioRoom')) template = 'audioroom.html' add_script(req, 'janus/js/audiobridge.js') else: add_ctxtnav(req, _('AudioRoom'), href=req.href.janus('audioroom')) if plugin.startswith('screensharing'): add_ctxtnav(req, _('ScreenSharing')) template = 'screensharing.html' add_script(req, 'janus/js/screensharing.js') else: add_ctxtnav(req, _('ScreenSharing'), href=req.href.janus('screensharing')) add_stylesheet(req, 'janus/css/janus.css') return (template, data, None)<|docstring|>Processing the plugin.<|endoftext|>
f027960704ae5e9ba5fc19489ad6a20392f895162af4438f57726f0f1527b9ca
def _ex_connection_class_kwargs(self): '\n Add the region to the kwargs before the connection is instantiated\n ' kwargs = super(DimensionDataLBDriver, self)._ex_connection_class_kwargs() kwargs['region'] = self.selected_region return kwargs
Add the region to the kwargs before the connection is instantiated
libcloud/loadbalancer/drivers/dimensiondata.py
_ex_connection_class_kwargs
gig-tech/libcloud
1,435
python
def _ex_connection_class_kwargs(self): '\n \n ' kwargs = super(DimensionDataLBDriver, self)._ex_connection_class_kwargs() kwargs['region'] = self.selected_region return kwargs
def _ex_connection_class_kwargs(self): '\n \n ' kwargs = super(DimensionDataLBDriver, self)._ex_connection_class_kwargs() kwargs['region'] = self.selected_region return kwargs<|docstring|>Add the region to the kwargs before the connection is instantiated<|endoftext|>
3ad5ed5f24226bb0e247961d6491d8d82114cf4e5834333cd23e7bef410e8ba9
def create_balancer(self, name, port=None, protocol=None, algorithm=None, members=None, ex_listener_ip_address=None): "\n Create a new load balancer instance\n\n :param name: Name of the new load balancer (required)\n :type name: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param protocol: Loadbalancer protocol, defaults to http.\n :type protocol: ``str``\n\n :param members: list of Members to attach to balancer (optional)\n :type members: ``list`` of :class:`Member`\n\n :param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN.\n :type algorithm: :class:`.Algorithm`\n\n :param ex_listener_ip_address: Must be a valid IPv4 in dot-decimal\n notation (x.x.x.x).\n :type ex_listener_ip_address: ``str``\n\n :rtype: :class:`LoadBalancer`\n " network_domain_id = self.network_domain_id if (protocol is None): protocol = 'http' if (algorithm is None): algorithm = Algorithm.ROUND_ROBIN pool = self.ex_create_pool(network_domain_id=network_domain_id, name=name, ex_description=None, balancer_method=self._ALGORITHM_TO_VALUE_MAP[algorithm]) if (members is not None): for member in members: node = self.ex_create_node(network_domain_id=network_domain_id, name=member.ip, ip=member.ip, ex_description=None) self.ex_create_pool_member(pool=pool, node=node, port=port) listener = self.ex_create_virtual_listener(network_domain_id=network_domain_id, name=name, ex_description=name, port=port, pool=pool, listener_ip_address=ex_listener_ip_address) return LoadBalancer(id=listener.id, name=listener.name, state=State.RUNNING, ip=listener.ip, port=port, driver=self, extra={'pool_id': pool.id, 'network_domain_id': network_domain_id, 'listener_ip_address': ex_listener_ip_address})
Create a new load balancer instance :param name: Name of the new load balancer (required) :type name: ``str`` :param port: An integer in the range of 1-65535. If not supplied, it will be taken to mean 'Any Port' :type port: ``int`` :param protocol: Loadbalancer protocol, defaults to http. :type protocol: ``str`` :param members: list of Members to attach to balancer (optional) :type members: ``list`` of :class:`Member` :param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN. :type algorithm: :class:`.Algorithm` :param ex_listener_ip_address: Must be a valid IPv4 in dot-decimal notation (x.x.x.x). :type ex_listener_ip_address: ``str`` :rtype: :class:`LoadBalancer`
libcloud/loadbalancer/drivers/dimensiondata.py
create_balancer
gig-tech/libcloud
1,435
python
def create_balancer(self, name, port=None, protocol=None, algorithm=None, members=None, ex_listener_ip_address=None): "\n Create a new load balancer instance\n\n :param name: Name of the new load balancer (required)\n :type name: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param protocol: Loadbalancer protocol, defaults to http.\n :type protocol: ``str``\n\n :param members: list of Members to attach to balancer (optional)\n :type members: ``list`` of :class:`Member`\n\n :param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN.\n :type algorithm: :class:`.Algorithm`\n\n :param ex_listener_ip_address: Must be a valid IPv4 in dot-decimal\n notation (x.x.x.x).\n :type ex_listener_ip_address: ``str``\n\n :rtype: :class:`LoadBalancer`\n " network_domain_id = self.network_domain_id if (protocol is None): protocol = 'http' if (algorithm is None): algorithm = Algorithm.ROUND_ROBIN pool = self.ex_create_pool(network_domain_id=network_domain_id, name=name, ex_description=None, balancer_method=self._ALGORITHM_TO_VALUE_MAP[algorithm]) if (members is not None): for member in members: node = self.ex_create_node(network_domain_id=network_domain_id, name=member.ip, ip=member.ip, ex_description=None) self.ex_create_pool_member(pool=pool, node=node, port=port) listener = self.ex_create_virtual_listener(network_domain_id=network_domain_id, name=name, ex_description=name, port=port, pool=pool, listener_ip_address=ex_listener_ip_address) return LoadBalancer(id=listener.id, name=listener.name, state=State.RUNNING, ip=listener.ip, port=port, driver=self, extra={'pool_id': pool.id, 'network_domain_id': network_domain_id, 'listener_ip_address': ex_listener_ip_address})
def create_balancer(self, name, port=None, protocol=None, algorithm=None, members=None, ex_listener_ip_address=None): "\n Create a new load balancer instance\n\n :param name: Name of the new load balancer (required)\n :type name: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param protocol: Loadbalancer protocol, defaults to http.\n :type protocol: ``str``\n\n :param members: list of Members to attach to balancer (optional)\n :type members: ``list`` of :class:`Member`\n\n :param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN.\n :type algorithm: :class:`.Algorithm`\n\n :param ex_listener_ip_address: Must be a valid IPv4 in dot-decimal\n notation (x.x.x.x).\n :type ex_listener_ip_address: ``str``\n\n :rtype: :class:`LoadBalancer`\n " network_domain_id = self.network_domain_id if (protocol is None): protocol = 'http' if (algorithm is None): algorithm = Algorithm.ROUND_ROBIN pool = self.ex_create_pool(network_domain_id=network_domain_id, name=name, ex_description=None, balancer_method=self._ALGORITHM_TO_VALUE_MAP[algorithm]) if (members is not None): for member in members: node = self.ex_create_node(network_domain_id=network_domain_id, name=member.ip, ip=member.ip, ex_description=None) self.ex_create_pool_member(pool=pool, node=node, port=port) listener = self.ex_create_virtual_listener(network_domain_id=network_domain_id, name=name, ex_description=name, port=port, pool=pool, listener_ip_address=ex_listener_ip_address) return LoadBalancer(id=listener.id, name=listener.name, state=State.RUNNING, ip=listener.ip, port=port, driver=self, extra={'pool_id': pool.id, 'network_domain_id': network_domain_id, 'listener_ip_address': ex_listener_ip_address})<|docstring|>Create a new load balancer instance :param name: Name of the new load balancer (required) :type name: ``str`` :param port: An integer in the range of 1-65535. If not supplied, it will be taken to mean 'Any Port' :type port: ``int`` :param protocol: Loadbalancer protocol, defaults to http. :type protocol: ``str`` :param members: list of Members to attach to balancer (optional) :type members: ``list`` of :class:`Member` :param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN. :type algorithm: :class:`.Algorithm` :param ex_listener_ip_address: Must be a valid IPv4 in dot-decimal notation (x.x.x.x). :type ex_listener_ip_address: ``str`` :rtype: :class:`LoadBalancer`<|endoftext|>
e44d2244d6c609fb677b2f427a725c2643b9b79b7e3ee1015a53a051230b94c4
def list_balancers(self, ex_network_domain_id=None): '\n List all loadbalancers inside a geography or in given network.\n\n In Dimension Data terminology these are known as virtual listeners\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :rtype: ``list`` of :class:`LoadBalancer`\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} return self._to_balancers(self.connection.request_with_orgId_api_2('networkDomainVip/virtualListener', params=params).object)
List all loadbalancers inside a geography or in given network. In Dimension Data terminology these are known as virtual listeners :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :rtype: ``list`` of :class:`LoadBalancer`
libcloud/loadbalancer/drivers/dimensiondata.py
list_balancers
gig-tech/libcloud
1,435
python
def list_balancers(self, ex_network_domain_id=None): '\n List all loadbalancers inside a geography or in given network.\n\n In Dimension Data terminology these are known as virtual listeners\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :rtype: ``list`` of :class:`LoadBalancer`\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} return self._to_balancers(self.connection.request_with_orgId_api_2('networkDomainVip/virtualListener', params=params).object)
def list_balancers(self, ex_network_domain_id=None): '\n List all loadbalancers inside a geography or in given network.\n\n In Dimension Data terminology these are known as virtual listeners\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :rtype: ``list`` of :class:`LoadBalancer`\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} return self._to_balancers(self.connection.request_with_orgId_api_2('networkDomainVip/virtualListener', params=params).object)<|docstring|>List all loadbalancers inside a geography or in given network. In Dimension Data terminology these are known as virtual listeners :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :rtype: ``list`` of :class:`LoadBalancer`<|endoftext|>
dc82a569aa068e2f2ad29ec32900f5d59a6a8a87d756a576cbf4b6764aaa70b4
def get_balancer(self, balancer_id): '\n Return a :class:`LoadBalancer` object.\n\n :param balancer_id: id of a load balancer you want to fetch\n :type balancer_id: ``str``\n\n :rtype: :class:`LoadBalancer`\n ' bal = self.connection.request_with_orgId_api_2(('networkDomainVip/virtualListener/%s' % balancer_id)).object return self._to_balancer(bal)
Return a :class:`LoadBalancer` object. :param balancer_id: id of a load balancer you want to fetch :type balancer_id: ``str`` :rtype: :class:`LoadBalancer`
libcloud/loadbalancer/drivers/dimensiondata.py
get_balancer
gig-tech/libcloud
1,435
python
def get_balancer(self, balancer_id): '\n Return a :class:`LoadBalancer` object.\n\n :param balancer_id: id of a load balancer you want to fetch\n :type balancer_id: ``str``\n\n :rtype: :class:`LoadBalancer`\n ' bal = self.connection.request_with_orgId_api_2(('networkDomainVip/virtualListener/%s' % balancer_id)).object return self._to_balancer(bal)
def get_balancer(self, balancer_id): '\n Return a :class:`LoadBalancer` object.\n\n :param balancer_id: id of a load balancer you want to fetch\n :type balancer_id: ``str``\n\n :rtype: :class:`LoadBalancer`\n ' bal = self.connection.request_with_orgId_api_2(('networkDomainVip/virtualListener/%s' % balancer_id)).object return self._to_balancer(bal)<|docstring|>Return a :class:`LoadBalancer` object. :param balancer_id: id of a load balancer you want to fetch :type balancer_id: ``str`` :rtype: :class:`LoadBalancer`<|endoftext|>
cdf78f52f24e739df3adc63af933389509455e644df1cf88faa90cace88c30d8
def list_protocols(self): '\n Return a list of supported protocols.\n\n Since all protocols are support by Dimension Data, this is a list\n of common protocols.\n\n :rtype: ``list`` of ``str``\n ' return ['http', 'https', 'tcp', 'udp', 'ftp', 'smtp']
Return a list of supported protocols. Since all protocols are support by Dimension Data, this is a list of common protocols. :rtype: ``list`` of ``str``
libcloud/loadbalancer/drivers/dimensiondata.py
list_protocols
gig-tech/libcloud
1,435
python
def list_protocols(self): '\n Return a list of supported protocols.\n\n Since all protocols are support by Dimension Data, this is a list\n of common protocols.\n\n :rtype: ``list`` of ``str``\n ' return ['http', 'https', 'tcp', 'udp', 'ftp', 'smtp']
def list_protocols(self): '\n Return a list of supported protocols.\n\n Since all protocols are support by Dimension Data, this is a list\n of common protocols.\n\n :rtype: ``list`` of ``str``\n ' return ['http', 'https', 'tcp', 'udp', 'ftp', 'smtp']<|docstring|>Return a list of supported protocols. Since all protocols are support by Dimension Data, this is a list of common protocols. :rtype: ``list`` of ``str``<|endoftext|>
14b4a3b180fd532a428a3ac3680cd64ce9be0bce30efd7eee8dba19233c69144
def balancer_list_members(self, balancer): '\n Return list of members attached to balancer.\n\n In Dimension Data terminology these are the members of the pools\n within a virtual listener.\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :rtype: ``list`` of :class:`Member`\n ' pool_members = self.ex_get_pool_members(balancer.extra['pool_id']) members = [] for pool_member in pool_members: members.append(Member(id=pool_member.id, ip=pool_member.ip, port=pool_member.port, balancer=balancer, extra=None)) return members
Return list of members attached to balancer. In Dimension Data terminology these are the members of the pools within a virtual listener. :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :rtype: ``list`` of :class:`Member`
libcloud/loadbalancer/drivers/dimensiondata.py
balancer_list_members
gig-tech/libcloud
1,435
python
def balancer_list_members(self, balancer): '\n Return list of members attached to balancer.\n\n In Dimension Data terminology these are the members of the pools\n within a virtual listener.\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :rtype: ``list`` of :class:`Member`\n ' pool_members = self.ex_get_pool_members(balancer.extra['pool_id']) members = [] for pool_member in pool_members: members.append(Member(id=pool_member.id, ip=pool_member.ip, port=pool_member.port, balancer=balancer, extra=None)) return members
def balancer_list_members(self, balancer): '\n Return list of members attached to balancer.\n\n In Dimension Data terminology these are the members of the pools\n within a virtual listener.\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :rtype: ``list`` of :class:`Member`\n ' pool_members = self.ex_get_pool_members(balancer.extra['pool_id']) members = [] for pool_member in pool_members: members.append(Member(id=pool_member.id, ip=pool_member.ip, port=pool_member.port, balancer=balancer, extra=None)) return members<|docstring|>Return list of members attached to balancer. In Dimension Data terminology these are the members of the pools within a virtual listener. :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :rtype: ``list`` of :class:`Member`<|endoftext|>
520298df1364f7ae1cc2bb21c0edbee1bf0fec9148bbc8bfc9c856ed5f5e0e5c
def balancer_attach_member(self, balancer, member): '\n Attach a member to balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member to join to the balancer\n :type member: :class:`Member`\n\n :return: Member after joining the balancer.\n :rtype: :class:`Member`\n ' node = self.ex_create_node(network_domain_id=balancer.extra['network_domain_id'], name=('Member.' + member.ip), ip=member.ip, ex_description='') if (node is False): return False pool = self.ex_get_pool(balancer.extra['pool_id']) pool_member = self.ex_create_pool_member(pool=pool, node=node, port=member.port) member.id = pool_member.id return member
Attach a member to balancer :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :param member: Member to join to the balancer :type member: :class:`Member` :return: Member after joining the balancer. :rtype: :class:`Member`
libcloud/loadbalancer/drivers/dimensiondata.py
balancer_attach_member
gig-tech/libcloud
1,435
python
def balancer_attach_member(self, balancer, member): '\n Attach a member to balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member to join to the balancer\n :type member: :class:`Member`\n\n :return: Member after joining the balancer.\n :rtype: :class:`Member`\n ' node = self.ex_create_node(network_domain_id=balancer.extra['network_domain_id'], name=('Member.' + member.ip), ip=member.ip, ex_description=) if (node is False): return False pool = self.ex_get_pool(balancer.extra['pool_id']) pool_member = self.ex_create_pool_member(pool=pool, node=node, port=member.port) member.id = pool_member.id return member
def balancer_attach_member(self, balancer, member): '\n Attach a member to balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member to join to the balancer\n :type member: :class:`Member`\n\n :return: Member after joining the balancer.\n :rtype: :class:`Member`\n ' node = self.ex_create_node(network_domain_id=balancer.extra['network_domain_id'], name=('Member.' + member.ip), ip=member.ip, ex_description=) if (node is False): return False pool = self.ex_get_pool(balancer.extra['pool_id']) pool_member = self.ex_create_pool_member(pool=pool, node=node, port=member.port) member.id = pool_member.id return member<|docstring|>Attach a member to balancer :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :param member: Member to join to the balancer :type member: :class:`Member` :return: Member after joining the balancer. :rtype: :class:`Member`<|endoftext|>
60c419c95ccda9db52003660fb1464e53f23c9ecbf2a7705e9d687262a678889
def balancer_detach_member(self, balancer, member): '\n Detach member from balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member which should be used\n :type member: :class:`Member`\n\n :return: ``True`` if member detach was successful, otherwise ``False``.\n :rtype: ``bool``\n ' create_pool_m = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/removePoolMember', method='POST', data=ET.tostring(create_pool_m)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Detach member from balancer :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :param member: Member which should be used :type member: :class:`Member` :return: ``True`` if member detach was successful, otherwise ``False``. :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
balancer_detach_member
gig-tech/libcloud
1,435
python
def balancer_detach_member(self, balancer, member): '\n Detach member from balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member which should be used\n :type member: :class:`Member`\n\n :return: ``True`` if member detach was successful, otherwise ``False``.\n :rtype: ``bool``\n ' create_pool_m = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/removePoolMember', method='POST', data=ET.tostring(create_pool_m)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def balancer_detach_member(self, balancer, member): '\n Detach member from balancer\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :param member: Member which should be used\n :type member: :class:`Member`\n\n :return: ``True`` if member detach was successful, otherwise ``False``.\n :rtype: ``bool``\n ' create_pool_m = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/removePoolMember', method='POST', data=ET.tostring(create_pool_m)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Detach member from balancer :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :param member: Member which should be used :type member: :class:`Member` :return: ``True`` if member detach was successful, otherwise ``False``. :rtype: ``bool``<|endoftext|>
ac9b4d70ff7bc837e60c1172822f6d24a434b82d344388c18614f0b10c6471c6
def destroy_balancer(self, balancer): '\n Destroy a load balancer (virtual listener)\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :return: ``True`` if the destroy was successful, otherwise ``False``.\n :rtype: ``bool``\n ' delete_listener = ET.Element('deleteVirtualListener', {'xmlns': TYPES_URN, 'id': balancer.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/deleteVirtualListener', method='POST', data=ET.tostring(delete_listener)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Destroy a load balancer (virtual listener) :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :return: ``True`` if the destroy was successful, otherwise ``False``. :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
destroy_balancer
gig-tech/libcloud
1,435
python
def destroy_balancer(self, balancer): '\n Destroy a load balancer (virtual listener)\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :return: ``True`` if the destroy was successful, otherwise ``False``.\n :rtype: ``bool``\n ' delete_listener = ET.Element('deleteVirtualListener', {'xmlns': TYPES_URN, 'id': balancer.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/deleteVirtualListener', method='POST', data=ET.tostring(delete_listener)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def destroy_balancer(self, balancer): '\n Destroy a load balancer (virtual listener)\n\n :param balancer: LoadBalancer which should be used\n :type balancer: :class:`LoadBalancer`\n\n :return: ``True`` if the destroy was successful, otherwise ``False``.\n :rtype: ``bool``\n ' delete_listener = ET.Element('deleteVirtualListener', {'xmlns': TYPES_URN, 'id': balancer.id}) result = self.connection.request_with_orgId_api_2('networkDomainVip/deleteVirtualListener', method='POST', data=ET.tostring(delete_listener)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Destroy a load balancer (virtual listener) :param balancer: LoadBalancer which should be used :type balancer: :class:`LoadBalancer` :return: ``True`` if the destroy was successful, otherwise ``False``. :rtype: ``bool``<|endoftext|>
c552f41a067657dd2ba71455461bf6192bee98263f170284709e2b84cfc8f6e3
def ex_set_current_network_domain(self, network_domain_id): '\n Set the network domain (part of the network) of the driver\n\n :param network_domain_id: ID of the pool (required)\n :type network_domain_id: ``str``\n ' self.network_domain_id = network_domain_id
Set the network domain (part of the network) of the driver :param network_domain_id: ID of the pool (required) :type network_domain_id: ``str``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_set_current_network_domain
gig-tech/libcloud
1,435
python
def ex_set_current_network_domain(self, network_domain_id): '\n Set the network domain (part of the network) of the driver\n\n :param network_domain_id: ID of the pool (required)\n :type network_domain_id: ``str``\n ' self.network_domain_id = network_domain_id
def ex_set_current_network_domain(self, network_domain_id): '\n Set the network domain (part of the network) of the driver\n\n :param network_domain_id: ID of the pool (required)\n :type network_domain_id: ``str``\n ' self.network_domain_id = network_domain_id<|docstring|>Set the network domain (part of the network) of the driver :param network_domain_id: ID of the pool (required) :type network_domain_id: ``str``<|endoftext|>
cf40f134f88b6d3fdb071028843e5bdf327498833c00247442f13717181220da
def ex_get_current_network_domain(self): '\n Get the current network domain ID of the driver.\n\n :return: ID of the network domain\n :rtype: ``str``\n ' return self.network_domain_id
Get the current network domain ID of the driver. :return: ID of the network domain :rtype: ``str``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_current_network_domain
gig-tech/libcloud
1,435
python
def ex_get_current_network_domain(self): '\n Get the current network domain ID of the driver.\n\n :return: ID of the network domain\n :rtype: ``str``\n ' return self.network_domain_id
def ex_get_current_network_domain(self): '\n Get the current network domain ID of the driver.\n\n :return: ID of the network domain\n :rtype: ``str``\n ' return self.network_domain_id<|docstring|>Get the current network domain ID of the driver. :return: ID of the network domain :rtype: ``str``<|endoftext|>
228537ec9de290a68b3c39f193f014ffe67814a65551020f1869d09c7884eeaa
def ex_create_pool_member(self, pool, node, port=None): '\n Create a new member in an existing pool from an existing node\n\n :param pool: Instance of ``DimensionDataPool`` (required)\n :type pool: ``DimensionDataPool``\n\n :param node: Instance of ``DimensionDataVIPNode`` (required)\n :type node: ``DimensionDataVIPNode``\n\n :param port: Port the the service will listen on\n :type port: ``str``\n\n :return: The node member, instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' create_pool_m = ET.Element('addPoolMember', {'xmlns': TYPES_URN}) ET.SubElement(create_pool_m, 'poolId').text = pool.id ET.SubElement(create_pool_m, 'nodeId').text = node.id if (port is not None): ET.SubElement(create_pool_m, 'port').text = str(port) ET.SubElement(create_pool_m, 'status').text = 'ENABLED' response = self.connection.request_with_orgId_api_2('networkDomainVip/addPoolMember', method='POST', data=ET.tostring(create_pool_m)).object member_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolMemberId'): member_id = info.get('value') if (info.get('name') == 'nodeName'): node_name = info.get('value') return DimensionDataPoolMember(id=member_id, name=node_name, status=State.RUNNING, ip=node.ip, port=port, node_id=node.id)
Create a new member in an existing pool from an existing node :param pool: Instance of ``DimensionDataPool`` (required) :type pool: ``DimensionDataPool`` :param node: Instance of ``DimensionDataVIPNode`` (required) :type node: ``DimensionDataVIPNode`` :param port: Port the the service will listen on :type port: ``str`` :return: The node member, instance of ``DimensionDataPoolMember`` :rtype: ``DimensionDataPoolMember``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_create_pool_member
gig-tech/libcloud
1,435
python
def ex_create_pool_member(self, pool, node, port=None): '\n Create a new member in an existing pool from an existing node\n\n :param pool: Instance of ``DimensionDataPool`` (required)\n :type pool: ``DimensionDataPool``\n\n :param node: Instance of ``DimensionDataVIPNode`` (required)\n :type node: ``DimensionDataVIPNode``\n\n :param port: Port the the service will listen on\n :type port: ``str``\n\n :return: The node member, instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' create_pool_m = ET.Element('addPoolMember', {'xmlns': TYPES_URN}) ET.SubElement(create_pool_m, 'poolId').text = pool.id ET.SubElement(create_pool_m, 'nodeId').text = node.id if (port is not None): ET.SubElement(create_pool_m, 'port').text = str(port) ET.SubElement(create_pool_m, 'status').text = 'ENABLED' response = self.connection.request_with_orgId_api_2('networkDomainVip/addPoolMember', method='POST', data=ET.tostring(create_pool_m)).object member_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolMemberId'): member_id = info.get('value') if (info.get('name') == 'nodeName'): node_name = info.get('value') return DimensionDataPoolMember(id=member_id, name=node_name, status=State.RUNNING, ip=node.ip, port=port, node_id=node.id)
def ex_create_pool_member(self, pool, node, port=None): '\n Create a new member in an existing pool from an existing node\n\n :param pool: Instance of ``DimensionDataPool`` (required)\n :type pool: ``DimensionDataPool``\n\n :param node: Instance of ``DimensionDataVIPNode`` (required)\n :type node: ``DimensionDataVIPNode``\n\n :param port: Port the the service will listen on\n :type port: ``str``\n\n :return: The node member, instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' create_pool_m = ET.Element('addPoolMember', {'xmlns': TYPES_URN}) ET.SubElement(create_pool_m, 'poolId').text = pool.id ET.SubElement(create_pool_m, 'nodeId').text = node.id if (port is not None): ET.SubElement(create_pool_m, 'port').text = str(port) ET.SubElement(create_pool_m, 'status').text = 'ENABLED' response = self.connection.request_with_orgId_api_2('networkDomainVip/addPoolMember', method='POST', data=ET.tostring(create_pool_m)).object member_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolMemberId'): member_id = info.get('value') if (info.get('name') == 'nodeName'): node_name = info.get('value') return DimensionDataPoolMember(id=member_id, name=node_name, status=State.RUNNING, ip=node.ip, port=port, node_id=node.id)<|docstring|>Create a new member in an existing pool from an existing node :param pool: Instance of ``DimensionDataPool`` (required) :type pool: ``DimensionDataPool`` :param node: Instance of ``DimensionDataVIPNode`` (required) :type node: ``DimensionDataVIPNode`` :param port: Port the the service will listen on :type port: ``str`` :return: The node member, instance of ``DimensionDataPoolMember`` :rtype: ``DimensionDataPoolMember``<|endoftext|>
dbf4d47d40dcef6eaa1fba64f7665af8bbd42bec5067255b81833998eb88ed27
def ex_create_node(self, network_domain_id, name, ip, ex_description, connection_limit=25000, connection_rate_limit=2000): '\n Create a new node\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param ip: IPv4 address of the node (required)\n :type ip: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :return: Instance of ``DimensionDataVIPNode``\n :rtype: ``DimensionDataVIPNode``\n ' create_node_elm = ET.Element('createNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'ipv4Address').text = ip ET.SubElement(create_node_elm, 'status').text = 'ENABLED' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object node_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'nodeId'): node_id = info.get('value') if (info.get('name') == 'name'): node_name = info.get('value') return DimensionDataVIPNode(id=node_id, name=node_name, status=State.RUNNING, ip=ip)
Create a new node :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the node (required) :type name: ``str`` :param ip: IPv4 address of the node (required) :type ip: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param connection_limit: Maximum number of concurrent connections per sec :type connection_limit: ``int`` :param connection_rate_limit: Maximum number of concurrent sessions :type connection_rate_limit: ``int`` :return: Instance of ``DimensionDataVIPNode`` :rtype: ``DimensionDataVIPNode``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_create_node
gig-tech/libcloud
1,435
python
def ex_create_node(self, network_domain_id, name, ip, ex_description, connection_limit=25000, connection_rate_limit=2000): '\n Create a new node\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param ip: IPv4 address of the node (required)\n :type ip: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :return: Instance of ``DimensionDataVIPNode``\n :rtype: ``DimensionDataVIPNode``\n ' create_node_elm = ET.Element('createNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'ipv4Address').text = ip ET.SubElement(create_node_elm, 'status').text = 'ENABLED' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object node_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'nodeId'): node_id = info.get('value') if (info.get('name') == 'name'): node_name = info.get('value') return DimensionDataVIPNode(id=node_id, name=node_name, status=State.RUNNING, ip=ip)
def ex_create_node(self, network_domain_id, name, ip, ex_description, connection_limit=25000, connection_rate_limit=2000): '\n Create a new node\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param ip: IPv4 address of the node (required)\n :type ip: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :return: Instance of ``DimensionDataVIPNode``\n :rtype: ``DimensionDataVIPNode``\n ' create_node_elm = ET.Element('createNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'ipv4Address').text = ip ET.SubElement(create_node_elm, 'status').text = 'ENABLED' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object node_id = None node_name = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'nodeId'): node_id = info.get('value') if (info.get('name') == 'name'): node_name = info.get('value') return DimensionDataVIPNode(id=node_id, name=node_name, status=State.RUNNING, ip=ip)<|docstring|>Create a new node :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the node (required) :type name: ``str`` :param ip: IPv4 address of the node (required) :type ip: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param connection_limit: Maximum number of concurrent connections per sec :type connection_limit: ``int`` :param connection_rate_limit: Maximum number of concurrent sessions :type connection_rate_limit: ``int`` :return: Instance of ``DimensionDataVIPNode`` :rtype: ``DimensionDataVIPNode``<|endoftext|>
29641b25face12efe1bf553a27e31d69de4093fb9277cb83d551bf6557dbb4a5
def ex_update_node(self, node): '\n Update the properties of a node\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'connectionLimit').text = str(node.connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(node.connection_rate_limit) self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object return node
Update the properties of a node :param pool: The instance of ``DimensionDataNode`` to update :type pool: ``DimensionDataNode`` :return: The instance of ``DimensionDataNode`` :rtype: ``DimensionDataNode``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_update_node
gig-tech/libcloud
1,435
python
def ex_update_node(self, node): '\n Update the properties of a node\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'connectionLimit').text = str(node.connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(node.connection_rate_limit) self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object return node
def ex_update_node(self, node): '\n Update the properties of a node\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'connectionLimit').text = str(node.connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(node.connection_rate_limit) self.connection.request_with_orgId_api_2(action='networkDomainVip/createNode', method='POST', data=ET.tostring(create_node_elm)).object return node<|docstring|>Update the properties of a node :param pool: The instance of ``DimensionDataNode`` to update :type pool: ``DimensionDataNode`` :return: The instance of ``DimensionDataNode`` :rtype: ``DimensionDataNode``<|endoftext|>
3ed7edcc5502022308f675568e252f6e8966240fe43130deb1de6f7c9d703d16
def ex_set_node_state(self, node, enabled): '\n Change the state of a node (enable/disable)\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :param enabled: The target state of the node\n :type enabled: ``bool``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'status').text = ('ENABLED' if (enabled is True) else 'DISABLED') self.connection.request_with_orgId_api_2(action='networkDomainVip/editNode', method='POST', data=ET.tostring(create_node_elm)).object return node
Change the state of a node (enable/disable) :param pool: The instance of ``DimensionDataNode`` to update :type pool: ``DimensionDataNode`` :param enabled: The target state of the node :type enabled: ``bool`` :return: The instance of ``DimensionDataNode`` :rtype: ``DimensionDataNode``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_set_node_state
gig-tech/libcloud
1,435
python
def ex_set_node_state(self, node, enabled): '\n Change the state of a node (enable/disable)\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :param enabled: The target state of the node\n :type enabled: ``bool``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'status').text = ('ENABLED' if (enabled is True) else 'DISABLED') self.connection.request_with_orgId_api_2(action='networkDomainVip/editNode', method='POST', data=ET.tostring(create_node_elm)).object return node
def ex_set_node_state(self, node, enabled): '\n Change the state of a node (enable/disable)\n\n :param pool: The instance of ``DimensionDataNode`` to update\n :type pool: ``DimensionDataNode``\n\n :param enabled: The target state of the node\n :type enabled: ``bool``\n\n :return: The instance of ``DimensionDataNode``\n :rtype: ``DimensionDataNode``\n ' create_node_elm = ET.Element('editNode', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'status').text = ('ENABLED' if (enabled is True) else 'DISABLED') self.connection.request_with_orgId_api_2(action='networkDomainVip/editNode', method='POST', data=ET.tostring(create_node_elm)).object return node<|docstring|>Change the state of a node (enable/disable) :param pool: The instance of ``DimensionDataNode`` to update :type pool: ``DimensionDataNode`` :param enabled: The target state of the node :type enabled: ``bool`` :return: The instance of ``DimensionDataNode`` :rtype: ``DimensionDataNode``<|endoftext|>
4a0711f1dff730e693b5fd5156003d03645fe29bd3d7a63d1ac185d79934c471
def ex_create_pool(self, network_domain_id, name, balancer_method, ex_description, health_monitors=None, service_down_action='NONE', slow_ramp_time=30): '\n Create a new pool\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param balancer_method: The load balancer algorithm (required)\n :type balancer_method: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param health_monitors: A list of health monitors to use for the pool.\n :type health_monitors: ``list`` of\n :class:`DimensionDataDefaultHealthMonitor`\n\n :param service_down_action: What to do when node\n is unavailable NONE, DROP or RESELECT\n :type service_down_action: ``str``\n\n :param slow_ramp_time: Number of seconds to stagger ramp up of nodes\n :type slow_ramp_time: ``int``\n\n :return: Instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' name.replace(' ', '_') create_node_elm = ET.Element('createPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(balancer_method) if (health_monitors is not None): for monitor in health_monitors: ET.SubElement(create_node_elm, 'healthMonitorId').text = str(monitor.id) ET.SubElement(create_node_elm, 'serviceDownAction').text = service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createPool', method='POST', data=ET.tostring(create_node_elm)).object pool_id = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolId'): pool_id = info.get('value') return DimensionDataPool(id=pool_id, name=name, description=ex_description, status=State.RUNNING, load_balance_method=str(balancer_method), health_monitor_id=None, service_down_action=service_down_action, slow_ramp_time=str(slow_ramp_time))
Create a new pool :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the node (required) :type name: ``str`` :param balancer_method: The load balancer algorithm (required) :type balancer_method: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param health_monitors: A list of health monitors to use for the pool. :type health_monitors: ``list`` of :class:`DimensionDataDefaultHealthMonitor` :param service_down_action: What to do when node is unavailable NONE, DROP or RESELECT :type service_down_action: ``str`` :param slow_ramp_time: Number of seconds to stagger ramp up of nodes :type slow_ramp_time: ``int`` :return: Instance of ``DimensionDataPool`` :rtype: ``DimensionDataPool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_create_pool
gig-tech/libcloud
1,435
python
def ex_create_pool(self, network_domain_id, name, balancer_method, ex_description, health_monitors=None, service_down_action='NONE', slow_ramp_time=30): '\n Create a new pool\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param balancer_method: The load balancer algorithm (required)\n :type balancer_method: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param health_monitors: A list of health monitors to use for the pool.\n :type health_monitors: ``list`` of\n :class:`DimensionDataDefaultHealthMonitor`\n\n :param service_down_action: What to do when node\n is unavailable NONE, DROP or RESELECT\n :type service_down_action: ``str``\n\n :param slow_ramp_time: Number of seconds to stagger ramp up of nodes\n :type slow_ramp_time: ``int``\n\n :return: Instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' name.replace(' ', '_') create_node_elm = ET.Element('createPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(balancer_method) if (health_monitors is not None): for monitor in health_monitors: ET.SubElement(create_node_elm, 'healthMonitorId').text = str(monitor.id) ET.SubElement(create_node_elm, 'serviceDownAction').text = service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createPool', method='POST', data=ET.tostring(create_node_elm)).object pool_id = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolId'): pool_id = info.get('value') return DimensionDataPool(id=pool_id, name=name, description=ex_description, status=State.RUNNING, load_balance_method=str(balancer_method), health_monitor_id=None, service_down_action=service_down_action, slow_ramp_time=str(slow_ramp_time))
def ex_create_pool(self, network_domain_id, name, balancer_method, ex_description, health_monitors=None, service_down_action='NONE', slow_ramp_time=30): '\n Create a new pool\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the node (required)\n :type name: ``str``\n\n :param balancer_method: The load balancer algorithm (required)\n :type balancer_method: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param health_monitors: A list of health monitors to use for the pool.\n :type health_monitors: ``list`` of\n :class:`DimensionDataDefaultHealthMonitor`\n\n :param service_down_action: What to do when node\n is unavailable NONE, DROP or RESELECT\n :type service_down_action: ``str``\n\n :param slow_ramp_time: Number of seconds to stagger ramp up of nodes\n :type slow_ramp_time: ``int``\n\n :return: Instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' name.replace(' ', '_') create_node_elm = ET.Element('createPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(balancer_method) if (health_monitors is not None): for monitor in health_monitors: ET.SubElement(create_node_elm, 'healthMonitorId').text = str(monitor.id) ET.SubElement(create_node_elm, 'serviceDownAction').text = service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createPool', method='POST', data=ET.tostring(create_node_elm)).object pool_id = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'poolId'): pool_id = info.get('value') return DimensionDataPool(id=pool_id, name=name, description=ex_description, status=State.RUNNING, load_balance_method=str(balancer_method), health_monitor_id=None, service_down_action=service_down_action, slow_ramp_time=str(slow_ramp_time))<|docstring|>Create a new pool :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the node (required) :type name: ``str`` :param balancer_method: The load balancer algorithm (required) :type balancer_method: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param health_monitors: A list of health monitors to use for the pool. :type health_monitors: ``list`` of :class:`DimensionDataDefaultHealthMonitor` :param service_down_action: What to do when node is unavailable NONE, DROP or RESELECT :type service_down_action: ``str`` :param slow_ramp_time: Number of seconds to stagger ramp up of nodes :type slow_ramp_time: ``int`` :return: Instance of ``DimensionDataPool`` :rtype: ``DimensionDataPool``<|endoftext|>
694d3207c7ea03fd85d73f76999c4b9ebc2e6d792ce436ae7b33c990dd76eb0e
def ex_create_virtual_listener(self, network_domain_id, name, ex_description, port=None, pool=None, listener_ip_address=None, persistence_profile=None, fallback_persistence_profile=None, irule=None, protocol='TCP', connection_limit=25000, connection_rate_limit=2000, source_port_preservation='PRESERVE'): "\n Create a new virtual listener (load balancer)\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the listener (required)\n :type name: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param pool: The pool to use for the listener\n :type pool: :class:`DimensionDataPool`\n\n :param listener_ip_address: The IPv4 Address of the virtual listener\n :type listener_ip_address: ``str``\n\n :param persistence_profile: Persistence profile\n :type persistence_profile: :class:`DimensionDataPersistenceProfile`\n\n :param fallback_persistence_profile: Fallback persistence profile\n :type fallback_persistence_profile:\n :class:`DimensionDataPersistenceProfile`\n\n :param irule: The iRule to apply\n :type irule: :class:`DimensionDataDefaultiRule`\n\n :param protocol: For STANDARD type, ANY, TCP or UDP\n for PERFORMANCE_LAYER_4 choice of ANY, TCP, UDP, HTTP\n :type protcol: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :param source_port_preservation: Choice of PRESERVE,\n PRESERVE_STRICT or CHANGE\n :type source_port_preservation: ``str``\n\n :return: Instance of the listener\n :rtype: ``DimensionDataVirtualListener``\n " if ((port == 80) or (port == 443)): listener_type = 'PERFORMANCE_LAYER_4' else: listener_type = 'STANDARD' create_node_elm = ET.Element('createVirtualListener', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'type').text = listener_type ET.SubElement(create_node_elm, 'protocol').text = protocol if (listener_ip_address is not None): ET.SubElement(create_node_elm, 'listenerIpAddress').text = str(listener_ip_address) if (port is not None): ET.SubElement(create_node_elm, 'port').text = str(port) ET.SubElement(create_node_elm, 'enabled').text = 'true' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) ET.SubElement(create_node_elm, 'sourcePortPreservation').text = source_port_preservation if (pool is not None): ET.SubElement(create_node_elm, 'poolId').text = pool.id if (persistence_profile is not None): ET.SubElement(create_node_elm, 'persistenceProfileId').text = persistence_profile.id if (fallback_persistence_profile is not None): ET.SubElement(create_node_elm, 'fallbackPersistenceProfileId').text = fallback_persistence_profile.id if (irule is not None): ET.SubElement(create_node_elm, 'iruleId').text = irule.id response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createVirtualListener', method='POST', data=ET.tostring(create_node_elm)).object virtual_listener_id = None virtual_listener_ip = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'virtualListenerId'): virtual_listener_id = info.get('value') if (info.get('name') == 'listenerIpAddress'): virtual_listener_ip = info.get('value') return DimensionDataVirtualListener(id=virtual_listener_id, name=name, ip=virtual_listener_ip, status=State.RUNNING)
Create a new virtual listener (load balancer) :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the listener (required) :type name: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param port: An integer in the range of 1-65535. If not supplied, it will be taken to mean 'Any Port' :type port: ``int`` :param pool: The pool to use for the listener :type pool: :class:`DimensionDataPool` :param listener_ip_address: The IPv4 Address of the virtual listener :type listener_ip_address: ``str`` :param persistence_profile: Persistence profile :type persistence_profile: :class:`DimensionDataPersistenceProfile` :param fallback_persistence_profile: Fallback persistence profile :type fallback_persistence_profile: :class:`DimensionDataPersistenceProfile` :param irule: The iRule to apply :type irule: :class:`DimensionDataDefaultiRule` :param protocol: For STANDARD type, ANY, TCP or UDP for PERFORMANCE_LAYER_4 choice of ANY, TCP, UDP, HTTP :type protcol: ``str`` :param connection_limit: Maximum number of concurrent connections per sec :type connection_limit: ``int`` :param connection_rate_limit: Maximum number of concurrent sessions :type connection_rate_limit: ``int`` :param source_port_preservation: Choice of PRESERVE, PRESERVE_STRICT or CHANGE :type source_port_preservation: ``str`` :return: Instance of the listener :rtype: ``DimensionDataVirtualListener``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_create_virtual_listener
gig-tech/libcloud
1,435
python
def ex_create_virtual_listener(self, network_domain_id, name, ex_description, port=None, pool=None, listener_ip_address=None, persistence_profile=None, fallback_persistence_profile=None, irule=None, protocol='TCP', connection_limit=25000, connection_rate_limit=2000, source_port_preservation='PRESERVE'): "\n Create a new virtual listener (load balancer)\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the listener (required)\n :type name: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param pool: The pool to use for the listener\n :type pool: :class:`DimensionDataPool`\n\n :param listener_ip_address: The IPv4 Address of the virtual listener\n :type listener_ip_address: ``str``\n\n :param persistence_profile: Persistence profile\n :type persistence_profile: :class:`DimensionDataPersistenceProfile`\n\n :param fallback_persistence_profile: Fallback persistence profile\n :type fallback_persistence_profile:\n :class:`DimensionDataPersistenceProfile`\n\n :param irule: The iRule to apply\n :type irule: :class:`DimensionDataDefaultiRule`\n\n :param protocol: For STANDARD type, ANY, TCP or UDP\n for PERFORMANCE_LAYER_4 choice of ANY, TCP, UDP, HTTP\n :type protcol: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :param source_port_preservation: Choice of PRESERVE,\n PRESERVE_STRICT or CHANGE\n :type source_port_preservation: ``str``\n\n :return: Instance of the listener\n :rtype: ``DimensionDataVirtualListener``\n " if ((port == 80) or (port == 443)): listener_type = 'PERFORMANCE_LAYER_4' else: listener_type = 'STANDARD' create_node_elm = ET.Element('createVirtualListener', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'type').text = listener_type ET.SubElement(create_node_elm, 'protocol').text = protocol if (listener_ip_address is not None): ET.SubElement(create_node_elm, 'listenerIpAddress').text = str(listener_ip_address) if (port is not None): ET.SubElement(create_node_elm, 'port').text = str(port) ET.SubElement(create_node_elm, 'enabled').text = 'true' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) ET.SubElement(create_node_elm, 'sourcePortPreservation').text = source_port_preservation if (pool is not None): ET.SubElement(create_node_elm, 'poolId').text = pool.id if (persistence_profile is not None): ET.SubElement(create_node_elm, 'persistenceProfileId').text = persistence_profile.id if (fallback_persistence_profile is not None): ET.SubElement(create_node_elm, 'fallbackPersistenceProfileId').text = fallback_persistence_profile.id if (irule is not None): ET.SubElement(create_node_elm, 'iruleId').text = irule.id response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createVirtualListener', method='POST', data=ET.tostring(create_node_elm)).object virtual_listener_id = None virtual_listener_ip = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'virtualListenerId'): virtual_listener_id = info.get('value') if (info.get('name') == 'listenerIpAddress'): virtual_listener_ip = info.get('value') return DimensionDataVirtualListener(id=virtual_listener_id, name=name, ip=virtual_listener_ip, status=State.RUNNING)
def ex_create_virtual_listener(self, network_domain_id, name, ex_description, port=None, pool=None, listener_ip_address=None, persistence_profile=None, fallback_persistence_profile=None, irule=None, protocol='TCP', connection_limit=25000, connection_rate_limit=2000, source_port_preservation='PRESERVE'): "\n Create a new virtual listener (load balancer)\n\n :param network_domain_id: Network Domain ID (required)\n :type name: ``str``\n\n :param name: name of the listener (required)\n :type name: ``str``\n\n :param ex_description: Description of the node (required)\n :type ex_description: ``str``\n\n :param port: An integer in the range of 1-65535. If not supplied,\n it will be taken to mean 'Any Port'\n :type port: ``int``\n\n :param pool: The pool to use for the listener\n :type pool: :class:`DimensionDataPool`\n\n :param listener_ip_address: The IPv4 Address of the virtual listener\n :type listener_ip_address: ``str``\n\n :param persistence_profile: Persistence profile\n :type persistence_profile: :class:`DimensionDataPersistenceProfile`\n\n :param fallback_persistence_profile: Fallback persistence profile\n :type fallback_persistence_profile:\n :class:`DimensionDataPersistenceProfile`\n\n :param irule: The iRule to apply\n :type irule: :class:`DimensionDataDefaultiRule`\n\n :param protocol: For STANDARD type, ANY, TCP or UDP\n for PERFORMANCE_LAYER_4 choice of ANY, TCP, UDP, HTTP\n :type protcol: ``str``\n\n :param connection_limit: Maximum number\n of concurrent connections per sec\n :type connection_limit: ``int``\n\n :param connection_rate_limit: Maximum number of concurrent sessions\n :type connection_rate_limit: ``int``\n\n :param source_port_preservation: Choice of PRESERVE,\n PRESERVE_STRICT or CHANGE\n :type source_port_preservation: ``str``\n\n :return: Instance of the listener\n :rtype: ``DimensionDataVirtualListener``\n " if ((port == 80) or (port == 443)): listener_type = 'PERFORMANCE_LAYER_4' else: listener_type = 'STANDARD' create_node_elm = ET.Element('createVirtualListener', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'networkDomainId').text = network_domain_id ET.SubElement(create_node_elm, 'name').text = name ET.SubElement(create_node_elm, 'description').text = str(ex_description) ET.SubElement(create_node_elm, 'type').text = listener_type ET.SubElement(create_node_elm, 'protocol').text = protocol if (listener_ip_address is not None): ET.SubElement(create_node_elm, 'listenerIpAddress').text = str(listener_ip_address) if (port is not None): ET.SubElement(create_node_elm, 'port').text = str(port) ET.SubElement(create_node_elm, 'enabled').text = 'true' ET.SubElement(create_node_elm, 'connectionLimit').text = str(connection_limit) ET.SubElement(create_node_elm, 'connectionRateLimit').text = str(connection_rate_limit) ET.SubElement(create_node_elm, 'sourcePortPreservation').text = source_port_preservation if (pool is not None): ET.SubElement(create_node_elm, 'poolId').text = pool.id if (persistence_profile is not None): ET.SubElement(create_node_elm, 'persistenceProfileId').text = persistence_profile.id if (fallback_persistence_profile is not None): ET.SubElement(create_node_elm, 'fallbackPersistenceProfileId').text = fallback_persistence_profile.id if (irule is not None): ET.SubElement(create_node_elm, 'iruleId').text = irule.id response = self.connection.request_with_orgId_api_2(action='networkDomainVip/createVirtualListener', method='POST', data=ET.tostring(create_node_elm)).object virtual_listener_id = None virtual_listener_ip = None for info in findall(response, 'info', TYPES_URN): if (info.get('name') == 'virtualListenerId'): virtual_listener_id = info.get('value') if (info.get('name') == 'listenerIpAddress'): virtual_listener_ip = info.get('value') return DimensionDataVirtualListener(id=virtual_listener_id, name=name, ip=virtual_listener_ip, status=State.RUNNING)<|docstring|>Create a new virtual listener (load balancer) :param network_domain_id: Network Domain ID (required) :type name: ``str`` :param name: name of the listener (required) :type name: ``str`` :param ex_description: Description of the node (required) :type ex_description: ``str`` :param port: An integer in the range of 1-65535. If not supplied, it will be taken to mean 'Any Port' :type port: ``int`` :param pool: The pool to use for the listener :type pool: :class:`DimensionDataPool` :param listener_ip_address: The IPv4 Address of the virtual listener :type listener_ip_address: ``str`` :param persistence_profile: Persistence profile :type persistence_profile: :class:`DimensionDataPersistenceProfile` :param fallback_persistence_profile: Fallback persistence profile :type fallback_persistence_profile: :class:`DimensionDataPersistenceProfile` :param irule: The iRule to apply :type irule: :class:`DimensionDataDefaultiRule` :param protocol: For STANDARD type, ANY, TCP or UDP for PERFORMANCE_LAYER_4 choice of ANY, TCP, UDP, HTTP :type protcol: ``str`` :param connection_limit: Maximum number of concurrent connections per sec :type connection_limit: ``int`` :param connection_rate_limit: Maximum number of concurrent sessions :type connection_rate_limit: ``int`` :param source_port_preservation: Choice of PRESERVE, PRESERVE_STRICT or CHANGE :type source_port_preservation: ``str`` :return: Instance of the listener :rtype: ``DimensionDataVirtualListener``<|endoftext|>
eb1adcd6851919c7ef18c5f7de97ff447944e319b85eb27a49169bcbc0e1e3ef
def ex_get_pools(self, ex_network_domain_id=None): '\n Get all of the pools inside the current geography or\n in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns a ``list`` of type ``DimensionDataPool``\n :rtype: ``list`` of ``DimensionDataPool``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} pools = self.connection.request_with_orgId_api_2('networkDomainVip/pool', params=params).object return self._to_pools(pools)
Get all of the pools inside the current geography or in given network. :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :return: Returns a ``list`` of type ``DimensionDataPool`` :rtype: ``list`` of ``DimensionDataPool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_pools
gig-tech/libcloud
1,435
python
def ex_get_pools(self, ex_network_domain_id=None): '\n Get all of the pools inside the current geography or\n in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns a ``list`` of type ``DimensionDataPool``\n :rtype: ``list`` of ``DimensionDataPool``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} pools = self.connection.request_with_orgId_api_2('networkDomainVip/pool', params=params).object return self._to_pools(pools)
def ex_get_pools(self, ex_network_domain_id=None): '\n Get all of the pools inside the current geography or\n in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns a ``list`` of type ``DimensionDataPool``\n :rtype: ``list`` of ``DimensionDataPool``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} pools = self.connection.request_with_orgId_api_2('networkDomainVip/pool', params=params).object return self._to_pools(pools)<|docstring|>Get all of the pools inside the current geography or in given network. :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :return: Returns a ``list`` of type ``DimensionDataPool`` :rtype: ``list`` of ``DimensionDataPool``<|endoftext|>
3f1d49d5951ef18577b178ddf013863baef6d52a0173f5026078ccfc2a261c85
def ex_get_pool(self, pool_id): '\n Get a specific pool inside the current geography\n\n :param pool_id: The identifier of the pool\n :type pool_id: ``str``\n\n :return: Returns an instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' pool = self.connection.request_with_orgId_api_2(('networkDomainVip/pool/%s' % pool_id)).object return self._to_pool(pool)
Get a specific pool inside the current geography :param pool_id: The identifier of the pool :type pool_id: ``str`` :return: Returns an instance of ``DimensionDataPool`` :rtype: ``DimensionDataPool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_pool
gig-tech/libcloud
1,435
python
def ex_get_pool(self, pool_id): '\n Get a specific pool inside the current geography\n\n :param pool_id: The identifier of the pool\n :type pool_id: ``str``\n\n :return: Returns an instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' pool = self.connection.request_with_orgId_api_2(('networkDomainVip/pool/%s' % pool_id)).object return self._to_pool(pool)
def ex_get_pool(self, pool_id): '\n Get a specific pool inside the current geography\n\n :param pool_id: The identifier of the pool\n :type pool_id: ``str``\n\n :return: Returns an instance of ``DimensionDataPool``\n :rtype: ``DimensionDataPool``\n ' pool = self.connection.request_with_orgId_api_2(('networkDomainVip/pool/%s' % pool_id)).object return self._to_pool(pool)<|docstring|>Get a specific pool inside the current geography :param pool_id: The identifier of the pool :type pool_id: ``str`` :return: Returns an instance of ``DimensionDataPool`` :rtype: ``DimensionDataPool``<|endoftext|>
28cc9667b997d4fd1774a97f7a4d76d978267d7407fb0f447f5c3d01b82397c4
def ex_update_pool(self, pool): '\n Update the properties of an existing pool\n only method, serviceDownAction and slowRampTime are updated\n\n :param pool: The instance of ``DimensionDataPool`` to update\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' create_node_elm = ET.Element('editPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(pool.load_balance_method) ET.SubElement(create_node_elm, 'serviceDownAction').text = pool.service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(pool.slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/editPool', method='POST', data=ET.tostring(create_node_elm)).object response_code = findtext(response, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Update the properties of an existing pool only method, serviceDownAction and slowRampTime are updated :param pool: The instance of ``DimensionDataPool`` to update :type pool: ``DimensionDataPool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_update_pool
gig-tech/libcloud
1,435
python
def ex_update_pool(self, pool): '\n Update the properties of an existing pool\n only method, serviceDownAction and slowRampTime are updated\n\n :param pool: The instance of ``DimensionDataPool`` to update\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' create_node_elm = ET.Element('editPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(pool.load_balance_method) ET.SubElement(create_node_elm, 'serviceDownAction').text = pool.service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(pool.slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/editPool', method='POST', data=ET.tostring(create_node_elm)).object response_code = findtext(response, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def ex_update_pool(self, pool): '\n Update the properties of an existing pool\n only method, serviceDownAction and slowRampTime are updated\n\n :param pool: The instance of ``DimensionDataPool`` to update\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' create_node_elm = ET.Element('editPool', {'xmlns': TYPES_URN}) ET.SubElement(create_node_elm, 'loadBalanceMethod').text = str(pool.load_balance_method) ET.SubElement(create_node_elm, 'serviceDownAction').text = pool.service_down_action ET.SubElement(create_node_elm, 'slowRampTime').text = str(pool.slow_ramp_time) response = self.connection.request_with_orgId_api_2(action='networkDomainVip/editPool', method='POST', data=ET.tostring(create_node_elm)).object response_code = findtext(response, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Update the properties of an existing pool only method, serviceDownAction and slowRampTime are updated :param pool: The instance of ``DimensionDataPool`` to update :type pool: ``DimensionDataPool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``<|endoftext|>
3da36720c64440cb046b01a117bef95024389f85f98d37db39bd76c3047ba26d
def ex_destroy_pool(self, pool): '\n Destroy an existing pool\n\n :param pool: The instance of ``DimensionDataPool`` to destroy\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deletePool', {'xmlns': TYPES_URN, 'id': pool.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deletePool', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Destroy an existing pool :param pool: The instance of ``DimensionDataPool`` to destroy :type pool: ``DimensionDataPool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_destroy_pool
gig-tech/libcloud
1,435
python
def ex_destroy_pool(self, pool): '\n Destroy an existing pool\n\n :param pool: The instance of ``DimensionDataPool`` to destroy\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deletePool', {'xmlns': TYPES_URN, 'id': pool.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deletePool', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def ex_destroy_pool(self, pool): '\n Destroy an existing pool\n\n :param pool: The instance of ``DimensionDataPool`` to destroy\n :type pool: ``DimensionDataPool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deletePool', {'xmlns': TYPES_URN, 'id': pool.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deletePool', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Destroy an existing pool :param pool: The instance of ``DimensionDataPool`` to destroy :type pool: ``DimensionDataPool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``<|endoftext|>
36c98b9aaf37acb5b6656daf8225609091ae4c68783af3e254a8b7a32fda5aa0
def ex_get_pool_members(self, pool_id): '\n Get the members of a pool\n\n :param pool: The instance of a pool\n :type pool: ``DimensionDataPool``\n\n :return: Returns an ``list`` of ``DimensionDataPoolMember``\n :rtype: ``list`` of ``DimensionDataPoolMember``\n ' members = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember?poolId=%s' % pool_id)).object return self._to_members(members)
Get the members of a pool :param pool: The instance of a pool :type pool: ``DimensionDataPool`` :return: Returns an ``list`` of ``DimensionDataPoolMember`` :rtype: ``list`` of ``DimensionDataPoolMember``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_pool_members
gig-tech/libcloud
1,435
python
def ex_get_pool_members(self, pool_id): '\n Get the members of a pool\n\n :param pool: The instance of a pool\n :type pool: ``DimensionDataPool``\n\n :return: Returns an ``list`` of ``DimensionDataPoolMember``\n :rtype: ``list`` of ``DimensionDataPoolMember``\n ' members = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember?poolId=%s' % pool_id)).object return self._to_members(members)
def ex_get_pool_members(self, pool_id): '\n Get the members of a pool\n\n :param pool: The instance of a pool\n :type pool: ``DimensionDataPool``\n\n :return: Returns an ``list`` of ``DimensionDataPoolMember``\n :rtype: ``list`` of ``DimensionDataPoolMember``\n ' members = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember?poolId=%s' % pool_id)).object return self._to_members(members)<|docstring|>Get the members of a pool :param pool: The instance of a pool :type pool: ``DimensionDataPool`` :return: Returns an ``list`` of ``DimensionDataPoolMember`` :rtype: ``list`` of ``DimensionDataPoolMember``<|endoftext|>
559266c9224d537d920fdb190353060d37eb2a7c0a26f126c68c724e5e26b4bb
def ex_get_pool_member(self, pool_member_id): '\n Get a specific member of a pool\n\n :param pool: The id of a pool member\n :type pool: ``str``\n\n :return: Returns an instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' member = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember/%s' % pool_member_id)).object return self._to_member(member)
Get a specific member of a pool :param pool: The id of a pool member :type pool: ``str`` :return: Returns an instance of ``DimensionDataPoolMember`` :rtype: ``DimensionDataPoolMember``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_pool_member
gig-tech/libcloud
1,435
python
def ex_get_pool_member(self, pool_member_id): '\n Get a specific member of a pool\n\n :param pool: The id of a pool member\n :type pool: ``str``\n\n :return: Returns an instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' member = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember/%s' % pool_member_id)).object return self._to_member(member)
def ex_get_pool_member(self, pool_member_id): '\n Get a specific member of a pool\n\n :param pool: The id of a pool member\n :type pool: ``str``\n\n :return: Returns an instance of ``DimensionDataPoolMember``\n :rtype: ``DimensionDataPoolMember``\n ' member = self.connection.request_with_orgId_api_2(('networkDomainVip/poolMember/%s' % pool_member_id)).object return self._to_member(member)<|docstring|>Get a specific member of a pool :param pool: The id of a pool member :type pool: ``str`` :return: Returns an instance of ``DimensionDataPoolMember`` :rtype: ``DimensionDataPoolMember``<|endoftext|>
82a71a9d78a39e8a1c5ad8aa824603a81342b405c4057124c54181bcb20e622c
def ex_destroy_pool_member(self, member, destroy_node=False): '\n Destroy a specific member of a pool\n\n :param pool: The instance of a pool member\n :type pool: ``DimensionDataPoolMember``\n\n :param destroy_node: Also destroy the associated node\n :type destroy_node: ``bool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/removePoolMember', method='POST', data=ET.tostring(destroy_request)).object if ((member.node_id is not None) and (destroy_node is True)): return self.ex_destroy_node(member.node_id) else: response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Destroy a specific member of a pool :param pool: The instance of a pool member :type pool: ``DimensionDataPoolMember`` :param destroy_node: Also destroy the associated node :type destroy_node: ``bool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_destroy_pool_member
gig-tech/libcloud
1,435
python
def ex_destroy_pool_member(self, member, destroy_node=False): '\n Destroy a specific member of a pool\n\n :param pool: The instance of a pool member\n :type pool: ``DimensionDataPoolMember``\n\n :param destroy_node: Also destroy the associated node\n :type destroy_node: ``bool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/removePoolMember', method='POST', data=ET.tostring(destroy_request)).object if ((member.node_id is not None) and (destroy_node is True)): return self.ex_destroy_node(member.node_id) else: response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def ex_destroy_pool_member(self, member, destroy_node=False): '\n Destroy a specific member of a pool\n\n :param pool: The instance of a pool member\n :type pool: ``DimensionDataPoolMember``\n\n :param destroy_node: Also destroy the associated node\n :type destroy_node: ``bool``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('removePoolMember', {'xmlns': TYPES_URN, 'id': member.id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/removePoolMember', method='POST', data=ET.tostring(destroy_request)).object if ((member.node_id is not None) and (destroy_node is True)): return self.ex_destroy_node(member.node_id) else: response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Destroy a specific member of a pool :param pool: The instance of a pool member :type pool: ``DimensionDataPoolMember`` :param destroy_node: Also destroy the associated node :type destroy_node: ``bool`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``<|endoftext|>
293a9fd962dafa0a2a4fbf8ef9da3d4b5693867639cdeccf01dc07ad4e7697e8
def ex_get_nodes(self, ex_network_domain_id=None): '\n Get the nodes within this geography or in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns an ``list`` of ``DimensionDataVIPNode``\n :rtype: ``list`` of ``DimensionDataVIPNode``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} nodes = self.connection.request_with_orgId_api_2('networkDomainVip/node', params=params).object return self._to_nodes(nodes)
Get the nodes within this geography or in given network. :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :return: Returns an ``list`` of ``DimensionDataVIPNode`` :rtype: ``list`` of ``DimensionDataVIPNode``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_nodes
gig-tech/libcloud
1,435
python
def ex_get_nodes(self, ex_network_domain_id=None): '\n Get the nodes within this geography or in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns an ``list`` of ``DimensionDataVIPNode``\n :rtype: ``list`` of ``DimensionDataVIPNode``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} nodes = self.connection.request_with_orgId_api_2('networkDomainVip/node', params=params).object return self._to_nodes(nodes)
def ex_get_nodes(self, ex_network_domain_id=None): '\n Get the nodes within this geography or in given network.\n\n :param ex_network_domain_id: UUID of Network Domain\n if not None returns only balancers in the given network\n if None then returns all pools for the organization\n :type ex_network_domain_id: ``str``\n\n :return: Returns an ``list`` of ``DimensionDataVIPNode``\n :rtype: ``list`` of ``DimensionDataVIPNode``\n ' params = None if (ex_network_domain_id is not None): params = {'networkDomainId': ex_network_domain_id} nodes = self.connection.request_with_orgId_api_2('networkDomainVip/node', params=params).object return self._to_nodes(nodes)<|docstring|>Get the nodes within this geography or in given network. :param ex_network_domain_id: UUID of Network Domain if not None returns only balancers in the given network if None then returns all pools for the organization :type ex_network_domain_id: ``str`` :return: Returns an ``list`` of ``DimensionDataVIPNode`` :rtype: ``list`` of ``DimensionDataVIPNode``<|endoftext|>
916a3aa83ca53dbaa1d9c1450f105f6cc90d154ff9a2ff0c3c158f12ded8fc1b
def ex_get_node(self, node_id): '\n Get the node specified by node_id\n\n :return: Returns an instance of ``DimensionDataVIPNode``\n :rtype: Instance of ``DimensionDataVIPNode``\n ' nodes = self.connection.request_with_orgId_api_2(('networkDomainVip/node/%s' % node_id)).object return self._to_node(nodes)
Get the node specified by node_id :return: Returns an instance of ``DimensionDataVIPNode`` :rtype: Instance of ``DimensionDataVIPNode``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_node
gig-tech/libcloud
1,435
python
def ex_get_node(self, node_id): '\n Get the node specified by node_id\n\n :return: Returns an instance of ``DimensionDataVIPNode``\n :rtype: Instance of ``DimensionDataVIPNode``\n ' nodes = self.connection.request_with_orgId_api_2(('networkDomainVip/node/%s' % node_id)).object return self._to_node(nodes)
def ex_get_node(self, node_id): '\n Get the node specified by node_id\n\n :return: Returns an instance of ``DimensionDataVIPNode``\n :rtype: Instance of ``DimensionDataVIPNode``\n ' nodes = self.connection.request_with_orgId_api_2(('networkDomainVip/node/%s' % node_id)).object return self._to_node(nodes)<|docstring|>Get the node specified by node_id :return: Returns an instance of ``DimensionDataVIPNode`` :rtype: Instance of ``DimensionDataVIPNode``<|endoftext|>
52ec8abcb973db68b35116223856890a2b6aa0e3080583f471cf4e670a7e78a6
def ex_destroy_node(self, node_id): '\n Destroy a specific node\n\n :param node_id: The ID of of a ``DimensionDataVIPNode``\n :type node_id: ``str``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deleteNode', {'xmlns': TYPES_URN, 'id': node_id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deleteNode', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
Destroy a specific node :param node_id: The ID of of a ``DimensionDataVIPNode`` :type node_id: ``str`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``
libcloud/loadbalancer/drivers/dimensiondata.py
ex_destroy_node
gig-tech/libcloud
1,435
python
def ex_destroy_node(self, node_id): '\n Destroy a specific node\n\n :param node_id: The ID of of a ``DimensionDataVIPNode``\n :type node_id: ``str``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deleteNode', {'xmlns': TYPES_URN, 'id': node_id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deleteNode', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])
def ex_destroy_node(self, node_id): '\n Destroy a specific node\n\n :param node_id: The ID of of a ``DimensionDataVIPNode``\n :type node_id: ``str``\n\n :return: ``True`` for success, ``False`` for failure\n :rtype: ``bool``\n ' destroy_request = ET.Element('deleteNode', {'xmlns': TYPES_URN, 'id': node_id}) result = self.connection.request_with_orgId_api_2(action='networkDomainVip/deleteNode', method='POST', data=ET.tostring(destroy_request)).object response_code = findtext(result, 'responseCode', TYPES_URN) return (response_code in ['IN_PROGRESS', 'OK'])<|docstring|>Destroy a specific node :param node_id: The ID of of a ``DimensionDataVIPNode`` :type node_id: ``str`` :return: ``True`` for success, ``False`` for failure :rtype: ``bool``<|endoftext|>
134565b9731d96fceab7635ad824ac650077ed783ec68f7a6250a3fe54f1431c
def ex_wait_for_state(self, state, func, poll_interval=2, timeout=60, *args, **kwargs): '\n Wait for the function which returns a instance\n with field status to match\n\n Keep polling func until one of the desired states is matched\n\n :param state: Either the desired state (`str`) or a `list` of states\n :type state: ``str`` or ``list``\n\n :param func: The function to call, e.g. ex_get_vlan\n :type func: ``function``\n\n :param poll_interval: The number of seconds to wait between checks\n :type poll_interval: `int`\n\n :param timeout: The total number of seconds to wait to reach a state\n :type timeout: `int`\n\n :param args: The arguments for func\n :type args: Positional arguments\n\n :param kwargs: The arguments for func\n :type kwargs: Keyword arguments\n ' return self.connection.wait_for_state(state, func, poll_interval, timeout, *args, **kwargs)
Wait for the function which returns a instance with field status to match Keep polling func until one of the desired states is matched :param state: Either the desired state (`str`) or a `list` of states :type state: ``str`` or ``list`` :param func: The function to call, e.g. ex_get_vlan :type func: ``function`` :param poll_interval: The number of seconds to wait between checks :type poll_interval: `int` :param timeout: The total number of seconds to wait to reach a state :type timeout: `int` :param args: The arguments for func :type args: Positional arguments :param kwargs: The arguments for func :type kwargs: Keyword arguments
libcloud/loadbalancer/drivers/dimensiondata.py
ex_wait_for_state
gig-tech/libcloud
1,435
python
def ex_wait_for_state(self, state, func, poll_interval=2, timeout=60, *args, **kwargs): '\n Wait for the function which returns a instance\n with field status to match\n\n Keep polling func until one of the desired states is matched\n\n :param state: Either the desired state (`str`) or a `list` of states\n :type state: ``str`` or ``list``\n\n :param func: The function to call, e.g. ex_get_vlan\n :type func: ``function``\n\n :param poll_interval: The number of seconds to wait between checks\n :type poll_interval: `int`\n\n :param timeout: The total number of seconds to wait to reach a state\n :type timeout: `int`\n\n :param args: The arguments for func\n :type args: Positional arguments\n\n :param kwargs: The arguments for func\n :type kwargs: Keyword arguments\n ' return self.connection.wait_for_state(state, func, poll_interval, timeout, *args, **kwargs)
def ex_wait_for_state(self, state, func, poll_interval=2, timeout=60, *args, **kwargs): '\n Wait for the function which returns a instance\n with field status to match\n\n Keep polling func until one of the desired states is matched\n\n :param state: Either the desired state (`str`) or a `list` of states\n :type state: ``str`` or ``list``\n\n :param func: The function to call, e.g. ex_get_vlan\n :type func: ``function``\n\n :param poll_interval: The number of seconds to wait between checks\n :type poll_interval: `int`\n\n :param timeout: The total number of seconds to wait to reach a state\n :type timeout: `int`\n\n :param args: The arguments for func\n :type args: Positional arguments\n\n :param kwargs: The arguments for func\n :type kwargs: Keyword arguments\n ' return self.connection.wait_for_state(state, func, poll_interval, timeout, *args, **kwargs)<|docstring|>Wait for the function which returns a instance with field status to match Keep polling func until one of the desired states is matched :param state: Either the desired state (`str`) or a `list` of states :type state: ``str`` or ``list`` :param func: The function to call, e.g. ex_get_vlan :type func: ``function`` :param poll_interval: The number of seconds to wait between checks :type poll_interval: `int` :param timeout: The total number of seconds to wait to reach a state :type timeout: `int` :param args: The arguments for func :type args: Positional arguments :param kwargs: The arguments for func :type kwargs: Keyword arguments<|endoftext|>
cd67b0b03e1f37fd1d66f03ada01b9df065a40102bbd0c8aa926510c470bf77d
def ex_get_default_health_monitors(self, network_domain_id): '\n Get the default health monitors available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultHealthMonitor`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultHealthMonitor', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_health_monitors(result)
Get the default health monitors available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataDefaultHealthMonitor`
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_default_health_monitors
gig-tech/libcloud
1,435
python
def ex_get_default_health_monitors(self, network_domain_id): '\n Get the default health monitors available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultHealthMonitor`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultHealthMonitor', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_health_monitors(result)
def ex_get_default_health_monitors(self, network_domain_id): '\n Get the default health monitors available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultHealthMonitor`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultHealthMonitor', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_health_monitors(result)<|docstring|>Get the default health monitors available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataDefaultHealthMonitor`<|endoftext|>
a118ed458561280c6a1e4a6bc1ca051b05e85436f98195a4621767028460c811
def ex_get_default_persistence_profiles(self, network_domain_id): '\n Get the default persistence profiles available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataPersistenceProfile`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultPersistenceProfile', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_persistence_profiles(result)
Get the default persistence profiles available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataPersistenceProfile`
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_default_persistence_profiles
gig-tech/libcloud
1,435
python
def ex_get_default_persistence_profiles(self, network_domain_id): '\n Get the default persistence profiles available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataPersistenceProfile`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultPersistenceProfile', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_persistence_profiles(result)
def ex_get_default_persistence_profiles(self, network_domain_id): '\n Get the default persistence profiles available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataPersistenceProfile`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultPersistenceProfile', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_persistence_profiles(result)<|docstring|>Get the default persistence profiles available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataPersistenceProfile`<|endoftext|>
ea3ccdcfa51b8dd175578b7ff7da0975c0fe409b21ca307291cbe84719913e8f
def ex_get_default_irules(self, network_domain_id): '\n Get the default iRules available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultiRule`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultIrule', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_irules(result)
Get the default iRules available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataDefaultiRule`
libcloud/loadbalancer/drivers/dimensiondata.py
ex_get_default_irules
gig-tech/libcloud
1,435
python
def ex_get_default_irules(self, network_domain_id): '\n Get the default iRules available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultiRule`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultIrule', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_irules(result)
def ex_get_default_irules(self, network_domain_id): '\n Get the default iRules available for a network domain\n\n :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain``\n :type network_domain_id: ``str``\n\n :rtype: `list` of :class:`DimensionDataDefaultiRule`\n ' result = self.connection.request_with_orgId_api_2(action='networkDomainVip/defaultIrule', params={'networkDomainId': network_domain_id}, method='GET').object return self._to_irules(result)<|docstring|>Get the default iRules available for a network domain :param network_domain_id: The ID of of a ``DimensionDataNetworkDomain`` :type network_domain_id: ``str`` :rtype: `list` of :class:`DimensionDataDefaultiRule`<|endoftext|>
75d695bb040ef21b3d02f47aedfb9be23cf40e9c0eb0d6370c8f9db3d16e1f0b
def read_unsigned(self) -> int: 'Get the stored value as a 256-bit unsigned value' raise NotImplementedError()
Get the stored value as a 256-bit unsigned value
hw/ip/otbn/dv/otbnsim/sim/wsr.py
read_unsigned
sha-ron/opentitan
1
python
def read_unsigned(self) -> int: raise NotImplementedError()
def read_unsigned(self) -> int: raise NotImplementedError()<|docstring|>Get the stored value as a 256-bit unsigned value<|endoftext|>