_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q276900
|
SignalHandler.default_handler
|
test
|
def default_handler(self, signum, frame):
""" Default handler, a generic callback method for signal processing"""
self.log.debug("Signal handler called with signal: {0}".format(signum))
# 1. If signal is HUP restart the python process
# 2. If signal is TERM, INT or QUIT we try to cleanup then exit with -1
# 3. If signal is STOP or TSTP we pause
# 4. If signal is CONT or USR1 we continue
# 5. If signal is INFO we print status
# 6. If signal is USR2 we we abort and then exit with -1
if signum in self.restart_signals:
self.set_handler(self.handled_signals, self.pseudo_handler)
self._cleanup()
os.execl('python', 'python', * sys.argv)
elif signum in self.abort_signals:
|
python
|
{
"resource": ""
}
|
q276901
|
SignalHandler.pause
|
test
|
def pause(self, signum, seconds=0, callback_function=None):
"""
Pause execution, execution will resume in X seconds or when the
appropriate resume signal is received. Execution will jump to the
callback_function, the default callback function is the handler
method which will run all tasks registered with the reg_on_resume
methodi.
Returns True if timer expired, otherwise returns False
"""
if callback_function is None:
callback_function = self.default_handler
if seconds > 0:
self.log.info("Signal handler pausing for {0} seconds or until it receives SIGALRM or SIGCONT".format(seconds))
|
python
|
{
"resource": ""
}
|
q276902
|
SignalHandler.abort
|
test
|
def abort(self, signum):
""" Run all abort tasks, then all exit tasks, then exit with error
|
python
|
{
"resource": ""
}
|
q276903
|
SignalHandler.status
|
test
|
def status(self, signum):
""" Run all status tasks, then run all tasks in the resume queue"""
self.log.debug('Signal handler got status signal')
new_status_callbacks = []
for status_call in self.status_callbacks:
# If callback is non persistent we remove it
try:
self.log.debug("Calling {0}({1},{2})".format(status_call['function'].__name__, status_call['args'], status_call['kwargs']))
except AttributeError:
self.log.debug("Calling unbound function/method
|
python
|
{
"resource": ""
}
|
q276904
|
SignalHandler._unreg_event
|
test
|
def _unreg_event(self, event_list, event):
""" Tries to remove a registered event without triggering it """
try:
self.log.debug("Removing event {0}({1},{2})".format(event['function'].__name__, event['args'], event['kwargs']))
except AttributeError:
self.log.debug("Removing event {0}".format(str(event)))
try:
event_list.remove(event)
except ValueError:
try:
self.log.warn("Unable to remove event {0}({1},{2}) , not found in list: {3}".format(event['function'].__name__,
|
python
|
{
"resource": ""
}
|
q276905
|
Connection.fetch_metric
|
test
|
def fetch_metric(self, metric, start, end, tags={}, aggregator="sum",
downsample=None, ms_resolution=True):
"""Fetch time series data from OpenTSDB
Parameters:
metric:
A string representing a valid OpenTSDB metric.
tags:
A dict mapping tag names to tag values. Tag names and values are
always strings.
{ 'user_id': '44' }
start:
A datetime.datetime-like object representing the start of the
range to query over.
end:
A datetime.datetime-like object representing the end of the
range to query over.
aggregator:
The function for merging multiple time series together. For
example, if the "user_id" tag is not specified, this aggregator
function is used to combine all heart rate time series into one
time series. (Yes, this isn't very useful.)
For queries that return only one time series, this parameter is
not relevant.
Valid values: "sum", "min", "max", "avg", "dev"
See: http://opentsdb.net/docs/build/html/user_guide/query/aggregators.html
downsampling:
A relative time interval to "downsample". This isn't true
downsampling; rather, if you specify a downsampling of "5m"
(five minutes), OpenTSDB will split data into five minute
intervals, and return one data point in the middle of each
interval whose value is the average of all data points within
that interval.
Valid relative time values are strings of the following format:
"<amount><time_unit>"
Valid time units: "ms", "s", "m", "h", "d", "w", "n", "y"
Date and time format: http://opentsdb.net/docs/build/html/user_guide/query/dates.html
ms_resolution:
Whether or
|
python
|
{
"resource": ""
}
|
q276906
|
Connection.fetch_sorted_metric
|
test
|
def fetch_sorted_metric(self, *args, **kwargs):
"""Fetch and sort time series data from OpenTSDB
Takes the same parameters as `fetch_metric`, but returns a list of
(timestamp, value) tuples sorted
|
python
|
{
"resource": ""
}
|
q276907
|
pfcollect
|
test
|
def pfcollect(iterable, n=None):
"""Collects and returns a list of values from the given iterable. If
the n parameter is not specified, collects all values from the
iterable.
:param iterable: An iterable yielding values for the list
:param n: An optional maximum number of items to collect
:rtype: List of values from the iterable
Example::
>>> @pointfree
... def fibonaccis():
... a, b = 0, 1
... while
|
python
|
{
"resource": ""
}
|
q276908
|
pfprint
|
test
|
def pfprint(item, end='\n', file=None):
"""Prints an item.
:param item: The item to print
:param end: String to append to the end of printed output
:param file: File to which output is printed
:rtype: None
Example::
>>> from operator import add
|
python
|
{
"resource": ""
}
|
q276909
|
pfprint_all
|
test
|
def pfprint_all(iterable, end='\n', file=None):
"""Prints each item from an iterable.
:param iterable: An iterable yielding values to print
:param end: String to append to the end of printed output
:param file: File to which output is printed
:rtype: None
Example::
>>> @pointfree
... def prefix_all(prefix, iterable):
... for item in iterable:
... yield "%s%s" % (prefix, item)
|
python
|
{
"resource": ""
}
|
q276910
|
partial.__sig_from_func
|
test
|
def __sig_from_func(self, func):
"""Extract function signature, default arguments, keyword-only
arguments, and whether or not variable positional or keyword
arguments are allowed. This also supports calling unbound instance
methods by passing an object instance as the first argument;
however, unbound classmethod and staticmethod objects are not
callable, so we do not attempt to support them here."""
if isinstance(func, types.MethodType):
# A bound instance or class method.
argspec = getfullargspec(func.__func__)
|
python
|
{
"resource": ""
}
|
q276911
|
partial.__sig_from_partial
|
test
|
def __sig_from_partial(self, inst):
"""Extract function signature from an existing partial instance."""
self.pargl
|
python
|
{
"resource": ""
}
|
q276912
|
partial.__new_argv
|
test
|
def __new_argv(self, *new_pargs, **new_kargs):
"""Calculate new argv and extra_argv values resulting from adding
the specified positional and keyword arguments."""
new_argv = self.argv.copy()
new_extra_argv = list(self.extra_argv)
for v in new_pargs:
arg_name = None
for name in self.pargl:
if not name in new_argv:
arg_name = name
break
if arg_name:
new_argv[arg_name] = v
elif self.var_pargs:
new_extra_argv.append(v)
else:
num_prev_pargs = len([name for name in self.pargl if name in self.argv])
raise TypeError("%s() takes exactly %d positional arguments (%d given)" \
% (self.__name__,
|
python
|
{
"resource": ""
}
|
q276913
|
ignore_certain_metainf_files
|
test
|
def ignore_certain_metainf_files(filename):
"""
We do not support multiple signatures in XPI signing because the client
side code makes some pretty reasonable assumptions about a single signature
on any given JAR. This function returns True if the file name given is one
that we dispose of to prevent multiple signatures.
"""
ignore = ("META-INF/manifest.mf",
"META-INF/*.sf",
"META-INF/*.rsa",
"META-INF/*.dsa",
|
python
|
{
"resource": ""
}
|
q276914
|
file_key
|
test
|
def file_key(filename):
'''Sort keys for xpi files
The filenames in a manifest are ordered so that files not in a
directory come before files in any directory, ordered
alphabetically but ignoring case, with a few exceptions
(install.rdf, chrome.manifest, icon.png and icon64.png come at the
beginning; licenses come at the end).
This order does not appear to affect anything in any way, but it
looks nicer.
'''
prio = 4
if filename == 'install.rdf':
prio = 1
elif filename in
|
python
|
{
"resource": ""
}
|
q276915
|
vlq2int
|
test
|
def vlq2int(data):
"""Read one VLQ-encoded integer value from an input data stream."""
# The VLQ is little-endian.
byte = ord(data.read(1))
value = byte & 0x7F
shift = 1
while byte & 0x80 != 0:
byte
|
python
|
{
"resource": ""
}
|
q276916
|
read_table
|
test
|
def read_table(data, fields):
"""Read a table structure.
These are used by Blizzard to collect pieces of data together. Each
value is prefixed by two bytes, first denoting (doubled) index and the
second denoting some sort of key -- so far it has always been '09'. The
actual value follows as a Variable-Length Quantity, also known as uintvar.
The actual value is also doubled.
|
python
|
{
"resource": ""
}
|
q276917
|
SC2Replay._parse_header
|
test
|
def _parse_header(self):
"""Parse the user data header portion of the replay."""
header = OrderedDict()
user_data_header = self.archive.header['user_data_header']['content']
if re.search(r'StarCraft II replay', user_data_header):
user_data_header = StringIO.StringIO(user_data_header)
user_data_header.seek(30) # Just skip the beginning.
header.update(read_table(user_data_header, ['release_flag',
'major_version',
'minor_version',
'maintenance_version',
'build_number',
'unknown',
'unknown',
'duration']))
# Some post processing is required.
header['version'] = '%s.%s.%s.%s' % (header['major_version'],
|
python
|
{
"resource": ""
}
|
q276918
|
SC2Replay.get_duration
|
test
|
def get_duration(self, seconds):
"""Transform duration into a human-readable form."""
duration = ""
minutes, seconds = divmod(seconds, 60)
if minutes >= 60:
hours, minutes = divmod(minutes, 60)
|
python
|
{
"resource": ""
}
|
q276919
|
SC2Replay.print_details
|
test
|
def print_details(self):
"""Print a summary of the game details."""
print 'Map ', self.map
print 'Duration ', self.duration
print 'Version ', self.version
print 'Team Player Race Color'
|
python
|
{
"resource": ""
}
|
q276920
|
FormEvents.data
|
test
|
def data(self):
"""
This function gets back data that the user typed.
"""
self.batch_name_value = self.ui.batch_name_value.text()
self.saa_values = self.ui.saa_values.text()
self.sza_values = self.ui.sza_values.text()
self.p_values = self.ui.p_values.text()
self.x_value = self.ui.x_value.text()
self.y_value = self.ui.y_value.text()
|
python
|
{
"resource": ""
}
|
q276921
|
FormEvents.search_file_result
|
test
|
def search_file_result(self):
"""
This function once the file found, display data's file and the graphic associated.
"""
if self.ui.tabWidget.currentIndex() == TabWidget.NORMAL_MODE:
self.result_file = self.file_dialog.getOpenFileName(caption=str("Open Report File"), directory="./outputs")
if not self.result_file == '':
|
python
|
{
"resource": ""
}
|
q276922
|
FormEvents.write_to_file
|
test
|
def write_to_file(self):
"""
This function calls "gui_batch.py" with inputs values to write the batch file.
"""
bt = BatchFile(self.batch_name_value, self.p_values, self.x_value, self.y_value, self.g_value, self.s_value,
self.z_value, self.wavelength_values, self.verbose_value, self.phytoplankton_path,
self.bottom_path,
|
python
|
{
"resource": ""
}
|
q276923
|
FormEvents.data_processing
|
test
|
def data_processing(self):
"""
This function separates data, from the file to display curves, and will put them in the good arrays.
"""
the_file_name = str(self.result_file)
the_file = open(the_file_name, 'r')
lines = the_file.readlines()
# We put all lines in an array and we put each cell of the line in a column.
lines_array = []
for line in lines:
line = line.split(',') # Each time there is a tabulation, there is a new cell
lines_array.append(line)
labels_line = lines_array[0]
cell_labels_line = 0 # Iterator on each cell of the line labels_line.
flag = True # Become FALSE when we find the word which separate data from wavelength values.
try:
while flag: # While it is TRUE, so if the word doesn't match, it's an infinite loop,
if "wave length (nm)" in labels_line[cell_labels_line]:
index = labels_line.index(labels_line[cell_labels_line]) # Find the index of the string searched.
flag = False
else:
cell_labels_line += 1
except IndexError: # In
|
python
|
{
"resource": ""
}
|
q276924
|
FormEvents.display_the_graphic_connection
|
test
|
def display_the_graphic_connection(self):
"""
The following permits to attribute the function "display_the_graphic" to the slider.
Because, to make a connection, we can not have parameters for the function, but "display_the_graphic"
|
python
|
{
"resource": ""
}
|
q276925
|
FormEvents.print_graphic_information
|
test
|
def print_graphic_information(self, num_curve, information):
"""
This function displays information about curves.
Inputs ; num_curve ; The index of the curve's line that we have to display.
information ; The array which contains the information, of all curves to display.
"""
"""In this function, the best would to create labels each time we need to create one,
following the number of labels in label_information.
#self.essai = QtGui.QLabel(self.ui.tab)
#self.essai.setGeometry(PyQt4.QtCore.QRect(870,650,111,16))
#self.essai.setText("ESSAI")
"""
label_information = information[0]
data_information = information[1:]
count_nb_label = 0 # Iterator on all labels of label_information
nb_label = len(label_information)
while count_nb_label <= nb_label:
self.ui.column1_label.setText(label_information[0].strip('\"'))
self.ui.column2_label.setText(label_information[1].strip('\"'))
self.ui.column3_label.setText(label_information[2].strip('\"'))
self.ui.column4_label.setText(label_information[3].strip('\"'))
self.ui.column5_label.setText(label_information[4].strip('\"'))
self.ui.column6_label.setText(label_information[5].strip('\"'))
self.ui.column7_label.setText(label_information[6].strip('\"'))
self.ui.column8_label.setText(label_information[7].strip('\"'))
count_nb_label += 1
line_of_data = 0 # Iterator on each line of data_information.
while line_of_data < len(data_information):
|
python
|
{
"resource": ""
}
|
q276926
|
FormEvents.display_error_message
|
test
|
def display_error_message(self):
"""
This function displays an error message when a wrong value is typed.
"""
self.ui.error_label.setScaledContents(True) # Warning image shown.
|
python
|
{
"resource": ""
}
|
q276927
|
FormEvents.hide_error_message
|
test
|
def hide_error_message(self):
"""
This function hides the error message when all values are correct.
"""
|
python
|
{
"resource": ""
}
|
q276928
|
FormEvents.run
|
test
|
def run(self):
"""
This function executes planarRad using the batch file.
"""
"""
Error when planarRad start : /bin/sh: 1: ../planarrad.py: not found
"""
print('Executing planarrad')
# If we are not in the reverse_mode :
if self.ui.tabWidget.currentIndex() == TabWidget.NORMAL_MODE:
self.data()
self.check_values()
if self.without_error == False:
self.display_error_message()
elif self.without_error ==
|
python
|
{
"resource": ""
}
|
q276929
|
FormEvents.cancel_planarrad
|
test
|
def cancel_planarrad(self):
"""
This function cancels PlanarRad.
"""
"""
This function needs to be tested. We don't know if she works.
"""
if (self.is_running == True) & (self.ui.tabWidget.currentIndex() == TabWidget.NORMAL_MODE):
cancel = QtGui.QMessageBox.question(self.ui.cancel, 'Cancel PlanarRad', "Are you sure to cancel ?",
QtGui.QMessageBox.Yes,
|
python
|
{
"resource": ""
}
|
q276930
|
FormEvents.quit
|
test
|
def quit(self):
"""
This function quits PlanarRad, checking if PlanarRad is running before.
"""
"""
Nothing programmed for displaying a message box when the user clicks on the window cross in order to quit.
"""
if self.is_running == True:
warning_planarrad_running = QtGui.QMessageBox.warning(self.ui.quit, 'Warning !',
"PlanarRad is running. Stop it before quit !",
QtGui.QMessageBox.Ok)
|
python
|
{
"resource": ""
}
|
q276931
|
FormEvents.save_figure
|
test
|
def save_figure(self):
"""
This function programs the button to save the figure displayed
and save it in a png file in the current repository.
"""
"""
Increment the name of the figure in order to not erase the previous figure if the user use always this method.
The png file is put in the "Artists_saved" file localized in the "planarradpy" folder.
|
python
|
{
"resource": ""
}
|
q276932
|
FormEvents.open_log_file
|
test
|
def open_log_file(self):
"""
The following opens the log file of PlanarRad.
"""
"""
TO DO.
"""
# webbrowser.open('https://marrabld.github.io/planarradpy/')
f = open(os.path.expanduser('~/.planarradpy/log/libplanarradpy.log'))
|
python
|
{
"resource": ""
}
|
q276933
|
FormEvents.open_documentation
|
test
|
def open_documentation(self):
"""
The following opens the documentation file.
"""
"""
TO DO.
"""
# webbrowser.open('https://marrabld.github.io/planarradpy/')
window = Window()
|
python
|
{
"resource": ""
}
|
q276934
|
FormEvents.prerequisite_actions
|
test
|
def prerequisite_actions(self):
"""
This function does all required actions at the beginning when we run the GUI.
"""
self.hide_error_message()
self.ui.show_all_curves.setDisabled(True)
self.ui.sens.setDisabled(True)
self.ui.show_grid.setDisabled(True)
pathname = os.path.dirname(sys.argv[0])
path = os.path.abspath(pathname)
# self.phytoplankton_path = self.ui.phyto_path.setText(path.replace('gui', 'inputs/iop_files'))
# self.bottom_path = self.ui.bottom_path.setText(path.replace('gui',
|
python
|
{
"resource": ""
}
|
q276935
|
FormEvents.click
|
test
|
def click(self, event):
"""
This function intercepts the mouse's right click and its position.
"""
if event.button == 3:
if self.ui.tabWidget.currentIndex()
|
python
|
{
"resource": ""
}
|
q276936
|
FormEvents.mouse_move
|
test
|
def mouse_move(self, event):
"""
The following gets back coordinates of the mouse on the canvas.
"""
if (self.ui.tabWidget.currentIndex() == TabWidget.NORMAL_MODE):
|
python
|
{
"resource": ""
}
|
q276937
|
FormEvents.graphic_target
|
test
|
def graphic_target(self, x, y):
"""
The following update labels about mouse coordinates.
"""
if self.authorized_display == True:
|
python
|
{
"resource": ""
}
|
q276938
|
genesis_signing_lockset
|
test
|
def genesis_signing_lockset(genesis, privkey):
"""
in order to avoid a complicated bootstrapping, we define
the genesis_signing_lockset as a lockset with one vote by any validator.
"""
|
python
|
{
"resource": ""
}
|
q276939
|
Signed.sign
|
test
|
def sign(self, privkey):
"""Sign this with a private key"""
if self.v:
raise InvalidSignature("already signed")
if privkey in (0, '', '\x00' * 32):
raise InvalidSignature("Zero privkey cannot sign")
rawhash = sha3(rlp.encode(self, self.__class__.exclude(['v', 'r', 's'])))
if len(privkey) == 64:
privkey = encode_privkey(privkey, 'bin')
|
python
|
{
"resource": ""
}
|
q276940
|
Signed.hash
|
test
|
def hash(self):
"signatures are non deterministic"
if self.sender is None:
raise MissingSignatureError()
class HashSerializable(rlp.Serializable):
fields = [(field, sedes) for field, sedes in self.fields
|
python
|
{
"resource": ""
}
|
q276941
|
LockSet.check
|
test
|
def check(self):
"either invalid or one of quorum, noquorum, quorumpossible"
if not self.is_valid:
return True
|
python
|
{
"resource": ""
}
|
q276942
|
IOU.issue_funds
|
test
|
def issue_funds(ctx, amount='uint256', rtgs_hash='bytes32', returns=STATUS):
"In the IOU fungible the supply is set by Issuer, who issue funds."
# allocate new issue as result of a new cash entry
ctx.accounts[ctx.msg_sender]
|
python
|
{
"resource": ""
}
|
q276943
|
HeightManager.last_lock
|
test
|
def last_lock(self):
"highest lock on height"
rs = list(self.rounds)
assert len(rs) < 2 or rs[0] > rs[1] # FIXME REMOVE
for r in self.rounds:
|
python
|
{
"resource": ""
}
|
q276944
|
HeightManager.last_voted_blockproposal
|
test
|
def last_voted_blockproposal(self):
"the last block proposal node voted on"
for r in self.rounds:
if isinstance(self.rounds[r].proposal, BlockProposal):
assert isinstance(self.rounds[r].lock, Vote)
|
python
|
{
"resource": ""
}
|
q276945
|
HeightManager.last_valid_lockset
|
test
|
def last_valid_lockset(self):
"highest valid lockset on height"
for r in self.rounds:
ls = self.rounds[r].lockset
|
python
|
{
"resource": ""
}
|
q276946
|
RoundManager.get_timeout
|
test
|
def get_timeout(self):
"setup a timeout for waiting for a proposal"
if self.timeout_time is not None or self.proposal:
return
now = self.cm.chainservice.now
round_timeout = ConsensusManager.round_timeout
|
python
|
{
"resource": ""
}
|
q276947
|
Synchronizer.on_proposal
|
test
|
def on_proposal(self, proposal, proto):
"called to inform about synced peers"
assert isinstance(proto, HDCProtocol)
assert isinstance(proposal, Proposal)
|
python
|
{
"resource": ""
}
|
q276948
|
mk_privkeys
|
test
|
def mk_privkeys(num):
"make privkeys that support coloring, see utils.cstr"
privkeys = []
assert num <= num_colors
for i in range(num):
j = 0
while True:
k = sha3(str(j))
|
python
|
{
"resource": ""
}
|
q276949
|
Transport.delay
|
test
|
def delay(self, sender, receiver, packet, add_delay=0):
"""
bandwidths are inaccurate, as we don't account for parallel transfers here
|
python
|
{
"resource": ""
}
|
q276950
|
SlowTransport.deliver
|
test
|
def deliver(self, sender, receiver, packet):
"deliver on edge of timeout_window"
to = ConsensusManager.round_timeout
assert to > 0
|
python
|
{
"resource": ""
}
|
q276951
|
chain_nac_proxy
|
test
|
def chain_nac_proxy(chain, sender, contract_address, value=0):
"create an object which acts as a proxy for the contract on the chain"
klass = registry[contract_address].im_self
assert issubclass(klass, NativeABIContract)
def mk_method(method):
def m(s, *args):
data = abi_encode_args(method, args)
block = chain.head_candidate
output = test_call(block, sender, contract_address, data)
if output is not None:
|
python
|
{
"resource": ""
}
|
q276952
|
Registry.address_to_native_contract_class
|
test
|
def address_to_native_contract_class(self, address):
"returns class._on_msg_unsafe, use x.im_self to get class"
assert isinstance(address, bytes) and len(address) == 20
assert self.is_instance_address(address)
|
python
|
{
"resource": ""
}
|
q276953
|
Registry.register
|
test
|
def register(self, contract):
"registers NativeContract classes"
assert issubclass(contract, NativeContractBase)
assert len(contract.address) == 20
assert contract.address.startswith(self.native_contract_address_prefix)
if self.native_contracts.get(contract.address) == contract._on_msg:
log.debug("already registered", contract=contract, address=contract.address)
return
|
python
|
{
"resource": ""
}
|
q276954
|
DuplicatesFilter.update
|
test
|
def update(self, data):
"returns True if unknown"
if data not in self.filter:
self.filter.append(data)
if len(self.filter) > self.max_items:
self.filter.pop(0)
|
python
|
{
"resource": ""
}
|
q276955
|
ChainService.on_receive_transactions
|
test
|
def on_receive_transactions(self, proto, transactions):
"receives rlp.decoded serialized"
log.debug('----------------------------------')
log.debug('remote_transactions_received', count=len(transactions), remote_id=proto)
def
|
python
|
{
"resource": ""
}
|
q276956
|
img_from_vgg
|
test
|
def img_from_vgg(x):
'''Decondition an image from the VGG16 model.'''
x = x.transpose((1, 2, 0))
x[:, :, 0] += 103.939
x[:, :, 1]
|
python
|
{
"resource": ""
}
|
q276957
|
img_to_vgg
|
test
|
def img_to_vgg(x):
'''Condition an image for use with the VGG16 model.'''
x = x[:,:,::-1] # to BGR
x[:, :, 0] -= 103.939
x[:,
|
python
|
{
"resource": ""
}
|
q276958
|
VGG16.get_f_layer
|
test
|
def get_f_layer(self, layer_name):
'''Create a function for the response of a layer.'''
inputs = [self.net_input]
if self.learning_phase is not None:
|
python
|
{
"resource": ""
}
|
q276959
|
VGG16.get_layer_output
|
test
|
def get_layer_output(self, name):
'''Get symbolic output of a layer.'''
if not name in self._f_layer_outputs:
layer = self.net.get_layer(name)
|
python
|
{
"resource": ""
}
|
q276960
|
VGG16.get_features
|
test
|
def get_features(self, x, layers):
'''Evaluate layer outputs for `x`'''
if not layers:
return None
inputs = [self.net.input]
if self.learning_phase is not None:
inputs.append(self.learning_phase)
|
python
|
{
"resource": ""
}
|
q276961
|
create_key_file
|
test
|
def create_key_file(path):
"""
Creates a new encryption key in the path provided and sets the file
permissions. Setting the file permissions currently does not work
on Windows platforms because of the differences in how file
permissions are read and modified.
|
python
|
{
"resource": ""
}
|
q276962
|
TeradataBulkLoad.finish
|
test
|
def finish(self):
"""
Finishes the load job. Called automatically when the connection closes.
:return: The exit code returned when applying rows to the table
"""
if self.finished:
return self.exit_code
checkpoint_status = self.checkpoint()
self.exit_code = self._exit_code()
if self.exit_code != 0:
raise TeradataPTError("BulkLoad job finished with return code '{}'".format(self.exit_code))
# TODO(chris): should this happen every time?
if self.applied_count > 0:
self._end_acquisition()
|
python
|
{
"resource": ""
}
|
q276963
|
TeradataBulkLoad.from_file
|
test
|
def from_file(self, filename, table=None, delimiter='|', null='NULL',
panic=True, quotechar='"', parse_dates=False):
"""
Load from a file into the target table, handling each step of the
load process.
Can load from text files, and properly formatted giraffez archive
files. In both cases, if Gzip compression is detected the file will be
decompressed while reading and handled appropriately. The encoding is
determined automatically by the contents of the file.
It is not necessary to set the columns in use prior to loading from a file.
In the case of a text file, the header is used to determine column names
and their order. Valid delimiters include '|', ',', and '\\t' (tab). When
loading an archive file, the column information is decoded alongside the data.
:param str filename: The location of the file to be loaded
:param str table: The name of the target table, if it was not specified
to the constructor for the isntance
:param str null: The string that indicates a null value in the rows being
inserted from a file. Defaults to 'NULL'
:param str delimiter: When loading a file, indicates that fields are
separated by this delimiter. Defaults to :code:`None`, which causes the
delimiter to be determined from the header of the file. In most
cases, this behavior is sufficient
:param str quotechar: The character used to quote fields containing special characters,
like the delimiter.
:param bool panic: If :code:`True`, when an error is encountered it will be
raised. Otherwise, the error will be logged and :code:`self.error_count`
is incremented.
:return: The output of the call to
:meth:`~giraffez.load.TeradataBulkLoad.finish`
:raises `giraffez.errors.GiraffeError`: if table was not set and :code:`table`
is :code:`None`, or if a Teradata error ocurred while retrieving table info.
:raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there
are format errors in the row values.
"""
if not self.table:
if not table:
raise GiraffeError("Table must be set or specified to load a file.")
self.table = table
if not isinstance(null, basestring):
raise GiraffeError("Expected 'null' to be
|
python
|
{
"resource": ""
}
|
q276964
|
TeradataBulkLoad.put
|
test
|
def put(self, items, panic=True):
"""
Load a single row into the target table.
:param list items: A list of values in the row corresponding to the
fields specified by :code:`self.columns`
:param bool panic: If :code:`True`, when an error is encountered it will be
raised. Otherwise, the error will be logged and :code:`self.error_count`
is incremented.
:raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there
are format errors in the row values.
:raises `giraffez.errors.GiraffeError`: if table name is not set.
:raises `giraffez.TeradataPTError`: if there is a problem
connecting to Teradata.
|
python
|
{
"resource": ""
}
|
q276965
|
TeradataBulkLoad.release
|
test
|
def release(self):
"""
Attempt release of target mload table.
:raises `giraffez.errors.GiraffeError`: if table was not set by
the constructor, the :code:`TeradataBulkLoad.table`, or
:meth:`~giraffez.load.TeradataBulkLoad.from_file`.
"""
if self.table is None:
|
python
|
{
"resource": ""
}
|
q276966
|
TeradataBulkLoad.tables
|
test
|
def tables(self):
"""
The names of the work tables used for loading.
:return: A list of four tables, each the name of the target table
with the added suffixes, "_wt", "_log", "_e1", and "_e2"
:raises `giraffez.errors.GiraffeError`: if table was not set by
|
python
|
{
"resource": ""
}
|
q276967
|
fix_compile
|
test
|
def fix_compile(remove_flags):
"""
Monkey-patch compiler to allow for removal of default compiler flags.
"""
import distutils.ccompiler
def _fix_compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
for flag in remove_flags:
if flag in self.compiler_so:
|
python
|
{
"resource": ""
}
|
q276968
|
find_teradata_home
|
test
|
def find_teradata_home():
"""
Attempts to find the Teradata install directory with the defaults
for a given platform. Should always return `None` when the defaults
are not present and the TERADATA_HOME environment variable wasn't
explicitly set to the correct install location.
"""
if platform.system() == 'Windows':
# The default installation path for Windows is split between the
# Windows directories for 32-bit/64-bit applications. It is
# worth noting that Teradata archiecture installed should match
# the architecture of the Python architecture being used (i.e.
# TTU 32-bit is required /w Python 32-bit and TTU 64-bit is
# required for Python 64-bit).
if is_64bit():
|
python
|
{
"resource": ""
}
|
q276969
|
Secret.get
|
test
|
def get(self, key):
"""
Retrieve the decrypted value of a key in a giraffez
configuration file.
:param str key: The key used to lookup the encrypted value
"""
if not key.startswith("secure.") and not key.startswith("connections."):
key
|
python
|
{
"resource": ""
}
|
q276970
|
Secret.set
|
test
|
def set(self, key, value):
"""
Set a decrypted value by key in a giraffez configuration file.
:param str key: The key used to lookup the encrypted value
:param value: Value to set at the given key, can be any value that is
YAML serializeable.
|
python
|
{
"resource": ""
}
|
q276971
|
GiraffeShell.do_table
|
test
|
def do_table(self, line):
"""Display results in table format"""
if len(line) > 0:
if line.strip().lower() == "on":
log.write("Table ON")
|
python
|
{
"resource": ""
}
|
q276972
|
TeradataCmd.execute
|
test
|
def execute(self, command, coerce_floats=True, parse_dates=False, header=False, sanitize=True,
silent=False, panic=None, multi_statement=False, prepare_only=False):
"""
Execute commands using CLIv2.
:param str command: The SQL command to be executed
:param bool coerce_floats: Coerce Teradata decimal types into Python floats
:param bool parse_dates: Parses Teradata datetime types into Python datetimes
:param bool header: Include row header
:param bool sanitize: Whether or not to call :func:`~giraffez.sql.prepare_statement`
on the command
:param bool silent: Silence console logging (within this function only)
:param bool panic: If :code:`True`, when an error is encountered it will be
raised.
:param bool multi_statement: Execute in multi-statement mode
:param bool prepare_only: Only prepare the command (no results)
:return: a cursor over the results of each statement in the command
:rtype: :class:`~giraffez.cmd.Cursor`
:raises `giraffez.TeradataError`: if the query is invalid
:raises `giraffez.errors.GiraffeError`: if the return data could not be decoded
"""
if panic is None:
panic = self.panic
self.options("panic", panic)
self.options("multi-statement mode", multi_statement, 3)
if isfile(command):
|
python
|
{
"resource": ""
}
|
q276973
|
Config.get_value
|
test
|
def get_value(self, key, default={}, nested=True, decrypt=True):
"""
Retrieve a value from the configuration based on its key. The key
may be nested.
:param str key: A path to the value, with nested levels joined by '.'
:param default: Value to return if the key does not exist (defaults to :code:`dict()`)
:param bool decrypt: If :code:`True`, decrypt an encrypted value before returning
(if encrypted). Defaults to :code:`True`.
"""
key = key.lstrip()
if key.endswith("."):
key = key[:-1]
|
python
|
{
"resource": ""
}
|
q276974
|
Config.write_default
|
test
|
def write_default(self, conf=None):
"""
A class method to write a default configuration file structure to a file.
Note that the contents of the file will be overwritten if it already exists.
:param str conf: The name of the file to write to. Defaults to
|
python
|
{
"resource": ""
}
|
q276975
|
Columns.set_filter
|
test
|
def set_filter(self, names=None):
"""
Set the names of columns to be used when iterating through the list,
retrieving names, etc.
:param list names: A list of names to be used, or :code:`None` for all
"""
_names = []
if names:
for name in names:
_safe_name = safe_name(name)
if _safe_name not in self._column_map:
|
python
|
{
"resource": ""
}
|
q276976
|
TeradataBulkExport.to_archive
|
test
|
def to_archive(self, writer):
"""
Writes export archive files in the Giraffez archive format.
This takes a `giraffez.io.Writer` and writes archive chunks to
file until all rows for a given statement have been exhausted.
.. code-block:: python
with giraffez.BulkExport("database.table_name") as export:
with giraffez.Writer("database.table_name.tar.gz", 'wb', use_gzip=True) as out:
for n in export.to_archive(out):
print("Rows: {}".format(n))
:param `giraffez.io.Writer` writer: A writer handling the archive output
|
python
|
{
"resource": ""
}
|
q276977
|
TeradataBulkExport.to_str
|
test
|
def to_str(self, delimiter='|', null='NULL'):
"""
Sets the current encoder output to Python `str` and returns
a row iterator.
:param str null: The string representation of null values
:param str delimiter: The string delimiting values in the output
string
:rtype: iterator (yields ``str``)
|
python
|
{
"resource": ""
}
|
q276978
|
float_with_multiplier
|
test
|
def float_with_multiplier(string):
"""Convert string with optional k, M, G, T multiplier to float"""
match = re_float_with_multiplier.search(string)
if not match or not match.group('num'):
|
python
|
{
"resource": ""
}
|
q276979
|
specific_gains
|
test
|
def specific_gains(string):
"""Convert string with gains of individual amplification elements to dict"""
if not string:
|
python
|
{
"resource": ""
}
|
q276980
|
device_settings
|
test
|
def device_settings(string):
"""Convert string with SoapySDR device settings to dict"""
if not string:
return {}
settings = {}
for setting in string.split(','):
|
python
|
{
"resource": ""
}
|
q276981
|
wrap
|
test
|
def wrap(text, indent=' '):
"""Wrap text to terminal width with default indentation"""
wrapper = textwrap.TextWrapper(
width=int(os.environ.get('COLUMNS', 80)),
|
python
|
{
"resource": ""
}
|
q276982
|
detect_devices
|
test
|
def detect_devices(soapy_args=''):
"""Returns detected SoapySDR devices"""
devices = simplesoapy.detect_devices(soapy_args, as_string=True)
text = []
text.append('Detected SoapySDR devices:')
if devices:
for i, d in enumerate(devices):
|
python
|
{
"resource": ""
}
|
q276983
|
PSD.set_center_freq
|
test
|
def set_center_freq(self, center_freq):
"""Set center frequency and clear averaged PSD data"""
psd_state = {
'repeats': 0,
|
python
|
{
"resource": ""
}
|
q276984
|
PSD.result
|
test
|
def result(self, psd_state):
"""Return freqs and averaged PSD for given center frequency"""
freq_array = numpy.fft.fftshift(psd_state['freq_array'])
pwr_array = numpy.fft.fftshift(psd_state['pwr_array'])
if self._crop_factor:
crop_bins_half = round((self._crop_factor * self._bins) / 2)
freq_array = freq_array[crop_bins_half:-crop_bins_half]
pwr_array = pwr_array[crop_bins_half:-crop_bins_half]
|
python
|
{
"resource": ""
}
|
q276985
|
PSD.wait_for_result
|
test
|
def wait_for_result(self, psd_state):
"""Wait for all PSD threads to finish and return result"""
if len(psd_state['futures']) > 1:
concurrent.futures.wait(psd_state['futures'])
|
python
|
{
"resource": ""
}
|
q276986
|
PSD.update
|
test
|
def update(self, psd_state, samples_array):
"""Compute PSD from samples and update average for given center frequency"""
freq_array, pwr_array = simplespectral.welch(samples_array, self._sample_rate, nperseg=self._bins,
window=self._fft_window, noverlap=self._fft_overlap_bins,
detrend=self._detrend)
if self._remove_dc:
|
python
|
{
"resource": ""
}
|
q276987
|
SoapyPowerBinFormat.read
|
test
|
def read(self, f):
"""Read data from file-like object"""
magic = f.read(len(self.magic))
if not magic:
return None
if magic != self.magic:
raise ValueError('Magic bytes
|
python
|
{
"resource": ""
}
|
q276988
|
SoapyPowerBinFormat.write
|
test
|
def write(self, f, time_start, time_stop, start, stop, step, samples, pwr_array):
"""Write data to file-like object"""
f.write(self.magic)
f.write(self.header_struct.pack(
|
python
|
{
"resource": ""
}
|
q276989
|
ThreadPoolExecutor.submit
|
test
|
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Count maximum reached work queue size in ThreadPoolExecutor.max_queue_size_reached.
|
python
|
{
"resource": ""
}
|
q276990
|
SoapyPower.time_to_repeats
|
test
|
def time_to_repeats(self, bins, integration_time):
"""Convert integration time to number of repeats"""
|
python
|
{
"resource": ""
}
|
q276991
|
SoapyPower.freq_plan
|
test
|
def freq_plan(self, min_freq, max_freq, bins, overlap=0, quiet=False):
"""Returns list of frequencies for frequency hopping"""
bin_size = self.bins_to_bin_size(bins)
bins_crop = round((1 - overlap) * bins)
sample_rate_crop = (1 - overlap) * self.device.sample_rate
freq_range = max_freq - min_freq
hopping = True if freq_range >= sample_rate_crop else False
hop_size = self.nearest_freq(sample_rate_crop, bin_size)
hops = math.ceil(freq_range / hop_size) if hopping else 1
min_center_freq = min_freq + (hop_size / 2) if hopping else min_freq + (freq_range / 2)
max_center_freq = min_center_freq + ((hops - 1) * hop_size)
freq_list = [min_center_freq + (i * hop_size) for i in range(hops)]
if not quiet:
logger.info('overlap: {:.5f}'.format(overlap))
logger.info('bin_size: {:.2f} Hz'.format(bin_size))
logger.info('bins: {}'.format(bins))
logger.info('bins (after crop): {}'.format(bins_crop))
logger.info('sample_rate: {:.3f} MHz'.format(self.device.sample_rate / 1e6))
logger.info('sample_rate (after crop): {:.3f} MHz'.format(sample_rate_crop / 1e6))
logger.info('freq_range: {:.3f} MHz'.format(freq_range / 1e6))
logger.info('hopping: {}'.format('YES' if hopping else 'NO'))
logger.info('hop_size: {:.3f} MHz'.format(hop_size / 1e6))
logger.info('hops: {}'.format(hops))
logger.info('min_center_freq: {:.3f} MHz'.format(min_center_freq / 1e6))
|
python
|
{
"resource": ""
}
|
q276992
|
SoapyPower.create_buffer
|
test
|
def create_buffer(self, bins, repeats, base_buffer_size, max_buffer_size=0):
"""Create buffer for reading samples"""
samples = bins * repeats
buffer_repeats = 1
buffer_size = math.ceil(samples / base_buffer_size) * base_buffer_size
if not max_buffer_size:
# Max buffer size about 100 MB
max_buffer_size = (100 * 1024**2) / 8
if max_buffer_size > 0:
max_buffer_size = math.ceil(max_buffer_size / base_buffer_size) * base_buffer_size
if buffer_size > max_buffer_size:
logger.warning('Required buffer size ({}) will be shrinked to max_buffer_size ({})!'.format(
buffer_size, max_buffer_size
))
buffer_repeats = math.ceil(buffer_size / max_buffer_size)
buffer_size = max_buffer_size
logger.info('repeats: {}'.format(repeats))
logger.info('samples: {} (time: {:.5f} s)'.format(samples, samples / self.device.sample_rate))
if max_buffer_size > 0:
logger.info('max_buffer_size (samples): {} (repeats: {:.2f}, time: {:.5f} s)'.format(
|
python
|
{
"resource": ""
}
|
q276993
|
SoapyPower.setup
|
test
|
def setup(self, bins, repeats, base_buffer_size=0, max_buffer_size=0, fft_window='hann',
fft_overlap=0.5, crop_factor=0, log_scale=True, remove_dc=False, detrend=None,
lnb_lo=0, tune_delay=0, reset_stream=False, max_threads=0, max_queue_size=0):
"""Prepare samples buffer and start streaming samples from device"""
if self.device.is_streaming:
self.device.stop_stream()
base_buffer = self.device.start_stream(buffer_size=base_buffer_size)
self._bins = bins
self._repeats = repeats
self._base_buffer_size = len(base_buffer)
self._max_buffer_size = max_buffer_size
self._buffer_repeats, self._buffer = self.create_buffer(
bins, repeats, self._base_buffer_size, self._max_buffer_size
)
|
python
|
{
"resource": ""
}
|
q276994
|
SoapyPower.stop
|
test
|
def stop(self):
"""Stop streaming samples from device and delete samples buffer"""
if not self.device.is_streaming:
return
self.device.stop_stream()
self._writer.close()
self._bins = None
self._repeats = None
self._base_buffer_size = None
|
python
|
{
"resource": ""
}
|
q276995
|
SoapyPower.psd
|
test
|
def psd(self, freq):
"""Tune to specified center frequency and compute Power Spectral Density"""
if not self.device.is_streaming:
raise RuntimeError('Streaming is not initialized, you must run setup() first!')
# Tune to new frequency in main thread
logger.debug(' Frequency hop: {:.2f} Hz'.format(freq))
t_freq = time.time()
if self.device.freq != freq:
# Deactivate streaming before tuning
if self._reset_stream:
self.device.device.deactivateStream(self.device.stream)
# Actually tune to new center frequency
self.device.freq = freq
# Reactivate straming after tuning
if self._reset_stream:
self.device.device.activateStream(self.device.stream)
# Delay reading samples after tuning
if self._tune_delay:
t_delay = time.time()
while True:
|
python
|
{
"resource": ""
}
|
q276996
|
SoapyPower.sweep
|
test
|
def sweep(self, min_freq, max_freq, bins, repeats, runs=0, time_limit=0, overlap=0,
fft_window='hann', fft_overlap=0.5, crop=False, log_scale=True, remove_dc=False, detrend=None, lnb_lo=0,
tune_delay=0, reset_stream=False, base_buffer_size=0, max_buffer_size=0, max_threads=0, max_queue_size=0):
"""Sweep spectrum using frequency hopping"""
self.setup(
bins, repeats, base_buffer_size, max_buffer_size,
fft_window=fft_window, fft_overlap=fft_overlap, crop_factor=overlap if crop else 0,
log_scale=log_scale, remove_dc=remove_dc, detrend=detrend, lnb_lo=lnb_lo, tune_delay=tune_delay,
reset_stream=reset_stream, max_threads=max_threads, max_queue_size=max_queue_size
)
try:
freq_list = self.freq_plan(min_freq - lnb_lo, max_freq - lnb_lo, bins, overlap)
t_start = time.time()
run = 0
while not _shutdown and (runs == 0 or run < runs):
run += 1
t_run_start = time.time()
logger.debug('Run: {}'.format(run))
for freq in freq_list:
# Tune to new frequency, acquire samples and compute Power Spectral Density
psd_future, acq_time_start, acq_time_stop = self.psd(freq)
# Write PSD to stdout (in another thread)
self._writer.write_async(psd_future, acq_time_start, acq_time_stop,
len(self._buffer) * self._buffer_repeats)
if _shutdown:
break
# Write end of measurement marker (in another thread)
write_next_future = self._writer.write_next_async()
t_run = time.time()
logger.debug(' Total run time: {:.3f} s'.format(t_run - t_run_start))
# End measurement if time limit is exceeded
if time_limit and (time.time() - t_start) >= time_limit:
|
python
|
{
"resource": ""
}
|
q276997
|
SMBus._set_addr
|
test
|
def _set_addr(self, addr):
"""private helper method"""
if self._addr != addr:
ioctl(self._fd,
|
python
|
{
"resource": ""
}
|
q276998
|
run_cmake
|
test
|
def run_cmake(arg=""):
"""
Forcing to run cmake
"""
if ds.find_executable('cmake') is None:
print "CMake is required to build zql"
print "Please install cmake version >= 2.8 and re-run setup"
sys.exit(-1)
print "Configuring zql build with CMake.... "
cmake_args = arg
try:
build_dir = op.join(op.split(__file__)[0], 'build')
|
python
|
{
"resource": ""
}
|
q276999
|
Filter.filter
|
test
|
def filter(cls, datetimes, number, now=None, **options):
"""Return a set of datetimes, after filtering ``datetimes``.
The result will be the ``datetimes`` which are ``number`` of
units before ``now``, until ``now``, with approximately one
unit between each of them. The first datetime for any unit is
kept, later duplicates are removed.
If there are ``datetimes`` after ``now``, they will be
returned unfiltered.
"""
if not isinstance(number, int) or number < 0:
raise ValueError('Invalid number: %s' % number)
datetimes = tuple(datetimes)
# Sample the first datetime to see if it is timezone-aware
tzinfo = None
if datetimes and datetimes[0].tzinfo is not None:
tzinfo = UTC()
if now is None:
now = datetime.now(tzinfo)
if not hasattr(now, 'second'):
# now looks like a date, so convert it into a datetime
now = datetime.combine(now, time(23, 59, 59, 999999, tzinfo=tzinfo))
# Always keep
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.