_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q277000
|
Days.mask
|
test
|
def mask(cls, dt, **options):
"""
Return a datetime with the same value as ``dt``, to a
resolution of days.
|
python
|
{
"resource": ""
}
|
q277001
|
Weeks.mask
|
test
|
def mask(cls, dt, firstweekday=calendar.SATURDAY, **options):
"""
Return a datetime with the same value as ``dt``, to a
resolution of weeks.
``firstweekday`` determines when the week starts. It defaults
to Saturday.
"""
correction =
|
python
|
{
"resource": ""
}
|
q277002
|
to_keep
|
test
|
def to_keep(datetimes,
years=0, months=0, weeks=0, days=0,
hours=0, minutes=0, seconds=0,
firstweekday=SATURDAY, now=None):
"""
Return a set of datetimes that should be kept, out of ``datetimes``.
Keeps up to ``years``, ``months``, ``weeks``, ``days``,
``hours``, ``minutes``, and ``seconds`` in the past.
When keeping weeks, it prefers to keep ``firstweekday``, which
defaults to Saturday.
If ``now`` is None, it will base its calculations on
``datetime.datetime.now()``. Datetimes after this point will always be
kept.
"""
datetimes = set(datetimes)
return (filters.Years.filter(datetimes, number=years, now=now) |
filters.Months.filter(datetimes, number=months, now=now) |
filters.Weeks.filter(datetimes,
|
python
|
{
"resource": ""
}
|
q277003
|
to_delete
|
test
|
def to_delete(datetimes,
years=0, months=0, weeks=0, days=0,
hours=0, minutes=0, seconds=0,
firstweekday=SATURDAY, now=None):
"""
Return a set of datetimes that should be deleted, out of ``datetimes``.
See ``to_keep`` for a description
|
python
|
{
"resource": ""
}
|
q277004
|
dates_to_keep
|
test
|
def dates_to_keep(dates,
years=0, months=0, weeks=0, days=0, firstweekday=SATURDAY,
now=None):
"""
Return a set of dates that should be kept, out of ``dates``.
See ``to_keep`` for a description of arguments.
"""
datetimes = to_keep((datetime.combine(d, time()) for d in dates),
|
python
|
{
"resource": ""
}
|
q277005
|
dates_to_delete
|
test
|
def dates_to_delete(dates,
years=0, months=0, weeks=0, days=0, firstweekday=SATURDAY,
now=None):
"""
Return a set of date that should be deleted, out of ``dates``.
See ``to_keep`` for a description of arguments.
"""
dates = set(dates)
return dates - dates_to_keep(dates,
|
python
|
{
"resource": ""
}
|
q277006
|
MCP23S17._get_spi_control_byte
|
test
|
def _get_spi_control_byte(self, read_write_cmd):
"""Returns an SPI control byte.
The MCP23S17 is a slave SPI device. The slave address contains
four fixed bits and three user-defined hardware address bits
(if enabled via IOCON.HAEN) (pins A2, A1 and A0) with the
read/write bit filling out the control byte::
+--------------------+
|0|1|0|0|A2|A1|A0|R/W|
+--------------------+
7 6 5 4 3 2 1 0
|
python
|
{
"resource": ""
}
|
q277007
|
MCP23S17.read_bit
|
test
|
def read_bit(self, bit_num, address):
"""Returns the bit specified from the address.
:param bit_num: The bit number to read from.
:type bit_num: int
:param address: The address to read from.
:type address: int
:returns: int -- the
|
python
|
{
"resource": ""
}
|
q277008
|
MCP23S17.write_bit
|
test
|
def write_bit(self, value, bit_num, address):
"""Writes the value given to the bit in the address specified.
:param value: The value to write.
:type value: int
:param bit_num: The bit number to write to.
:type bit_num: int
|
python
|
{
"resource": ""
}
|
q277009
|
get_bit_num
|
test
|
def get_bit_num(bit_pattern):
"""Returns the lowest bit num from a given bit pattern. Returns None if no
bits set.
:param bit_pattern: The bit pattern.
:type bit_pattern: int
:returns: int -- the bit number
:returns: None -- no bits set
>>> pifacecommon.core.get_bit_num(0)
None
>>> pifacecommon.core.get_bit_num(0b1)
0
>>> pifacecommon.core.get_bit_num(0b11000)
3
"""
|
python
|
{
"resource": ""
}
|
q277010
|
watch_port_events
|
test
|
def watch_port_events(port, chip, pin_function_maps, event_queue,
return_after_kbdint=False):
"""Waits for a port event. When a port event occurs it is placed onto the
event queue.
:param port: The port we are waiting for interrupts on (GPIOA/GPIOB).
:type port: int
:param chip: The chip we are waiting for interrupts on.
:type chip: :class:`pifacecommon.mcp23s17.MCP23S17`
:param pin_function_maps: A list of classes that have inheritted from
:class:`FunctionMap`\ s describing what to do with events.
:type pin_function_maps: list
:param event_queue: A queue to put events on.
:type event_queue: :py:class:`multiprocessing.Queue`
"""
# set up epoll
gpio25 = open(GPIO_INTERRUPT_DEVICE_VALUE, 'r') # change to use 'with'?
epoll = select.epoll()
epoll.register(gpio25, select.EPOLLIN | select.EPOLLET)
while True:
# wait here until input
try:
events = epoll.poll()
except KeyboardInterrupt as e:
if return_after_kbdint:
return
else:
raise e
except IOError as e:
|
python
|
{
"resource": ""
}
|
q277011
|
handle_events
|
test
|
def handle_events(
function_maps, event_queue, event_matches_function_map,
terminate_signal):
"""Waits for events on the event queue and calls the registered functions.
:param function_maps: A list of classes that have inheritted from
:class:`FunctionMap`\ s describing what to do with events.
:type function_maps: list
:param event_queue: A queue to put events on.
:type event_queue: :py:class:`multiprocessing.Queue`
:param event_matches_function_map: A function that determines if the given
event and :class:`FunctionMap` match.
:type event_matches_function_map: function
:param terminate_signal: The signal that, when placed on the event queue,
causes this function to exit.
"""
while True:
# print("HANDLE: Waiting for events!")
event = event_queue.get()
|
python
|
{
"resource": ""
}
|
q277012
|
bring_gpio_interrupt_into_userspace
|
test
|
def bring_gpio_interrupt_into_userspace(): # activate gpio interrupt
"""Bring the interrupt pin on the GPIO into Linux userspace."""
try:
# is it already there?
with open(GPIO_INTERRUPT_DEVICE_VALUE):
return
except IOError:
# no, bring it into userspace
|
python
|
{
"resource": ""
}
|
q277013
|
set_gpio_interrupt_edge
|
test
|
def set_gpio_interrupt_edge(edge='falling'):
"""Set the interrupt edge on the userspace GPIO pin.
:param edge: The interrupt edge ('none', 'falling', 'rising').
:type edge: string
"""
# we're only interested in the falling edge (1 -> 0)
start_time = time.time()
time_limit = start_time + FILE_IO_TIMEOUT
while time.time() < time_limit:
|
python
|
{
"resource": ""
}
|
q277014
|
wait_until_file_exists
|
test
|
def wait_until_file_exists(filename):
"""Wait until a file exists.
:param filename: The name of the file to wait for.
:type filename: string
"""
start_time = time.time()
time_limit = start_time + FILE_IO_TIMEOUT
while time.time() < time_limit:
try:
|
python
|
{
"resource": ""
}
|
q277015
|
PortEventListener.register
|
test
|
def register(self, pin_num, direction, callback,
settle_time=DEFAULT_SETTLE_TIME):
"""Registers a pin number and direction to a callback function.
:param pin_num: The pin pin number.
:type pin_num: int
:param direction: The event direction
|
python
|
{
"resource": ""
}
|
q277016
|
PortEventListener.deregister
|
test
|
def deregister(self, pin_num=None, direction=None):
"""De-registers callback functions
:param pin_num: The pin number. If None then all functions are de-registered
:type pin_num: int
:param direction: The event direction. If None then all functions for the
given pin are de-registered
:type direction:int
"""
to_delete = []
for i, function_map in enumerate(self.pin_function_maps):
if ( pin_num == None
|
python
|
{
"resource": ""
}
|
q277017
|
GPIOInterruptDevice.gpio_interrupts_enable
|
test
|
def gpio_interrupts_enable(self):
"""Enables GPIO interrupts."""
try:
bring_gpio_interrupt_into_userspace()
set_gpio_interrupt_edge()
except Timeout as e:
raise InterruptEnableException(
|
python
|
{
"resource": ""
}
|
q277018
|
SPIDevice.spisend
|
test
|
def spisend(self, bytes_to_send):
"""Sends bytes via the SPI bus.
:param bytes_to_send: The bytes to send on the SPI device.
:type bytes_to_send: bytes
:returns: bytes -- returned bytes from SPI device
:raises: InitError
"""
# make some buffer space to store reading/writing
wbuffer = ctypes.create_string_buffer(bytes_to_send,
len(bytes_to_send))
rbuffer = ctypes.create_string_buffer(len(bytes_to_send))
# create the spi transfer struct
transfer = spi_ioc_transfer(
|
python
|
{
"resource": ""
}
|
q277019
|
TabHolder.render
|
test
|
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK):
"""
Re-implement almost the same code from crispy_forms but passing
``form`` instance to item ``render_link`` method.
"""
links, content = '', ''
# accordion group needs the parent div id to set `data-parent` (I don't
# know why). This needs to be a unique id
if not self.css_id:
self.css_id = "-".join(["tabsholder",
|
python
|
{
"resource": ""
}
|
q277020
|
TabItem.has_errors
|
test
|
def has_errors(self, form):
"""
Find tab fields listed as invalid
|
python
|
{
"resource": ""
}
|
q277021
|
TabItem.render_link
|
test
|
def render_link(self, form, template_pack=TEMPLATE_PACK, **kwargs):
"""
Render the link for the tab-pane. It must be called after render so
``css_class`` is updated with ``active`` class name if needed.
|
python
|
{
"resource": ""
}
|
q277022
|
_extract_version
|
test
|
def _extract_version(package_name):
"""
Get package version from installed distribution or configuration file if not
installed
"""
try:
|
python
|
{
"resource": ""
}
|
q277023
|
FormContainersMixin.get_form_kwargs
|
test
|
def get_form_kwargs(self):
"""
Pass template pack argument
"""
kwargs = super(FormContainersMixin, self).get_form_kwargs()
kwargs.update({
|
python
|
{
"resource": ""
}
|
q277024
|
OpenLoad._check_status
|
test
|
def _check_status(cls, response_json):
"""Check the status of the incoming response, raise exception if status is not 200.
Args:
response_json (dict): results of the response of the GET request.
Returns:
None
"""
status = response_json['status']
msg = response_json['msg']
if status == 400:
raise BadRequestException(msg)
elif status == 403:
raise PermissionDeniedException(msg)
elif status == 404:
|
python
|
{
"resource": ""
}
|
q277025
|
OpenLoad._get
|
test
|
def _get(self, url, params=None):
"""Used by every other method, it makes a GET request with the given params.
Args:
url (str): relative path of a specific service (account_info, ...).
params (:obj:`dict`, optional): contains parameters to be sent in the GET request.
Returns:
dict: results of the response of the GET request.
"""
if not params:
|
python
|
{
"resource": ""
}
|
q277026
|
OpenLoad.get_download_link
|
test
|
def get_download_link(self, file_id, ticket, captcha_response=None):
"""Requests direct download link for requested file,
this method makes use of the response of prepare_download, prepare_download must be called first.
Args:
file_id (str): id of the file to be downloaded.
ticket (str): preparation ticket is found in prepare_download response,\
this is why we need to call prepare_download before get_download_link.
captcha_response (:obj:`str`, optional): sometimes prepare_download will have captcha url to be solved, \
first, this is the solution of the captcha.
Returns:
dict: dictionary containing (file info, download url, ...). ::
{
"name": "The quick brown fox.txt",
|
python
|
{
"resource": ""
}
|
q277027
|
OpenLoad.upload_link
|
test
|
def upload_link(self, folder_id=None, sha1=None, httponly=False):
"""Makes a request to prepare for file upload.
Note:
If folder_id is not provided, it will make and upload link to the ``Home`` folder.
Args:
folder_id (:obj:`str`, optional): folder-ID to upload to.
sha1 (:obj:`str`, optional): expected sha1 If sha1 of uploaded file doesn't match this value, upload fails.
httponly (:obj:`bool`, optional): If this is set to true, use only http upload links.
Returns:
dict: dictionary containing (url: will be used in actual upload, valid_until). ::
|
python
|
{
"resource": ""
}
|
q277028
|
OpenLoad.upload_file
|
test
|
def upload_file(self, file_path, folder_id=None, sha1=None, httponly=False):
"""Calls upload_link request to get valid url, then it makes a post request with given file to be uploaded.
No need to call upload_link explicitly since upload_file calls it.
Note:
If folder_id is not provided, the file will be uploaded to ``Home`` folder.
Args:
file_path (str): full path of the file to be uploaded.
folder_id (:obj:`str`, optional): folder-ID to upload to.
sha1 (:obj:`str`, optional): expected sha1 If sha1 of uploaded file doesn't match this value, upload fails.
httponly (:obj:`bool`, optional): If this is set to true, use only http upload links.
Returns:
dict: dictionary containing uploaded file info. ::
{
"content_type": "application/zip",
"id": "0yiQTPzi4Y4",
"name": 'favicons.zip',
|
python
|
{
"resource": ""
}
|
q277029
|
OpenLoad.remote_upload
|
test
|
def remote_upload(self, remote_url, folder_id=None, headers=None):
"""Used to make a remote file upload to openload.co
Note:
If folder_id is not provided, the file will be uploaded to ``Home`` folder.
Args:
remote_url (str): direct link of file to be remotely downloaded.
folder_id (:obj:`str`, optional): folder-ID to upload to.
headers (:obj:`dict`, optional): additional HTTP headers (e.g. Cookies or HTTP Basic-Auth)
Returns:
dict: dictionary containing ("id": uploaded file id, "folderid"). ::
|
python
|
{
"resource": ""
}
|
q277030
|
OpenLoad.remote_upload_status
|
test
|
def remote_upload_status(self, limit=None, remote_upload_id=None):
"""Checks a remote file upload to status.
Args:
limit (:obj:`int`, optional): Maximum number of results (Default: 5, Maximum: 100).
remote_upload_id (:obj:`str`, optional): Remote Upload ID.
Returns:
dict: dictionary containing all remote uploads, each dictionary element is a dictionary. ::
{
"24": {
"id": "24",
"remoteurl": "http://proof.ovh.net/files/100Mio.dat",
"status": "new",
"folderid": "4248",
|
python
|
{
"resource": ""
}
|
q277031
|
OpenLoad.list_folder
|
test
|
def list_folder(self, folder_id=None):
"""Request a list of files and folders in specified folder.
Note:
if folder_id is not provided, ``Home`` folder will be listed
Args:
folder_id (:obj:`str`, optional): id of the folder to be listed.
Returns:
dict: dictionary containing only two keys ("folders", "files"), \
each key represents a list of dictionaries. ::
{
"folders": [
{
"id": "5144",
"name": ".videothumb"
},
{
"id": "5792",
"name": ".subtitles"
},
...
],
"files": [
{
"name": "big_buck_bunny.mp4.mp4",
"sha1": "c6531f5ce9669d6547023d92aea4805b7c45d133",
"folderid": "4258",
"upload_at": "1419791256",
|
python
|
{
"resource": ""
}
|
q277032
|
OpenLoad.running_conversions
|
test
|
def running_conversions(self, folder_id=None):
"""Shows running file converts by folder
Note:
If folder_id is not provided, ``Home`` folder will be used.
Args:
folder_id (:obj:`str`, optional): id of the folder to list conversions of files exist in it.
Returns:
list: list of dictionaries, each dictionary represents a file conversion info. ::
[
{
"name": "Geysir.AVI",
"id": "3565411",
"status": "pending",
"last_update": "2015-08-23 19:41:40",
|
python
|
{
"resource": ""
}
|
q277033
|
calc_humidity
|
test
|
def calc_humidity(temp, dewpoint):
'''
calculates the humidity via the formula from weatherwise.org
return the relative humidity
'''
t = fahrenheit_to_celsius(temp)
td
|
python
|
{
"resource": ""
}
|
q277034
|
calc_dewpoint
|
test
|
def calc_dewpoint(temp, hum):
'''
calculates the dewpoint via the formula from weatherwise.org
return the dewpoint in degrees F.
'''
c = fahrenheit_to_celsius(temp)
x = 1 - 0.01 * hum;
dewpoint = (14.55 + 0.114 * c) * x;
dewpoint = dewpoint + ((2.5 + 0.007
|
python
|
{
"resource": ""
}
|
q277035
|
HttpPublisher.publish
|
test
|
def publish(self):
'''
Perform HTTP session to transmit defined
|
python
|
{
"resource": ""
}
|
q277036
|
VProCRC.get
|
test
|
def get(data):
'''
return CRC calc value from raw serial data
'''
crc
|
python
|
{
"resource": ""
}
|
q277037
|
VProCRC.verify
|
test
|
def verify(data):
'''
perform CRC check on raw serial data, return true if valid.
a valid CRC == 0.
'''
if len(data) == 0:
return False
crc =
|
python
|
{
"resource": ""
}
|
q277038
|
LoopStruct._unpack_storm_date
|
test
|
def _unpack_storm_date(date):
'''
given a packed storm date field, unpack and return 'YYYY-MM-DD' string.
'''
year = (date & 0x7f) + 2000 # 7 bits
day = (date >> 7)
|
python
|
{
"resource": ""
}
|
q277039
|
VantagePro._use_rev_b_archive
|
test
|
def _use_rev_b_archive(self, records, offset):
'''
return True if weather station returns Rev.B archives
'''
# if pre-determined, return result
if type(self._ARCHIVE_REV_B) is bool:
return self._ARCHIVE_REV_B
# assume, B and check 'RecType' field
data = ArchiveBStruct.unpack_from(records, offset)
if
|
python
|
{
"resource": ""
}
|
q277040
|
VantagePro._wakeup
|
test
|
def _wakeup(self):
'''
issue wakeup command to device to take out of standby mode.
'''
log.info("send: WAKEUP")
for i in xrange(3):
self.port.write('\n') # wakeup device
ack = self.port.read(len(self.WAKE_ACK)) # read wakeup string
|
python
|
{
"resource": ""
}
|
q277041
|
VantagePro._cmd
|
test
|
def _cmd(self, cmd, *args, **kw):
'''
write a single command, with variable number of arguments. after the
command, the device must return ACK
'''
ok = kw.setdefault('ok', False)
self._wakeup()
if args:
cmd = "%s %s" % (cmd, ' '.join(str(a) for a in args))
for i in xrange(3):
log.info("send: " + cmd)
self.port.write(cmd + '\n')
if ok:
ack = self.port.read(len(self.OK)) # read OK
|
python
|
{
"resource": ""
}
|
q277042
|
VantagePro._dmpaft_cmd
|
test
|
def _dmpaft_cmd(self, time_fields):
'''
issue a command to read the archive records after a known time stamp.
'''
records = []
# convert time stamp fields to buffer
tbuf = struct.pack('2H', *time_fields)
# 1. send 'DMPAFT' cmd
self._cmd('DMPAFT')
# 2. send time stamp + crc
crc = VProCRC.get(tbuf)
crc = struct.pack('>H', crc) # crc in big-endian format
log_raw('send', tbuf + crc)
self.port.write(tbuf + crc) # send time stamp + crc
ack = self.port.read(len(self.ACK)) # read ACK
log_raw('read', ack)
if ack != self.ACK: return # if bad ack, return
# 3. read pre-amble data
raw = self.port.read(DmpStruct.size)
log_raw('read', raw)
if not VProCRC.verify(raw): # check CRC value
log_raw('send ESC', self.ESC)
self.port.write(self.ESC) # if bad, escape and abort
return
log_raw('send ACK', self.ACK)
self.port.write(self.ACK) # send ACK
# 4. loop
|
python
|
{
"resource": ""
}
|
q277043
|
VantagePro._get_new_archive_fields
|
test
|
def _get_new_archive_fields(self):
'''
returns a dictionary of fields from the newest archive record in the
device. return None when no records are new.
'''
for i in xrange(3):
records = self._dmpaft_cmd(self._archive_time)
if records is not None: break
time.sleep(1)
if records is None:
raise NoDeviceException('Can not access weather station')
# find the newest record
new_rec =
|
python
|
{
"resource": ""
}
|
q277044
|
VantagePro.parse
|
test
|
def parse(self):
'''
read and parse a set of data read from the console. after the
data is parsed it is available in the fields variable.
'''
fields = self._get_loop_fields()
fields['Archive'] = self._get_new_archive_fields()
|
python
|
{
"resource": ""
}
|
q277045
|
weather_update
|
test
|
def weather_update(station, pub_sites, interval):
'''
main execution loop. query weather data and post to online service.
'''
station.parse() # read weather data
# santity check weather data
if station.fields['TempOut'] > 200:
raise NoSensorException(
'Out of range temperature value: %.1f, check sensors' %
(station.fields['TempOut'],))
gust, gust_dir = WindGust.get( station, interval )
# upload data in the following order:
for ps in pub_sites:
try: # try block necessary to attempt every publisher
ps.set(
pressure = station.fields['Pressure'],
dewpoint = station.fields['DewPoint'],
humidity = station.fields['HumOut'],
tempf = station.fields['TempOut'],
rainin = station.fields['RainRate'],
rainday
|
python
|
{
"resource": ""
}
|
q277046
|
init_log
|
test
|
def init_log( quiet, debug ):
'''
setup system logging to desired verbosity.
'''
from logging.handlers import SysLogHandler
fmt = logging.Formatter( os.path.basename(sys.argv[0]) +
".%(name)s %(levelname)s - %(message)s")
|
python
|
{
"resource": ""
}
|
q277047
|
get_pub_services
|
test
|
def get_pub_services(opts):
'''
use values in opts data to generate instances of publication services.
'''
sites = []
for p_key in vars(opts).keys():
args = getattr(opts,p_key)
if p_key in PUB_SERVICES and args:
if isinstance(args,tuple):
|
python
|
{
"resource": ""
}
|
q277048
|
WindGust.get
|
test
|
def get( self, station, interval ):
'''
return gust data, if above threshold value and current time is inside
reporting window period
'''
rec = station.fields['Archive']
# process new data
if rec:
threshold = station.fields['WindSpeed10Min'] + GUST_MPH_MIN
if rec['WindHi'] >= threshold:
self.value = (rec['WindHi'],rec['WindHiDir'])
self.count = GUST_TTL * 60 / interval
else:
self.value = self.NO_VALUE
|
python
|
{
"resource": ""
}
|
q277049
|
Wunderground.set
|
test
|
def set( self, pressure='NA', dewpoint='NA', humidity='NA', tempf='NA',
rainin='NA', rainday='NA', dateutc='NA', windgust='NA',
windgustdir='NA', windspeed='NA', winddir='NA',
clouds='NA', weather='NA', *args, **kw):
'''
Useful for defining weather data published to the server. Parameters
not set will be reset and not sent to server. Unknown keyword args will
be silently ignored, so be careful. This is necessary for publishers
that support more fields than others.
'''
# see: http://wiki.wunderground.com/index.php/PWS_-_Upload_Protocol
# unused, but valid, parameters are:
# windspdmph_avg2m, winddir_avg2m, windgustmph_10m, windgusdir_10m
# soiltempf, soilmoisture, leafwetness, solarradiation, UV
# indoortempf, indoorhumidity
self.args.update( {
|
python
|
{
"resource": ""
}
|
q277050
|
TextFile.set
|
test
|
def set( self, **kw):
'''
Store keyword args to be written to output file.
'''
|
python
|
{
"resource": ""
}
|
q277051
|
TextFile.publish
|
test
|
def publish(self):
'''
Write output file.
'''
with open( self.file_name, 'w') as fh:
for k,v in self.args.iteritems():
buf = StringIO.StringIO()
buf.write(k)
|
python
|
{
"resource": ""
}
|
q277052
|
wants_request
|
test
|
def wants_request(f):
"""
Helper decorator for transitioning to user-only requirements, this aids
in situations where the request may be marked optional and causes an
incorrect flow into user-only requirements.
This decorator causes the requirement to look like a user-only requirement
but passes the current request context internally to the requirement.
This decorator is intended only to assist during a
|
python
|
{
"resource": ""
}
|
q277053
|
Allows.init_app
|
test
|
def init_app(self, app):
"""
Initializes the Flask-Allows object against the provided application
"""
if not hasattr(app, "extensions"): # pragma: no cover
app.extensions = {}
app.extensions["allows"] = self
@app.before_request
def start_context(*a, **k):
self.overrides.push(Override())
|
python
|
{
"resource": ""
}
|
q277054
|
Allows.fulfill
|
test
|
def fulfill(self, requirements, identity=None):
"""
Checks that the provided or current identity meets each requirement
passed to this method.
This method takes into account both additional and overridden
requirements, with overridden requirements taking precedence::
allows.additional.push(Additional(Has('foo')))
allows.overrides.push(Override(Has('foo')))
allows.fulfill([], user_without_foo) # return True
:param requirements: The requirements to check the identity against.
:param identity: Optional. Identity to use in place of the current
identity.
"""
identity =
|
python
|
{
"resource": ""
}
|
q277055
|
OverrideManager.push
|
test
|
def push(self, override, use_parent=False):
"""
Binds an override to the current context, optionally use the
current overrides in conjunction with this override
If ``use_parent`` is true, a new override is created from the
parent and child overrides rather than manipulating either
directly.
|
python
|
{
"resource": ""
}
|
q277056
|
OverrideManager.pop
|
test
|
def pop(self):
"""
Pops the latest override context.
If the override context was pushed by a different override manager,
a ``RuntimeError`` is raised.
"""
rv = _override_ctx_stack.pop()
if rv is None or rv[0]
|
python
|
{
"resource": ""
}
|
q277057
|
OverrideManager.override
|
test
|
def override(self, override, use_parent=False):
"""
Allows temporarily pushing an override context, yields the new context
into the following block.
|
python
|
{
"resource": ""
}
|
q277058
|
AdditionalManager.push
|
test
|
def push(self, additional, use_parent=False):
"""
Binds an additional to the current context, optionally use the
current additionals in conjunction with this additional
If ``use_parent`` is true, a new additional is created from the
parent and child additionals rather than manipulating either
directly.
|
python
|
{
"resource": ""
}
|
q277059
|
AdditionalManager.pop
|
test
|
def pop(self):
"""
Pops the latest additional context.
If the additional context was pushed by a different additional manager,
a ``RuntimeError`` is raised.
"""
rv = _additional_ctx_stack.pop()
if rv is None or rv[0]
|
python
|
{
"resource": ""
}
|
q277060
|
AdditionalManager.additional
|
test
|
def additional(self, additional, use_parent=False):
"""
Allows temporarily pushing an additional context, yields the new context
into the following block.
|
python
|
{
"resource": ""
}
|
q277061
|
unduplicate_field_names
|
test
|
def unduplicate_field_names(field_names):
"""Append a number to duplicate field names to make them unique. """
res = []
for k in field_names:
if k in res:
i = 1
|
python
|
{
"resource": ""
}
|
q277062
|
interpret_stats
|
test
|
def interpret_stats(results):
"""Generates the string to be shown as updates after the execution of a
Cypher query
:param results: ``ResultSet`` with the raw results of the execution of
the Cypher query
"""
stats = results.stats
contains_updates = stats.pop("contains_updates", False) if stats else False
if not contains_updates:
result = '{} rows affected.'.format(len(results))
else:
|
python
|
{
"resource": ""
}
|
q277063
|
extract_params_from_query
|
test
|
def extract_params_from_query(query, user_ns):
"""Generates a dictionary with safe keys and values to pass onto Neo4j
:param query: string with the Cypher query to execute
:param user_ns: dictionary with the IPython user space
"""
# TODO: Optmize this function
params = {}
|
python
|
{
"resource": ""
}
|
q277064
|
run
|
test
|
def run(query, params=None, config=None, conn=None, **kwargs):
"""Executes a query and depending on the options of the extensions will
return raw data, a ``ResultSet``, a Pandas ``DataFrame`` or a
NetworkX graph.
:param query: string with the Cypher query
:param params: dictionary with parameters for the query (default=``None``)
:param config: Configurable or NamedTuple with extra IPython configuration
details. If ``None``, a new object will be created
(defaults=``None``)
:param conn: connection dictionary or string for the Neo4j backend.
If ``None``, a new connection will be created
(default=``None``)
:param **kwargs: Any of the cell configuration options.
"""
|
python
|
{
"resource": ""
}
|
q277065
|
ResultSet.get_dataframe
|
test
|
def get_dataframe(self):
"""Returns a Pandas DataFrame instance built from the result set."""
if pd is None:
raise ImportError("Try installing
|
python
|
{
"resource": ""
}
|
q277066
|
ResultSet.get_graph
|
test
|
def get_graph(self, directed=True):
"""Returns a NetworkX multi-graph instance built from the result set
:param directed: boolean, optional (default=`True`).
Whether to create a direted or an undirected graph.
"""
if nx is None:
raise ImportError("Try installing NetworkX first.")
if directed:
graph = nx.MultiDiGraph()
else:
graph = nx.MultiGraph()
for item in self._results.graph:
for node in item['nodes']:
properties = copy.deepcopy(node['properties'])
properties['labels'] = node['labels']
|
python
|
{
"resource": ""
}
|
q277067
|
ResultSet.pie
|
test
|
def pie(self, key_word_sep=" ", title=None, **kwargs):
"""Generates a pylab pie chart from the result set.
``matplotlib`` must be installed, and in an
IPython Notebook, inlining must be on::
%%matplotlib inline
Values (pie slice sizes) are taken from the
rightmost column (numerical values required).
All other columns are used to label the pie slices.
:param key_word_sep: string used to separate column values
from each other in pie labels
:param title: plot title, defaults to name of value column
|
python
|
{
"resource": ""
}
|
q277068
|
ResultSet.plot
|
test
|
def plot(self, title=None, **kwargs):
"""Generates a pylab plot from the result set.
``matplotlib`` must be installed, and in an
IPython Notebook, inlining must be on::
%%matplotlib inline
The first and last columns are taken as the X and Y
values. Any columns between are ignored.
:param title: plot title, defaults to names of Y value columns
Any additional keyword arguments will be passsed
through to ``matplotlib.pylab.plot``.
"""
if not plt:
raise ImportError("Try installing matplotlib first.")
self.guess_plot_columns()
|
python
|
{
"resource": ""
}
|
q277069
|
ResultSet.bar
|
test
|
def bar(self, key_word_sep=" ", title=None, **kwargs):
"""Generates a pylab bar plot from the result set.
``matplotlib`` must be installed, and in an
IPython Notebook, inlining must be on::
%%matplotlib inline
The last quantitative column is taken as the Y values;
all other columns are combined to label the X axis.
:param title: plot title, defaults to names of Y value columns
:param key_word_sep: string used to separate column values
from each other in labels
Any additional keyword arguments will be passsed
|
python
|
{
"resource": ""
}
|
q277070
|
ResultSet.csv
|
test
|
def csv(self, filename=None, **format_params):
"""Generates results in comma-separated form. Write to ``filename``
if given. Any other parameter will be passed on to ``csv.writer``.
:param filename: if given, the CSV will be written to filename.
Any additional keyword arguments will be passsed
through to ``csv.writer``.
"""
if not self.pretty:
return None # no results
if filename:
outfile = open(filename, 'w')
|
python
|
{
"resource": ""
}
|
q277071
|
permission_required
|
test
|
def permission_required(perm, login_url=None, raise_exception=False):
"""
Re-implementation of the permission_required decorator, honors settings.
If ``DASHBOARD_REQUIRE_LOGIN`` is False, this decorator will always return
``True``, otherwise it will check for the permission as usual.
"""
def check_perms(user):
if not getattr(settings, 'DASHBOARD_REQUIRE_LOGIN',
app_settings.REQUIRE_LOGIN):
return True
# First check if the user has the permission (even anon users)
if user.has_perm(perm):
|
python
|
{
"resource": ""
}
|
q277072
|
RenderWidgetMixin.get_context_data
|
test
|
def get_context_data(self, **kwargs):
"""
Adds ``is_rendered`` to the context and the widget's context data.
``is_rendered`` signals that the AJAX view has been called and that
we are displaying the full widget now. When ``is_rendered`` is not
found
|
python
|
{
"resource": ""
}
|
q277073
|
DashboardWidgetPool.get_widgets_sorted
|
test
|
def get_widgets_sorted(self):
"""Returns the widgets sorted by position."""
result = []
for widget_name, widget
|
python
|
{
"resource": ""
}
|
q277074
|
DashboardWidgetPool.get_widgets_that_need_update
|
test
|
def get_widgets_that_need_update(self):
"""
Returns all widgets that need an update.
This should be scheduled every minute via crontab.
|
python
|
{
"resource": ""
}
|
q277075
|
DashboardWidgetPool.register_widget
|
test
|
def register_widget(self, widget_cls, **widget_kwargs):
"""
Registers the given widget.
Widgets must inherit ``DashboardWidgetBase`` and you cannot register
the same widget twice.
:widget_cls: A class that inherits ``DashboardWidgetBase``.
"""
if not issubclass(widget_cls, DashboardWidgetBase):
raise ImproperlyConfigured(
'DashboardWidgets must be subclasses of DashboardWidgetBase,'
' {0} is not.'.format(widget_cls))
widget = widget_cls(**widget_kwargs)
|
python
|
{
"resource": ""
}
|
q277076
|
DashboardWidgetPool.unregister_widget
|
test
|
def unregister_widget(self, widget_cls):
"""Unregisters the given widget."""
if
|
python
|
{
"resource": ""
}
|
q277077
|
DashboardWidgetBase.get_last_update
|
test
|
def get_last_update(self):
"""Gets or creates the last update object for this widget."""
instance, created
|
python
|
{
"resource": ""
}
|
q277078
|
DashboardWidgetBase.get_setting
|
test
|
def get_setting(self, setting_name, default=None):
"""
Returns the setting for this widget from the database.
:setting_name: The name of the setting.
:default: Optional default value if the setting cannot be found.
"""
try:
|
python
|
{
"resource": ""
}
|
q277079
|
DashboardWidgetBase.save_setting
|
test
|
def save_setting(self, setting_name, value):
"""Saves the setting value into the database."""
setting = self.get_setting(setting_name)
if setting is None:
setting = models.DashboardWidgetSettings.objects.create(
widget_name=self.get_name(),
|
python
|
{
"resource": ""
}
|
q277080
|
DashboardWidgetBase.should_update
|
test
|
def should_update(self):
"""
Checks if an update is needed.
Checks against ``self.update_interval`` and this widgets
``DashboardWidgetLastUpdate`` instance if an update is overdue.
This should be called by
``DashboardWidgetPool.get_widgets_that_need_update()``, which in turn
should be called by an admin command which should be scheduled every
minute via crontab.
|
python
|
{
"resource": ""
}
|
q277081
|
ConstructSpark.array
|
test
|
def array(a, context=None, axis=(0,), dtype=None, npartitions=None):
"""
Create a spark bolt array from a local array.
Parameters
----------
a : array-like
An array, any object exposing the array interface, an
object whose __array__ method returns an array, or any
(nested) sequence.
context : SparkContext
A context running Spark. (see pyspark)
axis : tuple, optional, default=(0,)
Which axes to distribute the array along. The resulting
distributed object will use keys to represent these axes,
with the remaining axes represented by values.
dtype : data-type, optional, default=None
The desired data-type for the array. If None, will
be determined from the data. (see numpy)
npartitions : int
Number of partitions for parallization.
Returns
-------
BoltArraySpark
"""
if dtype is None:
arry = asarray(a)
dtype = arry.dtype
else:
arry = asarray(a, dtype)
shape =
|
python
|
{
"resource": ""
}
|
q277082
|
ConstructSpark.ones
|
test
|
def ones(shape, context=None, axis=(0,), dtype=float64, npartitions=None):
"""
Create a spark bolt array of ones.
Parameters
----------
shape : tuple
The desired shape of the array.
context : SparkContext
A context running Spark. (see pyspark)
axis : tuple, optional, default=(0,)
Which axes to distribute the array along. The resulting
distributed object will use keys to represent these axes,
with the remaining axes represented by values.
dtype : data-type, optional, default=float64
The desired
|
python
|
{
"resource": ""
}
|
q277083
|
ConstructSpark.concatenate
|
test
|
def concatenate(arrays, axis=0):
"""
Join two bolt arrays together, at least one of which is in spark.
Parameters
----------
arrays : tuple
A pair of arrays. At least one must be a spark array,
the other can be a local bolt array, a local numpy array,
or an array-like.
axis : int, optional, default=0
The axis along which the arrays will be joined.
Returns
-------
BoltArraySpark
"""
if not isinstance(arrays, tuple):
raise ValueError("data type not understood")
if
|
python
|
{
"resource": ""
}
|
q277084
|
ConstructSpark._argcheck
|
test
|
def _argcheck(*args, **kwargs):
"""
Check that arguments are consistent with spark array construction.
Conditions are:
(1) a positional argument is a SparkContext
(2) keyword arg 'context' is a SparkContext
(3) an argument is a BoltArraySpark, or
(4) an argument is a nested list containing a BoltArraySpark
"""
try:
from pyspark import SparkContext
except ImportError:
return False
cond1 = any([isinstance(arg, SparkContext) for arg
|
python
|
{
"resource": ""
}
|
q277085
|
ConstructSpark._format_axes
|
test
|
def _format_axes(axes, shape):
"""
Format target axes given an array shape
"""
if isinstance(axes, int):
axes = (axes,)
elif isinstance(axes, list) or hasattr(axes, '__iter__'):
axes = tuple(axes)
if not isinstance(axes, tuple):
|
python
|
{
"resource": ""
}
|
q277086
|
ConstructSpark._wrap
|
test
|
def _wrap(func, shape, context=None, axis=(0,), dtype=None, npartitions=None):
"""
Wrap an existing numpy constructor in a parallelized construction
"""
if isinstance(shape, int):
shape = (shape,)
key_shape, value_shape = get_kv_shape(shape, ConstructSpark._format_axes(axis, shape))
split = len(key_shape)
# make the keys
rdd =
|
python
|
{
"resource": ""
}
|
q277087
|
BoltArrayLocal._align
|
test
|
def _align(self, axes, key_shape=None):
"""
Align local bolt array so that axes for iteration are in the keys.
This operation is applied before most functional operators.
It ensures that the specified axes are valid, and might transpose/reshape
the underlying array so that the functional operators can be applied
over the correct records.
Parameters
----------
axes: tuple[int]
One or more axes that will be iterated over by a functional operator
Returns
|
python
|
{
"resource": ""
}
|
q277088
|
BoltArrayLocal.tospark
|
test
|
def tospark(self, sc, axis=0):
"""
Converts a BoltArrayLocal into a BoltArraySpark
Parameters
----------
sc : SparkContext
The SparkContext which will be used to create the BoltArraySpark
axis : tuple or int, optional, default=0
The axis (or
|
python
|
{
"resource": ""
}
|
q277089
|
BoltArrayLocal.tordd
|
test
|
def tordd(self, sc, axis=0):
"""
Converts a BoltArrayLocal into an RDD
Parameters
----------
sc : SparkContext
The SparkContext which will be used to create the BoltArraySpark
axis : tuple or int, optional, default=0
The axis (or axes) across which
|
python
|
{
"resource": ""
}
|
q277090
|
StackedArray.stack
|
test
|
def stack(self, size):
"""
Make an intermediate RDD where all records are combined into a
list of keys and larger ndarray along a new 0th dimension.
"""
def tostacks(partition):
keys = []
arrs = []
for key, arr in partition:
keys.append(key)
arrs.append(arr)
if size and 0 <= size <= len(keys):
|
python
|
{
"resource": ""
}
|
q277091
|
StackedArray.map
|
test
|
def map(self, func):
"""
Apply a function on each subarray.
Parameters
----------
func : function
This is applied to each value in the intermediate RDD.
Returns
-------
StackedArray
"""
vshape = self.shape[self.split:]
x = self._rdd.values().first()
if x.shape == vshape:
a, b = asarray([x]), asarray([x, x])
else:
a, b = x, concatenate((x, x))
try:
atest = func(a)
btest = func(b)
except Exception as e:
raise RuntimeError("Error evaluating function on test array, got error:\n %s" % e)
if not (isinstance(atest, ndarray) and isinstance(btest, ndarray)):
raise ValueError("Function must return ndarray")
# different shapes map to the same new shape
elif atest.shape == btest.shape:
if self._rekeyed is True:
# we've already rekeyed
rdd = self._rdd.map(lambda kv: (kv[0], func(kv[1])))
shape = (self.shape[0],) + atest.shape
else:
#
|
python
|
{
"resource": ""
}
|
q277092
|
ChunkedArray._chunk
|
test
|
def _chunk(self, size="150", axis=None, padding=None):
"""
Split values of distributed array into chunks.
Transforms an underlying pair RDD of (key, value) into
records of the form: (key, chunk id), (chunked value).
Here, chunk id is a tuple identifying the chunk and
chunked value is a subset of the data from each original value,
that has been divided along the specified dimensions.
Parameters
----------
size : str or tuple or int
If str, the average size (in KB) of the chunks in all value dimensions.
If int or tuple, an explicit specification of the number chunks in
each value dimension.
axis : tuple, optional, default=None
One or more axes to estimate chunks for, if provided any
other axes will use one chunk.
padding: tuple or int, default = None
Number of elements per dimension that will overlap with the adjacent chunk.
If a tuple, specifies padding along each chunked dimension; if a int, same
|
python
|
{
"resource": ""
}
|
q277093
|
ChunkedArray.map
|
test
|
def map(self, func, value_shape=None, dtype=None):
"""
Apply an array -> array function on each subarray.
The function can change the shape of the subarray, but only along
dimensions that are not chunked.
Parameters
----------
func : function
Function of a single subarray to apply
value_shape:
Known shape of chunking plan after the map
dtype: numpy.dtype, optional, default=None
Known dtype of values resulting from operation
Returns
-------
ChunkedArray
"""
if value_shape is None or dtype is None:
# try to compute the size of each mapped element by applying func to a random array
try:
mapped = func(random.randn(*self.plan).astype(self.dtype))
except Exception:
first = self._rdd.first()
if first:
# eval func on the first element
mapped = func(first[1])
if value_shape is None:
value_shape = mapped.shape
if dtype is None:
dtype = mapped.dtype
chunked_dims = where(self.plan != self.vshape)[0]
unchunked_dims = where(self.plan == self.vshape)[0]
# check that no dimensions are dropped
if len(value_shape) != len(self.plan):
raise NotImplementedError('map on ChunkedArray cannot drop dimensions')
|
python
|
{
"resource": ""
}
|
q277094
|
ChunkedArray.map_generic
|
test
|
def map_generic(self, func):
"""
Apply a generic array -> object to each subarray
The resulting object is a BoltArraySpark of dtype object where the
blocked dimensions are replaced with indices indication block ID.
"""
def process_record(val):
|
python
|
{
"resource": ""
}
|
q277095
|
ChunkedArray.getplan
|
test
|
def getplan(self, size="150", axes=None, padding=None):
"""
Identify a plan for chunking values along each dimension.
Generates an ndarray with the size (in number of elements) of chunks
in each dimension. If provided, will estimate chunks for only a
subset of axes, leaving all others to the full size of the axis.
Parameters
----------
size : string or tuple
If str, the average size (in KB) of the chunks in all value dimensions.
If int/tuple, an explicit specification of the number chunks in
each moving value dimension.
axes : tuple, optional, default=None
One or more axes to estimate chunks for, if provided any
other axes will use one chunk.
padding : tuple or int, option, default=None
Size over overlapping padding between chunks in each dimension.
If tuple, specifies padding along each chunked dimension; if int,
all dimensions use same padding; if None, no padding
"""
from numpy import dtype as gettype
# initialize with all elements in one chunk
plan = self.vshape
# check for subset of axes
if axes is None:
if isinstance(size, str):
axes = arange(len(self.vshape))
else:
axes = arange(len(size))
else:
axes = asarray(axes, 'int')
# set padding
pad = array(len(self.vshape)*[0, ])
if padding is not None:
pad[axes] = padding
# set the plan
if isinstance(size, tuple):
plan[axes] = size
elif isinstance(size, str):
# convert from kilobytes
|
python
|
{
"resource": ""
}
|
q277096
|
ChunkedArray.removepad
|
test
|
def removepad(idx, value, number, padding, axes=None):
"""
Remove the padding from chunks.
Given a chunk and its corresponding index, use the plan and padding to remove any
padding from the chunk along with specified axes.
Parameters
----------
idx: tuple or array-like
The chunk index, indicating which chunk this is.
value: ndarray
The chunk that goes along with the index.
number: ndarray or array-like
|
python
|
{
"resource": ""
}
|
q277097
|
ChunkedArray.getnumber
|
test
|
def getnumber(plan, shape):
"""
Obtain number of chunks for the given dimensions and chunk sizes.
Given a plan for the number of chunks along each dimension,
calculate the number of chunks that this will lead to.
Parameters
|
python
|
{
"resource": ""
}
|
q277098
|
ChunkedArray.getslices
|
test
|
def getslices(plan, padding, shape):
"""
Obtain slices for the given dimensions, padding, and chunks.
Given a plan for the number of chunks along each dimension and the amount of padding,
calculate a list of slices required to generate those chunks.
Parameters
----------
plan: tuple or array-like
Size of chunks (in number of elements) along each dimensions.
Length must be equal to the number of dimensions.
padding: tuple or array-like
Size of overlap (in number of elements) between chunks along each dimension.
Length must be equal to the number of dimensions.
shape: tuple
Dimensions of axes to be chunked.
"""
slices = []
for size, pad, d in zip(plan, padding, shape):
nchunks = int(floor(d/size))
remainder = d % size
start = 0
dimslices = []
|
python
|
{
"resource": ""
}
|
q277099
|
ChunkedArray.getmask
|
test
|
def getmask(inds, n):
"""
Obtain a binary mask by setting a subset of entries to true.
Parameters
----------
inds : array-like
Which indices to set as true.
n : int
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.