_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q277600
|
NADReceiverTCP.power_off
|
test
|
def power_off(self):
"""Power the device off."""
status = self.status()
if status['power']: # Setting power off when it is already off can cause hangs
self._send(self.CMD_POWERSAVE + self.CMD_OFF)
|
python
|
{
"resource": ""
}
|
q277601
|
NADReceiverTCP.power_on
|
test
|
def power_on(self):
"""Power the device on."""
status = self.status()
if not status['power']:
self._send(self.CMD_ON, read_reply=True)
sleep(0.5)
|
python
|
{
"resource": ""
}
|
q277602
|
NADReceiverTCP.set_volume
|
test
|
def set_volume(self, volume):
"""Set volume level of the device. Accepts integer values 0-200."""
if 0 <= volume <= 200:
volume = format(volume, "02x") # Convert to hex
self._send(self.CMD_VOLUME + volume)
|
python
|
{
"resource": ""
}
|
q277603
|
NADReceiverTCP.select_source
|
test
|
def select_source(self, source):
"""Select a source from the list of sources."""
status = self.status()
if status['power']: # Changing source when off may hang NAD7050
if status['source'] != source: # Setting the source to the current source will hang the NAD7050
if source in self.SOURCES:
self._send(self.CMD_SOURCE + self.SOURCES[source], read_reply=True)
|
python
|
{
"resource": ""
}
|
q277604
|
deobfuscate
|
test
|
def deobfuscate(request, key, juice=None):
"""
Deobfuscates the URL and returns HttpResponse from source view.
SEO juice is mostly ignored as it is intended for display purposes only.
"""
try:
url = decrypt(str(key),
settings.UNFRIENDLY_SECRET,
settings.UNFRIENDLY_IV,
checksum=settings.UNFRIENDLY_ENFORCE_CHECKSUM)
except (CheckSumError, InvalidKeyError):
return HttpResponseNotFound()
try:
url = url.decode('utf-8')
except UnicodeDecodeError:
return HttpResponseNotFound()
url_parts = urlparse(unquote(url))
path = url_parts.path
query = url_parts.query
try:
view, args, kwargs = resolve(path)
except Resolver404:
return HttpResponseNotFound()
# fix-up the environ object
environ = request.environ.copy()
environ['PATH_INFO'] = path[len(environ['SCRIPT_NAME']):]
environ['QUERY_STRING'] = query
# init a new request
patched_request = request.__class__(environ)
# copy over any missing request attributes - this feels hackish
missing_items = set(dir(request)) - set(dir(patched_request))
while missing_items:
missing_item = missing_items.pop()
patched_request.__setattr__(missing_item,
request.__getattribute__(missing_item))
# mark this request as obfuscated
patched_request.META['obfuscated'] = True
response = view(patched_request, *args, **kwargs)
# offer up a friendlier juice-powered filename if downloaded
if juice and not response.has_header('Content-Disposition'):
response['Content-Disposition'] = 'inline; filename=%s' % juice
return response
|
python
|
{
"resource": ""
}
|
q277605
|
_crc
|
test
|
def _crc(plaintext):
"""Generates crc32. Modulo keep the value within int range."""
if not isinstance(plaintext, six.binary_type):
plaintext = six.b(plaintext)
return (zlib.crc32(plaintext) % 2147483647) & 0xffffffff
|
python
|
{
"resource": ""
}
|
q277606
|
obfuscate
|
test
|
def obfuscate(value, juice=None):
"""
Template filter that obfuscates whatever text it is applied to. The text is
supposed to be a URL, but it will obfuscate anything.
Usage:
Extremely unfriendly URL:
{{ "/my-secret-path/"|obfuscate }}
Include some SEO juice:
{{ "/my-secret-path/"|obfuscate:"some SEO friendly text" }}
"""
if not settings.UNFRIENDLY_ENABLE_FILTER:
return value
kwargs = {
'key': encrypt(value,
settings.UNFRIENDLY_SECRET,
settings.UNFRIENDLY_IV,
checksum=settings.UNFRIENDLY_ENFORCE_CHECKSUM),
}
if juice:
kwargs['juice'] = slugify(juice)
return reverse('unfriendly-deobfuscate', kwargs=kwargs)
|
python
|
{
"resource": ""
}
|
q277607
|
MrJattParser.missing_schema
|
test
|
def missing_schema(self,html,song_name):
'''
It will print the list of songs that can be downloaded
'''
#html=self.get_html_response(url)
soup=BeautifulSoup(html)
name=' '.join(song_name)
print '%s not found'%name
print "But you can download any of the following songs :"
a_list=soup.findAll('a','touch')
for x in xrange(len(a_list)-1):
r=a_list[x]
p=str(r)
q=re.sub(r'<a.*/>|<span.*">|</span>|</a>|<a.*html">|<font.*">|</font>','',p)
print q
|
python
|
{
"resource": ""
}
|
q277608
|
MrJattParser.list_of_all_href
|
test
|
def list_of_all_href(self,html):
'''
It will return all hyper links found in the mr-jatt page for download
'''
soup=BeautifulSoup(html)
links=[]
a_list=soup.findAll('a','touch')
for x in xrange(len(a_list)-1):
link = a_list[x].get('href')
name = a_list[x]
name = str(name)
name=re.sub(r'<a.*/>|<span.*">|</span>|</a>|<a.*html">|<font.*">|</font>','',name)
name=re.sub(r'^[0-9]+\.','',name)
links.append([link,name])
#quit()
return links
|
python
|
{
"resource": ""
}
|
q277609
|
MrJattParser.check_if_song_name
|
test
|
def check_if_song_name(self,html):
'''
Returns true if user entered artist or movie name
'''
soup=BeautifulSoup(html)
a_list=soup.findAll('a','touch')
#print a_list
text=[str(x) for x in a_list]
text=''.join(text)
text=text.lower()
string1='download in 48 kbps'
string2='download in 128 kbps'
string3='download in 320 kbps'
href=''
if string3 in text:
#print 'Downloading in 320 kbps'
href=a_list[2].get('href')
elif string2 in text:
#print 'Downloading in 128 kbps'
href=a_list[1].get('href')
elif string1 in text:
#print 'Downloading in 48 kbps'
href=a_list[0].get('href')
else:
return (True,'nothing')
return (False,href)
|
python
|
{
"resource": ""
}
|
q277610
|
MrJattParser.Parse
|
test
|
def Parse(self,url,song_name,flag):
'''
It will the resource URL if song is found,
Otherwise it will return the list of songs that can be downloaded
'''
file_download=FileDownload()
html=file_download.get_html_response(url)
if flag == False:
soup=BeautifulSoup(html)
a_list=soup.findAll('a','touch')
#print a_list
text=[str(x) for x in a_list]
text=''.join(text)
text=text.lower()
string1='download in 48 kbps'
string2='download in 128 kbps'
string3='download in 320 kbps'
href=''
if string3 in text:
print 'Downloading in 320 kbps'
href=a_list[2].get('href')
elif string2 in text:
print 'Downloading in 128 kbps'
href=a_list[1].get('href')
elif string1 in text:
print 'Downloading in 48 kbps'
href=a_list[0].get('href')
else:
self.missing_schema(html,song_name)
quit()
return href
else:
x,href=self.check_if_song_name(html)
links = []
if x==True:
links=self.list_of_all_href(html)
else:
file_download=FileDownload()
file_download.file_download_cross_platform(href)
quit()
return links
|
python
|
{
"resource": ""
}
|
q277611
|
GoogleParser.google_url
|
test
|
def google_url(self,song_name,website):
''' It will return the google url to be searched'''
name='+'.join(song_name)
prefix='https://www.google.co.in/search?q='
website=website.split(" ")
suffix='+'.join(website)
url=prefix+name+suffix
#print url
return url
|
python
|
{
"resource": ""
}
|
q277612
|
GoogleParser.parse_google
|
test
|
def parse_google(self,html):
'''It will parse google html response
and return the first url
'''
soup = BeautifulSoup(html)
href=soup.find('div','g').find('a').get('href')
href_list=href.split('&')
download_url=href_list[0]
download_url=download_url.strip()
download_url=download_url.replace('/url?q=','')
return download_url
|
python
|
{
"resource": ""
}
|
q277613
|
GoogleParser.Parse
|
test
|
def Parse(self,song_name,website):
'''
song_name is a list of strings
website is a string
It will return the url from where music file needs to be downloaded
'''
url_to_be_parsed=self.google_url(song_name,website)
file_download=FileDownload()
html=file_download.get_html_response(url_to_be_parsed)
website_url=self.parse_google(html)
return website_url
|
python
|
{
"resource": ""
}
|
q277614
|
FileDownload.get_html_response
|
test
|
def get_html_response(self,url):
'''It will download the html page specified by url and return the html response '''
print "Downloading page %s .."%url
try:
response=requests.get(url,timeout=50)
except requests.exceptions.SSLError:
try:
response=requests.get(url,verify=False,timeout=50)
except requests.exceptions.RequestException as e:
print e
quit()
except requests.exceptions.RequestException as e:
print e
quit()
return response.content
|
python
|
{
"resource": ""
}
|
q277615
|
FileDownload.file_download_using_requests
|
test
|
def file_download_using_requests(self,url):
'''It will download file specified by url using requests module'''
file_name=url.split('/')[-1]
if os.path.exists(os.path.join(os.getcwd(),file_name)):
print 'File already exists'
return
#print 'Downloading file %s '%file_name
#print 'Downloading from %s'%url
try:
r=requests.get(url,stream=True,timeout=200)
except requests.exceptions.SSLError:
try:
response=requests.get(url,stream=True,verify=False,timeout=200)
except requests.exceptions.RequestException as e:
print e
quit()
except requests.exceptions.RequestException as e:
print e
quit()
chunk_size = 1024
total_size = int(r.headers['Content-Length'])
total_chunks = total_size/chunk_size
file_iterable = r.iter_content(chunk_size = chunk_size)
tqdm_iter = tqdm(iterable = file_iterable,total = total_chunks,unit = 'KB',
leave = False
)
with open(file_name,'wb') as f:
for data in tqdm_iter:
f.write(data)
#total_size=float(r.headers['Content-Length'])/(1024*1024)
'''print 'Total size of file to be downloaded %.2f MB '%total_size
total_downloaded_size=0.0
with open(file_name,'wb') as f:
for chunk in r.iter_content(chunk_size=1*1024*1024):
if chunk:
size_of_chunk=float(len(chunk))/(1024*1024)
total_downloaded_size+=size_of_chunk
print '{0:.0%} Downloaded'.format(total_downloaded_size/total_size)
f.write(chunk)'''
print 'Downloaded file %s '%file_name
|
python
|
{
"resource": ""
}
|
q277616
|
FileDownload.file_download_using_wget
|
test
|
def file_download_using_wget(self,url):
'''It will download file specified by url using wget utility of linux '''
file_name=url.split('/')[-1]
print 'Downloading file %s '%file_name
command='wget -c --read-timeout=50 --tries=3 -q --show-progress --no-check-certificate '
url='"'+url+'"'
command=command+url
os.system(command)
|
python
|
{
"resource": ""
}
|
q277617
|
findStationCodesByCity
|
test
|
def findStationCodesByCity(city_name, token):
"""Lookup AQI database for station codes in a given city."""
req = requests.get(
API_ENDPOINT_SEARCH,
params={
'token': token,
'keyword': city_name
})
if req.status_code == 200 and req.json()["status"] == "ok":
return [result["uid"] for result in req.json()["data"]]
else:
return []
|
python
|
{
"resource": ""
}
|
q277618
|
get_location_observation
|
test
|
def get_location_observation(lat, lng, token):
"""Lookup observations by geo coordinates."""
req = requests.get(
API_ENDPOINT_GEO % (lat, lng),
params={
'token': token
})
if req.status_code == 200 and req.json()["status"] == "ok":
return parse_observation_response(req.json()["data"])
return {}
|
python
|
{
"resource": ""
}
|
q277619
|
parse_observation_response
|
test
|
def parse_observation_response(json):
"""Decode AQICN observation response JSON into python object."""
logging.debug(json)
iaqi = json['iaqi']
result = {
'idx': json['idx'],
'city': json.get('city', ''),
'aqi': json['aqi'],
'dominentpol': json.get("dominentpol", ''),
'time': json['time']['s'],
'iaqi': [{'p': item, 'v': iaqi[item]['v']} for item in iaqi]
}
return result
|
python
|
{
"resource": ""
}
|
q277620
|
get_station_observation
|
test
|
def get_station_observation(station_code, token):
"""Request station data for a specific station identified by code.
A language parameter can also be specified to translate location
information (default: "en")
"""
req = requests.get(
API_ENDPOINT_OBS % (station_code),
params={
'token': token
})
if req.status_code == 200 and req.json()['status'] == "ok":
return parse_observation_response(req.json()['data'])
else:
return {}
|
python
|
{
"resource": ""
}
|
q277621
|
AssetAttributes.search_paths
|
test
|
def search_paths(self):
"""The list of logical paths which are used to search for an asset.
This property makes sense only if the attributes was created with
logical path.
It is assumed that the logical path can be a directory containing a
file named ``index`` with the same suffix.
Example::
>>> attrs = AssetAttributes(environment, 'js/app.js')
>>> attrs.search_paths
['js/app.js', 'js/app/index.js']
>>> attrs = AssetAttributes(environment, 'js/app/index.js')
>>> attrs.search_paths
['js/models/index.js']
"""
paths = [self.path]
if os.path.basename(self.path_without_suffix) != 'index':
path = os.path.join(self.path_without_suffix, 'index')
paths.append(path + ''.join(self.suffix))
return paths
|
python
|
{
"resource": ""
}
|
q277622
|
AssetAttributes.compilers
|
test
|
def compilers(self):
"""The list of compilers used to build asset."""
return [self.environment.compilers.get(e) for e in self.compiler_extensions]
|
python
|
{
"resource": ""
}
|
q277623
|
AssetAttributes.mimetype
|
test
|
def mimetype(self):
"""MIME type of the asset."""
return (self.environment.mimetypes.get(self.format_extension) or
self.compiler_mimetype or 'application/octet-stream')
|
python
|
{
"resource": ""
}
|
q277624
|
AssetAttributes.compiler_mimetype
|
test
|
def compiler_mimetype(self):
"""Implicit MIME type of the asset by its compilers."""
for compiler in reversed(self.compilers):
if compiler.result_mimetype:
return compiler.result_mimetype
return None
|
python
|
{
"resource": ""
}
|
q277625
|
AssetAttributes.compiler_format_extension
|
test
|
def compiler_format_extension(self):
"""Implicit format extension on the asset by its compilers."""
for extension, mimetype in self.environment.mimetypes.items():
if mimetype == self.compiler_mimetype:
return extension
return None
|
python
|
{
"resource": ""
}
|
q277626
|
Processors.register
|
test
|
def register(self, mimetype, processor):
"""Register passed `processor` for passed `mimetype`."""
if mimetype not in self or processor not in self[mimetype]:
self.setdefault(mimetype, []).append(processor)
|
python
|
{
"resource": ""
}
|
q277627
|
Processors.unregister
|
test
|
def unregister(self, mimetype, processor):
"""Remove passed `processor` for passed `mimetype`. If processor for
this MIME type does not found in the registry, nothing happens.
"""
if mimetype in self and processor in self[mimetype]:
self[mimetype].remove(processor)
|
python
|
{
"resource": ""
}
|
q277628
|
Environment.paths
|
test
|
def paths(self):
"""The list of search paths. It is built from registered finders, which
has ``paths`` property. Can be useful for compilers to resolve internal
dependencies.
"""
if not hasattr(self, '_paths'):
paths = []
for finder in self.finders:
if hasattr(finder, 'paths'):
paths.extend(finder.paths)
self._paths = paths
return self._paths
|
python
|
{
"resource": ""
}
|
q277629
|
Environment.register_defaults
|
test
|
def register_defaults(self):
"""Register default compilers, preprocessors and MIME types."""
self.mimetypes.register_defaults()
self.preprocessors.register_defaults()
self.postprocessors.register_defaults()
|
python
|
{
"resource": ""
}
|
q277630
|
import_qtcore
|
test
|
def import_qtcore():
"""
This nasty piece of code is here to force the loading of IDA's
Qt bindings.
Without it, Python attempts to load PySide from the site-packages
directory, and failing, as it does not play nicely with IDA.
via: github.com/tmr232/Cute
"""
has_ida = False
try:
# if we're running under IDA,
# then we'll use IDA's Qt bindings
import idaapi
has_ida = True
except ImportError:
# not running under IDA,
# so use default Qt installation
has_ida = False
if has_ida:
old_path = sys.path[:]
try:
ida_python_path = os.path.dirname(idaapi.__file__)
sys.path.insert(0, ida_python_path)
if idaapi.IDA_SDK_VERSION >= 690:
from PyQt5 import QtCore
return QtCore
else:
from PySide import QtCore
return QtCore
finally:
sys.path = old_path
else:
try:
from PyQt5 import QtCore
return QtCore
except ImportError:
pass
try:
from PySide import QtCore
return QtCore
except ImportError:
pass
raise ImportError("No module named PySide or PyQt")
|
python
|
{
"resource": ""
}
|
q277631
|
get_meta_netnode
|
test
|
def get_meta_netnode():
"""
Get the netnode used to store settings metadata in the current IDB.
Note that this implicitly uses the open IDB via the idc iterface.
"""
node_name = "$ {org:s}.{application:s}".format(
org=IDA_SETTINGS_ORGANIZATION,
application=IDA_SETTINGS_APPLICATION)
return netnode.Netnode(node_name)
|
python
|
{
"resource": ""
}
|
q277632
|
add_netnode_plugin_name
|
test
|
def add_netnode_plugin_name(plugin_name):
"""
Add the given plugin name to the list of plugin names registered in
the current IDB.
Note that this implicitly uses the open IDB via the idc iterface.
"""
current_names = set(get_netnode_plugin_names())
if plugin_name in current_names:
return
current_names.add(plugin_name)
get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
|
python
|
{
"resource": ""
}
|
q277633
|
del_netnode_plugin_name
|
test
|
def del_netnode_plugin_name(plugin_name):
"""
Remove the given plugin name to the list of plugin names registered in
the current IDB.
Note that this implicitly uses the open IDB via the idc iterface.
"""
current_names = set(get_netnode_plugin_names())
if plugin_name not in current_names:
return
try:
current_names.remove(plugin_name)
except KeyError:
return
get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
|
python
|
{
"resource": ""
}
|
q277634
|
import_settings
|
test
|
def import_settings(settings, config_path):
"""
Import settings from the given file system path to given settings instance.
type settings: IDASettingsInterface
type config_path: str
"""
other = QtCore.QSettings(config_path, QtCore.QSettings.IniFormat)
for k in other.allKeys():
settings[k] = other.value(k)
|
python
|
{
"resource": ""
}
|
q277635
|
export_settings
|
test
|
def export_settings(settings, config_path):
"""
Export the given settings instance to the given file system path.
type settings: IDASettingsInterface
type config_path: str
"""
other = QtCore.QSettings(config_path, QtCore.QSettings.IniFormat)
for k, v in settings.iteritems():
other.setValue(k, v)
|
python
|
{
"resource": ""
}
|
q277636
|
IDASettings.directory
|
test
|
def directory(self):
"""
Fetch the IDASettings instance for the curren plugin with directory scope.
rtype: IDASettingsInterface
"""
if self._config_directory is None:
ensure_ida_loaded()
return DirectoryIDASettings(self._plugin_name, directory=self._config_directory)
|
python
|
{
"resource": ""
}
|
q277637
|
IDASettings.iterkeys
|
test
|
def iterkeys(self):
"""
Enumerate the keys found at any scope for the current plugin.
rtype: Generator[str]
"""
visited_keys = set()
try:
for key in self.idb.iterkeys():
if key not in visited_keys:
yield key
visited_keys.add(key)
except (PermissionError, EnvironmentError):
pass
try:
for key in self.directory.iterkeys():
if key not in visited_keys:
yield key
visited_keys.add(key)
except (PermissionError, EnvironmentError):
pass
try:
for key in self.user.iterkeys():
if key not in visited_keys:
yield key
visited_keys.add(key)
except (PermissionError, EnvironmentError):
pass
try:
for key in self.system.iterkeys():
if key not in visited_keys:
yield key
visited_keys.add(key)
except (PermissionError, EnvironmentError):
pass
|
python
|
{
"resource": ""
}
|
q277638
|
simple_error_handler
|
test
|
def simple_error_handler(exc, *args):
"""
Returns the response that should be used for any given exception.
By default we handle the REST framework `APIException`, and also
Django's builtin `Http404` and `PermissionDenied` exceptions.
Any unhandled exceptions may return `None`, which will cause a 500 error
to be raised.
"""
if isinstance(exc, exceptions.APIException):
headers = {}
if getattr(exc, 'auth_header', None):
headers['WWW-Authenticate'] = exc.auth_header
if getattr(exc, 'wait', None):
headers['X-Throttle-Wait-Seconds'] = '%d' % exc.wait
return Response({'error': exc.detail},
status=exc.status_code,
headers=headers)
elif isinstance(exc, Http404):
return Response({'error': 'Not found'},
status=status.HTTP_404_NOT_FOUND)
elif isinstance(exc, PermissionDenied):
return Response({'error': 'Permission denied'},
status=status.HTTP_403_FORBIDDEN)
# Note: Unhandled exceptions will raise a 500 error.
return None
|
python
|
{
"resource": ""
}
|
q277639
|
table
|
test
|
def table(name, auth=None, eager=True):
"""Returns a given table for the given user."""
auth = auth or []
dynamodb = boto.connect_dynamodb(*auth)
table = dynamodb.get_table(name)
return Table(table=table, eager=eager)
|
python
|
{
"resource": ""
}
|
q277640
|
tables
|
test
|
def tables(auth=None, eager=True):
"""Returns a list of tables for the given user."""
auth = auth or []
dynamodb = boto.connect_dynamodb(*auth)
return [table(t, auth, eager=eager) for t in dynamodb.list_tables()]
|
python
|
{
"resource": ""
}
|
q277641
|
Crates.fetch_items
|
test
|
def fetch_items(self, category, **kwargs):
"""Fetch packages and summary from Crates.io
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
from_date = kwargs['from_date']
if category == CATEGORY_CRATES:
return self.__fetch_crates(from_date)
else:
return self.__fetch_summary()
|
python
|
{
"resource": ""
}
|
q277642
|
Crates.metadata_id
|
test
|
def metadata_id(item):
"""Extracts the identifier from an item depending on its type."""
if Crates.metadata_category(item) == CATEGORY_CRATES:
return str(item['id'])
else:
ts = item['fetched_on']
ts = str_to_datetime(ts)
return str(ts.timestamp())
|
python
|
{
"resource": ""
}
|
q277643
|
Crates.metadata_updated_on
|
test
|
def metadata_updated_on(item):
"""Extracts the update time from an item.
Depending on the item, the timestamp is extracted from the
'updated_at' or 'fetched_on' fields.
This date is converted to UNIX timestamp format.
:param item: item generated by the backend
:returns: a UNIX timestamp
"""
if Crates.metadata_category(item) == CATEGORY_CRATES:
ts = item['updated_at']
else:
ts = item['fetched_on']
ts = str_to_datetime(ts)
return ts.timestamp()
|
python
|
{
"resource": ""
}
|
q277644
|
Crates.__fetch_crate_owner_team
|
test
|
def __fetch_crate_owner_team(self, crate_id):
"""Get crate team owner"""
raw_owner_team = self.client.crate_attribute(crate_id, 'owner_team')
owner_team = json.loads(raw_owner_team)
return owner_team
|
python
|
{
"resource": ""
}
|
q277645
|
Crates.__fetch_crate_owner_user
|
test
|
def __fetch_crate_owner_user(self, crate_id):
"""Get crate user owners"""
raw_owner_user = self.client.crate_attribute(crate_id, 'owner_user')
owner_user = json.loads(raw_owner_user)
return owner_user
|
python
|
{
"resource": ""
}
|
q277646
|
Crates.__fetch_crate_versions
|
test
|
def __fetch_crate_versions(self, crate_id):
"""Get crate versions data"""
raw_versions = self.client.crate_attribute(crate_id, "versions")
version_downloads = json.loads(raw_versions)
return version_downloads
|
python
|
{
"resource": ""
}
|
q277647
|
Crates.__fetch_crate_version_downloads
|
test
|
def __fetch_crate_version_downloads(self, crate_id):
"""Get crate version downloads"""
raw_version_downloads = self.client.crate_attribute(crate_id, "downloads")
version_downloads = json.loads(raw_version_downloads)
return version_downloads
|
python
|
{
"resource": ""
}
|
q277648
|
Crates.__fetch_crate_data
|
test
|
def __fetch_crate_data(self, crate_id):
"""Get crate data"""
raw_crate = self.client.crate(crate_id)
crate = json.loads(raw_crate)
return crate['crate']
|
python
|
{
"resource": ""
}
|
q277649
|
CratesClient.summary
|
test
|
def summary(self):
"""Get Crates.io summary"""
path = urijoin(CRATES_API_URL, CATEGORY_SUMMARY)
raw_content = self.fetch(path)
return raw_content
|
python
|
{
"resource": ""
}
|
q277650
|
CratesClient.crates
|
test
|
def crates(self, from_page=1):
"""Get crates in alphabetical order"""
path = urijoin(CRATES_API_URL, CATEGORY_CRATES)
raw_crates = self.__fetch_items(path, from_page)
return raw_crates
|
python
|
{
"resource": ""
}
|
q277651
|
CratesClient.crate
|
test
|
def crate(self, crate_id):
"""Get a crate by its ID"""
path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id)
raw_crate = self.fetch(path)
return raw_crate
|
python
|
{
"resource": ""
}
|
q277652
|
CratesClient.crate_attribute
|
test
|
def crate_attribute(self, crate_id, attribute):
"""Get crate attribute"""
path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id, attribute)
raw_attribute_data = self.fetch(path)
return raw_attribute_data
|
python
|
{
"resource": ""
}
|
q277653
|
CratesClient.__fetch_items
|
test
|
def __fetch_items(self, path, page=1):
"""Return the items from Crates.io API using pagination"""
fetch_data = True
parsed_crates = 0
total_crates = 0
while fetch_data:
logger.debug("Fetching page: %i", page)
try:
payload = {'sort': 'alphabetical', 'page': page}
raw_content = self.fetch(path, payload=payload)
content = json.loads(raw_content)
parsed_crates += len(content['crates'])
if not total_crates:
total_crates = content['meta']['total']
except requests.exceptions.HTTPError as e:
logger.error("HTTP exception raised - %s", e.response.text)
raise e
yield raw_content
page += 1
if parsed_crates >= total_crates:
fetch_data = False
|
python
|
{
"resource": ""
}
|
q277654
|
Kitsune.fetch
|
test
|
def fetch(self, category=CATEGORY_QUESTION, offset=DEFAULT_OFFSET):
"""Fetch questions from the Kitsune url.
:param category: the category of items to fetch
:offset: obtain questions after offset
:returns: a generator of questions
"""
if not offset:
offset = DEFAULT_OFFSET
kwargs = {"offset": offset}
items = super().fetch(category, **kwargs)
return items
|
python
|
{
"resource": ""
}
|
q277655
|
Kitsune.fetch_items
|
test
|
def fetch_items(self, category, **kwargs):
"""Fetch questions from the Kitsune url
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
offset = kwargs['offset']
logger.info("Looking for questions at url '%s' using offset %s",
self.url, str(offset))
nquestions = 0 # number of questions processed
tquestions = 0 # number of questions from API data
equestions = 0 # number of questions dropped by errors
# Always get complete pages so the first item is always
# the first one in the page
page = int(offset / KitsuneClient.ITEMS_PER_PAGE)
page_offset = page * KitsuneClient.ITEMS_PER_PAGE
# drop questions from page before the offset
drop_questions = offset - page_offset
current_offset = offset
questions_page = self.client.get_questions(offset)
while True:
try:
raw_questions = next(questions_page)
except StopIteration:
break
except requests.exceptions.HTTPError as e:
# Continue with the next page if it is a 500 error
if e.response.status_code == 500:
logger.exception(e)
logger.error("Problem getting Kitsune questions. "
"Loosing %i questions. Going to the next page.",
KitsuneClient.ITEMS_PER_PAGE)
equestions += KitsuneClient.ITEMS_PER_PAGE
current_offset += KitsuneClient.ITEMS_PER_PAGE
questions_page = self.client.get_questions(current_offset)
continue
else:
# If it is another error just propagate the exception
raise e
try:
questions_data = json.loads(raw_questions)
tquestions = questions_data['count']
questions = questions_data['results']
except (ValueError, KeyError) as ex:
logger.error(ex)
cause = ("Bad JSON format for mozilla_questions: %s" % (raw_questions))
raise ParseError(cause=cause)
for question in questions:
if drop_questions > 0:
# Remove extra questions due to page base retrieval
drop_questions -= 1
continue
question['offset'] = current_offset
current_offset += 1
question['answers_data'] = []
for raw_answers in self.client.get_question_answers(question['id']):
answers = json.loads(raw_answers)['results']
question['answers_data'] += answers
yield question
nquestions += 1
logger.debug("Questions: %i/%i", nquestions + offset, tquestions)
logger.info("Total number of questions: %i (%i total)", nquestions, tquestions)
logger.info("Questions with errors dropped: %i", equestions)
|
python
|
{
"resource": ""
}
|
q277656
|
KitsuneClient.get_questions
|
test
|
def get_questions(self, offset=None):
"""Retrieve questions from older to newer updated starting offset"""
page = KitsuneClient.FIRST_PAGE
if offset:
page += int(offset / KitsuneClient.ITEMS_PER_PAGE)
while True:
api_questions_url = urijoin(self.base_url, '/question') + '/'
params = {
"page": page,
"ordering": "updated"
}
questions = self.fetch(api_questions_url, params)
yield questions
questions_json = json.loads(questions)
next_uri = questions_json['next']
if not next_uri:
break
page += 1
|
python
|
{
"resource": ""
}
|
q277657
|
ReMo.fetch
|
test
|
def fetch(self, category=CATEGORY_EVENT, offset=REMO_DEFAULT_OFFSET):
"""Fetch items from the ReMo url.
The method retrieves, from a ReMo URL, the set of items
of the given `category`.
:param category: the category of items to fetch
:param offset: obtain items after offset
:returns: a generator of items
"""
if not offset:
offset = REMO_DEFAULT_OFFSET
kwargs = {"offset": offset}
items = super().fetch(category, **kwargs)
return items
|
python
|
{
"resource": ""
}
|
q277658
|
ReMo.metadata_updated_on
|
test
|
def metadata_updated_on(item):
"""Extracts the update time from a ReMo item.
The timestamp is extracted from 'end' field.
This date is converted to a perceval format using a float value.
:param item: item generated by the backend
:returns: a UNIX timestamp
"""
if 'end' in item:
# events updated field
updated = item['end']
elif 'date_joined_program' in item:
# users updated field that always appear
updated = item['date_joined_program']
elif 'report_date' in item:
# activities updated field
updated = item['report_date']
else:
raise ValueError("Can't find updated field for item " + str(item))
return float(str_to_datetime(updated).timestamp())
|
python
|
{
"resource": ""
}
|
q277659
|
ReMo.metadata_category
|
test
|
def metadata_category(item):
"""Extracts the category from a ReMo item.
This backend generates items types 'event', 'activity'
or 'user'. To guess the type of item, the code will look
for unique fields.
"""
if 'estimated_attendance' in item:
category = CATEGORY_EVENT
elif 'activity' in item:
category = CATEGORY_ACTIVITY
elif 'first_name' in item:
category = CATEGORY_USER
else:
raise TypeError("Could not define the category of item " + str(item))
return category
|
python
|
{
"resource": ""
}
|
q277660
|
ReMoClient.get_items
|
test
|
def get_items(self, category=CATEGORY_EVENT, offset=REMO_DEFAULT_OFFSET):
"""Retrieve all items for category using pagination """
more = True # There are more items to be processed
next_uri = None # URI for the next items page query
page = ReMoClient.FIRST_PAGE
page += int(offset / ReMoClient.ITEMS_PER_PAGE)
if category == CATEGORY_EVENT:
api = self.api_events_url
elif category == CATEGORY_ACTIVITY:
api = self.api_activities_url
elif category == CATEGORY_USER:
api = self.api_users_url
else:
raise ValueError(category + ' not supported in ReMo')
while more:
params = {
"page": page,
"orderby": "ASC"
}
logger.debug("ReMo client calls APIv2: %s params: %s",
api, str(params))
raw_items = self.fetch(api, payload=params)
yield raw_items
items_data = json.loads(raw_items)
next_uri = items_data['next']
if not next_uri:
more = False
else:
# https://reps.mozilla.org/remo/api/remo/v1/events/?orderby=ASC&page=269
parsed_uri = urllib.parse.urlparse(next_uri)
parsed_params = urllib.parse.parse_qs(parsed_uri.query)
page = parsed_params['page'][0]
|
python
|
{
"resource": ""
}
|
q277661
|
AIOBlock.buffer_list
|
test
|
def buffer_list(self):
"""
The buffer list this instance operates on.
Only available in mode != AIOBLOCK_MODE_POLL.
Changes on a submitted transfer are not fully applied until its
next submission: kernel will still be using original buffer list.
"""
if self._iocb.aio_lio_opcode == libaio.IO_CMD_POLL:
raise AttributeError
return self._buffer_list
|
python
|
{
"resource": ""
}
|
q277662
|
AIOBlock.io_priority
|
test
|
def io_priority(self):
"""
IO priority for this instance.
"""
return (
self._iocb.aio_reqprio
if self._iocb.u.c.flags & libaio.IOCB_FLAG_IOPRIO else
None
)
|
python
|
{
"resource": ""
}
|
q277663
|
AIOContext.close
|
test
|
def close(self):
"""
Cancels all pending IO blocks.
Waits until all non-cancellable IO blocks finish.
De-initialises AIO context.
"""
if self._ctx is not None:
# Note: same as io_destroy
self._io_queue_release(self._ctx)
del self._ctx
|
python
|
{
"resource": ""
}
|
q277664
|
AIOContext.submit
|
test
|
def submit(self, block_list):
"""
Submits transfers.
block_list (list of AIOBlock)
The IO blocks to hand off to kernel.
Returns the number of successfully submitted blocks.
"""
# io_submit ioctl will only return an error for issues with the first
# transfer block. If there are issues with a later block, it will stop
# submission and return the number of submitted blocks. So it is safe
# to only update self._submitted once io_submit returned.
submitted_count = libaio.io_submit(
self._ctx,
len(block_list),
(libaio.iocb_p * len(block_list))(*[
# pylint: disable=protected-access
pointer(x._iocb)
# pylint: enable=protected-access
for x in block_list
]),
)
submitted = self._submitted
for block in block_list[:submitted_count]:
# pylint: disable=protected-access
submitted[addressof(block._iocb)] = (block, block._getSubmissionState())
# pylint: enable=protected-access
return submitted_count
|
python
|
{
"resource": ""
}
|
q277665
|
AIOContext.cancel
|
test
|
def cancel(self, block):
"""
Cancel an IO block.
block (AIOBlock)
The IO block to cancel.
Returns cancelled block's event data (see getEvents), or None if the
kernel returned EINPROGRESS. In the latter case, event completion will
happen on a later getEvents call.
"""
event = libaio.io_event()
try:
# pylint: disable=protected-access
libaio.io_cancel(self._ctx, byref(block._iocb), byref(event))
# pylint: enable=protected-access
except OSError as exc:
if exc.errno == errno.EINPROGRESS:
return None
raise
return self._eventToPython(event)
|
python
|
{
"resource": ""
}
|
q277666
|
AIOContext.cancelAll
|
test
|
def cancelAll(self):
"""
Cancel all submitted IO blocks.
Blocks until all submitted transfers have been finalised.
Submitting more transfers or processing completion events while this
method is running produces undefined behaviour.
Returns the list of values returned by individual cancellations.
See "cancel" documentation.
"""
cancel = self.cancel
result = []
for block, _ in self._submitted.itervalues():
try:
result.append(cancel(block))
except OSError as exc:
# EINVAL should mean we requested to cancel a not-in-flight
# transfer - maybe it was just completed and we just did
# not process its completion event yet.
if exc.errno != errno.EINVAL:
raise
return result
|
python
|
{
"resource": ""
}
|
q277667
|
AIOContext.getEvents
|
test
|
def getEvents(self, min_nr=1, nr=None, timeout=None):
"""
Returns a list of event data from submitted IO blocks.
min_nr (int, None)
When timeout is None, minimum number of events to collect before
returning.
If None, waits for all submitted events.
nr (int, None)
Maximum number of events to return.
If None, set to maxevents given at construction or to the number of
currently submitted events, whichever is larger.
timeout (float, None):
Time to wait for events.
If None, become blocking.
Returns a list of 3-tuples, containing:
- completed AIOBlock instance
- res, file-object-type-dependent value
- res2, another file-object-type-dependent value
"""
if min_nr is None:
min_nr = len(self._submitted)
if nr is None:
nr = max(len(self._submitted), self._maxevents)
if timeout is None:
timeoutp = None
else:
sec = int(timeout)
timeout = libaio.timespec(sec, int((timeout - sec) * 1e9))
timeoutp = byref(timeout)
event_buffer = (libaio.io_event * nr)()
actual_nr = libaio.io_getevents(
self._ctx,
min_nr,
nr,
event_buffer,
timeoutp,
)
return [
self._eventToPython(event_buffer[x])
for x in xrange(actual_nr)
]
|
python
|
{
"resource": ""
}
|
q277668
|
MozillaClub.fetch
|
test
|
def fetch(self, category=CATEGORY_EVENT):
"""Fetch events from the MozillaClub URL.
The method retrieves, from a MozillaClub URL, the
events. The data is a Google spreadsheet retrieved using
the feed API REST.
:param category: the category of items to fetch
:returns: a generator of events
"""
kwargs = {}
items = super().fetch(category, **kwargs)
return items
|
python
|
{
"resource": ""
}
|
q277669
|
MozillaClubClient.get_cells
|
test
|
def get_cells(self):
"""Retrieve all cells from the spreadsheet."""
logger.info("Retrieving all cells spreadsheet data ...")
logger.debug("MozillaClub client calls API: %s", self.base_url)
raw_cells = self.fetch(self.base_url)
return raw_cells.text
|
python
|
{
"resource": ""
}
|
q277670
|
MozillaClubParser.parse
|
test
|
def parse(self):
"""Parse the MozillaClub spreadsheet feed cells json."""
nevents_wrong = 0
feed_json = json.loads(self.feed)
if 'entry' not in feed_json['feed']:
return
self.cells = feed_json['feed']['entry']
self.ncell = 0
event_fields = self.__get_event_fields()
# Process all events reading the rows according to the event template
# The only way to detect the end of row is looking to the
# number of column. When the max number is reached (cell_cols) the next
# cell is from the next row.
while self.ncell < len(self.cells):
# Process the next row (event) getting all cols to build the event
event = self.__get_next_event(event_fields)
if event['Date of Event'] is None or event['Club Name'] is None:
logger.warning("Wrong event data: %s", event)
nevents_wrong += 1
continue
yield event
logger.info("Total number of wrong events: %i", nevents_wrong)
|
python
|
{
"resource": ""
}
|
q277671
|
_RecordUIState.export_formats
|
test
|
def export_formats(self, pid_type):
"""List of export formats."""
if pid_type not in self._export_formats:
fmts = self.app.config.get('RECORDS_UI_EXPORT_FORMATS', {}).get(
pid_type, {})
self._export_formats[pid_type] = sorted(
[(k, v) for k, v in fmts.items() if v],
key=lambda x: x[1]['order'],
)
return self._export_formats[pid_type]
|
python
|
{
"resource": ""
}
|
q277672
|
_RecordUIState.permission_factory
|
test
|
def permission_factory(self):
"""Load default permission factory."""
if self._permission_factory is None:
imp = self.app.config['RECORDS_UI_DEFAULT_PERMISSION_FACTORY']
self._permission_factory = obj_or_import_string(imp)
return self._permission_factory
|
python
|
{
"resource": ""
}
|
q277673
|
create_blueprint
|
test
|
def create_blueprint(endpoints):
"""Create Invenio-Records-UI blueprint.
The factory installs one URL route per endpoint defined, and adds an
error handler for rendering tombstones.
:param endpoints: Dictionary of endpoints to be installed. See usage
documentation for further details.
:returns: The initialized blueprint.
"""
blueprint = Blueprint(
'invenio_records_ui',
__name__,
url_prefix='',
template_folder='templates',
static_folder='static',
)
@blueprint.errorhandler(PIDDeletedError)
def tombstone_errorhandler(error):
return render_template(
current_app.config['RECORDS_UI_TOMBSTONE_TEMPLATE'],
pid=error.pid,
record=error.record or {},
), 410
@blueprint.context_processor
def inject_export_formats():
return dict(
export_formats=(
current_app.extensions['invenio-records-ui'].export_formats)
)
for endpoint, options in (endpoints or {}).items():
blueprint.add_url_rule(**create_url_rule(endpoint, **options))
return blueprint
|
python
|
{
"resource": ""
}
|
q277674
|
create_url_rule
|
test
|
def create_url_rule(endpoint, route=None, pid_type=None, template=None,
permission_factory_imp=None, view_imp=None,
record_class=None, methods=None):
"""Create Werkzeug URL rule for a specific endpoint.
The method takes care of creating a persistent identifier resolver
for the given persistent identifier type.
:param endpoint: Name of endpoint.
:param route: URL route (must include ``<pid_value>`` pattern). Required.
:param pid_type: Persistent identifier type for endpoint. Required.
:param template: Template to render.
(Default: ``invenio_records_ui/detail.html``)
:param permission_factory_imp: Import path to factory that creates a
permission object for a given record.
:param view_imp: Import path to view function. (Default: ``None``)
:param record_class: Name of the record API class.
:param methods: Method allowed for the endpoint.
:returns: A dictionary that can be passed as keywords arguments to
``Blueprint.add_url_rule``.
"""
assert route
assert pid_type
permission_factory = import_string(permission_factory_imp) if \
permission_factory_imp else None
view_method = import_string(view_imp) if view_imp else default_view_method
record_class = import_string(record_class) if record_class else Record
methods = methods or ['GET']
view_func = partial(
record_view,
resolver=Resolver(pid_type=pid_type, object_type='rec',
getter=record_class.get_record),
template=template or 'invenio_records_ui/detail.html',
permission_factory=permission_factory,
view_method=view_method)
# Make view well-behaved for Flask-DebugToolbar
view_func.__module__ = record_view.__module__
view_func.__name__ = record_view.__name__
return dict(
endpoint=endpoint,
rule=route,
view_func=view_func,
methods=methods,
)
|
python
|
{
"resource": ""
}
|
q277675
|
record_view
|
test
|
def record_view(pid_value=None, resolver=None, template=None,
permission_factory=None, view_method=None, **kwargs):
"""Display record view.
The two parameters ``resolver`` and ``template`` should not be included
in the URL rule, but instead set by creating a partially evaluated function
of the view.
The template being rendered is passed two variables in the template
context:
- ``pid``
- ``record``.
Procedure followed:
#. PID and record are resolved.
#. Permission are checked.
#. ``view_method`` is called.
:param pid_value: Persistent identifier value.
:param resolver: An instance of a persistent identifier resolver. A
persistent identifier resolver takes care of resolving persistent
identifiers into internal objects.
:param template: Template to render.
:param permission_factory: Permission factory called to check if user has
enough power to execute the action.
:param view_method: Function that is called.
:returns: Tuple (pid object, record object).
"""
try:
pid, record = resolver.resolve(pid_value)
except (PIDDoesNotExistError, PIDUnregistered):
abort(404)
except PIDMissingObjectError as e:
current_app.logger.exception(
"No object assigned to {0}.".format(e.pid),
extra={'pid': e.pid})
abort(500)
except PIDRedirectedError as e:
try:
return redirect(url_for(
'.{0}'.format(e.destination_pid.pid_type),
pid_value=e.destination_pid.pid_value))
except BuildError:
current_app.logger.exception(
"Invalid redirect - pid_type '{0}' endpoint missing.".format(
e.destination_pid.pid_type),
extra={
'pid': e.pid,
'destination_pid': e.destination_pid,
})
abort(500)
# Check permissions
permission_factory = permission_factory or current_permission_factory
if permission_factory:
# Note, cannot be done in one line due to overloading of boolean
# operations in permission object.
if not permission_factory(record).can():
from flask_login import current_user
if not current_user.is_authenticated:
return redirect(url_for(
current_app.config['RECORDS_UI_LOGIN_ENDPOINT'],
next=request.url))
abort(403)
return view_method(pid, record, template=template, **kwargs)
|
python
|
{
"resource": ""
}
|
q277676
|
default_view_method
|
test
|
def default_view_method(pid, record, template=None, **kwargs):
r"""Display default view.
Sends record_viewed signal and renders template.
:param pid: PID object.
:param record: Record object.
:param template: Template to render.
:param \*\*kwargs: Additional view arguments based on URL rule.
:returns: The rendered template.
"""
record_viewed.send(
current_app._get_current_object(),
pid=pid,
record=record,
)
return render_template(
template,
pid=pid,
record=record,
)
|
python
|
{
"resource": ""
}
|
q277677
|
export
|
test
|
def export(pid, record, template=None, **kwargs):
r"""Record serialization view.
Serializes record with given format and renders record export template.
:param pid: PID object.
:param record: Record object.
:param template: Template to render.
:param \*\*kwargs: Additional view arguments based on URL rule.
:return: The rendered template.
"""
formats = current_app.config.get('RECORDS_UI_EXPORT_FORMATS', {}).get(
pid.pid_type)
fmt = formats.get(request.view_args.get('format'))
if fmt is False:
# If value is set to False, it means it was deprecated.
abort(410)
elif fmt is None:
abort(404)
else:
serializer = obj_or_import_string(fmt['serializer'])
data = serializer.serialize(pid, record)
if isinstance(data, six.binary_type):
data = data.decode('utf8')
return render_template(
template, pid=pid, record=record, data=data,
format_title=fmt['title'],
)
|
python
|
{
"resource": ""
}
|
q277678
|
Chronometer.time_callable
|
test
|
def time_callable(self, name, target, rate=None, args=(), kwargs={}):
# type: (str, Callable, float, Tuple, Dict) -> Chronometer
"""Send a Timer metric calculating duration of execution of the provided callable"""
assert callable(target)
if rate is None:
rate = self._rate
else:
assert_sample_rate(rate)
start_time = time() # type: float
result = target(*args, **kwargs)
self.since(name, start_time, rate)
return result
|
python
|
{
"resource": ""
}
|
q277679
|
AutoClosingSharedSocket.close
|
test
|
def close(self):
# type: () -> None
"""Close the socket to free system resources.
After the socket is closed, further operations with socket
will fail. Multiple calls to close will have no effect.
"""
if self._closed:
return
self._socket.close()
self._closed = True
|
python
|
{
"resource": ""
}
|
q277680
|
AutoClosingSharedSocket.remove_client
|
test
|
def remove_client(self, client):
# type: (object) -> None
"""Remove the client from the users of the socket.
If there are no more clients for the socket, it
will close automatically.
"""
try:
self._clients.remove(id(client))
except ValueError:
pass
if len(self._clients) < 1:
self.close()
|
python
|
{
"resource": ""
}
|
q277681
|
AbstractClient.increment
|
test
|
def increment(self, name, count=1, rate=1):
# type: (str, int, float) -> None
"""Increment a Counter metric"""
if self._should_send_metric(name, rate):
self._request(
Counter(
self._create_metric_name_for_request(name),
int(count),
rate
).to_request()
)
|
python
|
{
"resource": ""
}
|
q277682
|
AbstractClient.timing
|
test
|
def timing(self, name, milliseconds, rate=1):
# type: (str, float, float) -> None
"""Send a Timer metric with the specified duration in milliseconds"""
if self._should_send_metric(name, rate):
milliseconds = int(milliseconds)
self._request(
Timer(
self._create_metric_name_for_request(name),
milliseconds,
rate
).to_request()
)
|
python
|
{
"resource": ""
}
|
q277683
|
AbstractClient.timing_since
|
test
|
def timing_since(self, name, start_time, rate=1):
# type: (str, Union[float, datetime], float) -> None
"""Send a Timer metric calculating the duration from the start time"""
duration = 0 # type: float
if isinstance(start_time, datetime):
duration = (datetime.now(start_time.tzinfo) - start_time).total_seconds() * 1000
elif is_numeric(start_time):
assert start_time > 0
duration = (time() - start_time) * 1000
else:
raise ValueError("start time should be a timestamp or a datetime")
self.timing(name, duration, rate)
|
python
|
{
"resource": ""
}
|
q277684
|
AbstractClient.gauge
|
test
|
def gauge(self, name, value, rate=1):
# type: (str, float, float) -> None
"""Send a Gauge metric with the specified value"""
if self._should_send_metric(name, rate):
if not is_numeric(value):
value = float(value)
self._request(
Gauge(
self._create_metric_name_for_request(name),
value,
rate
).to_request()
)
|
python
|
{
"resource": ""
}
|
q277685
|
AbstractClient.gauge_delta
|
test
|
def gauge_delta(self, name, delta, rate=1):
# type: (str, float, float) -> None
"""Send a GaugeDelta metric to change a Gauge by the specified value"""
if self._should_send_metric(name, rate):
if not is_numeric(delta):
delta = float(delta)
self._request(
GaugeDelta(
self._create_metric_name_for_request(name),
delta,
rate
).to_request()
)
|
python
|
{
"resource": ""
}
|
q277686
|
AbstractClient.set
|
test
|
def set(self, name, value, rate=1):
# type: (str, str, float) -> None
"""Send a Set metric with the specified unique value"""
if self._should_send_metric(name, rate):
value = str(value)
self._request(
Set(
self._create_metric_name_for_request(name),
value,
rate
).to_request()
)
|
python
|
{
"resource": ""
}
|
q277687
|
BatchClientMixIn._request
|
test
|
def _request(self, data):
# type: (str) -> None
"""Override parent by buffering the metric instead of sending now"""
data = bytearray("{}\n".format(data).encode())
self._prepare_batches_for_storage(len(data))
self._batches[-1].extend(data)
|
python
|
{
"resource": ""
}
|
q277688
|
Client.batch_client
|
test
|
def batch_client(self, size=512):
# type: (int) -> BatchClient
"""Return a batch client with same settings of the client"""
batch_client = BatchClient(self.host, self.port, self.prefix, size)
self._configure_client(batch_client)
return batch_client
|
python
|
{
"resource": ""
}
|
q277689
|
BatchClient.unit_client
|
test
|
def unit_client(self):
# type: () -> Client
"""Return a client with same settings of the batch client"""
client = Client(self.host, self.port, self.prefix)
self._configure_client(client)
return client
|
python
|
{
"resource": ""
}
|
q277690
|
BatchClient.flush
|
test
|
def flush(self):
# type: () -> BatchClient
"""Send buffered metrics in batch requests"""
address = self.remote_address
while len(self._batches) > 0:
self._socket.sendto(self._batches[0], address)
self._batches.popleft()
return self
|
python
|
{
"resource": ""
}
|
q277691
|
my_permission_factory
|
test
|
def my_permission_factory(record, *args, **kwargs):
"""My permission factory."""
def can(self):
rec = Record.get_record(record.id)
return rec.get('access', '') == 'open'
return type('MyPermissionChecker', (), {'can': can})()
|
python
|
{
"resource": ""
}
|
q277692
|
TCPClient.batch_client
|
test
|
def batch_client(self, size=512):
# type: (int) -> TCPBatchClient
"""Return a TCP batch client with same settings of the TCP client"""
batch_client = TCPBatchClient(self.host, self.port, self.prefix, size)
self._configure_client(batch_client)
return batch_client
|
python
|
{
"resource": ""
}
|
q277693
|
TCPBatchClient.flush
|
test
|
def flush(self):
"""Send buffered metrics in batch requests over TCP"""
# type: () -> TCPBatchClient
while len(self._batches) > 0:
self._socket.sendall(self._batches[0])
self._batches.popleft()
return self
|
python
|
{
"resource": ""
}
|
q277694
|
TCPBatchClient.unit_client
|
test
|
def unit_client(self):
# type: () -> TCPClient
"""Return a TCPClient with same settings of the batch TCP client"""
client = TCPClient(self.host, self.port, self.prefix)
self._configure_client(client)
return client
|
python
|
{
"resource": ""
}
|
q277695
|
any_user
|
test
|
def any_user(password=None, permissions=[], groups=[], **kwargs):
"""
Shortcut for creating Users
Permissions could be a list of permission names
If not specified, creates active, non superuser
and non staff user
"""
is_active = kwargs.pop('is_active', True)
is_superuser = kwargs.pop('is_superuser', False)
is_staff = kwargs.pop('is_staff', False)
user = any_model(User, is_active = is_active, is_superuser = is_superuser,
is_staff = is_staff, **kwargs)
for group_name in groups :
group = Group.objects.get(name=group_name)
user.groups.add(group)
for permission_name in permissions:
app_label, codename = permission_name.split('.')
permission = Permission.objects.get(
content_type__app_label=app_label,
codename=codename)
user.user_permissions.add(permission)
if password:
user.set_password(password)
user.save()
return user
|
python
|
{
"resource": ""
}
|
q277696
|
interpretAsOpenMath
|
test
|
def interpretAsOpenMath(x):
"""tries to convert a Python object into an OpenMath object
this is not a replacement for using a Converter for exporting Python objects
instead, it is used conveniently building OM objects in DSL embedded in Python
inparticular, it converts Python functions into OMBinding objects using lambdaOM as the binder"""
if hasattr(x, "_ishelper") and x._ishelper:
# wrapped things in this class -> unwrap
return x._toOM()
elif isinstance(x, om.OMAny):
# already OM
return x
elif isinstance(x, six.integer_types):
# integers -> OMI
return om.OMInteger(x)
elif isinstance(x, float):
# floats -> OMF
return om.OMFloat(x)
elif isinstance(x, six.string_types):
# strings -> OMSTR
return om.OMString(x)
elif isinstance(x, WrappedHelper):
# wrapper -> wrapped object
return x.toOM()
elif inspect.isfunction(x):
# function -> OMBIND(lambda,...)
# get all the parameters of the function
paramMap = inspect.signature(x).parameters
params = [v for k, v in six.iteritems(paramMap)]
# make sure that all of them are positional
posArgKinds = [inspect.Parameter.POSITIONAL_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD]
if not all([p.kind in posArgKinds for p in params]):
raise CannotInterpretAsOpenMath("no sequence arguments allowed")
# call the function with appropriate OMVariables
paramsOM = [om.OMVariable(name=p.name) for p in params]
bodyOM = interpretAsOpenMath(x(*paramsOM))
return OMBinding(om.OMSymbol(name="lambda", cd="python", cdbase="http://python.org"), paramsOM, bodyOM)
else:
# fail
raise CannotInterpretAsOpenMath("unknown kind of object: " + str(x))
|
python
|
{
"resource": ""
}
|
q277697
|
convertAsOpenMath
|
test
|
def convertAsOpenMath(term, converter):
""" Converts a term into OpenMath, using either a converter or the interpretAsOpenMath method """
# if we already have openmath, or have some of our magic helpers, use interpretAsOpenMath
if hasattr(term, "_ishelper") and term._ishelper or isinstance(term, om.OMAny):
return interpretAsOpenMath(term)
# next try to convert using the converter
if converter is not None:
try:
_converted = converter.to_openmath(term)
except Exception as e:
_converted = None
if isinstance(_converted, om.OMAny):
return _converted
# fallback to the openmath helper
return interpretAsOpenMath(term)
|
python
|
{
"resource": ""
}
|
q277698
|
Converter.to_python
|
test
|
def to_python(self, omobj):
""" Convert OpenMath object to Python """
# general overrides
if omobj.__class__ in self._omclass_to_py:
return self._omclass_to_py[omobj.__class__](omobj)
# oms
elif isinstance(omobj, om.OMSymbol):
return self._lookup_to_python(omobj.cdbase, omobj.cd, omobj.name)
# oma
elif isinstance(omobj, om.OMApplication):
elem = self.to_python(omobj.elem)
arguments = [self.to_python(x) for x in omobj.arguments]
return elem(*arguments)
raise ValueError('Cannot convert object of class %s to Python.' % omobj.__class__.__name__)
|
python
|
{
"resource": ""
}
|
q277699
|
Converter.to_openmath
|
test
|
def to_openmath(self, obj):
""" Convert Python object to OpenMath """
for cl, conv in reversed(self._conv_to_om):
if cl is None or isinstance(obj, cl):
try:
return conv(obj)
except CannotConvertError:
continue
if hasattr(obj, '__openmath__'):
return obj.__openmath__()
raise ValueError('Cannot convert %r to OpenMath.' % obj)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.