_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q278600
|
CopyDoc._parse_attr
|
test
|
def _parse_attr(self, tagname, attr, value):
"""
Parse attribute. Delegate to href parser for hrefs, otherwise return
value.
"""
if tagname == 'a' and attr == 'href':
return self._parse_href(value)
else:
return value
|
python
|
{
"resource": ""
}
|
q278601
|
translate_key_values
|
test
|
def translate_key_values(adict, translations, default=''):
"""Modify the keys in adict to the ones in translations.
Be careful, this will modify your input dictionary.
The keys not present in translations will be left intact.
Parameters
----------
adict: a dictionary
translations: iterable of 2-tuples
Each 2-tuple must have the following format:
(<adict existing key>, <desired key name for the existing key>)
Returns
-------
Translated adict
"""
for src_key, dst_key in translations:
adict[dst_key] = adict.pop(src_key, default)
return adict
|
python
|
{
"resource": ""
}
|
q278602
|
JSONMixin.to_json_str
|
test
|
def to_json_str(self):
"""Convert data to json string representation.
Returns:
json representation as string.
"""
adict = dict(vars(self), sort_keys=True)
adict['type'] = self.__class__.__name__
return json.dumps(adict)
|
python
|
{
"resource": ""
}
|
q278603
|
find_file_match
|
test
|
def find_file_match(folder_path, regex=''):
"""
Returns absolute paths of files that match the regex within folder_path and
all its children folders.
Note: The regex matching is done using the match function
of the re module.
Parameters
----------
folder_path: string
regex: string
Returns
-------
A list of strings.
"""
outlist = []
for root, dirs, files in os.walk(folder_path):
outlist.extend([os.path.join(root, f) for f in files
if re.match(regex, f)])
return outlist
|
python
|
{
"resource": ""
}
|
q278604
|
concat
|
test
|
def concat(ctx, *strings):
'''
Yields one string, concatenation of argument strings
'''
strings = flatten([ (s.compute(ctx) if callable(s) else s) for s in strings ])
strings = (next(string_arg(ctx, s), '') for s in strings)
#assert(all(map(lambda x: isinstance(x, str), strings)))
#FIXME: Check arg types
yield ''.join(strings)
|
python
|
{
"resource": ""
}
|
q278605
|
starts_with
|
test
|
def starts_with(ctx, full, part):
'''
Yields one boolean, whether the first string starts with the second
'''
full = next(string_arg(ctx, full), '')
part = next(string_arg(ctx, part), '')
yield full.startswith(part)
|
python
|
{
"resource": ""
}
|
q278606
|
contains
|
test
|
def contains(ctx, full, part):
'''
Yields one boolean, whether the first string contains the second
'''
full = next(string_arg(ctx, full), '')
part = next(string_arg(ctx, part), '')
yield part in full
|
python
|
{
"resource": ""
}
|
q278607
|
string_length
|
test
|
def string_length(ctx, s=None):
'''
Yields one number
'''
if s is None:
s = ctx.node
elif callable(s):
s = next(s.compute(ctx), '')
yield len(s)
|
python
|
{
"resource": ""
}
|
q278608
|
boolean
|
test
|
def boolean(ctx, obj):
'''
Yields one boolean, false if the argument sequence is empty, otherwise
* false if the first item is a boolean and false
* false if the first item is a number and positive or negative zero or NaN
* false if the first item is a string and ''
* true in all other cases
'''
if hasattr(obj, 'compute'):
obj = next(seq.compute(ctx), '')
else:
obj = seq
yield next(to_boolean(obj), '')
|
python
|
{
"resource": ""
}
|
q278609
|
foreach_
|
test
|
def foreach_(ctx, seq, expr):
'''
Yields the result of applying an expression to each item in the input sequence.
* seq: input sequence
* expr: expression to be converted to string, then dynamically evaluated for each item on the sequence to produce the result
'''
from . import context, parse as uxpathparse
if hasattr(seq, 'compute'):
seq = seq.compute(ctx)
expr = next(string_arg(ctx, expr), '')
pexpr = uxpathparse(expr)
for item in seq:
innerctx = ctx.copy(item=item)
yield from pexpr.compute(innerctx)
|
python
|
{
"resource": ""
}
|
q278610
|
lookup_
|
test
|
def lookup_(ctx, tableid, key):
'''
Yields a sequence of a single value, the result of looking up a value from the tables provided in the context, or an empty sequence if lookup is unsuccessful
* tableid: id of the lookup table to use
* expr: expression to be converted to string, then dynamically evaluated for each item on the sequence to produce the result
'''
tableid = next(string_arg(ctx, tableid), '')
key = next(string_arg(ctx, key), '')
#value = ctx.
for item in seq:
innerctx = ctx.copy(item=item)
yield from pexpr.compute(innerctx)
|
python
|
{
"resource": ""
}
|
q278611
|
replace_chars_for_svg_code
|
test
|
def replace_chars_for_svg_code(svg_content):
""" Replace known special characters to SVG code.
Parameters
----------
svg_content: str
Returns
-------
corrected_svg: str
Corrected SVG content
"""
result = svg_content
svg_char = [
('&', '&'),
('>', '>'),
('<', '<'),
('"', '"'),
]
for c, entity in svg_char:
result = result.replace(c, entity)
return result
|
python
|
{
"resource": ""
}
|
q278612
|
_check_svg_file
|
test
|
def _check_svg_file(svg_file):
""" Try to read a SVG file if `svg_file` is a string.
Raise an exception in case of error or return the svg object.
If `svg_file` is a svgutils svg object, will just return it.
Parameters
----------
svg_file: str or svgutils.transform.SVGFigure object
If a `str`: path to a '.svg' file,
otherwise a svgutils svg object is expected.
Returns
-------
svgutils svg object
Raises
------
Exception if any error happens.
"""
if isinstance(svg_file, str):
try:
svg = sg.fromfile(svg_file)
except Exception as exc:
raise Exception('Error reading svg file {}.'.format(svg_file)) from exc
else:
return svg
if isinstance(svg_file, sg.SVGFigure):
return svg_file
raise ValueError('Expected `svg_file` to be `str` or `svgutils.SVG`, got {}.'.format(type(svg_file)))
|
python
|
{
"resource": ""
}
|
q278613
|
merge_svg_files
|
test
|
def merge_svg_files(svg_file1, svg_file2, x_coord, y_coord, scale=1):
""" Merge `svg_file2` in `svg_file1` in the given positions `x_coord`, `y_coord` and `scale`.
Parameters
----------
svg_file1: str or svgutils svg document object
Path to a '.svg' file.
svg_file2: str or svgutils svg document object
Path to a '.svg' file.
x_coord: float
Horizontal axis position of the `svg_file2` content.
y_coord: float
Vertical axis position of the `svg_file2` content.
scale: float
Scale to apply to `svg_file2` content.
Returns
-------
`svg1` svgutils object with the content of 'svg_file2'
"""
svg1 = _check_svg_file(svg_file1)
svg2 = _check_svg_file(svg_file2)
svg2_root = svg2.getroot()
svg1.append([svg2_root])
svg2_root.moveto(x_coord, y_coord, scale=scale)
return svg1
|
python
|
{
"resource": ""
}
|
q278614
|
merge_pdfs
|
test
|
def merge_pdfs(pdf_filepaths, out_filepath):
""" Merge all the PDF files in `pdf_filepaths` in a new PDF file `out_filepath`.
Parameters
----------
pdf_filepaths: list of str
Paths to PDF files.
out_filepath: str
Path to the result PDF file.
Returns
-------
path: str
The output file path.
"""
merger = PdfFileMerger()
for pdf in pdf_filepaths:
merger.append(PdfFileReader(open(pdf, 'rb')))
merger.write(out_filepath)
return out_filepath
|
python
|
{
"resource": ""
}
|
q278615
|
_embed_font_to_svg
|
test
|
def _embed_font_to_svg(filepath, font_files):
""" Return the ElementTree of the SVG content in `filepath`
with the font content embedded.
"""
with open(filepath, 'r') as svgf:
tree = etree.parse(svgf)
if not font_files:
return tree
fontfaces = FontFaceGroup()
for font_file in font_files:
fontfaces.append(FontFace(font_file))
for element in tree.iter():
if element.tag.split("}")[1] == 'svg':
break
element.insert(0, fontfaces.xml_elem)
return tree
|
python
|
{
"resource": ""
}
|
q278616
|
embed_font_to_svg
|
test
|
def embed_font_to_svg(filepath, outfile, font_files):
""" Write ttf and otf font content from `font_files`
in the svg file in `filepath` and write the result in
`outfile`.
Parameters
----------
filepath: str
The SVG file whose content must be modified.
outfile: str
The file path where the result will be written.
font_files: iterable of str
List of paths to .ttf or .otf files.
"""
tree = _embed_font_to_svg(filepath, font_files)
tree.write(outfile, encoding='utf-8', pretty_print=True)
|
python
|
{
"resource": ""
}
|
q278617
|
SearchMethod._check_inputs
|
test
|
def _check_inputs(self):
''' make some basic checks on the inputs to make sure they are valid'''
try:
_ = self._inputs[0]
except TypeError:
raise RuntimeError(
"inputs should be iterable but found type='{0}', value="
"'{1}'".format(type(self._inputs), str(self._inputs)))
from melody.inputs import Input
for check_input in self._inputs:
if not isinstance(check_input, Input):
raise RuntimeError(
"input should be a subclass of the Input class but "
"found type='{0}', value='{1}'".format(type(check_input),
str(check_input)))
|
python
|
{
"resource": ""
}
|
q278618
|
SearchMethod._check_function
|
test
|
def _check_function(self):
''' make some basic checks on the function to make sure it is valid'''
# note, callable is valid for Python 2 and Python 3.2 onwards but
# not inbetween
if not callable(self._function):
raise RuntimeError(
"provided function '{0}' is not callable".
format(str(self._function)))
from inspect import getargspec
arg_info = getargspec(self._function)
if len(arg_info.args) != 1:
print str(arg_info)
raise RuntimeError(
"provided function should have one argument but found "
"{0}".format(len(arg_info.args)))
|
python
|
{
"resource": ""
}
|
q278619
|
BruteForce._recurse
|
test
|
def _recurse(self, inputs, output):
'''internal recursion routine called by the run method that generates
all input combinations'''
if inputs:
my_input = inputs[0]
name = my_input.name
if my_input.state:
my_options = my_input.options(self.state)
else:
my_options = my_input.options
for option in my_options:
my_output = list(output)
my_output.append({name: option})
self._recurse(inputs[1:], my_output)
else:
try:
valid, result = self._function(output)
except ValueError:
raise RuntimeError("function must return 2 values")
print output, valid, result
|
python
|
{
"resource": ""
}
|
q278620
|
create_input
|
test
|
def create_input(option, template_name, template_location="template"):
'''create an input file using jinja2 by filling a template
with the values from the option variable passed in.'''
# restructure option list into jinja2 input format
jinja2_input = {}
for item in option:
try:
jinja2_input.update(item)
except ValueError:
raise RuntimeError(
("inputs.py, create_input : format of item '{0}' is not "
"supported. Expecting a dictionary.".format(str(item))))
# load the template and fill it with the option variable contents
import jinja2
try:
template_loader = jinja2.FileSystemLoader(searchpath=template_location)
template_env = jinja2.Environment(loader=template_loader)
template = template_env.get_template(template_name)
output_text = template.render(jinja2_input)
except jinja2.TemplateNotFound:
raise RuntimeError("template '{0}' not found".format(template_name))
# return the particular input file as a string
return output_text
|
python
|
{
"resource": ""
}
|
q278621
|
Subsets._recurse
|
test
|
def _recurse(self, inputs, output, depth, max_depth):
'''We work out all combinations using this internal recursion method'''
if depth < max_depth:
for index, option in enumerate(inputs):
my_output = list(output)
my_output.append(option)
self._recurse(inputs[index + 1:], my_output, depth + 1,
max_depth)
else:
self._options.append(output)
|
python
|
{
"resource": ""
}
|
q278622
|
to_string
|
test
|
def to_string(obj):
'''
Cast an arbitrary object or sequence to a string type
'''
if isinstance(obj, LiteralWrapper):
val = obj.obj
elif isinstance(obj, Iterable) and not isinstance(obj, str):
val = next(obj, None)
else:
val = obj
if val is None:
yield ''
elif isinstance(val, str):
yield val
elif isinstance(val, node):
yield strval(val)
elif isinstance(val, int) or isinstance(val, float):
yield str(val)
elif isinstance(item, bool):
yield 'true' if item else 'false'
else:
raise RuntimeError('Unknown type for string conversion: {}'.format(val))
|
python
|
{
"resource": ""
}
|
q278623
|
to_number
|
test
|
def to_number(obj):
'''
Cast an arbitrary object or sequence to a number type
'''
if isinstance(obj, LiteralWrapper):
val = obj.obj
elif isinstance(obj, Iterable) and not isinstance(obj, str):
val = next(obj, None)
else:
val = obj
if val is None:
#FIXME: Should be NaN, not 0
yield 0
elif isinstance(val, str):
yield float(val)
elif isinstance(val, node):
yield float(strval(val))
elif isinstance(val, int) or isinstance(val, float):
yield val
else:
raise RuntimeError('Unknown type for number conversion: {}'.format(val))
|
python
|
{
"resource": ""
}
|
q278624
|
to_boolean
|
test
|
def to_boolean(obj):
'''
Cast an arbitrary sequence to a boolean type
'''
#if hasattr(obj, '__iter__'):
if isinstance(obj, LiteralWrapper):
val = obj.obj
elif isinstance(obj, Iterable) and not isinstance(obj, str):
val = next(obj, None)
else:
val = obj
if val is None:
yield False
elif isinstance(val, bool):
yield val
elif isinstance(val, str):
yield bool(str)
elif isinstance(val, node):
yield True
elif isinstance(val, float) or isinstance(val, int):
yield bool(val)
else:
raise RuntimeError('Unknown type for boolean conversion: {}'.format(val))
|
python
|
{
"resource": ""
}
|
q278625
|
_serialize
|
test
|
def _serialize(xp_ast):
'''Generate token strings which, when joined together, form a valid
XPath serialization of the AST.'''
if hasattr(xp_ast, '_serialize'):
for tok in xp_ast._serialize():
yield(tok)
elif isinstance(xp_ast, str):
yield(repr(xp_ast))
|
python
|
{
"resource": ""
}
|
q278626
|
change_xml_encoding
|
test
|
def change_xml_encoding(filepath, src_enc, dst_enc='utf-8'):
""" Modify the encoding entry in the XML file.
Parameters
----------
filepath: str
Path to the file to be modified.
src_enc: str
Encoding that is written in the file
dst_enc: str
Encoding to be set in the file.
"""
enc_attr = "encoding='{}'"
replace_file_content(filepath, enc_attr.format(src_enc), enc_attr.format(dst_enc), 1)
|
python
|
{
"resource": ""
}
|
q278627
|
save_into_qrcode
|
test
|
def save_into_qrcode(text, out_filepath, color='', box_size=10, pixel_size=1850):
""" Save `text` in a qrcode svg image file.
Parameters
----------
text: str
The string to be codified in the QR image.
out_filepath: str
Path to the output file
color: str
A RGB color expressed in 6 hexadecimal values.
box_size: scalar
Size of the QR code boxes.
"""
try:
qr = qrcode.QRCode(version=1, error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=box_size, border=0, )
qr.add_data(text)
qr.make(fit=True)
except Exception as exc:
raise Exception('Error trying to generate QR code '
' from `vcard_string`: {}'.format(text)) from exc
else:
img = qr.make_image(image_factory=qrcode.image.svg.SvgPathImage)
_ = _qrcode_to_file(img, out_filepath)
if color:
replace_file_content(out_filepath, 'fill:#000000', 'fill:#{}'.format(color))
|
python
|
{
"resource": ""
}
|
q278628
|
launch
|
test
|
def launch(option):
'''Set the gromacs input data using the supplied input options, run
gromacs and extract and return the required outputs.'''
from melody.inputs import create_input
_ = create_input(option, template_name="input.mdp")
# save the input file in the appropriate place and launch gromacs using
# longbow ...
# determine if the run was successful
success = True
results = None
if success:
# extract the required outputs
results = {"rate": {"value": 35, "units": "ns/day"}, }
return success, results
|
python
|
{
"resource": ""
}
|
q278629
|
call_command
|
test
|
def call_command(cmd_name, args_strings):
"""Call CLI command with arguments and returns its return value.
Parameters
----------
cmd_name: str
Command name or full path to the binary file.
arg_strings: str
Argument strings list.
Returns
-------
return_value
Command return value.
"""
if not os.path.isabs(cmd_name):
cmd_fullpath = which(cmd_name)
else:
cmd_fullpath = cmd_name
try:
cmd_line = [cmd_fullpath] + args_strings
log.debug('Calling: `{}`.'.format(' '.join(cmd_line)))
# retval = subprocess.check_call(cmd_line)
retval = subprocess.call(' '.join(cmd_line), shell=True)
except CalledProcessError as ce:
log.exception(
"Error calling command with arguments: "
"{} \n With return code: {}".format(cmd_line, ce.returncode)
)
raise
else:
return retval
|
python
|
{
"resource": ""
}
|
q278630
|
tex2pdf
|
test
|
def tex2pdf(tex_file, output_file=None, output_format='pdf'):
""" Call PDFLatex to convert TeX files to PDF.
Parameters
----------
tex_file: str
Path to the input LateX file.
output_file: str
Path to the output PDF file.
If None, will use the same output directory as the tex_file.
output_format: str
Output file format. Choices: 'pdf' or 'dvi'. Default: 'pdf'
Returns
-------
return_value
PDFLatex command call return value.
"""
if not os.path.exists(tex_file):
raise IOError('Could not find file {}.'.format(tex_file))
if output_format != 'pdf' and output_format != 'dvi':
raise ValueError("Invalid output format given {}. Can only accept 'pdf' or 'dvi'.".format(output_format))
cmd_name = 'pdflatex'
check_command(cmd_name)
args_strings = [cmd_name]
if output_file is not None:
args_strings += ['-output-directory="{}" '.format(os.path.abspath(os.path.dirname(output_file)))]
result_dir = os.path.dirname(output_file) if output_file else os.path.dirname(tex_file)
args_strings += ['-output-format="{}"'.format(output_format)]
args_strings += ['"' + tex_file + '"']
log.debug('Calling command {} with args: {}.'.format(cmd_name, args_strings))
ret = simple_call(args_strings)
result_file = os.path.join(result_dir, remove_ext(os.path.basename(tex_file)) + '.' + output_format)
if os.path.exists(result_file):
shutil.move(result_file, output_file)
else:
raise IOError('Could not find PDFLatex result file.')
log.debug('Cleaning *.aux and *.log files from folder {}.'.format(result_dir))
cleanup(result_dir, 'aux')
cleanup(result_dir, 'log')
return ret
|
python
|
{
"resource": ""
}
|
q278631
|
GOLoopFuse.options
|
test
|
def options(self, my_psy):
'''Returns all potential loop fusion options for the psy object
provided'''
# compute options dynamically here as they may depend on previous
# changes to the psy tree
my_options = []
invokes = my_psy.invokes.invoke_list
#print "there are {0} invokes".format(len(invokes))
if self._dependent_invokes:
raise RuntimeError(
"dependent invokes assumes fusion in one invoke might "
"affect fusion in another invoke. This is not yet "
"implemented")
else:
# treat each invoke separately
for idx, invoke in enumerate(invokes):
print "invoke {0}".format(idx)
# iterate through each outer loop
for loop in invoke.schedule.loops():
if loop.loop_type == "outer":
siblings = loop.parent.children
my_index = siblings.index(loop)
option = []
self._recurse(siblings, my_index, option, my_options,
invoke)
return my_options
|
python
|
{
"resource": ""
}
|
q278632
|
transform
|
test
|
def transform(geom, to_sref):
"""Returns a transformed Geometry.
Arguments:
geom -- any coercible Geometry value or Envelope
to_sref -- SpatialReference or EPSG ID as int
"""
# If we have an envelope, assume it's in the target sref.
try:
geom = getattr(geom, 'polygon', Envelope(geom).polygon)
except (TypeError, ValueError):
pass
else:
geom.AssignSpatialReference(to_sref)
try:
geom_sref = geom.GetSpatialReference()
except AttributeError:
return transform(Geometry(geom), to_sref)
if geom_sref is None:
raise Exception('Cannot transform from unknown spatial reference')
# Reproject geom if necessary
if not geom_sref.IsSame(to_sref):
geom = geom.Clone()
geom.TransformTo(to_sref)
return geom
|
python
|
{
"resource": ""
}
|
q278633
|
Geometry
|
test
|
def Geometry(*args, **kwargs):
"""Returns an ogr.Geometry instance optionally created from a geojson str
or dict. The spatial reference may also be provided.
"""
# Look for geojson as a positional or keyword arg.
arg = kwargs.pop('geojson', None) or len(args) and args[0]
try:
srs = kwargs.pop('srs', None) or arg.srs.wkt
except AttributeError:
srs = SpatialReference(4326)
if hasattr(arg, 'keys'):
geom = ogr.CreateGeometryFromJson(json.dumps(arg))
elif hasattr(arg, 'startswith'):
# WKB as hexadecimal string.
char = arg[0] if arg else ' '
i = char if isinstance(char, int) else ord(char)
if i in (0, 1):
geom = ogr.CreateGeometryFromWkb(arg)
elif arg.startswith('{'):
geom = ogr.CreateGeometryFromJson(arg)
elif arg.startswith('<gml'):
geom = ogr.CreateGeometryFromGML(arg)
else:
raise ValueError('Invalid geometry value: %s' % arg)
elif hasattr(arg, 'wkb'):
geom = ogr.CreateGeometryFromWkb(bytes(arg.wkb))
else:
geom = ogr.Geometry(*args, **kwargs)
if geom:
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
geom.AssignSpatialReference(srs)
return geom
|
python
|
{
"resource": ""
}
|
q278634
|
Envelope.expand
|
test
|
def expand(self, other):
"""Expands this envelope by the given Envelope or tuple.
Arguments:
other -- Envelope, two-tuple, or four-tuple
"""
if len(other) == 2:
other += other
mid = len(other) // 2
self.ll = map(min, self.ll, other[:mid])
self.ur = map(max, self.ur, other[mid:])
|
python
|
{
"resource": ""
}
|
q278635
|
Envelope.intersect
|
test
|
def intersect(self, other):
"""Returns the intersection of this and another Envelope."""
inter = Envelope(tuple(self))
if inter.intersects(other):
mid = len(other) // 2
inter.ll = map(max, inter.ll, other[:mid])
inter.ur = map(min, inter.ur, other[mid:])
else:
inter.ll = (0, 0)
inter.ur = (0, 0)
return inter
|
python
|
{
"resource": ""
}
|
q278636
|
Envelope.intersects
|
test
|
def intersects(self, other):
"""Returns true if this envelope intersects another.
Arguments:
other -- Envelope or tuple of (minX, minY, maxX, maxY)
"""
try:
return (self.min_x <= other.max_x and
self.max_x >= other.min_x and
self.min_y <= other.max_y and
self.max_y >= other.min_y)
except AttributeError:
return self.intersects(Envelope(other))
|
python
|
{
"resource": ""
}
|
q278637
|
Envelope.polygon
|
test
|
def polygon(self):
"""Returns an OGR Geometry for this envelope."""
ring = ogr.Geometry(ogr.wkbLinearRing)
for coord in self.ll, self.lr, self.ur, self.ul, self.ll:
ring.AddPoint_2D(*coord)
polyg = ogr.Geometry(ogr.wkbPolygon)
polyg.AddGeometryDirectly(ring)
return polyg
|
python
|
{
"resource": ""
}
|
q278638
|
Table.from_ZNM
|
test
|
def from_ZNM(cls, Z, N, M, name=''):
"""
Creates a table from arrays Z, N and M
Example:
________
>>> Z = [82, 82, 83]
>>> N = [126, 127, 130]
>>> M = [-21.34, -18.0, -14.45]
>>> Table.from_ZNM(Z, N, M, name='Custom Table')
Z N
82 126 -21.34
127 -18.00
83 130 -14.45
Name: Custom Table, dtype: float64
"""
df = pd.DataFrame.from_dict({'Z': Z, 'N': N, 'M': M}).set_index(['Z', 'N'])['M']
df.name = name
return cls(df=df, name=name)
|
python
|
{
"resource": ""
}
|
q278639
|
Table.to_file
|
test
|
def to_file(self, path):
"""Export the contents to a file as comma separated values.
Parameters
----------
path : string
File path where the data should be saved to
Example
-------
Export the last ten elements of AME2012 to a new file:
>>> Table('AME2012').tail(10).to_file('last_ten.txt')
"""
with open(path, 'w') as f:
f.write('Z N M\n')
self.df.to_csv(path, sep='\t', mode='a')
|
python
|
{
"resource": ""
}
|
q278640
|
Table.select
|
test
|
def select(self, condition, name=''):
"""
Selects nuclei according to a condition on Z,N or M
Parameters
----------
condition : function,
Can have one of the signatures f(M), f(Z,N) or f(Z, N, M)
must return a boolean value
name: string, optional name for the resulting Table
Example:
--------
Select all nuclei with A > 160:
>>> A_gt_160 = lambda Z,N: Z + N > 160
>>> Table('AME2003').select(A_gt_160)
"""
if condition.func_code.co_argcount == 1:
idx = [(Z, N) for (Z, N), M in self if condition(M)]
if condition.func_code.co_argcount == 2:
idx = [(Z, N) for (Z, N) in self.index if condition(Z, N)]
if condition.func_code.co_argcount == 3:
idx = [(Z, N) for (Z, N), M in self if condition(Z, N, M)]
index = pd.MultiIndex.from_tuples(idx, names=['Z', 'N'])
return Table(df=self.df.ix[index], name=name)
|
python
|
{
"resource": ""
}
|
q278641
|
Table.at
|
test
|
def at(self, nuclei):
"""Return a selection of the Table at positions given by ``nuclei``
Parameters
----------
nuclei: list of tuples
A list where each element is tuple of the form (Z,N)
Example
-------
Return binding energies at magic nuclei:
>>> magic_nuclei = [(20,28), (50,50), (50,82), (82,126)]
>>> Table('AME2012').binding_energy.at(magic_nuclei)
Z N
20 28 416.014215
50 50 825.325172
82 1102.876416
82 126 1636.486450
"""
index = pd.MultiIndex.from_tuples(nuclei, names=['Z', 'N'])
return Table(df=self.df.ix[index], name=self.name)
|
python
|
{
"resource": ""
}
|
q278642
|
Table.intersection
|
test
|
def intersection(self, table):
"""
Select nuclei which also belong to ``table``
Parameters
----------
table: Table, Table object
Example:
----------
Table('AME2003').intersection(Table('AME1995'))
"""
idx = self.df.index & table.df.index
return Table(df=self.df[idx], name=self.name)
|
python
|
{
"resource": ""
}
|
q278643
|
Table.not_in
|
test
|
def not_in(self, table):
"""
Select nuclei not in table
Parameters
----------
table: Table, Table object from where nuclei should be removed
Example:
----------
Find the new nuclei in AME2003 with Z,N >= 8:
>>> Table('AME2003').not_in(Table('AME1995'))[8:,8:].count
389
"""
idx = self.df.index - table.df.index
return Table(df=self.df[idx], name=self.name)
|
python
|
{
"resource": ""
}
|
q278644
|
Table.odd_even
|
test
|
def odd_even(self):
"""
Selects odd-even nuclei from the table
"""
return self.select(lambda Z, N: (Z % 2) and not(N % 2), name=self.name)
|
python
|
{
"resource": ""
}
|
q278645
|
Table.even_odd
|
test
|
def even_odd(self):
"""
Selects even-odd nuclei from the table
"""
return self.select(lambda Z, N: not(Z % 2) and (N % 2), name=self.name)
|
python
|
{
"resource": ""
}
|
q278646
|
Table.even_even
|
test
|
def even_even(self):
"""
Selects even-even nuclei from the table
"""
return self.select(lambda Z, N: not(Z % 2) and not(N % 2), name=self.name)
|
python
|
{
"resource": ""
}
|
q278647
|
Table.error
|
test
|
def error(self, relative_to='AME2003'):
"""
Calculate error difference
Parameters
----------
relative_to : string,
a valid mass table name.
Example:
----------
>>> Table('DUZU').error(relative_to='AME2003')
"""
df = self.df - Table(relative_to).df
return Table(df=df)
|
python
|
{
"resource": ""
}
|
q278648
|
Table.rmse
|
test
|
def rmse(self, relative_to='AME2003'):
"""Calculate root mean squared error
Parameters
----------
relative_to : string,
a valid mass table name.
Example:
----------
>>> template = '{0:10}|{1:^6.2f}|{2:^6.2f}|{3:^6.2f}'
>>> print 'Model ', 'AME95 ', 'AME03 ', 'AME12 ' # Table header
... for name in Table.names:
... print template.format(name, Table(name).rmse(relative_to='AME1995'),
... Table(name).rmse(relative_to='AME2003'),
... Table(name).rmse(relative_to='AME2012'))
Model AME95 AME03 AME12
AME2003 | 0.13 | 0.00 | 0.13
AME2003all| 0.42 | 0.40 | 0.71
AME2012 | 0.16 | 0.13 | 0.00
AME2012all| 0.43 | 0.43 | 0.69
AME1995 | 0.00 | 0.13 | 0.16
AME1995all| 0.00 | 0.17 | 0.21
DUZU | 0.52 | 0.52 | 0.76
FRDM95 | 0.79 | 0.78 | 0.95
KTUY05 | 0.78 | 0.77 | 1.03
ETFSI12 | 0.84 | 0.84 | 1.04
HFB14 | 0.84 | 0.83 | 1.02
"""
error = self.error(relative_to=relative_to)
return math.sqrt((error.df ** 2).mean())
|
python
|
{
"resource": ""
}
|
q278649
|
Table.binding_energy
|
test
|
def binding_energy(self):
"""
Return binding energies instead of mass excesses
"""
M_P = 938.2723
# MeV
M_E = 0.5110
# MeV
M_N = 939.5656
# MeV
AMU = 931.494028
# MeV
df = self.Z * (M_P + M_E) + (self.A - self.Z) * M_N - (self.df + self.A * AMU)
return Table(df=df, name='BE' + '(' + self.name + ')')
|
python
|
{
"resource": ""
}
|
q278650
|
Table.s2n
|
test
|
def s2n(self):
"""Return 2 neutron separation energy"""
M_N = 8.0713171 # neutron mass excess in MeV
f = lambda parent, daugther: -parent + daugther + 2 * M_N
return self.derived('s2n', (0, -2), f)
|
python
|
{
"resource": ""
}
|
q278651
|
Table.s1n
|
test
|
def s1n(self):
"""Return 1 neutron separation energy"""
M_N = 8.0713171 # neutron mass excess in MeV
f = lambda parent, daugther: -parent + daugther + M_N
return self.derived('s1n', (0, -1), f)
|
python
|
{
"resource": ""
}
|
q278652
|
Table.s2p
|
test
|
def s2p(self):
"""Return 2 proton separation energy"""
M_P = 7.28897050 # proton mass excess in MeV
f = lambda parent, daugther: -parent + daugther + 2 * M_P
return self.derived('s2p', (-2, 0), f)
|
python
|
{
"resource": ""
}
|
q278653
|
Table.s1p
|
test
|
def s1p(self):
"""Return 1 proton separation energy"""
M_P = 7.28897050 # proton mass excess in MeV
f = lambda parent, daugther: -parent + daugther + M_P
return self.derived('s1p', (-1, 0), f)
|
python
|
{
"resource": ""
}
|
q278654
|
Table.derived
|
test
|
def derived(self, name, relative_coords, formula):
"""Helper function for derived quantities"""
relZ, relN = relative_coords
daughter_idx = [(x[0] + relZ, x[1] + relN) for x in self.df.index]
values = formula(self.df.values, self.df.loc[daughter_idx].values)
return Table(df=pd.Series(values, index=self.df.index, name=name + '(' + self.name + ')'))
|
python
|
{
"resource": ""
}
|
q278655
|
_uses_db
|
test
|
def _uses_db(func, self, *args, **kwargs):
""" Use as a decorator for operations on the database, to ensure connection setup and
teardown. Can only be used on methods on objects with a `self.session` attribute.
"""
if not self.session:
_logger.debug('Creating new db session')
self._init_db_session()
try:
ret = func(self, *args, **kwargs)
self.session.commit()
except:
self.session.rollback()
tb = traceback.format_exc()
_logger.debug(tb)
raise
finally:
_logger.debug('Closing db session')
self.session.close()
return ret
|
python
|
{
"resource": ""
}
|
q278656
|
Domain.derive_key
|
test
|
def derive_key(self, master_password):
""" Computes the key from the salt and the master password. """
encoder = encoding.Encoder(self.charset)
bytes = ('%s:%s' % (master_password, self.name)).encode('utf8')
start_time = time.clock()
# we fix the scrypt parameters in case the defaults change
digest = scrypt.hash(bytes, self.salt, N=1<<14, r=8, p=1)
key = encoder.encode(digest, self.key_length)
derivation_time_in_s = time.clock() - start_time
_logger.debug('Key derivation took %.2fms', derivation_time_in_s*1000)
return key
|
python
|
{
"resource": ""
}
|
q278657
|
PWM.bootstrap
|
test
|
def bootstrap(self, path_or_uri):
""" Initialize a database.
:param database_path: The absolute path to the database to initialize.
"""
_logger.debug("Bootstrapping new database: %s", path_or_uri)
self.database_uri = _urify_db(path_or_uri)
db = sa.create_engine(self.database_uri)
Base.metadata.create_all(db)
|
python
|
{
"resource": ""
}
|
q278658
|
PWM.search
|
test
|
def search(self, query):
""" Search the database for the given query. Will find partial matches. """
results = self.session.query(Domain).filter(Domain.name.ilike('%%%s%%' % query)).all()
return results
|
python
|
{
"resource": ""
}
|
q278659
|
PWM.modify_domain
|
test
|
def modify_domain(self, domain_name, new_salt=False, username=None):
""" Modify an existing domain.
:param domain_name: The name of the domain to modify.
:param new_salt: Whether to generate a new salt for the domain.
:param username: If given, change domain username to this value.
:returns: The modified :class:`Domain <pwm.core.Domain>` object.
"""
domain = self._get_domain_from_db(domain_name)
if domain is None:
raise NoSuchDomainException
if new_salt:
_logger.info("Generating new salt..")
domain.new_salt()
if username is not None:
domain.username = username
return domain
|
python
|
{
"resource": ""
}
|
q278660
|
PWM.create_domain
|
test
|
def create_domain(self, domain_name, username=None, alphabet=Domain.DEFAULT_ALPHABET,
length=Domain.DEFAULT_KEY_LENGTH):
""" Create a new domain entry in the database.
:param username: The username to associate with this domain.
:param alphabet: A character set restriction to impose on keys generated for this domain.
:param length: The length of the generated key, in case of restrictions on the site.
"""
# Wrap the actual implementation to do some error handling
try:
return self._create_domain(domain_name, username, alphabet, length)
except Exception as ex:
_logger.warn("Inserting new domain failed: %s", ex)
raise DuplicateDomainException
|
python
|
{
"resource": ""
}
|
q278661
|
extract_hbs
|
test
|
def extract_hbs(fileobj, keywords, comment_tags, options):
"""Extract messages from Handlebars templates.
It returns an iterator yielding tuples in the following form ``(lineno,
funcname, message, comments)``.
TODO: Things to improve:
--- Return comments
"""
server = get_pipeserver()
server.sendline(COMMAND+u'PARSE FILE:'+fileobj.name)
server.expect(RESPONSE+'SENDING OUTPUT')
server.expect(RESPONSE+'OUTPUT END')
trans_strings = server.before
for item in json.loads(trans_strings):
messages = [item['content']]
if item['funcname'] == 'ngettext':
messages.append(item['alt_content'])
yield item['line_number'],item['funcname'],tuple(messages),[]
|
python
|
{
"resource": ""
}
|
q278662
|
vsiprefix
|
test
|
def vsiprefix(path):
"""Returns a GDAL virtual filesystem prefixed path.
Arguments:
path -- file path as str
"""
vpath = path.lower()
scheme = VSI_SCHEMES.get(urlparse(vpath).scheme, '')
for ext in VSI_TYPES:
if ext in vpath:
filesys = VSI_TYPES[ext]
break
else:
filesys = ''
if filesys and scheme:
filesys = filesys[:-1]
return ''.join((filesys, scheme, path))
|
python
|
{
"resource": ""
}
|
q278663
|
SpatialReference.srid
|
test
|
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
|
python
|
{
"resource": ""
}
|
q278664
|
main
|
test
|
def main():
""" Main entry point for the CLI. """
args = get_args()
ret_code = args.target(args)
_logger.debug('Exiting with code %d', ret_code)
sys.exit(ret_code)
|
python
|
{
"resource": ""
}
|
q278665
|
_init_logging
|
test
|
def _init_logging(verbose=False):
""" Initialize loggers. """
config = {
'version': 1,
'formatters': {
'console': {
'format': '* %(message)s',
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'DEBUG',
'formatter': 'console',
'stream': 'ext://sys.stdout',
}
},
'loggers': {
'pwm': {
'level': 'DEBUG' if verbose else 'INFO',
'handlers': ['console'],
'propagate': True,
},
'requests.packages.urllib3': {
'level': 'INFO' if verbose else 'WARNING',
'handlers': ['console'],
'propagate': True,
}
}
}
logging.config.dictConfig(config)
HTTPConnection.debuglevel = 1 if verbose else 0
|
python
|
{
"resource": ""
}
|
q278666
|
update_file
|
test
|
def update_file(url, filename):
"""Update the content of a single file."""
resp = urlopen(url)
if resp.code != 200:
raise Exception('GET {} failed.'.format(url))
with open(_get_package_path(filename), 'w') as fp:
for l in resp:
if not l.startswith(b'#'):
fp.write(l.decode('utf8'))
print('Updated {}'.format(filename))
|
python
|
{
"resource": ""
}
|
q278667
|
available_drivers
|
test
|
def available_drivers():
"""Returns a dictionary of enabled GDAL Driver metadata keyed by the
'ShortName' attribute.
"""
drivers = {}
for i in range(gdal.GetDriverCount()):
d = gdal.GetDriver(i)
drivers[d.ShortName] = d.GetMetadata()
return drivers
|
python
|
{
"resource": ""
}
|
q278668
|
driver_for_path
|
test
|
def driver_for_path(path, drivers=None):
"""Returns the gdal.Driver for a path or None based on the file extension.
Arguments:
path -- file path as str with a GDAL supported file extension
"""
ext = (os.path.splitext(path)[1][1:] or path).lower()
drivers = drivers or ImageDriver.registry if ext else {}
for name, meta in drivers.items():
if ext == meta.get('DMD_EXTENSION', '').lower():
return ImageDriver(name)
return None
|
python
|
{
"resource": ""
}
|
q278669
|
geom_to_array
|
test
|
def geom_to_array(geom, size, affine):
"""Converts an OGR polygon to a 2D NumPy array.
Arguments:
geom -- OGR Geometry
size -- array size in pixels as a tuple of (width, height)
affine -- AffineTransform
"""
driver = ImageDriver('MEM')
rast = driver.raster(driver.ShortName, size)
rast.affine = affine
rast.sref = geom.GetSpatialReference()
with MemoryLayer.from_records([(1, geom)]) as ml:
status = gdal.RasterizeLayer(rast.ds, (1,), ml.layer, burn_values=(1,))
arr = rast.array()
rast.close()
return arr
|
python
|
{
"resource": ""
}
|
q278670
|
rasterize
|
test
|
def rasterize(layer, rast):
"""Returns a Raster from layer features.
Arguments:
layer -- Layer to rasterize
rast -- Raster with target affine, size, and sref
"""
driver = ImageDriver('MEM')
r2 = driver.raster(driver.ShortName, rast.size)
r2.affine = rast.affine
sref = rast.sref
if not sref.srid:
sref = SpatialReference(4326)
r2.sref = sref
ml = MemoryLayer(sref, layer.GetGeomType())
ml.load(layer)
status = gdal.RasterizeLayer(
r2.ds, (1,), ml.layer, options=['ATTRIBUTE=%s' % ml.id])
ml.close()
return r2
|
python
|
{
"resource": ""
}
|
q278671
|
open
|
test
|
def open(path, mode=gdalconst.GA_ReadOnly):
"""Returns a Raster instance.
Arguments:
path -- local or remote path as str or file-like object
Keyword args:
mode -- gdal constant representing access mode
"""
path = getattr(path, 'name', path)
try:
return Raster(vsiprefix(path), mode)
except AttributeError:
try:
imgdata = path.read()
except AttributeError:
raise TypeError('Not a file-like object providing read()')
else:
imgio = MemFileIO(delete=False)
gdal.FileFromMemBuffer(imgio.name, imgdata)
return Raster(imgio, mode)
raise ValueError('Failed to open raster from "%r"' % path)
|
python
|
{
"resource": ""
}
|
q278672
|
frombytes
|
test
|
def frombytes(data, size, bandtype=gdal.GDT_Byte):
"""Returns an in-memory raster initialized from a pixel buffer.
Arguments:
data -- byte buffer of raw pixel data
size -- two or three-tuple of (xsize, ysize, bandcount)
bandtype -- band data type
"""
r = ImageDriver('MEM').raster('', size, bandtype)
r.frombytes(data)
return r
|
python
|
{
"resource": ""
}
|
q278673
|
ImageDriver.copy
|
test
|
def copy(self, source, dest):
"""Returns a copied Raster instance.
Arguments:
source -- the source Raster instance or filepath as str
dest -- destination filepath as str
"""
if not self.copyable:
raise IOError('Driver does not support raster copying')
if not isinstance(source, Raster):
source = Raster(source)
should_close = True
else:
should_close = False
if source.name == dest:
raise ValueError(
'Input and output are the same location: %s' % source.name)
settings = driverdict_tolist(self.settings)
ds = self.CreateCopy(dest, source.ds, self.strictmode,
options=settings)
if should_close:
source.close()
return Raster(ds)
|
python
|
{
"resource": ""
}
|
q278674
|
ImageDriver.options
|
test
|
def options(self):
"""Returns a dict of driver specific raster creation options.
See GDAL format docs at http://www.gdal.org/formats_list.html
"""
if self._options is None:
try:
elem = ET.fromstring(
self.info.get('DMD_CREATIONOPTIONLIST', ''))
except ET.ParseError:
elem = []
opts = {}
for child in elem:
choices = [val.text for val in child]
if choices:
child.attrib.update(choices=choices)
opts[child.attrib.pop('name')] = child.attrib
self._options = opts
return self._options
|
python
|
{
"resource": ""
}
|
q278675
|
ImageDriver.raster
|
test
|
def raster(self, path, size, bandtype=gdal.GDT_Byte):
"""Returns a new Raster instance.
gdal.Driver.Create() does not support all formats.
Arguments:
path -- file object or path as str
size -- two or three-tuple of (xsize, ysize, bandcount)
bandtype -- GDAL pixel data type
"""
path = getattr(path, 'name', path)
try:
is_multiband = len(size) > 2
nx, ny, nbands = size if is_multiband else size + (1,)
except (TypeError, ValueError) as exc:
exc.args = ('Size must be 2 or 3-item sequence',)
raise
if nx < 1 or ny < 1:
raise ValueError('Invalid raster size %s' % (size,))
# Do not write to a non-empty file.
if not self._is_empty(path):
raise IOError('%s already exists, open with Raster()' % path)
ds = self.Create(path, nx, ny, nbands, bandtype)
if not ds:
raise ValueError(
'Could not create %s using %s' % (path, str(self)))
return Raster(ds)
|
python
|
{
"resource": ""
}
|
q278676
|
Raster.SetGeoTransform
|
test
|
def SetGeoTransform(self, affine):
"""Sets the affine transformation.
Intercepts the gdal.Dataset call to ensure use as a property setter.
Arguments:
affine -- AffineTransform or six-tuple of geotransformation values
"""
if isinstance(affine, collections.Sequence):
affine = AffineTransform(*affine)
self._affine = affine
self.ds.SetGeoTransform(affine)
|
python
|
{
"resource": ""
}
|
q278677
|
Raster.array
|
test
|
def array(self, envelope=()):
"""Returns an NDArray, optionally subset by spatial envelope.
Keyword args:
envelope -- coordinate extent tuple or Envelope
"""
args = ()
if envelope:
args = self.get_offset(envelope)
return self.ds.ReadAsArray(*args)
|
python
|
{
"resource": ""
}
|
q278678
|
Raster.envelope
|
test
|
def envelope(self):
"""Returns the minimum bounding rectangle as a tuple of min X, min Y,
max X, max Y.
"""
if self._envelope is None:
origin = self.affine.origin
ur_x = origin[0] + self.ds.RasterXSize * self.affine.scale[0]
ll_y = origin[1] + self.ds.RasterYSize * self.affine.scale[1]
self._envelope = Envelope(origin[0], ll_y, ur_x, origin[1])
return self._envelope
|
python
|
{
"resource": ""
}
|
q278679
|
Raster.driver
|
test
|
def driver(self):
"""Returns the underlying ImageDriver instance."""
if self._driver is None:
self._driver = ImageDriver(self.ds.GetDriver())
return self._driver
|
python
|
{
"resource": ""
}
|
q278680
|
Raster.new
|
test
|
def new(self, size=(), affine=None):
"""Derive new Raster instances.
Keyword args:
size -- tuple of image size (width, height)
affine -- AffineTransform or six-tuple of geotransformation values
"""
size = size or self.size + (len(self),)
band = self.ds.GetRasterBand(1)
driver = ImageDriver('MEM')
rcopy = driver.raster(driver.ShortName, size, band.DataType)
rcopy.sref = self.GetProjection()
rcopy.affine = affine or tuple(self.affine)
colors = band.GetColorTable()
for outband in rcopy:
if self.nodata is not None:
outband.SetNoDataValue(self.nodata)
if colors:
outband.SetColorTable(colors)
return rcopy
|
python
|
{
"resource": ""
}
|
q278681
|
Raster.masked_array
|
test
|
def masked_array(self, geometry=None):
"""Returns a MaskedArray using nodata values.
Keyword args:
geometry -- any geometry, envelope, or coordinate extent tuple
"""
if geometry is None:
return self._masked_array()
geom = transform(geometry, self.sref)
env = Envelope.from_geom(geom).intersect(self.envelope)
arr = self._masked_array(env)
if geom.GetGeometryType() != ogr.wkbPoint:
dims = self.get_offset(env)[2:]
affine = AffineTransform(*tuple(self.affine))
affine.origin = env.ul
mask = ~np.ma.make_mask(geom_to_array(geom, dims, affine))
arr.mask = arr.mask | mask
return arr
|
python
|
{
"resource": ""
}
|
q278682
|
Raster.nodata
|
test
|
def nodata(self):
"""Returns read only property for band nodata value, assuming single
band rasters for now.
"""
if self._nodata is None:
self._nodata = self[0].GetNoDataValue()
return self._nodata
|
python
|
{
"resource": ""
}
|
q278683
|
Raster.ReadRaster
|
test
|
def ReadRaster(self, *args, **kwargs):
"""Returns raster data bytes for partial or full extent.
Overrides gdal.Dataset.ReadRaster() with the full raster size by
default.
"""
args = args or (0, 0, self.ds.RasterXSize, self.ds.RasterYSize)
return self.ds.ReadRaster(*args, **kwargs)
|
python
|
{
"resource": ""
}
|
q278684
|
Raster.resample
|
test
|
def resample(self, size, interpolation=gdalconst.GRA_NearestNeighbour):
"""Returns a new instance resampled to provided size.
Arguments:
size -- tuple of x,y image dimensions
"""
# Find the scaling factor for pixel size.
factors = (size[0] / float(self.RasterXSize),
size[1] / float(self.RasterYSize))
affine = AffineTransform(*tuple(self.affine))
affine.scale = (affine.scale[0] / factors[0],
affine.scale[1] / factors[1])
dest = self.new(size, affine)
# Uses self and dest projection when set to None
gdal.ReprojectImage(self.ds, dest.ds, None, None, interpolation)
return dest
|
python
|
{
"resource": ""
}
|
q278685
|
Raster.save
|
test
|
def save(self, to, driver=None):
"""Save this instance to the path and format provided.
Arguments:
to -- output path as str, file, or MemFileIO instance
Keyword args:
driver -- GDAL driver name as string or ImageDriver
"""
path = getattr(to, 'name', to)
if not driver and hasattr(path, 'encode'):
driver = driver_for_path(path, self.driver.filter_copyable())
elif hasattr(driver, 'encode'):
driver = ImageDriver(driver)
if driver is None or not driver.copyable:
raise ValueError('Copy supporting driver not found for %s' % path)
driver.copy(self, path).close()
|
python
|
{
"resource": ""
}
|
q278686
|
Raster.SetProjection
|
test
|
def SetProjection(self, sref):
"""Sets the spatial reference.
Intercepts the gdal.Dataset call to ensure use as a property setter.
Arguments:
sref -- SpatialReference or any format supported by the constructor
"""
if not hasattr(sref, 'ExportToWkt'):
sref = SpatialReference(sref)
self._sref = sref
self.ds.SetProjection(sref.ExportToWkt())
|
python
|
{
"resource": ""
}
|
q278687
|
Raster.warp
|
test
|
def warp(self, to_sref, dest=None, interpolation=gdalconst.GRA_NearestNeighbour):
"""Returns a new reprojected instance.
Arguments:
to_sref -- spatial reference as a proj4 or wkt string, or a
SpatialReference
Keyword args:
dest -- filepath as str
interpolation -- GDAL interpolation type
"""
if not hasattr(to_sref, 'ExportToWkt'):
to_sref = SpatialReference(to_sref)
dest_wkt = to_sref.ExportToWkt()
dtype = self[0].DataType
err_thresh = 0.125
# Determine new values for destination raster dimensions and
# geotransform.
vrt = gdal.AutoCreateWarpedVRT(self.ds, None, dest_wkt,
interpolation, err_thresh)
if vrt is None:
raise ValueError('Could not warp %s to %s' % (self, dest_wkt))
warpsize = (vrt.RasterXSize, vrt.RasterYSize, len(self))
warptrans = vrt.GetGeoTransform()
vrt = None
if dest is None:
imgio = MemFileIO()
rwarp = self.driver.raster(imgio, warpsize, dtype)
imgio.close()
else:
rwarp = self.driver.raster(dest, warpsize, dtype)
rwarp.SetGeoTransform(warptrans)
rwarp.SetProjection(to_sref)
if self.nodata is not None:
for band in rwarp:
band.SetNoDataValue(self.nodata)
band = None
# Uses self and rwarp projection when set to None
gdal.ReprojectImage(self.ds, rwarp.ds, None, None, interpolation)
return rwarp
|
python
|
{
"resource": ""
}
|
q278688
|
calc_chunklen
|
test
|
def calc_chunklen(alph_len):
'''
computes the ideal conversion ratio for the given alphabet.
A ratio is considered ideal when the number of bits in one output
encoding chunk that don't add up to one input encoding chunk is minimal.
'''
binlen, enclen = min([
(i, i*8 / math.log(alph_len, 2))
for i in range(1, 7)
], key=lambda k: k[1] % 1)
return binlen, int(enclen)
|
python
|
{
"resource": ""
}
|
q278689
|
lookup_alphabet
|
test
|
def lookup_alphabet(charset):
'''
retrieves a named charset or treats the input as a custom alphabet and use that
'''
if charset in PRESETS:
return PRESETS[charset]
if len(charset) < 16:
_logger.warning('very small alphabet in use, possibly a failed lookup?')
return charset
|
python
|
{
"resource": ""
}
|
q278690
|
Encoder._encode_chunk
|
test
|
def _encode_chunk(self, data, index):
'''
gets a chunk from the input data, converts it to a number and
encodes that number
'''
chunk = self._get_chunk(data, index)
return self._encode_long(self._chunk_to_long(chunk))
|
python
|
{
"resource": ""
}
|
q278691
|
Encoder._chunk_to_long
|
test
|
def _chunk_to_long(self, chunk):
'''
parses a chunk of bytes to integer using big-endian representation
'''
return sum([
256**(self.chunklen[0]-1-i) * ord_byte(chunk[i])
for i in range(self.chunklen[0])
])
|
python
|
{
"resource": ""
}
|
q278692
|
Encoder._get_chunk
|
test
|
def _get_chunk(self, data, index):
'''
partition the data into chunks and retrieve the chunk at the given index
'''
return data[index*self.chunklen[0]:(index+1)*self.chunklen[0]]
|
python
|
{
"resource": ""
}
|
q278693
|
memoize
|
test
|
def memoize(func):
"""Cache result of function call."""
cache = {}
@wraps(func)
def inner(filename):
if filename not in cache:
cache[filename] = func(filename)
return cache[filename]
return inner
|
python
|
{
"resource": ""
}
|
q278694
|
_regexp
|
test
|
def _regexp(filename):
"""Get a list of patterns from a file and make a regular expression."""
lines = _get_resource_content(filename).decode('utf-8').splitlines()
return re.compile('|'.join(lines))
|
python
|
{
"resource": ""
}
|
q278695
|
normalize_date_format
|
test
|
def normalize_date_format(date):
'''
Dates can be defined in many ways, but zipline use
aware datetime objects only. Plus, the software work
with utc timezone so we convert it.
'''
if isinstance(date, int):
# This is probably epoch time
date = time.strftime('%Y-%m-%d %H:%M:%S',
time.localtime(date))
# assert isinstance(date, str) or isinstance(date, unicode)
if isinstance(date, str) or isinstance(date, unicode):
date = dateutil.parser.parse(date)
if not date.tzinfo:
local_tz = pytz.timezone(_detect_timezone())
local_dt = local_tz.localize(date, is_dst=None)
# TODO I'm not sure why and when I need to add a date to make it right
date = local_dt.astimezone(pytz.utc) + pd.datetools.day
return date
|
python
|
{
"resource": ""
}
|
q278696
|
_detect_timezone
|
test
|
def _detect_timezone():
'''
Get timezone as set by the system
'''
default_timezone = 'America/New_York'
locale_code = locale.getdefaultlocale()
return default_timezone if not locale_code[0] else \
str(pytz.country_timezones[locale_code[0][-2:]][0])
|
python
|
{
"resource": ""
}
|
q278697
|
FinancialDataStatusDatastatus.to_dict
|
test
|
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
|
python
|
{
"resource": ""
}
|
q278698
|
activate_pdb_hook
|
test
|
def activate_pdb_hook():
''' Catch exceptions with a prompt for post-mortem analyzis'''
def debug_exception(type_exception, value, tb):
import pdb
pdb.post_mortem(tb)
import sys
sys.excepthook = debug_exception
|
python
|
{
"resource": ""
}
|
q278699
|
emphasis
|
test
|
def emphasis(obj, align=True):
''' Clearer data printing '''
if isinstance(obj, dict):
if align:
pretty_msg = os.linesep.join(
["%25s: %s" % (k, obj[k]) for k in sorted(obj.keys())])
else:
pretty_msg = json.dumps(obj, indent=4, sort_keys=True)
else:
return obj
return pretty_msg
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.