repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
rootpy/rootpy
rootpy/plotting/root2matplotlib.py
imshow
def imshow(h, axes=None, colorbar=False, **kwargs): """ Draw a matplotlib imshow plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. colorbar : Boolean, optional (default=False) If True, include a colorbar in the produced plot kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's imshow function. Returns ------- Returns the value from matplotlib's imshow function. """ kwargs.setdefault('aspect', 'auto') if axes is None: axes = plt.gca() z = np.array(h.z()).T axis_image= axes.imshow( z, extent=[ h.xedges(1), h.xedges(h.nbins(0) + 1), h.yedges(1), h.yedges(h.nbins(1) + 1)], interpolation='nearest', origin='lower', **kwargs) if colorbar: plt.colorbar(axis_image, ax=axes) return axis_image
python
def imshow(h, axes=None, colorbar=False, **kwargs): """ Draw a matplotlib imshow plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. colorbar : Boolean, optional (default=False) If True, include a colorbar in the produced plot kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's imshow function. Returns ------- Returns the value from matplotlib's imshow function. """ kwargs.setdefault('aspect', 'auto') if axes is None: axes = plt.gca() z = np.array(h.z()).T axis_image= axes.imshow( z, extent=[ h.xedges(1), h.xedges(h.nbins(0) + 1), h.yedges(1), h.yedges(h.nbins(1) + 1)], interpolation='nearest', origin='lower', **kwargs) if colorbar: plt.colorbar(axis_image, ax=axes) return axis_image
[ "def", "imshow", "(", "h", ",", "axes", "=", "None", ",", "colorbar", "=", "False", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'aspect'", ",", "'auto'", ")", "if", "axes", "is", "None", ":", "axes", "=", "plt", ".", "gca", "(", ")", "z", "=", "np", ".", "array", "(", "h", ".", "z", "(", ")", ")", ".", "T", "axis_image", "=", "axes", ".", "imshow", "(", "z", ",", "extent", "=", "[", "h", ".", "xedges", "(", "1", ")", ",", "h", ".", "xedges", "(", "h", ".", "nbins", "(", "0", ")", "+", "1", ")", ",", "h", ".", "yedges", "(", "1", ")", ",", "h", ".", "yedges", "(", "h", ".", "nbins", "(", "1", ")", "+", "1", ")", "]", ",", "interpolation", "=", "'nearest'", ",", "origin", "=", "'lower'", ",", "*", "*", "kwargs", ")", "if", "colorbar", ":", "plt", ".", "colorbar", "(", "axis_image", ",", "ax", "=", "axes", ")", "return", "axis_image" ]
Draw a matplotlib imshow plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. colorbar : Boolean, optional (default=False) If True, include a colorbar in the produced plot kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's imshow function. Returns ------- Returns the value from matplotlib's imshow function.
[ "Draw", "a", "matplotlib", "imshow", "plot", "from", "a", "2D", "ROOT", "histogram", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L743-L785
train
rootpy/rootpy
rootpy/plotting/root2matplotlib.py
contour
def contour(h, axes=None, zoom=None, label_contour=False, **kwargs): """ Draw a matplotlib contour plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. zoom : float or sequence, optional (default=None) The zoom factor along the axes. If a float, zoom is the same for each axis. If a sequence, zoom should contain one value for each axis. The histogram is zoomed using a cubic spline interpolation to create smooth contours. label_contour : Boolean, optional (default=False) If True, labels are printed on the contour lines. kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's contour function. Returns ------- Returns the value from matplotlib's contour function. """ if axes is None: axes = plt.gca() x = np.array(list(h.x())) y = np.array(list(h.y())) z = np.array(h.z()).T if zoom is not None: from scipy import ndimage if hasattr(zoom, '__iter__'): zoom = list(zoom) x = ndimage.zoom(x, zoom[0]) y = ndimage.zoom(y, zoom[1]) else: x = ndimage.zoom(x, zoom) y = ndimage.zoom(y, zoom) z = ndimage.zoom(z, zoom) return_values = axes.contour(x, y, z, **kwargs) if label_contour: plt.clabel(return_values) return return_values
python
def contour(h, axes=None, zoom=None, label_contour=False, **kwargs): """ Draw a matplotlib contour plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. zoom : float or sequence, optional (default=None) The zoom factor along the axes. If a float, zoom is the same for each axis. If a sequence, zoom should contain one value for each axis. The histogram is zoomed using a cubic spline interpolation to create smooth contours. label_contour : Boolean, optional (default=False) If True, labels are printed on the contour lines. kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's contour function. Returns ------- Returns the value from matplotlib's contour function. """ if axes is None: axes = plt.gca() x = np.array(list(h.x())) y = np.array(list(h.y())) z = np.array(h.z()).T if zoom is not None: from scipy import ndimage if hasattr(zoom, '__iter__'): zoom = list(zoom) x = ndimage.zoom(x, zoom[0]) y = ndimage.zoom(y, zoom[1]) else: x = ndimage.zoom(x, zoom) y = ndimage.zoom(y, zoom) z = ndimage.zoom(z, zoom) return_values = axes.contour(x, y, z, **kwargs) if label_contour: plt.clabel(return_values) return return_values
[ "def", "contour", "(", "h", ",", "axes", "=", "None", ",", "zoom", "=", "None", ",", "label_contour", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "axes", "is", "None", ":", "axes", "=", "plt", ".", "gca", "(", ")", "x", "=", "np", ".", "array", "(", "list", "(", "h", ".", "x", "(", ")", ")", ")", "y", "=", "np", ".", "array", "(", "list", "(", "h", ".", "y", "(", ")", ")", ")", "z", "=", "np", ".", "array", "(", "h", ".", "z", "(", ")", ")", ".", "T", "if", "zoom", "is", "not", "None", ":", "from", "scipy", "import", "ndimage", "if", "hasattr", "(", "zoom", ",", "'__iter__'", ")", ":", "zoom", "=", "list", "(", "zoom", ")", "x", "=", "ndimage", ".", "zoom", "(", "x", ",", "zoom", "[", "0", "]", ")", "y", "=", "ndimage", ".", "zoom", "(", "y", ",", "zoom", "[", "1", "]", ")", "else", ":", "x", "=", "ndimage", ".", "zoom", "(", "x", ",", "zoom", ")", "y", "=", "ndimage", ".", "zoom", "(", "y", ",", "zoom", ")", "z", "=", "ndimage", ".", "zoom", "(", "z", ",", "zoom", ")", "return_values", "=", "axes", ".", "contour", "(", "x", ",", "y", ",", "z", ",", "*", "*", "kwargs", ")", "if", "label_contour", ":", "plt", ".", "clabel", "(", "return_values", ")", "return", "return_values" ]
Draw a matplotlib contour plot from a 2D ROOT histogram. Parameters ---------- h : Hist2D A rootpy Hist2D axes : matplotlib Axes instance, optional (default=None) The axes to plot on. If None then use the global current axes. zoom : float or sequence, optional (default=None) The zoom factor along the axes. If a float, zoom is the same for each axis. If a sequence, zoom should contain one value for each axis. The histogram is zoomed using a cubic spline interpolation to create smooth contours. label_contour : Boolean, optional (default=False) If True, labels are printed on the contour lines. kwargs : additional keyword arguments, optional Additional keyword arguments are passed directly to matplotlib's contour function. Returns ------- Returns the value from matplotlib's contour function.
[ "Draw", "a", "matplotlib", "contour", "plot", "from", "a", "2D", "ROOT", "histogram", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L788-L838
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree._post_init
def _post_init(self): """ The standard rootpy _post_init method that is used to initialize both new Trees and Trees retrieved from a File. """ if not hasattr(self, '_buffer'): # only set _buffer if model was not specified in the __init__ self._buffer = TreeBuffer() self.read_branches_on_demand = False self._branch_cache = {} self._current_entry = 0 self._always_read = [] self.userdata = UserData() self._inited = True
python
def _post_init(self): """ The standard rootpy _post_init method that is used to initialize both new Trees and Trees retrieved from a File. """ if not hasattr(self, '_buffer'): # only set _buffer if model was not specified in the __init__ self._buffer = TreeBuffer() self.read_branches_on_demand = False self._branch_cache = {} self._current_entry = 0 self._always_read = [] self.userdata = UserData() self._inited = True
[ "def", "_post_init", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_buffer'", ")", ":", "# only set _buffer if model was not specified in the __init__", "self", ".", "_buffer", "=", "TreeBuffer", "(", ")", "self", ".", "read_branches_on_demand", "=", "False", "self", ".", "_branch_cache", "=", "{", "}", "self", ".", "_current_entry", "=", "0", "self", ".", "_always_read", "=", "[", "]", "self", ".", "userdata", "=", "UserData", "(", ")", "self", ".", "_inited", "=", "True" ]
The standard rootpy _post_init method that is used to initialize both new Trees and Trees retrieved from a File.
[ "The", "standard", "rootpy", "_post_init", "method", "that", "is", "used", "to", "initialize", "both", "new", "Trees", "and", "Trees", "retrieved", "from", "a", "File", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L49-L62
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.always_read
def always_read(self, branches): """ Always read these branches, even when in caching mode. Maybe you have caching enabled and there are branches you want to be updated for each entry even though you never access them directly. This is useful if you are iterating over an input tree and writing to an output tree sharing the same TreeBuffer and you want a direct copy of certain branches. If you have caching enabled but these branches are not specified here and never accessed then they will never be read from disk, so the values of branches in memory will remain unchanged. Parameters ---------- branches : list, tuple these branches will always be read from disk for every GetEntry """ if type(branches) not in (list, tuple): raise TypeError("branches must be a list or tuple") self._always_read = branches
python
def always_read(self, branches): """ Always read these branches, even when in caching mode. Maybe you have caching enabled and there are branches you want to be updated for each entry even though you never access them directly. This is useful if you are iterating over an input tree and writing to an output tree sharing the same TreeBuffer and you want a direct copy of certain branches. If you have caching enabled but these branches are not specified here and never accessed then they will never be read from disk, so the values of branches in memory will remain unchanged. Parameters ---------- branches : list, tuple these branches will always be read from disk for every GetEntry """ if type(branches) not in (list, tuple): raise TypeError("branches must be a list or tuple") self._always_read = branches
[ "def", "always_read", "(", "self", ",", "branches", ")", ":", "if", "type", "(", "branches", ")", "not", "in", "(", "list", ",", "tuple", ")", ":", "raise", "TypeError", "(", "\"branches must be a list or tuple\"", ")", "self", ".", "_always_read", "=", "branches" ]
Always read these branches, even when in caching mode. Maybe you have caching enabled and there are branches you want to be updated for each entry even though you never access them directly. This is useful if you are iterating over an input tree and writing to an output tree sharing the same TreeBuffer and you want a direct copy of certain branches. If you have caching enabled but these branches are not specified here and never accessed then they will never be read from disk, so the values of branches in memory will remain unchanged. Parameters ---------- branches : list, tuple these branches will always be read from disk for every GetEntry
[ "Always", "read", "these", "branches", "even", "when", "in", "caching", "mode", ".", "Maybe", "you", "have", "caching", "enabled", "and", "there", "are", "branches", "you", "want", "to", "be", "updated", "for", "each", "entry", "even", "though", "you", "never", "access", "them", "directly", ".", "This", "is", "useful", "if", "you", "are", "iterating", "over", "an", "input", "tree", "and", "writing", "to", "an", "output", "tree", "sharing", "the", "same", "TreeBuffer", "and", "you", "want", "a", "direct", "copy", "of", "certain", "branches", ".", "If", "you", "have", "caching", "enabled", "but", "these", "branches", "are", "not", "specified", "here", "and", "never", "accessed", "then", "they", "will", "never", "be", "read", "from", "disk", "so", "the", "values", "of", "branches", "in", "memory", "will", "remain", "unchanged", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L64-L82
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.branch_type
def branch_type(cls, branch): """ Return the string representation for the type of a branch """ typename = branch.GetClassName() if not typename: leaf = branch.GetListOfLeaves()[0] typename = leaf.GetTypeName() # check if leaf has multiple elements leaf_count = leaf.GetLeafCount() if leaf_count: length = leaf_count.GetMaximum() else: length = leaf.GetLen() if length > 1: typename = '{0}[{1:d}]'.format(typename, length) return typename
python
def branch_type(cls, branch): """ Return the string representation for the type of a branch """ typename = branch.GetClassName() if not typename: leaf = branch.GetListOfLeaves()[0] typename = leaf.GetTypeName() # check if leaf has multiple elements leaf_count = leaf.GetLeafCount() if leaf_count: length = leaf_count.GetMaximum() else: length = leaf.GetLen() if length > 1: typename = '{0}[{1:d}]'.format(typename, length) return typename
[ "def", "branch_type", "(", "cls", ",", "branch", ")", ":", "typename", "=", "branch", ".", "GetClassName", "(", ")", "if", "not", "typename", ":", "leaf", "=", "branch", ".", "GetListOfLeaves", "(", ")", "[", "0", "]", "typename", "=", "leaf", ".", "GetTypeName", "(", ")", "# check if leaf has multiple elements", "leaf_count", "=", "leaf", ".", "GetLeafCount", "(", ")", "if", "leaf_count", ":", "length", "=", "leaf_count", ".", "GetMaximum", "(", ")", "else", ":", "length", "=", "leaf", ".", "GetLen", "(", ")", "if", "length", ">", "1", ":", "typename", "=", "'{0}[{1:d}]'", ".", "format", "(", "typename", ",", "length", ")", "return", "typename" ]
Return the string representation for the type of a branch
[ "Return", "the", "string", "representation", "for", "the", "type", "of", "a", "branch" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L85-L101
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.create_buffer
def create_buffer(self, ignore_unsupported=False): """ Create this tree's TreeBuffer """ bufferdict = OrderedDict() for branch in self.iterbranches(): # only include activated branches if not self.GetBranchStatus(branch.GetName()): continue if not BaseTree.branch_is_supported(branch): log.warning( "ignore unsupported branch `{0}`".format(branch.GetName())) continue bufferdict[branch.GetName()] = Tree.branch_type(branch) self.set_buffer(TreeBuffer( bufferdict, ignore_unsupported=ignore_unsupported))
python
def create_buffer(self, ignore_unsupported=False): """ Create this tree's TreeBuffer """ bufferdict = OrderedDict() for branch in self.iterbranches(): # only include activated branches if not self.GetBranchStatus(branch.GetName()): continue if not BaseTree.branch_is_supported(branch): log.warning( "ignore unsupported branch `{0}`".format(branch.GetName())) continue bufferdict[branch.GetName()] = Tree.branch_type(branch) self.set_buffer(TreeBuffer( bufferdict, ignore_unsupported=ignore_unsupported))
[ "def", "create_buffer", "(", "self", ",", "ignore_unsupported", "=", "False", ")", ":", "bufferdict", "=", "OrderedDict", "(", ")", "for", "branch", "in", "self", ".", "iterbranches", "(", ")", ":", "# only include activated branches", "if", "not", "self", ".", "GetBranchStatus", "(", "branch", ".", "GetName", "(", ")", ")", ":", "continue", "if", "not", "BaseTree", ".", "branch_is_supported", "(", "branch", ")", ":", "log", ".", "warning", "(", "\"ignore unsupported branch `{0}`\"", ".", "format", "(", "branch", ".", "GetName", "(", ")", ")", ")", "continue", "bufferdict", "[", "branch", ".", "GetName", "(", ")", "]", "=", "Tree", ".", "branch_type", "(", "branch", ")", "self", ".", "set_buffer", "(", "TreeBuffer", "(", "bufferdict", ",", "ignore_unsupported", "=", "ignore_unsupported", ")", ")" ]
Create this tree's TreeBuffer
[ "Create", "this", "tree", "s", "TreeBuffer" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L111-L127
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.create_branches
def create_branches(self, branches): """ Create branches from a TreeBuffer or dict mapping names to type names Parameters ---------- branches : TreeBuffer or dict """ if not isinstance(branches, TreeBuffer): branches = TreeBuffer(branches) self.set_buffer(branches, create_branches=True)
python
def create_branches(self, branches): """ Create branches from a TreeBuffer or dict mapping names to type names Parameters ---------- branches : TreeBuffer or dict """ if not isinstance(branches, TreeBuffer): branches = TreeBuffer(branches) self.set_buffer(branches, create_branches=True)
[ "def", "create_branches", "(", "self", ",", "branches", ")", ":", "if", "not", "isinstance", "(", "branches", ",", "TreeBuffer", ")", ":", "branches", "=", "TreeBuffer", "(", "branches", ")", "self", ".", "set_buffer", "(", "branches", ",", "create_branches", "=", "True", ")" ]
Create branches from a TreeBuffer or dict mapping names to type names Parameters ---------- branches : TreeBuffer or dict
[ "Create", "branches", "from", "a", "TreeBuffer", "or", "dict", "mapping", "names", "to", "type", "names" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L129-L139
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.update_buffer
def update_buffer(self, treebuffer, transfer_objects=False): """ Merge items from a TreeBuffer into this Tree's TreeBuffer Parameters ---------- buffer : rootpy.tree.buffer.TreeBuffer The TreeBuffer to merge into this Tree's buffer transfer_objects : bool, optional (default=False) If True then all objects and collections on the input buffer will be transferred to this Tree's buffer. """ self._buffer.update(treebuffer) if transfer_objects: self._buffer.set_objects(treebuffer)
python
def update_buffer(self, treebuffer, transfer_objects=False): """ Merge items from a TreeBuffer into this Tree's TreeBuffer Parameters ---------- buffer : rootpy.tree.buffer.TreeBuffer The TreeBuffer to merge into this Tree's buffer transfer_objects : bool, optional (default=False) If True then all objects and collections on the input buffer will be transferred to this Tree's buffer. """ self._buffer.update(treebuffer) if transfer_objects: self._buffer.set_objects(treebuffer)
[ "def", "update_buffer", "(", "self", ",", "treebuffer", ",", "transfer_objects", "=", "False", ")", ":", "self", ".", "_buffer", ".", "update", "(", "treebuffer", ")", "if", "transfer_objects", ":", "self", ".", "_buffer", ".", "set_objects", "(", "treebuffer", ")" ]
Merge items from a TreeBuffer into this Tree's TreeBuffer Parameters ---------- buffer : rootpy.tree.buffer.TreeBuffer The TreeBuffer to merge into this Tree's buffer transfer_objects : bool, optional (default=False) If True then all objects and collections on the input buffer will be transferred to this Tree's buffer.
[ "Merge", "items", "from", "a", "TreeBuffer", "into", "this", "Tree", "s", "TreeBuffer" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L141-L156
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.set_buffer
def set_buffer(self, treebuffer, branches=None, ignore_branches=None, create_branches=False, visible=True, ignore_missing=False, ignore_duplicates=False, transfer_objects=False): """ Set the Tree buffer Parameters ---------- treebuffer : rootpy.tree.buffer.TreeBuffer a TreeBuffer branches : list, optional (default=None) only include these branches from the TreeBuffer ignore_branches : list, optional (default=None) ignore these branches from the TreeBuffer create_branches : bool, optional (default=False) If True then the branches in the TreeBuffer should be created. Use this option if initializing the Tree. A ValueError is raised if an attempt is made to create a branch with the same name as one that already exists in the Tree. If False the addresses of existing branches will be set to point at the addresses in this buffer. visible : bool, optional (default=True) If True then the branches will be added to the buffer and will be accessible as attributes of the Tree. ignore_missing : bool, optional (default=False) If True then any branches in this buffer that do not exist in the Tree will be ignored, otherwise a ValueError will be raised. This option is only valid when ``create_branches`` is False. ignore_duplicates : bool, optional (default=False) If False then raise a ValueError if the tree already has a branch with the same name as an entry in the buffer. If True then skip branches that already exist. This option is only valid when ``create_branches`` is True. transfer_objects : bool, optional (default=False) If True, all tree objects and collections will be transferred from the buffer into this Tree's buffer. """ # determine branches to keep while preserving branch order if branches is None: branches = treebuffer.keys() if ignore_branches is not None: branches = [b for b in branches if b not in ignore_branches] if create_branches: for name in branches: value = treebuffer[name] if self.has_branch(name): if ignore_duplicates: log.warning( "Skipping entry in buffer with the same name " "as an existing branch: `{0}`".format(name)) continue raise ValueError( "Attempting to create two branches " "with the same name: `{0}`".format(name)) if isinstance(value, Scalar): self.Branch(name, value, '{0}/{1}'.format( name, value.type)) elif isinstance(value, Array): length = value.length_name or len(value) self.Branch(name, value, '{0}[{2}]/{1}'.format( name, value.type, length)) else: self.Branch(name, value) else: for name in branches: value = treebuffer[name] if self.has_branch(name): self.SetBranchAddress(name, value) elif not ignore_missing: raise ValueError( "Attempting to set address for " "branch `{0}` which does not exist".format(name)) else: log.warning( "Skipping entry in buffer for which no " "corresponding branch in the " "tree exists: `{0}`".format(name)) if visible: newbuffer = TreeBuffer() for branch in branches: if branch in treebuffer: newbuffer[branch] = treebuffer[branch] newbuffer.set_objects(treebuffer) self.update_buffer(newbuffer, transfer_objects=transfer_objects)
python
def set_buffer(self, treebuffer, branches=None, ignore_branches=None, create_branches=False, visible=True, ignore_missing=False, ignore_duplicates=False, transfer_objects=False): """ Set the Tree buffer Parameters ---------- treebuffer : rootpy.tree.buffer.TreeBuffer a TreeBuffer branches : list, optional (default=None) only include these branches from the TreeBuffer ignore_branches : list, optional (default=None) ignore these branches from the TreeBuffer create_branches : bool, optional (default=False) If True then the branches in the TreeBuffer should be created. Use this option if initializing the Tree. A ValueError is raised if an attempt is made to create a branch with the same name as one that already exists in the Tree. If False the addresses of existing branches will be set to point at the addresses in this buffer. visible : bool, optional (default=True) If True then the branches will be added to the buffer and will be accessible as attributes of the Tree. ignore_missing : bool, optional (default=False) If True then any branches in this buffer that do not exist in the Tree will be ignored, otherwise a ValueError will be raised. This option is only valid when ``create_branches`` is False. ignore_duplicates : bool, optional (default=False) If False then raise a ValueError if the tree already has a branch with the same name as an entry in the buffer. If True then skip branches that already exist. This option is only valid when ``create_branches`` is True. transfer_objects : bool, optional (default=False) If True, all tree objects and collections will be transferred from the buffer into this Tree's buffer. """ # determine branches to keep while preserving branch order if branches is None: branches = treebuffer.keys() if ignore_branches is not None: branches = [b for b in branches if b not in ignore_branches] if create_branches: for name in branches: value = treebuffer[name] if self.has_branch(name): if ignore_duplicates: log.warning( "Skipping entry in buffer with the same name " "as an existing branch: `{0}`".format(name)) continue raise ValueError( "Attempting to create two branches " "with the same name: `{0}`".format(name)) if isinstance(value, Scalar): self.Branch(name, value, '{0}/{1}'.format( name, value.type)) elif isinstance(value, Array): length = value.length_name or len(value) self.Branch(name, value, '{0}[{2}]/{1}'.format( name, value.type, length)) else: self.Branch(name, value) else: for name in branches: value = treebuffer[name] if self.has_branch(name): self.SetBranchAddress(name, value) elif not ignore_missing: raise ValueError( "Attempting to set address for " "branch `{0}` which does not exist".format(name)) else: log.warning( "Skipping entry in buffer for which no " "corresponding branch in the " "tree exists: `{0}`".format(name)) if visible: newbuffer = TreeBuffer() for branch in branches: if branch in treebuffer: newbuffer[branch] = treebuffer[branch] newbuffer.set_objects(treebuffer) self.update_buffer(newbuffer, transfer_objects=transfer_objects)
[ "def", "set_buffer", "(", "self", ",", "treebuffer", ",", "branches", "=", "None", ",", "ignore_branches", "=", "None", ",", "create_branches", "=", "False", ",", "visible", "=", "True", ",", "ignore_missing", "=", "False", ",", "ignore_duplicates", "=", "False", ",", "transfer_objects", "=", "False", ")", ":", "# determine branches to keep while preserving branch order", "if", "branches", "is", "None", ":", "branches", "=", "treebuffer", ".", "keys", "(", ")", "if", "ignore_branches", "is", "not", "None", ":", "branches", "=", "[", "b", "for", "b", "in", "branches", "if", "b", "not", "in", "ignore_branches", "]", "if", "create_branches", ":", "for", "name", "in", "branches", ":", "value", "=", "treebuffer", "[", "name", "]", "if", "self", ".", "has_branch", "(", "name", ")", ":", "if", "ignore_duplicates", ":", "log", ".", "warning", "(", "\"Skipping entry in buffer with the same name \"", "\"as an existing branch: `{0}`\"", ".", "format", "(", "name", ")", ")", "continue", "raise", "ValueError", "(", "\"Attempting to create two branches \"", "\"with the same name: `{0}`\"", ".", "format", "(", "name", ")", ")", "if", "isinstance", "(", "value", ",", "Scalar", ")", ":", "self", ".", "Branch", "(", "name", ",", "value", ",", "'{0}/{1}'", ".", "format", "(", "name", ",", "value", ".", "type", ")", ")", "elif", "isinstance", "(", "value", ",", "Array", ")", ":", "length", "=", "value", ".", "length_name", "or", "len", "(", "value", ")", "self", ".", "Branch", "(", "name", ",", "value", ",", "'{0}[{2}]/{1}'", ".", "format", "(", "name", ",", "value", ".", "type", ",", "length", ")", ")", "else", ":", "self", ".", "Branch", "(", "name", ",", "value", ")", "else", ":", "for", "name", "in", "branches", ":", "value", "=", "treebuffer", "[", "name", "]", "if", "self", ".", "has_branch", "(", "name", ")", ":", "self", ".", "SetBranchAddress", "(", "name", ",", "value", ")", "elif", "not", "ignore_missing", ":", "raise", "ValueError", "(", "\"Attempting to set address for \"", "\"branch `{0}` which does not exist\"", ".", "format", "(", "name", ")", ")", "else", ":", "log", ".", "warning", "(", "\"Skipping entry in buffer for which no \"", "\"corresponding branch in the \"", "\"tree exists: `{0}`\"", ".", "format", "(", "name", ")", ")", "if", "visible", ":", "newbuffer", "=", "TreeBuffer", "(", ")", "for", "branch", "in", "branches", ":", "if", "branch", "in", "treebuffer", ":", "newbuffer", "[", "branch", "]", "=", "treebuffer", "[", "branch", "]", "newbuffer", ".", "set_objects", "(", "treebuffer", ")", "self", ".", "update_buffer", "(", "newbuffer", ",", "transfer_objects", "=", "transfer_objects", ")" ]
Set the Tree buffer Parameters ---------- treebuffer : rootpy.tree.buffer.TreeBuffer a TreeBuffer branches : list, optional (default=None) only include these branches from the TreeBuffer ignore_branches : list, optional (default=None) ignore these branches from the TreeBuffer create_branches : bool, optional (default=False) If True then the branches in the TreeBuffer should be created. Use this option if initializing the Tree. A ValueError is raised if an attempt is made to create a branch with the same name as one that already exists in the Tree. If False the addresses of existing branches will be set to point at the addresses in this buffer. visible : bool, optional (default=True) If True then the branches will be added to the buffer and will be accessible as attributes of the Tree. ignore_missing : bool, optional (default=False) If True then any branches in this buffer that do not exist in the Tree will be ignored, otherwise a ValueError will be raised. This option is only valid when ``create_branches`` is False. ignore_duplicates : bool, optional (default=False) If False then raise a ValueError if the tree already has a branch with the same name as an entry in the buffer. If True then skip branches that already exist. This option is only valid when ``create_branches`` is True. transfer_objects : bool, optional (default=False) If True, all tree objects and collections will be transferred from the buffer into this Tree's buffer.
[ "Set", "the", "Tree", "buffer" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L158-L255
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.glob
def glob(self, patterns, exclude=None): """ Return a list of branch names that match ``pattern``. Exclude all matched branch names which also match a pattern in ``exclude``. ``exclude`` may be a string or list of strings. Parameters ---------- patterns: str or list branches are matched against this pattern or list of patterns where globbing is performed with '*'. exclude : str or list, optional (default=None) branches matching this pattern or list of patterns are excluded even if they match a pattern in ``patterns``. Returns ------- matches : list List of matching branch names """ if isinstance(patterns, string_types): patterns = [patterns] if isinstance(exclude, string_types): exclude = [exclude] matches = [] for pattern in patterns: matches += fnmatch.filter(self.iterbranchnames(), pattern) if exclude is not None: for exclude_pattern in exclude: matches = [match for match in matches if not fnmatch.fnmatch(match, exclude_pattern)] return matches
python
def glob(self, patterns, exclude=None): """ Return a list of branch names that match ``pattern``. Exclude all matched branch names which also match a pattern in ``exclude``. ``exclude`` may be a string or list of strings. Parameters ---------- patterns: str or list branches are matched against this pattern or list of patterns where globbing is performed with '*'. exclude : str or list, optional (default=None) branches matching this pattern or list of patterns are excluded even if they match a pattern in ``patterns``. Returns ------- matches : list List of matching branch names """ if isinstance(patterns, string_types): patterns = [patterns] if isinstance(exclude, string_types): exclude = [exclude] matches = [] for pattern in patterns: matches += fnmatch.filter(self.iterbranchnames(), pattern) if exclude is not None: for exclude_pattern in exclude: matches = [match for match in matches if not fnmatch.fnmatch(match, exclude_pattern)] return matches
[ "def", "glob", "(", "self", ",", "patterns", ",", "exclude", "=", "None", ")", ":", "if", "isinstance", "(", "patterns", ",", "string_types", ")", ":", "patterns", "=", "[", "patterns", "]", "if", "isinstance", "(", "exclude", ",", "string_types", ")", ":", "exclude", "=", "[", "exclude", "]", "matches", "=", "[", "]", "for", "pattern", "in", "patterns", ":", "matches", "+=", "fnmatch", ".", "filter", "(", "self", ".", "iterbranchnames", "(", ")", ",", "pattern", ")", "if", "exclude", "is", "not", "None", ":", "for", "exclude_pattern", "in", "exclude", ":", "matches", "=", "[", "match", "for", "match", "in", "matches", "if", "not", "fnmatch", ".", "fnmatch", "(", "match", ",", "exclude_pattern", ")", "]", "return", "matches" ]
Return a list of branch names that match ``pattern``. Exclude all matched branch names which also match a pattern in ``exclude``. ``exclude`` may be a string or list of strings. Parameters ---------- patterns: str or list branches are matched against this pattern or list of patterns where globbing is performed with '*'. exclude : str or list, optional (default=None) branches matching this pattern or list of patterns are excluded even if they match a pattern in ``patterns``. Returns ------- matches : list List of matching branch names
[ "Return", "a", "list", "of", "branch", "names", "that", "match", "pattern", ".", "Exclude", "all", "matched", "branch", "names", "which", "also", "match", "a", "pattern", "in", "exclude", ".", "exclude", "may", "be", "a", "string", "or", "list", "of", "strings", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L337-L369
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.CopyTree
def CopyTree(self, selection, *args, **kwargs): """ Copy the tree while supporting a rootpy.tree.cut.Cut selection in addition to a simple string. """ return super(BaseTree, self).CopyTree(str(selection), *args, **kwargs)
python
def CopyTree(self, selection, *args, **kwargs): """ Copy the tree while supporting a rootpy.tree.cut.Cut selection in addition to a simple string. """ return super(BaseTree, self).CopyTree(str(selection), *args, **kwargs)
[ "def", "CopyTree", "(", "self", ",", "selection", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "super", "(", "BaseTree", ",", "self", ")", ".", "CopyTree", "(", "str", "(", "selection", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Copy the tree while supporting a rootpy.tree.cut.Cut selection in addition to a simple string.
[ "Copy", "the", "tree", "while", "supporting", "a", "rootpy", ".", "tree", ".", "cut", ".", "Cut", "selection", "in", "addition", "to", "a", "simple", "string", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L656-L661
train
rootpy/rootpy
rootpy/tree/tree.py
BaseTree.to_array
def to_array(self, *args, **kwargs): """ Convert this tree into a NumPy structured array """ from root_numpy import tree2array return tree2array(self, *args, **kwargs)
python
def to_array(self, *args, **kwargs): """ Convert this tree into a NumPy structured array """ from root_numpy import tree2array return tree2array(self, *args, **kwargs)
[ "def", "to_array", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "root_numpy", "import", "tree2array", "return", "tree2array", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Convert this tree into a NumPy structured array
[ "Convert", "this", "tree", "into", "a", "NumPy", "structured", "array" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L858-L863
train
rootpy/rootpy
rootpy/roosh.py
color_key
def color_key(tkey): """ Function which returns a colorized TKey name given its type """ name = tkey.GetName() classname = tkey.GetClassName() for class_regex, color in _COLOR_MATCHER: if class_regex.match(classname): return colored(name, color=color) return name
python
def color_key(tkey): """ Function which returns a colorized TKey name given its type """ name = tkey.GetName() classname = tkey.GetClassName() for class_regex, color in _COLOR_MATCHER: if class_regex.match(classname): return colored(name, color=color) return name
[ "def", "color_key", "(", "tkey", ")", ":", "name", "=", "tkey", ".", "GetName", "(", ")", "classname", "=", "tkey", ".", "GetClassName", "(", ")", "for", "class_regex", ",", "color", "in", "_COLOR_MATCHER", ":", "if", "class_regex", ".", "match", "(", "classname", ")", ":", "return", "colored", "(", "name", ",", "color", "=", "color", ")", "return", "name" ]
Function which returns a colorized TKey name given its type
[ "Function", "which", "returns", "a", "colorized", "TKey", "name", "given", "its", "type" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/roosh.py#L49-L58
train
rootpy/rootpy
rootpy/plotting/contrib/plot_corrcoef_matrix.py
cov
def cov(m, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0): """ Estimate a covariance matrix, given data. Covariance indicates the level to which two variables vary together. If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element :math:`C_{ij}` is the covariance of :math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance of :math:`x_i`. Parameters ---------- m : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same form as that of `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations given (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : int, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The covariance matrix of the variables. See Also -------- corrcoef : Normalized covariance matrix Examples -------- Consider two variables, :math:`x_0` and :math:`x_1`, which correlate perfectly, but in opposite directions: >>> x = np.array([[0, 2], [1, 1], [2, 0]]).T >>> x array([[0, 1, 2], [2, 1, 0]]) Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance matrix shows this clearly: >>> np.cov(x) array([[ 1., -1.], [-1., 1.]]) Note that element :math:`C_{0,1}`, which shows the correlation between :math:`x_0` and :math:`x_1`, is negative. Further, note how `x` and `y` are combined: >>> x = [-2.1, -1, 4.3] >>> y = [3, 1.1, 0.12] >>> X = np.vstack((x,y)) >>> print np.cov(X) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x, y) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x) 11.71 """ import numpy as np # Check inputs if ddof is not None and ddof != int(ddof): raise ValueError( "ddof must be integer") X = np.array(m, ndmin=2, dtype=float) if X.size == 0: # handle empty arrays return np.array(m) if X.shape[0] == 1: rowvar = 1 if rowvar: axis = 0 tup = (slice(None), np.newaxis) else: axis = 1 tup = (np.newaxis, slice(None)) if y is not None: y = np.array(y, copy=False, ndmin=2, dtype=float) X = np.concatenate((X, y), axis) if ddof is None: if bias == 0: ddof = 1 else: ddof = 0 if weights is not None: weights = np.array(weights, dtype=float) weights_sum = weights.sum() if weights_sum <= 0: raise ValueError( "sum of weights is non-positive") X -= np.average(X, axis=1-axis, weights=weights)[tup] if repeat_weights: # each weight represents a number of repetitions of an observation # the total sample size can be determined in this case and we have # both an unbiased and biased weighted covariance fact = weights_sum - ddof else: # normalize weights so they sum to unity weights /= weights_sum # unbiased weighted covariance is not defined if the weights are # not integral frequencies (repeat-type) fact = (1. - np.power(weights, 2).sum()) else: weights = 1 X -= X.mean(axis=1-axis)[tup] if rowvar: N = X.shape[1] else: N = X.shape[0] fact = float(N - ddof) if not rowvar: return (np.dot(weights * X.T, X.conj()) / fact).squeeze() else: return (np.dot(weights * X, X.T.conj()) / fact).squeeze()
python
def cov(m, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0): """ Estimate a covariance matrix, given data. Covariance indicates the level to which two variables vary together. If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element :math:`C_{ij}` is the covariance of :math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance of :math:`x_i`. Parameters ---------- m : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same form as that of `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations given (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : int, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The covariance matrix of the variables. See Also -------- corrcoef : Normalized covariance matrix Examples -------- Consider two variables, :math:`x_0` and :math:`x_1`, which correlate perfectly, but in opposite directions: >>> x = np.array([[0, 2], [1, 1], [2, 0]]).T >>> x array([[0, 1, 2], [2, 1, 0]]) Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance matrix shows this clearly: >>> np.cov(x) array([[ 1., -1.], [-1., 1.]]) Note that element :math:`C_{0,1}`, which shows the correlation between :math:`x_0` and :math:`x_1`, is negative. Further, note how `x` and `y` are combined: >>> x = [-2.1, -1, 4.3] >>> y = [3, 1.1, 0.12] >>> X = np.vstack((x,y)) >>> print np.cov(X) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x, y) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x) 11.71 """ import numpy as np # Check inputs if ddof is not None and ddof != int(ddof): raise ValueError( "ddof must be integer") X = np.array(m, ndmin=2, dtype=float) if X.size == 0: # handle empty arrays return np.array(m) if X.shape[0] == 1: rowvar = 1 if rowvar: axis = 0 tup = (slice(None), np.newaxis) else: axis = 1 tup = (np.newaxis, slice(None)) if y is not None: y = np.array(y, copy=False, ndmin=2, dtype=float) X = np.concatenate((X, y), axis) if ddof is None: if bias == 0: ddof = 1 else: ddof = 0 if weights is not None: weights = np.array(weights, dtype=float) weights_sum = weights.sum() if weights_sum <= 0: raise ValueError( "sum of weights is non-positive") X -= np.average(X, axis=1-axis, weights=weights)[tup] if repeat_weights: # each weight represents a number of repetitions of an observation # the total sample size can be determined in this case and we have # both an unbiased and biased weighted covariance fact = weights_sum - ddof else: # normalize weights so they sum to unity weights /= weights_sum # unbiased weighted covariance is not defined if the weights are # not integral frequencies (repeat-type) fact = (1. - np.power(weights, 2).sum()) else: weights = 1 X -= X.mean(axis=1-axis)[tup] if rowvar: N = X.shape[1] else: N = X.shape[0] fact = float(N - ddof) if not rowvar: return (np.dot(weights * X.T, X.conj()) / fact).squeeze() else: return (np.dot(weights * X, X.T.conj()) / fact).squeeze()
[ "def", "cov", "(", "m", ",", "y", "=", "None", ",", "rowvar", "=", "1", ",", "bias", "=", "0", ",", "ddof", "=", "None", ",", "weights", "=", "None", ",", "repeat_weights", "=", "0", ")", ":", "import", "numpy", "as", "np", "# Check inputs", "if", "ddof", "is", "not", "None", "and", "ddof", "!=", "int", "(", "ddof", ")", ":", "raise", "ValueError", "(", "\"ddof must be integer\"", ")", "X", "=", "np", ".", "array", "(", "m", ",", "ndmin", "=", "2", ",", "dtype", "=", "float", ")", "if", "X", ".", "size", "==", "0", ":", "# handle empty arrays", "return", "np", ".", "array", "(", "m", ")", "if", "X", ".", "shape", "[", "0", "]", "==", "1", ":", "rowvar", "=", "1", "if", "rowvar", ":", "axis", "=", "0", "tup", "=", "(", "slice", "(", "None", ")", ",", "np", ".", "newaxis", ")", "else", ":", "axis", "=", "1", "tup", "=", "(", "np", ".", "newaxis", ",", "slice", "(", "None", ")", ")", "if", "y", "is", "not", "None", ":", "y", "=", "np", ".", "array", "(", "y", ",", "copy", "=", "False", ",", "ndmin", "=", "2", ",", "dtype", "=", "float", ")", "X", "=", "np", ".", "concatenate", "(", "(", "X", ",", "y", ")", ",", "axis", ")", "if", "ddof", "is", "None", ":", "if", "bias", "==", "0", ":", "ddof", "=", "1", "else", ":", "ddof", "=", "0", "if", "weights", "is", "not", "None", ":", "weights", "=", "np", ".", "array", "(", "weights", ",", "dtype", "=", "float", ")", "weights_sum", "=", "weights", ".", "sum", "(", ")", "if", "weights_sum", "<=", "0", ":", "raise", "ValueError", "(", "\"sum of weights is non-positive\"", ")", "X", "-=", "np", ".", "average", "(", "X", ",", "axis", "=", "1", "-", "axis", ",", "weights", "=", "weights", ")", "[", "tup", "]", "if", "repeat_weights", ":", "# each weight represents a number of repetitions of an observation", "# the total sample size can be determined in this case and we have", "# both an unbiased and biased weighted covariance", "fact", "=", "weights_sum", "-", "ddof", "else", ":", "# normalize weights so they sum to unity", "weights", "/=", "weights_sum", "# unbiased weighted covariance is not defined if the weights are", "# not integral frequencies (repeat-type)", "fact", "=", "(", "1.", "-", "np", ".", "power", "(", "weights", ",", "2", ")", ".", "sum", "(", ")", ")", "else", ":", "weights", "=", "1", "X", "-=", "X", ".", "mean", "(", "axis", "=", "1", "-", "axis", ")", "[", "tup", "]", "if", "rowvar", ":", "N", "=", "X", ".", "shape", "[", "1", "]", "else", ":", "N", "=", "X", ".", "shape", "[", "0", "]", "fact", "=", "float", "(", "N", "-", "ddof", ")", "if", "not", "rowvar", ":", "return", "(", "np", ".", "dot", "(", "weights", "*", "X", ".", "T", ",", "X", ".", "conj", "(", ")", ")", "/", "fact", ")", ".", "squeeze", "(", ")", "else", ":", "return", "(", "np", ".", "dot", "(", "weights", "*", "X", ",", "X", ".", "T", ".", "conj", "(", ")", ")", "/", "fact", ")", ".", "squeeze", "(", ")" ]
Estimate a covariance matrix, given data. Covariance indicates the level to which two variables vary together. If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element :math:`C_{ij}` is the covariance of :math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance of :math:`x_i`. Parameters ---------- m : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same form as that of `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations given (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : int, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The covariance matrix of the variables. See Also -------- corrcoef : Normalized covariance matrix Examples -------- Consider two variables, :math:`x_0` and :math:`x_1`, which correlate perfectly, but in opposite directions: >>> x = np.array([[0, 2], [1, 1], [2, 0]]).T >>> x array([[0, 1, 2], [2, 1, 0]]) Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance matrix shows this clearly: >>> np.cov(x) array([[ 1., -1.], [-1., 1.]]) Note that element :math:`C_{0,1}`, which shows the correlation between :math:`x_0` and :math:`x_1`, is negative. Further, note how `x` and `y` are combined: >>> x = [-2.1, -1, 4.3] >>> y = [3, 1.1, 0.12] >>> X = np.vstack((x,y)) >>> print np.cov(X) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x, y) [[ 11.71 -4.286 ] [ -4.286 2.14413333]] >>> print np.cov(x) 11.71
[ "Estimate", "a", "covariance", "matrix", "given", "data", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/plot_corrcoef_matrix.py#L136-L285
train
rootpy/rootpy
rootpy/plotting/contrib/plot_corrcoef_matrix.py
corrcoef
def corrcoef(x, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0): """ Return correlation coefficients. Please refer to the documentation for `cov` for more detail. The relationship between the correlation coefficient matrix, `P`, and the covariance matrix, `C`, is .. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } } The values of `P` are between -1 and 1, inclusive. Parameters ---------- x : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same shape as `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : {None, int}, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The correlation coefficient matrix of the variables. See Also -------- cov : Covariance matrix """ import numpy as np c = cov(x, y, rowvar, bias, ddof, weights, repeat_weights) if c.size == 0: # handle empty arrays return c try: d = np.diag(c) except ValueError: # scalar covariance return 1 return c / np.sqrt(np.multiply.outer(d, d))
python
def corrcoef(x, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0): """ Return correlation coefficients. Please refer to the documentation for `cov` for more detail. The relationship between the correlation coefficient matrix, `P`, and the covariance matrix, `C`, is .. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } } The values of `P` are between -1 and 1, inclusive. Parameters ---------- x : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same shape as `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : {None, int}, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The correlation coefficient matrix of the variables. See Also -------- cov : Covariance matrix """ import numpy as np c = cov(x, y, rowvar, bias, ddof, weights, repeat_weights) if c.size == 0: # handle empty arrays return c try: d = np.diag(c) except ValueError: # scalar covariance return 1 return c / np.sqrt(np.multiply.outer(d, d))
[ "def", "corrcoef", "(", "x", ",", "y", "=", "None", ",", "rowvar", "=", "1", ",", "bias", "=", "0", ",", "ddof", "=", "None", ",", "weights", "=", "None", ",", "repeat_weights", "=", "0", ")", ":", "import", "numpy", "as", "np", "c", "=", "cov", "(", "x", ",", "y", ",", "rowvar", ",", "bias", ",", "ddof", ",", "weights", ",", "repeat_weights", ")", "if", "c", ".", "size", "==", "0", ":", "# handle empty arrays", "return", "c", "try", ":", "d", "=", "np", ".", "diag", "(", "c", ")", "except", "ValueError", ":", "# scalar covariance", "return", "1", "return", "c", "/", "np", ".", "sqrt", "(", "np", ".", "multiply", ".", "outer", "(", "d", ",", "d", ")", ")" ]
Return correlation coefficients. Please refer to the documentation for `cov` for more detail. The relationship between the correlation coefficient matrix, `P`, and the covariance matrix, `C`, is .. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } } The values of `P` are between -1 and 1, inclusive. Parameters ---------- x : array_like A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. Also see `rowvar` below. y : array_like, optional An additional set of variables and observations. `y` has the same shape as `m`. rowvar : int, optional If `rowvar` is non-zero (default), then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. bias : int, optional Default normalization is by ``(N - 1)``, where ``N`` is the number of observations (unbiased estimate). If `bias` is 1, then normalization is by ``N``. These values can be overridden by using the keyword ``ddof`` in numpy versions >= 1.5. ddof : {None, int}, optional .. versionadded:: 1.5 If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is the number of observations; this overrides the value implied by ``bias``. The default value is ``None``. weights : array-like, optional A 1-D array of weights with a length equal to the number of observations. repeat_weights : int, optional The default treatment of weights in the weighted covariance is to first normalize them to unit sum and use the biased weighted covariance equation. If `repeat_weights` is 1 then the weights must represent an integer number of occurrences of each observation and both a biased and unbiased weighted covariance is defined because the total sample size can be determined. Returns ------- out : ndarray The correlation coefficient matrix of the variables. See Also -------- cov : Covariance matrix
[ "Return", "correlation", "coefficients", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/plot_corrcoef_matrix.py#L288-L355
train
rootpy/rootpy
rootpy/tree/cut.py
Cut.safe
def safe(self, parentheses=True): """ Returns a string representation with special characters replaced by safer characters for use in file names. """ if not self: return "" string = str(self) string = string.replace("**", "_pow_") string = string.replace("*", "_mul_") string = string.replace("/", "_div_") string = string.replace("==", "_eq_") string = string.replace("<=", "_leq_") string = string.replace(">=", "_geq_") string = string.replace("<", "_lt_") string = string.replace(">", "_gt_") string = string.replace("&&", "_and_") string = string.replace("||", "_or_") string = string.replace("!", "not_") if parentheses: string = string.replace("(", "L") string = string.replace(")", "R") else: string = string.replace("(", "") string = string.replace(")", "") string = string.replace(" ", "") return string
python
def safe(self, parentheses=True): """ Returns a string representation with special characters replaced by safer characters for use in file names. """ if not self: return "" string = str(self) string = string.replace("**", "_pow_") string = string.replace("*", "_mul_") string = string.replace("/", "_div_") string = string.replace("==", "_eq_") string = string.replace("<=", "_leq_") string = string.replace(">=", "_geq_") string = string.replace("<", "_lt_") string = string.replace(">", "_gt_") string = string.replace("&&", "_and_") string = string.replace("||", "_or_") string = string.replace("!", "not_") if parentheses: string = string.replace("(", "L") string = string.replace(")", "R") else: string = string.replace("(", "") string = string.replace(")", "") string = string.replace(" ", "") return string
[ "def", "safe", "(", "self", ",", "parentheses", "=", "True", ")", ":", "if", "not", "self", ":", "return", "\"\"", "string", "=", "str", "(", "self", ")", "string", "=", "string", ".", "replace", "(", "\"**\"", ",", "\"_pow_\"", ")", "string", "=", "string", ".", "replace", "(", "\"*\"", ",", "\"_mul_\"", ")", "string", "=", "string", ".", "replace", "(", "\"/\"", ",", "\"_div_\"", ")", "string", "=", "string", ".", "replace", "(", "\"==\"", ",", "\"_eq_\"", ")", "string", "=", "string", ".", "replace", "(", "\"<=\"", ",", "\"_leq_\"", ")", "string", "=", "string", ".", "replace", "(", "\">=\"", ",", "\"_geq_\"", ")", "string", "=", "string", ".", "replace", "(", "\"<\"", ",", "\"_lt_\"", ")", "string", "=", "string", ".", "replace", "(", "\">\"", ",", "\"_gt_\"", ")", "string", "=", "string", ".", "replace", "(", "\"&&\"", ",", "\"_and_\"", ")", "string", "=", "string", ".", "replace", "(", "\"||\"", ",", "\"_or_\"", ")", "string", "=", "string", ".", "replace", "(", "\"!\"", ",", "\"not_\"", ")", "if", "parentheses", ":", "string", "=", "string", ".", "replace", "(", "\"(\"", ",", "\"L\"", ")", "string", "=", "string", ".", "replace", "(", "\")\"", ",", "\"R\"", ")", "else", ":", "string", "=", "string", ".", "replace", "(", "\"(\"", ",", "\"\"", ")", "string", "=", "string", ".", "replace", "(", "\")\"", ",", "\"\"", ")", "string", "=", "string", ".", "replace", "(", "\" \"", ",", "\"\"", ")", "return", "string" ]
Returns a string representation with special characters replaced by safer characters for use in file names.
[ "Returns", "a", "string", "representation", "with", "special", "characters", "replaced", "by", "safer", "characters", "for", "use", "in", "file", "names", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L222-L248
train
rootpy/rootpy
rootpy/tree/cut.py
Cut.latex
def latex(self): """ Returns a string representation for use in LaTeX """ if not self: return "" s = str(self) s = s.replace("==", " = ") s = s.replace("<=", " \leq ") s = s.replace(">=", " \geq ") s = s.replace("&&", r" \text{ and } ") s = s.replace("||", r" \text{ or } ") return s
python
def latex(self): """ Returns a string representation for use in LaTeX """ if not self: return "" s = str(self) s = s.replace("==", " = ") s = s.replace("<=", " \leq ") s = s.replace(">=", " \geq ") s = s.replace("&&", r" \text{ and } ") s = s.replace("||", r" \text{ or } ") return s
[ "def", "latex", "(", "self", ")", ":", "if", "not", "self", ":", "return", "\"\"", "s", "=", "str", "(", "self", ")", "s", "=", "s", ".", "replace", "(", "\"==\"", ",", "\" = \"", ")", "s", "=", "s", ".", "replace", "(", "\"<=\"", ",", "\" \\leq \"", ")", "s", "=", "s", ".", "replace", "(", "\">=\"", ",", "\" \\geq \"", ")", "s", "=", "s", ".", "replace", "(", "\"&&\"", ",", "r\" \\text{ and } \"", ")", "s", "=", "s", ".", "replace", "(", "\"||\"", ",", "r\" \\text{ or } \"", ")", "return", "s" ]
Returns a string representation for use in LaTeX
[ "Returns", "a", "string", "representation", "for", "use", "in", "LaTeX" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L250-L262
train
rootpy/rootpy
rootpy/tree/cut.py
Cut.replace
def replace(self, name, newname): """ Replace all occurrences of name with newname """ if not re.match("[a-zA-Z]\w*", name): return None if not re.match("[a-zA-Z]\w*", newname): return None def _replace(match): return match.group(0).replace(match.group('name'), newname) pattern = re.compile("(\W|^)(?P<name>" + name + ")(\W|$)") cut = re.sub(pattern, _replace, str(self)) return Cut(cut)
python
def replace(self, name, newname): """ Replace all occurrences of name with newname """ if not re.match("[a-zA-Z]\w*", name): return None if not re.match("[a-zA-Z]\w*", newname): return None def _replace(match): return match.group(0).replace(match.group('name'), newname) pattern = re.compile("(\W|^)(?P<name>" + name + ")(\W|$)") cut = re.sub(pattern, _replace, str(self)) return Cut(cut)
[ "def", "replace", "(", "self", ",", "name", ",", "newname", ")", ":", "if", "not", "re", ".", "match", "(", "\"[a-zA-Z]\\w*\"", ",", "name", ")", ":", "return", "None", "if", "not", "re", ".", "match", "(", "\"[a-zA-Z]\\w*\"", ",", "newname", ")", ":", "return", "None", "def", "_replace", "(", "match", ")", ":", "return", "match", ".", "group", "(", "0", ")", ".", "replace", "(", "match", ".", "group", "(", "'name'", ")", ",", "newname", ")", "pattern", "=", "re", ".", "compile", "(", "\"(\\W|^)(?P<name>\"", "+", "name", "+", "\")(\\W|$)\"", ")", "cut", "=", "re", ".", "sub", "(", "pattern", ",", "_replace", ",", "str", "(", "self", ")", ")", "return", "Cut", "(", "cut", ")" ]
Replace all occurrences of name with newname
[ "Replace", "all", "occurrences", "of", "name", "with", "newname" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L273-L287
train
rootpy/rootpy
docs/sphinxext/ipython_directive.py
EmbeddedSphinxShell.save_image
def save_image(self, image_file): """ Saves the image file to disk. """ self.ensure_pyplot() command = 'plt.gcf().savefig("%s")'%image_file #print 'SAVEFIG', command # dbg self.process_input_line('bookmark ipy_thisdir', store_history=False) self.process_input_line('cd -b ipy_savedir', store_history=False) self.process_input_line(command, store_history=False) self.process_input_line('cd -b ipy_thisdir', store_history=False) self.process_input_line('bookmark -d ipy_thisdir', store_history=False) self.clear_cout()
python
def save_image(self, image_file): """ Saves the image file to disk. """ self.ensure_pyplot() command = 'plt.gcf().savefig("%s")'%image_file #print 'SAVEFIG', command # dbg self.process_input_line('bookmark ipy_thisdir', store_history=False) self.process_input_line('cd -b ipy_savedir', store_history=False) self.process_input_line(command, store_history=False) self.process_input_line('cd -b ipy_thisdir', store_history=False) self.process_input_line('bookmark -d ipy_thisdir', store_history=False) self.clear_cout()
[ "def", "save_image", "(", "self", ",", "image_file", ")", ":", "self", ".", "ensure_pyplot", "(", ")", "command", "=", "'plt.gcf().savefig(\"%s\")'", "%", "image_file", "#print 'SAVEFIG', command # dbg", "self", ".", "process_input_line", "(", "'bookmark ipy_thisdir'", ",", "store_history", "=", "False", ")", "self", ".", "process_input_line", "(", "'cd -b ipy_savedir'", ",", "store_history", "=", "False", ")", "self", ".", "process_input_line", "(", "command", ",", "store_history", "=", "False", ")", "self", ".", "process_input_line", "(", "'cd -b ipy_thisdir'", ",", "store_history", "=", "False", ")", "self", ".", "process_input_line", "(", "'bookmark -d ipy_thisdir'", ",", "store_history", "=", "False", ")", "self", ".", "clear_cout", "(", ")" ]
Saves the image file to disk.
[ "Saves", "the", "image", "file", "to", "disk", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/docs/sphinxext/ipython_directive.py#L393-L405
train
rootpy/rootpy
rootpy/plotting/base.py
Plottable.decorate
def decorate(self, other=None, **kwargs): """ Apply style options to a Plottable object. Returns a reference to self. """ if 'color' in kwargs: incompatible = [] for othercolor in ('linecolor', 'fillcolor', 'markercolor'): if othercolor in kwargs: incompatible.append(othercolor) if incompatible: raise ValueError( "Setting both the `color` and the `{0}` attribute{1} " "is ambiguous. Please set only one.".format( ', '.join(incompatible), 's' if len(incompatible) != 1 else '')) if other is not None: decor = other.decorators if 'color' in kwargs: decor.pop('linecolor', None) decor.pop('fillcolor', None) decor.pop('markercolor', None) decor.update(kwargs) kwargs = decor for key, value in kwargs.items(): if key in Plottable.EXTRA_ATTRS_DEPRECATED: newkey = Plottable.EXTRA_ATTRS_DEPRECATED[key] warnings.warn( "`{0}` is deprecated and will be removed in " "future versions. Use `{1}` instead".format( key, newkey), DeprecationWarning) key = newkey if key in Plottable.EXTRA_ATTRS: setattr(self, key, value) elif key == 'markerstyle': self.SetMarkerStyle(value) elif key == 'markercolor': self.SetMarkerColor(value) elif key == 'markersize': self.SetMarkerSize(value) elif key == 'fillcolor': self.SetFillColor(value) elif key == 'fillstyle': self.SetFillStyle(value) elif key == 'linecolor': self.SetLineColor(value) elif key == 'linestyle': self.SetLineStyle(value) elif key == 'linewidth': self.SetLineWidth(value) elif key == 'color': self.SetColor(value) else: raise AttributeError( "unknown decoration attribute: `{0}`".format(key)) return self
python
def decorate(self, other=None, **kwargs): """ Apply style options to a Plottable object. Returns a reference to self. """ if 'color' in kwargs: incompatible = [] for othercolor in ('linecolor', 'fillcolor', 'markercolor'): if othercolor in kwargs: incompatible.append(othercolor) if incompatible: raise ValueError( "Setting both the `color` and the `{0}` attribute{1} " "is ambiguous. Please set only one.".format( ', '.join(incompatible), 's' if len(incompatible) != 1 else '')) if other is not None: decor = other.decorators if 'color' in kwargs: decor.pop('linecolor', None) decor.pop('fillcolor', None) decor.pop('markercolor', None) decor.update(kwargs) kwargs = decor for key, value in kwargs.items(): if key in Plottable.EXTRA_ATTRS_DEPRECATED: newkey = Plottable.EXTRA_ATTRS_DEPRECATED[key] warnings.warn( "`{0}` is deprecated and will be removed in " "future versions. Use `{1}` instead".format( key, newkey), DeprecationWarning) key = newkey if key in Plottable.EXTRA_ATTRS: setattr(self, key, value) elif key == 'markerstyle': self.SetMarkerStyle(value) elif key == 'markercolor': self.SetMarkerColor(value) elif key == 'markersize': self.SetMarkerSize(value) elif key == 'fillcolor': self.SetFillColor(value) elif key == 'fillstyle': self.SetFillStyle(value) elif key == 'linecolor': self.SetLineColor(value) elif key == 'linestyle': self.SetLineStyle(value) elif key == 'linewidth': self.SetLineWidth(value) elif key == 'color': self.SetColor(value) else: raise AttributeError( "unknown decoration attribute: `{0}`".format(key)) return self
[ "def", "decorate", "(", "self", ",", "other", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "'color'", "in", "kwargs", ":", "incompatible", "=", "[", "]", "for", "othercolor", "in", "(", "'linecolor'", ",", "'fillcolor'", ",", "'markercolor'", ")", ":", "if", "othercolor", "in", "kwargs", ":", "incompatible", ".", "append", "(", "othercolor", ")", "if", "incompatible", ":", "raise", "ValueError", "(", "\"Setting both the `color` and the `{0}` attribute{1} \"", "\"is ambiguous. Please set only one.\"", ".", "format", "(", "', '", ".", "join", "(", "incompatible", ")", ",", "'s'", "if", "len", "(", "incompatible", ")", "!=", "1", "else", "''", ")", ")", "if", "other", "is", "not", "None", ":", "decor", "=", "other", ".", "decorators", "if", "'color'", "in", "kwargs", ":", "decor", ".", "pop", "(", "'linecolor'", ",", "None", ")", "decor", ".", "pop", "(", "'fillcolor'", ",", "None", ")", "decor", ".", "pop", "(", "'markercolor'", ",", "None", ")", "decor", ".", "update", "(", "kwargs", ")", "kwargs", "=", "decor", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "if", "key", "in", "Plottable", ".", "EXTRA_ATTRS_DEPRECATED", ":", "newkey", "=", "Plottable", ".", "EXTRA_ATTRS_DEPRECATED", "[", "key", "]", "warnings", ".", "warn", "(", "\"`{0}` is deprecated and will be removed in \"", "\"future versions. Use `{1}` instead\"", ".", "format", "(", "key", ",", "newkey", ")", ",", "DeprecationWarning", ")", "key", "=", "newkey", "if", "key", "in", "Plottable", ".", "EXTRA_ATTRS", ":", "setattr", "(", "self", ",", "key", ",", "value", ")", "elif", "key", "==", "'markerstyle'", ":", "self", ".", "SetMarkerStyle", "(", "value", ")", "elif", "key", "==", "'markercolor'", ":", "self", ".", "SetMarkerColor", "(", "value", ")", "elif", "key", "==", "'markersize'", ":", "self", ".", "SetMarkerSize", "(", "value", ")", "elif", "key", "==", "'fillcolor'", ":", "self", ".", "SetFillColor", "(", "value", ")", "elif", "key", "==", "'fillstyle'", ":", "self", ".", "SetFillStyle", "(", "value", ")", "elif", "key", "==", "'linecolor'", ":", "self", ".", "SetLineColor", "(", "value", ")", "elif", "key", "==", "'linestyle'", ":", "self", ".", "SetLineStyle", "(", "value", ")", "elif", "key", "==", "'linewidth'", ":", "self", ".", "SetLineWidth", "(", "value", ")", "elif", "key", "==", "'color'", ":", "self", ".", "SetColor", "(", "value", ")", "else", ":", "raise", "AttributeError", "(", "\"unknown decoration attribute: `{0}`\"", ".", "format", "(", "key", ")", ")", "return", "self" ]
Apply style options to a Plottable object. Returns a reference to self.
[ "Apply", "style", "options", "to", "a", "Plottable", "object", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/base.py#L174-L231
train
rootpy/rootpy
rootpy/tree/treeobject.py
TreeCollection.getitem
def getitem(self, index): """ direct access without going through self.selection """ if index >= getattr(self.tree, self.size): raise IndexError(index) if self.__cache_objects and index in self.__cache: return self.__cache[index] obj = self.tree_object_cls(self.tree, self.name, self.prefix, index) if self.__cache_objects: self.__cache[index] = obj return obj
python
def getitem(self, index): """ direct access without going through self.selection """ if index >= getattr(self.tree, self.size): raise IndexError(index) if self.__cache_objects and index in self.__cache: return self.__cache[index] obj = self.tree_object_cls(self.tree, self.name, self.prefix, index) if self.__cache_objects: self.__cache[index] = obj return obj
[ "def", "getitem", "(", "self", ",", "index", ")", ":", "if", "index", ">=", "getattr", "(", "self", ".", "tree", ",", "self", ".", "size", ")", ":", "raise", "IndexError", "(", "index", ")", "if", "self", ".", "__cache_objects", "and", "index", "in", "self", ".", "__cache", ":", "return", "self", ".", "__cache", "[", "index", "]", "obj", "=", "self", ".", "tree_object_cls", "(", "self", ".", "tree", ",", "self", ".", "name", ",", "self", ".", "prefix", ",", "index", ")", "if", "self", ".", "__cache_objects", ":", "self", ".", "__cache", "[", "index", "]", "=", "obj", "return", "obj" ]
direct access without going through self.selection
[ "direct", "access", "without", "going", "through", "self", ".", "selection" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treeobject.py#L219-L230
train
rootpy/rootpy
rootpy/defaults.py
configure_defaults
def configure_defaults(): """ This function is executed immediately after ROOT's finalSetup """ log.debug("configure_defaults()") global initialized initialized = True if use_rootpy_handler: # Need to do it again here, since it is overridden by ROOT. set_error_handler(python_logging_error_handler) if os.environ.get('ROOTPY_BATCH', False) or IN_NOSETESTS: ROOT.gROOT.SetBatch(True) log.debug('ROOT is running in batch mode') ROOT.gErrorIgnoreLevel = 0 this_dll = C.CDLL(None) try: EnableAutoDictionary = C.c_int.in_dll( this_dll, "G__EnableAutoDictionary") except ValueError: pass else: # Disable automatic dictionary generation EnableAutoDictionary.value = 0 # TODO(pwaller): idea, `execfile("userdata/initrc.py")` here? # note: that wouldn't allow the user to override the default # canvas size, for example. for init in _initializations: init()
python
def configure_defaults(): """ This function is executed immediately after ROOT's finalSetup """ log.debug("configure_defaults()") global initialized initialized = True if use_rootpy_handler: # Need to do it again here, since it is overridden by ROOT. set_error_handler(python_logging_error_handler) if os.environ.get('ROOTPY_BATCH', False) or IN_NOSETESTS: ROOT.gROOT.SetBatch(True) log.debug('ROOT is running in batch mode') ROOT.gErrorIgnoreLevel = 0 this_dll = C.CDLL(None) try: EnableAutoDictionary = C.c_int.in_dll( this_dll, "G__EnableAutoDictionary") except ValueError: pass else: # Disable automatic dictionary generation EnableAutoDictionary.value = 0 # TODO(pwaller): idea, `execfile("userdata/initrc.py")` here? # note: that wouldn't allow the user to override the default # canvas size, for example. for init in _initializations: init()
[ "def", "configure_defaults", "(", ")", ":", "log", ".", "debug", "(", "\"configure_defaults()\"", ")", "global", "initialized", "initialized", "=", "True", "if", "use_rootpy_handler", ":", "# Need to do it again here, since it is overridden by ROOT.", "set_error_handler", "(", "python_logging_error_handler", ")", "if", "os", ".", "environ", ".", "get", "(", "'ROOTPY_BATCH'", ",", "False", ")", "or", "IN_NOSETESTS", ":", "ROOT", ".", "gROOT", ".", "SetBatch", "(", "True", ")", "log", ".", "debug", "(", "'ROOT is running in batch mode'", ")", "ROOT", ".", "gErrorIgnoreLevel", "=", "0", "this_dll", "=", "C", ".", "CDLL", "(", "None", ")", "try", ":", "EnableAutoDictionary", "=", "C", ".", "c_int", ".", "in_dll", "(", "this_dll", ",", "\"G__EnableAutoDictionary\"", ")", "except", "ValueError", ":", "pass", "else", ":", "# Disable automatic dictionary generation", "EnableAutoDictionary", ".", "value", "=", "0", "# TODO(pwaller): idea, `execfile(\"userdata/initrc.py\")` here?", "# note: that wouldn't allow the user to override the default", "# canvas size, for example.", "for", "init", "in", "_initializations", ":", "init", "(", ")" ]
This function is executed immediately after ROOT's finalSetup
[ "This", "function", "is", "executed", "immediately", "after", "ROOT", "s", "finalSetup" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/defaults.py#L70-L104
train
rootpy/rootpy
rootpy/defaults.py
rp_module_level_in_stack
def rp_module_level_in_stack(): """ Returns true if we're during a rootpy import """ from traceback import extract_stack from rootpy import _ROOTPY_SOURCE_PATH modlevel_files = [filename for filename, _, func, _ in extract_stack() if func == "<module>"] return any(path.startswith(_ROOTPY_SOURCE_PATH) for path in modlevel_files)
python
def rp_module_level_in_stack(): """ Returns true if we're during a rootpy import """ from traceback import extract_stack from rootpy import _ROOTPY_SOURCE_PATH modlevel_files = [filename for filename, _, func, _ in extract_stack() if func == "<module>"] return any(path.startswith(_ROOTPY_SOURCE_PATH) for path in modlevel_files)
[ "def", "rp_module_level_in_stack", "(", ")", ":", "from", "traceback", "import", "extract_stack", "from", "rootpy", "import", "_ROOTPY_SOURCE_PATH", "modlevel_files", "=", "[", "filename", "for", "filename", ",", "_", ",", "func", ",", "_", "in", "extract_stack", "(", ")", "if", "func", "==", "\"<module>\"", "]", "return", "any", "(", "path", ".", "startswith", "(", "_ROOTPY_SOURCE_PATH", ")", "for", "path", "in", "modlevel_files", ")" ]
Returns true if we're during a rootpy import
[ "Returns", "true", "if", "we", "re", "during", "a", "rootpy", "import" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/defaults.py#L107-L116
train
rootpy/rootpy
rootpy/memory/deletion.py
monitor_deletion
def monitor_deletion(): """ Function for checking for correct deletion of weakref-able objects. Example usage:: monitor, is_alive = monitor_deletion() obj = set() monitor(obj, "obj") assert is_alive("obj") # True because there is a ref to `obj` is_alive del obj assert not is_alive("obj") # True because there `obj` is deleted """ monitors = {} def set_deleted(x): def _(weakref): del monitors[x] return _ def monitor(item, name): monitors[name] = ref(item, set_deleted(name)) def is_alive(name): return monitors.get(name, None) is not None return monitor, is_alive
python
def monitor_deletion(): """ Function for checking for correct deletion of weakref-able objects. Example usage:: monitor, is_alive = monitor_deletion() obj = set() monitor(obj, "obj") assert is_alive("obj") # True because there is a ref to `obj` is_alive del obj assert not is_alive("obj") # True because there `obj` is deleted """ monitors = {} def set_deleted(x): def _(weakref): del monitors[x] return _ def monitor(item, name): monitors[name] = ref(item, set_deleted(name)) def is_alive(name): return monitors.get(name, None) is not None return monitor, is_alive
[ "def", "monitor_deletion", "(", ")", ":", "monitors", "=", "{", "}", "def", "set_deleted", "(", "x", ")", ":", "def", "_", "(", "weakref", ")", ":", "del", "monitors", "[", "x", "]", "return", "_", "def", "monitor", "(", "item", ",", "name", ")", ":", "monitors", "[", "name", "]", "=", "ref", "(", "item", ",", "set_deleted", "(", "name", ")", ")", "def", "is_alive", "(", "name", ")", ":", "return", "monitors", ".", "get", "(", "name", ",", "None", ")", "is", "not", "None", "return", "monitor", ",", "is_alive" ]
Function for checking for correct deletion of weakref-able objects. Example usage:: monitor, is_alive = monitor_deletion() obj = set() monitor(obj, "obj") assert is_alive("obj") # True because there is a ref to `obj` is_alive del obj assert not is_alive("obj") # True because there `obj` is deleted
[ "Function", "for", "checking", "for", "correct", "deletion", "of", "weakref", "-", "able", "objects", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/memory/deletion.py#L25-L52
train
rootpy/rootpy
rootpy/plotting/utils.py
canvases_with
def canvases_with(drawable): """ Return a list of all canvases where `drawable` has been painted. Note: This function is inefficient because it inspects all objects on all canvases, recursively. Avoid calling it if you have a large number of canvases and primitives. """ return [c for c in ROOT.gROOT.GetListOfCanvases() if drawable in find_all_primitives(c)]
python
def canvases_with(drawable): """ Return a list of all canvases where `drawable` has been painted. Note: This function is inefficient because it inspects all objects on all canvases, recursively. Avoid calling it if you have a large number of canvases and primitives. """ return [c for c in ROOT.gROOT.GetListOfCanvases() if drawable in find_all_primitives(c)]
[ "def", "canvases_with", "(", "drawable", ")", ":", "return", "[", "c", "for", "c", "in", "ROOT", ".", "gROOT", ".", "GetListOfCanvases", "(", ")", "if", "drawable", "in", "find_all_primitives", "(", "c", ")", "]" ]
Return a list of all canvases where `drawable` has been painted. Note: This function is inefficient because it inspects all objects on all canvases, recursively. Avoid calling it if you have a large number of canvases and primitives.
[ "Return", "a", "list", "of", "all", "canvases", "where", "drawable", "has", "been", "painted", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/utils.py#L405-L414
train
rootpy/rootpy
rootpy/plotting/utils.py
tick_length_pixels
def tick_length_pixels(pad, xaxis, yaxis, xlength, ylength=None): """ Set the axes tick lengths in pixels """ if ylength is None: ylength = xlength xaxis.SetTickLength(xlength / float(pad.height_pixels)) yaxis.SetTickLength(ylength / float(pad.width_pixels))
python
def tick_length_pixels(pad, xaxis, yaxis, xlength, ylength=None): """ Set the axes tick lengths in pixels """ if ylength is None: ylength = xlength xaxis.SetTickLength(xlength / float(pad.height_pixels)) yaxis.SetTickLength(ylength / float(pad.width_pixels))
[ "def", "tick_length_pixels", "(", "pad", ",", "xaxis", ",", "yaxis", ",", "xlength", ",", "ylength", "=", "None", ")", ":", "if", "ylength", "is", "None", ":", "ylength", "=", "xlength", "xaxis", ".", "SetTickLength", "(", "xlength", "/", "float", "(", "pad", ".", "height_pixels", ")", ")", "yaxis", ".", "SetTickLength", "(", "ylength", "/", "float", "(", "pad", ".", "width_pixels", ")", ")" ]
Set the axes tick lengths in pixels
[ "Set", "the", "axes", "tick", "lengths", "in", "pixels" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/utils.py#L436-L443
train
rootpy/rootpy
rootpy/tree/treetypes.py
BaseArray.reset
def reset(self): """Reset the value to the default""" if self.resetable: for i in range(len(self)): self[i] = self.default
python
def reset(self): """Reset the value to the default""" if self.resetable: for i in range(len(self)): self[i] = self.default
[ "def", "reset", "(", "self", ")", ":", "if", "self", ".", "resetable", ":", "for", "i", "in", "range", "(", "len", "(", "self", ")", ")", ":", "self", "[", "i", "]", "=", "self", ".", "default" ]
Reset the value to the default
[ "Reset", "the", "value", "to", "the", "default" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treetypes.py#L210-L214
train
rootpy/rootpy
rootpy/stats/fit.py
minimize
def minimize(func, minimizer_type=None, minimizer_algo=None, strategy=None, retry=0, scan=False, print_level=None): """ Minimize a RooAbsReal function Parameters ---------- func : RooAbsReal The function to minimize minimizer_type : string, optional (default=None) The minimizer type: "Minuit" or "Minuit2". If None (the default) then use the current global default value. minimizer_algo : string, optional (default=None) The minimizer algorithm: "Migrad", etc. If None (the default) then use the current global default value. strategy : int, optional (default=None) Set the MINUIT strategy. Accepted values are 0, 1, and 2 and represent MINUIT strategies for dealing most efficiently with fast FCNs (0), expensive FCNs (2) and 'intermediate' FCNs (1). If None (the default) then use the current global default value. retry : int, optional (default=0) Number of times to retry failed minimizations. The strategy is incremented to a maximum of 2 from its initial value and remains at 2 for additional retries. scan : bool, optional (default=False) If True then run Minuit2's scan algorithm before running the main ``minimizer_algo`` ("Migrad"). print_level : int, optional (default=None) The verbosity level for the minimizer algorithm. If None (the default) then use the global default print level. If negative then all non-fatal messages will be suppressed. Returns ------- minimizer : RooMinimizer The minimizer. Get the RooFitResult with ``minimizer.save()``. """ llog = log['minimize'] min_opts = ROOT.Math.MinimizerOptions if minimizer_type is None: minimizer_type = min_opts.DefaultMinimizerType() if minimizer_algo is None: minimizer_algo = min_opts.DefaultMinimizerAlgo() if strategy is None: strategy = min_opts.DefaultStrategy() if print_level is None: print_level = min_opts.DefaultPrintLevel() if print_level < 0: msg_service = ROOT.RooMsgService.instance() msg_level = msg_service.globalKillBelow() msg_service.setGlobalKillBelow(ROOT.RooFit.FATAL) minim = Minimizer(func) minim.setPrintLevel(print_level) minim.setStrategy(strategy) if scan: llog.info("running scan algorithm ...") minim.minimize('Minuit2', 'Scan') llog.info("minimizing with {0} {1} using strategy {2}".format( minimizer_type, minimizer_algo, strategy)) status = minim.minimize(minimizer_type, minimizer_algo) iretry = 0 while iretry < retry and status not in (0, 1): if strategy < 2: strategy += 1 minim.setStrategy(strategy) llog.warning("minimization failed with status {0:d}".format(status)) llog.info("retrying minimization with strategy {0:d}".format(strategy)) status = minim.minimize(minimizer_type, minimizer_algo) if status in (0, 1): llog.info("found minimum") else: llog.warning("minimization failed with status {0:d}".format(status)) if print_level < 0: msg_service.setGlobalKillBelow(msg_level) return minim
python
def minimize(func, minimizer_type=None, minimizer_algo=None, strategy=None, retry=0, scan=False, print_level=None): """ Minimize a RooAbsReal function Parameters ---------- func : RooAbsReal The function to minimize minimizer_type : string, optional (default=None) The minimizer type: "Minuit" or "Minuit2". If None (the default) then use the current global default value. minimizer_algo : string, optional (default=None) The minimizer algorithm: "Migrad", etc. If None (the default) then use the current global default value. strategy : int, optional (default=None) Set the MINUIT strategy. Accepted values are 0, 1, and 2 and represent MINUIT strategies for dealing most efficiently with fast FCNs (0), expensive FCNs (2) and 'intermediate' FCNs (1). If None (the default) then use the current global default value. retry : int, optional (default=0) Number of times to retry failed minimizations. The strategy is incremented to a maximum of 2 from its initial value and remains at 2 for additional retries. scan : bool, optional (default=False) If True then run Minuit2's scan algorithm before running the main ``minimizer_algo`` ("Migrad"). print_level : int, optional (default=None) The verbosity level for the minimizer algorithm. If None (the default) then use the global default print level. If negative then all non-fatal messages will be suppressed. Returns ------- minimizer : RooMinimizer The minimizer. Get the RooFitResult with ``minimizer.save()``. """ llog = log['minimize'] min_opts = ROOT.Math.MinimizerOptions if minimizer_type is None: minimizer_type = min_opts.DefaultMinimizerType() if minimizer_algo is None: minimizer_algo = min_opts.DefaultMinimizerAlgo() if strategy is None: strategy = min_opts.DefaultStrategy() if print_level is None: print_level = min_opts.DefaultPrintLevel() if print_level < 0: msg_service = ROOT.RooMsgService.instance() msg_level = msg_service.globalKillBelow() msg_service.setGlobalKillBelow(ROOT.RooFit.FATAL) minim = Minimizer(func) minim.setPrintLevel(print_level) minim.setStrategy(strategy) if scan: llog.info("running scan algorithm ...") minim.minimize('Minuit2', 'Scan') llog.info("minimizing with {0} {1} using strategy {2}".format( minimizer_type, minimizer_algo, strategy)) status = minim.minimize(minimizer_type, minimizer_algo) iretry = 0 while iretry < retry and status not in (0, 1): if strategy < 2: strategy += 1 minim.setStrategy(strategy) llog.warning("minimization failed with status {0:d}".format(status)) llog.info("retrying minimization with strategy {0:d}".format(strategy)) status = minim.minimize(minimizer_type, minimizer_algo) if status in (0, 1): llog.info("found minimum") else: llog.warning("minimization failed with status {0:d}".format(status)) if print_level < 0: msg_service.setGlobalKillBelow(msg_level) return minim
[ "def", "minimize", "(", "func", ",", "minimizer_type", "=", "None", ",", "minimizer_algo", "=", "None", ",", "strategy", "=", "None", ",", "retry", "=", "0", ",", "scan", "=", "False", ",", "print_level", "=", "None", ")", ":", "llog", "=", "log", "[", "'minimize'", "]", "min_opts", "=", "ROOT", ".", "Math", ".", "MinimizerOptions", "if", "minimizer_type", "is", "None", ":", "minimizer_type", "=", "min_opts", ".", "DefaultMinimizerType", "(", ")", "if", "minimizer_algo", "is", "None", ":", "minimizer_algo", "=", "min_opts", ".", "DefaultMinimizerAlgo", "(", ")", "if", "strategy", "is", "None", ":", "strategy", "=", "min_opts", ".", "DefaultStrategy", "(", ")", "if", "print_level", "is", "None", ":", "print_level", "=", "min_opts", ".", "DefaultPrintLevel", "(", ")", "if", "print_level", "<", "0", ":", "msg_service", "=", "ROOT", ".", "RooMsgService", ".", "instance", "(", ")", "msg_level", "=", "msg_service", ".", "globalKillBelow", "(", ")", "msg_service", ".", "setGlobalKillBelow", "(", "ROOT", ".", "RooFit", ".", "FATAL", ")", "minim", "=", "Minimizer", "(", "func", ")", "minim", ".", "setPrintLevel", "(", "print_level", ")", "minim", ".", "setStrategy", "(", "strategy", ")", "if", "scan", ":", "llog", ".", "info", "(", "\"running scan algorithm ...\"", ")", "minim", ".", "minimize", "(", "'Minuit2'", ",", "'Scan'", ")", "llog", ".", "info", "(", "\"minimizing with {0} {1} using strategy {2}\"", ".", "format", "(", "minimizer_type", ",", "minimizer_algo", ",", "strategy", ")", ")", "status", "=", "minim", ".", "minimize", "(", "minimizer_type", ",", "minimizer_algo", ")", "iretry", "=", "0", "while", "iretry", "<", "retry", "and", "status", "not", "in", "(", "0", ",", "1", ")", ":", "if", "strategy", "<", "2", ":", "strategy", "+=", "1", "minim", ".", "setStrategy", "(", "strategy", ")", "llog", ".", "warning", "(", "\"minimization failed with status {0:d}\"", ".", "format", "(", "status", ")", ")", "llog", ".", "info", "(", "\"retrying minimization with strategy {0:d}\"", ".", "format", "(", "strategy", ")", ")", "status", "=", "minim", ".", "minimize", "(", "minimizer_type", ",", "minimizer_algo", ")", "if", "status", "in", "(", "0", ",", "1", ")", ":", "llog", ".", "info", "(", "\"found minimum\"", ")", "else", ":", "llog", ".", "warning", "(", "\"minimization failed with status {0:d}\"", ".", "format", "(", "status", ")", ")", "if", "print_level", "<", "0", ":", "msg_service", ".", "setGlobalKillBelow", "(", "msg_level", ")", "return", "minim" ]
Minimize a RooAbsReal function Parameters ---------- func : RooAbsReal The function to minimize minimizer_type : string, optional (default=None) The minimizer type: "Minuit" or "Minuit2". If None (the default) then use the current global default value. minimizer_algo : string, optional (default=None) The minimizer algorithm: "Migrad", etc. If None (the default) then use the current global default value. strategy : int, optional (default=None) Set the MINUIT strategy. Accepted values are 0, 1, and 2 and represent MINUIT strategies for dealing most efficiently with fast FCNs (0), expensive FCNs (2) and 'intermediate' FCNs (1). If None (the default) then use the current global default value. retry : int, optional (default=0) Number of times to retry failed minimizations. The strategy is incremented to a maximum of 2 from its initial value and remains at 2 for additional retries. scan : bool, optional (default=False) If True then run Minuit2's scan algorithm before running the main ``minimizer_algo`` ("Migrad"). print_level : int, optional (default=None) The verbosity level for the minimizer algorithm. If None (the default) then use the global default print level. If negative then all non-fatal messages will be suppressed. Returns ------- minimizer : RooMinimizer The minimizer. Get the RooFitResult with ``minimizer.save()``.
[ "Minimize", "a", "RooAbsReal", "function" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/fit.py#L15-L113
train
rootpy/rootpy
rootpy/stl.py
make_string
def make_string(obj): """ If ``obj`` is a string, return that, otherwise attempt to figure out the name of a type. """ if inspect.isclass(obj): if issubclass(obj, Object): return obj._ROOT.__name__ if issubclass(obj, string_types): return 'string' return obj.__name__ if not isinstance(obj, string_types): raise TypeError("expected string or class") return obj
python
def make_string(obj): """ If ``obj`` is a string, return that, otherwise attempt to figure out the name of a type. """ if inspect.isclass(obj): if issubclass(obj, Object): return obj._ROOT.__name__ if issubclass(obj, string_types): return 'string' return obj.__name__ if not isinstance(obj, string_types): raise TypeError("expected string or class") return obj
[ "def", "make_string", "(", "obj", ")", ":", "if", "inspect", ".", "isclass", "(", "obj", ")", ":", "if", "issubclass", "(", "obj", ",", "Object", ")", ":", "return", "obj", ".", "_ROOT", ".", "__name__", "if", "issubclass", "(", "obj", ",", "string_types", ")", ":", "return", "'string'", "return", "obj", ".", "__name__", "if", "not", "isinstance", "(", "obj", ",", "string_types", ")", ":", "raise", "TypeError", "(", "\"expected string or class\"", ")", "return", "obj" ]
If ``obj`` is a string, return that, otherwise attempt to figure out the name of a type.
[ "If", "obj", "is", "a", "string", "return", "that", "otherwise", "attempt", "to", "figure", "out", "the", "name", "of", "a", "type", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L302-L315
train
rootpy/rootpy
rootpy/stl.py
generate
def generate(declaration, headers=None, has_iterators=False): """Compile and load the reflection dictionary for a type. If the requested dictionary has already been cached, then load that instead. Parameters ---------- declaration : str A type declaration (for example "vector<int>") headers : str or list of str A header file or list of header files required to compile the dictionary for this type. has_iterators : bool If True, then include iterators in the dictionary generation. """ global NEW_DICTS # FIXME: _rootpy_dictionary_already_exists returns false positives # if a third-party module provides "incomplete" dictionaries. #if compiled._rootpy_dictionary_already_exists(declaration): # log.debug("generate({0}) => already available".format(declaration)) # return log.debug("requesting dictionary for {0}".format(declaration)) if headers: if isinstance(headers, string_types): headers = sorted(headers.split(';')) log.debug("using the headers {0}".format(', '.join(headers))) unique_name = ';'.join([declaration] + headers) else: unique_name = declaration unique_name = unique_name.replace(' ', '') # If the library is already loaded, do nothing if unique_name in LOADED_DICTS: log.debug("dictionary for {0} is already loaded".format(declaration)) return if sys.version_info[0] < 3: libname = hashlib.sha512(unique_name).hexdigest()[:16] else: libname = hashlib.sha512(unique_name.encode('utf-8')).hexdigest()[:16] libnameso = libname + ".so" if ROOT.gROOT.GetVersionInt() < 53403: # check for this class in the global TClass list and remove it # fixes infinite recursion in ROOT < 5.34.03 # (exact ROOT versions where this is required is unknown) cls = ROOT.gROOT.GetClass(declaration) if cls and not cls.IsLoaded(): log.debug("removing {0} from gROOT.GetListOfClasses()".format( declaration)) ROOT.gROOT.GetListOfClasses().Remove(cls) # If a .so already exists for this class, use it. if exists(pjoin(DICTS_PATH, libnameso)): log.debug("loading previously generated dictionary for {0}" .format(declaration)) if (ROOT.gInterpreter.Load(pjoin(DICTS_PATH, libnameso)) not in (0, 1)): raise RuntimeError( "failed to load the library for '{0}' @ {1}".format( declaration, libname)) LOADED_DICTS[unique_name] = None return with lock(pjoin(DICTS_PATH, "lock"), poll_interval=5, max_age=60): # This dict was not previously generated so we must create it now log.info("generating dictionary for {0} ...".format(declaration)) includes = '' if headers is not None: for header in headers: if re.match('^<.+>$', header): includes += '#include {0}\n'.format(header) else: includes += '#include "{0}"\n'.format(header) source = LINKDEF % locals() sourcepath = os.path.join(DICTS_PATH, '{0}.C'.format(libname)) log.debug("source path: {0}".format(sourcepath)) with open(sourcepath, 'w') as sourcefile: sourcefile.write(source) log.debug("include path: {0}".format( ROOT.gSystem.GetIncludePath())) if (ROOT.gSystem.CompileMacro( sourcepath, 'k-', libname, DICTS_PATH) != 1): raise RuntimeError( "failed to compile the library for '{0}'".format( sourcepath)) LOADED_DICTS[unique_name] = None NEW_DICTS = True
python
def generate(declaration, headers=None, has_iterators=False): """Compile and load the reflection dictionary for a type. If the requested dictionary has already been cached, then load that instead. Parameters ---------- declaration : str A type declaration (for example "vector<int>") headers : str or list of str A header file or list of header files required to compile the dictionary for this type. has_iterators : bool If True, then include iterators in the dictionary generation. """ global NEW_DICTS # FIXME: _rootpy_dictionary_already_exists returns false positives # if a third-party module provides "incomplete" dictionaries. #if compiled._rootpy_dictionary_already_exists(declaration): # log.debug("generate({0}) => already available".format(declaration)) # return log.debug("requesting dictionary for {0}".format(declaration)) if headers: if isinstance(headers, string_types): headers = sorted(headers.split(';')) log.debug("using the headers {0}".format(', '.join(headers))) unique_name = ';'.join([declaration] + headers) else: unique_name = declaration unique_name = unique_name.replace(' ', '') # If the library is already loaded, do nothing if unique_name in LOADED_DICTS: log.debug("dictionary for {0} is already loaded".format(declaration)) return if sys.version_info[0] < 3: libname = hashlib.sha512(unique_name).hexdigest()[:16] else: libname = hashlib.sha512(unique_name.encode('utf-8')).hexdigest()[:16] libnameso = libname + ".so" if ROOT.gROOT.GetVersionInt() < 53403: # check for this class in the global TClass list and remove it # fixes infinite recursion in ROOT < 5.34.03 # (exact ROOT versions where this is required is unknown) cls = ROOT.gROOT.GetClass(declaration) if cls and not cls.IsLoaded(): log.debug("removing {0} from gROOT.GetListOfClasses()".format( declaration)) ROOT.gROOT.GetListOfClasses().Remove(cls) # If a .so already exists for this class, use it. if exists(pjoin(DICTS_PATH, libnameso)): log.debug("loading previously generated dictionary for {0}" .format(declaration)) if (ROOT.gInterpreter.Load(pjoin(DICTS_PATH, libnameso)) not in (0, 1)): raise RuntimeError( "failed to load the library for '{0}' @ {1}".format( declaration, libname)) LOADED_DICTS[unique_name] = None return with lock(pjoin(DICTS_PATH, "lock"), poll_interval=5, max_age=60): # This dict was not previously generated so we must create it now log.info("generating dictionary for {0} ...".format(declaration)) includes = '' if headers is not None: for header in headers: if re.match('^<.+>$', header): includes += '#include {0}\n'.format(header) else: includes += '#include "{0}"\n'.format(header) source = LINKDEF % locals() sourcepath = os.path.join(DICTS_PATH, '{0}.C'.format(libname)) log.debug("source path: {0}".format(sourcepath)) with open(sourcepath, 'w') as sourcefile: sourcefile.write(source) log.debug("include path: {0}".format( ROOT.gSystem.GetIncludePath())) if (ROOT.gSystem.CompileMacro( sourcepath, 'k-', libname, DICTS_PATH) != 1): raise RuntimeError( "failed to compile the library for '{0}'".format( sourcepath)) LOADED_DICTS[unique_name] = None NEW_DICTS = True
[ "def", "generate", "(", "declaration", ",", "headers", "=", "None", ",", "has_iterators", "=", "False", ")", ":", "global", "NEW_DICTS", "# FIXME: _rootpy_dictionary_already_exists returns false positives", "# if a third-party module provides \"incomplete\" dictionaries.", "#if compiled._rootpy_dictionary_already_exists(declaration):", "# log.debug(\"generate({0}) => already available\".format(declaration))", "# return", "log", ".", "debug", "(", "\"requesting dictionary for {0}\"", ".", "format", "(", "declaration", ")", ")", "if", "headers", ":", "if", "isinstance", "(", "headers", ",", "string_types", ")", ":", "headers", "=", "sorted", "(", "headers", ".", "split", "(", "';'", ")", ")", "log", ".", "debug", "(", "\"using the headers {0}\"", ".", "format", "(", "', '", ".", "join", "(", "headers", ")", ")", ")", "unique_name", "=", "';'", ".", "join", "(", "[", "declaration", "]", "+", "headers", ")", "else", ":", "unique_name", "=", "declaration", "unique_name", "=", "unique_name", ".", "replace", "(", "' '", ",", "''", ")", "# If the library is already loaded, do nothing", "if", "unique_name", "in", "LOADED_DICTS", ":", "log", ".", "debug", "(", "\"dictionary for {0} is already loaded\"", ".", "format", "(", "declaration", ")", ")", "return", "if", "sys", ".", "version_info", "[", "0", "]", "<", "3", ":", "libname", "=", "hashlib", ".", "sha512", "(", "unique_name", ")", ".", "hexdigest", "(", ")", "[", ":", "16", "]", "else", ":", "libname", "=", "hashlib", ".", "sha512", "(", "unique_name", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", "[", ":", "16", "]", "libnameso", "=", "libname", "+", "\".so\"", "if", "ROOT", ".", "gROOT", ".", "GetVersionInt", "(", ")", "<", "53403", ":", "# check for this class in the global TClass list and remove it", "# fixes infinite recursion in ROOT < 5.34.03", "# (exact ROOT versions where this is required is unknown)", "cls", "=", "ROOT", ".", "gROOT", ".", "GetClass", "(", "declaration", ")", "if", "cls", "and", "not", "cls", ".", "IsLoaded", "(", ")", ":", "log", ".", "debug", "(", "\"removing {0} from gROOT.GetListOfClasses()\"", ".", "format", "(", "declaration", ")", ")", "ROOT", ".", "gROOT", ".", "GetListOfClasses", "(", ")", ".", "Remove", "(", "cls", ")", "# If a .so already exists for this class, use it.", "if", "exists", "(", "pjoin", "(", "DICTS_PATH", ",", "libnameso", ")", ")", ":", "log", ".", "debug", "(", "\"loading previously generated dictionary for {0}\"", ".", "format", "(", "declaration", ")", ")", "if", "(", "ROOT", ".", "gInterpreter", ".", "Load", "(", "pjoin", "(", "DICTS_PATH", ",", "libnameso", ")", ")", "not", "in", "(", "0", ",", "1", ")", ")", ":", "raise", "RuntimeError", "(", "\"failed to load the library for '{0}' @ {1}\"", ".", "format", "(", "declaration", ",", "libname", ")", ")", "LOADED_DICTS", "[", "unique_name", "]", "=", "None", "return", "with", "lock", "(", "pjoin", "(", "DICTS_PATH", ",", "\"lock\"", ")", ",", "poll_interval", "=", "5", ",", "max_age", "=", "60", ")", ":", "# This dict was not previously generated so we must create it now", "log", ".", "info", "(", "\"generating dictionary for {0} ...\"", ".", "format", "(", "declaration", ")", ")", "includes", "=", "''", "if", "headers", "is", "not", "None", ":", "for", "header", "in", "headers", ":", "if", "re", ".", "match", "(", "'^<.+>$'", ",", "header", ")", ":", "includes", "+=", "'#include {0}\\n'", ".", "format", "(", "header", ")", "else", ":", "includes", "+=", "'#include \"{0}\"\\n'", ".", "format", "(", "header", ")", "source", "=", "LINKDEF", "%", "locals", "(", ")", "sourcepath", "=", "os", ".", "path", ".", "join", "(", "DICTS_PATH", ",", "'{0}.C'", ".", "format", "(", "libname", ")", ")", "log", ".", "debug", "(", "\"source path: {0}\"", ".", "format", "(", "sourcepath", ")", ")", "with", "open", "(", "sourcepath", ",", "'w'", ")", "as", "sourcefile", ":", "sourcefile", ".", "write", "(", "source", ")", "log", ".", "debug", "(", "\"include path: {0}\"", ".", "format", "(", "ROOT", ".", "gSystem", ".", "GetIncludePath", "(", ")", ")", ")", "if", "(", "ROOT", ".", "gSystem", ".", "CompileMacro", "(", "sourcepath", ",", "'k-'", ",", "libname", ",", "DICTS_PATH", ")", "!=", "1", ")", ":", "raise", "RuntimeError", "(", "\"failed to compile the library for '{0}'\"", ".", "format", "(", "sourcepath", ")", ")", "LOADED_DICTS", "[", "unique_name", "]", "=", "None", "NEW_DICTS", "=", "True" ]
Compile and load the reflection dictionary for a type. If the requested dictionary has already been cached, then load that instead. Parameters ---------- declaration : str A type declaration (for example "vector<int>") headers : str or list of str A header file or list of header files required to compile the dictionary for this type. has_iterators : bool If True, then include iterators in the dictionary generation.
[ "Compile", "and", "load", "the", "reflection", "dictionary", "for", "a", "type", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L318-L406
train
rootpy/rootpy
rootpy/stl.py
CPPType.ensure_built
def ensure_built(self, headers=None): """ Make sure that a dictionary exists for this type. """ if not self.params: return else: for child in self.params: child.ensure_built(headers=headers) if headers is None: headers = self.guess_headers generate(str(self), headers, has_iterators=self.name in HAS_ITERATORS)
python
def ensure_built(self, headers=None): """ Make sure that a dictionary exists for this type. """ if not self.params: return else: for child in self.params: child.ensure_built(headers=headers) if headers is None: headers = self.guess_headers generate(str(self), headers, has_iterators=self.name in HAS_ITERATORS)
[ "def", "ensure_built", "(", "self", ",", "headers", "=", "None", ")", ":", "if", "not", "self", ".", "params", ":", "return", "else", ":", "for", "child", "in", "self", ".", "params", ":", "child", ".", "ensure_built", "(", "headers", "=", "headers", ")", "if", "headers", "is", "None", ":", "headers", "=", "self", ".", "guess_headers", "generate", "(", "str", "(", "self", ")", ",", "headers", ",", "has_iterators", "=", "self", ".", "name", "in", "HAS_ITERATORS", ")" ]
Make sure that a dictionary exists for this type.
[ "Make", "sure", "that", "a", "dictionary", "exists", "for", "this", "type", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L202-L214
train
rootpy/rootpy
rootpy/stl.py
CPPType.guess_headers
def guess_headers(self): """ Attempt to guess what headers may be required in order to use this type. Returns `guess_headers` of all children recursively. * If the typename is in the :const:`KNOWN_TYPES` dictionary, use the header specified there * If it's an STL type, include <{type}> * If it exists in the ROOT namespace and begins with T, include <{type}.h> """ name = self.name.replace("*", "") headers = [] if name in KNOWN_TYPES: headers.append(KNOWN_TYPES[name]) elif name in STL: headers.append('<{0}>'.format(name)) elif hasattr(ROOT, name) and name.startswith("T"): headers.append('<{0}.h>'.format(name)) elif '::' in name: headers.append('<{0}.h>'.format(name.replace('::', '/'))) elif name == 'allocator': headers.append('<memory>') else: try: # is this just a basic type? CPPGrammar.BASIC_TYPE.parseString(name, parseAll=True) except ParseException as e: # nope... I don't know what it is log.warning( "unable to guess headers required for {0}".format(name)) if self.params: for child in self.params: headers.extend(child.guess_headers) # remove duplicates return list(set(headers))
python
def guess_headers(self): """ Attempt to guess what headers may be required in order to use this type. Returns `guess_headers` of all children recursively. * If the typename is in the :const:`KNOWN_TYPES` dictionary, use the header specified there * If it's an STL type, include <{type}> * If it exists in the ROOT namespace and begins with T, include <{type}.h> """ name = self.name.replace("*", "") headers = [] if name in KNOWN_TYPES: headers.append(KNOWN_TYPES[name]) elif name in STL: headers.append('<{0}>'.format(name)) elif hasattr(ROOT, name) and name.startswith("T"): headers.append('<{0}.h>'.format(name)) elif '::' in name: headers.append('<{0}.h>'.format(name.replace('::', '/'))) elif name == 'allocator': headers.append('<memory>') else: try: # is this just a basic type? CPPGrammar.BASIC_TYPE.parseString(name, parseAll=True) except ParseException as e: # nope... I don't know what it is log.warning( "unable to guess headers required for {0}".format(name)) if self.params: for child in self.params: headers.extend(child.guess_headers) # remove duplicates return list(set(headers))
[ "def", "guess_headers", "(", "self", ")", ":", "name", "=", "self", ".", "name", ".", "replace", "(", "\"*\"", ",", "\"\"", ")", "headers", "=", "[", "]", "if", "name", "in", "KNOWN_TYPES", ":", "headers", ".", "append", "(", "KNOWN_TYPES", "[", "name", "]", ")", "elif", "name", "in", "STL", ":", "headers", ".", "append", "(", "'<{0}>'", ".", "format", "(", "name", ")", ")", "elif", "hasattr", "(", "ROOT", ",", "name", ")", "and", "name", ".", "startswith", "(", "\"T\"", ")", ":", "headers", ".", "append", "(", "'<{0}.h>'", ".", "format", "(", "name", ")", ")", "elif", "'::'", "in", "name", ":", "headers", ".", "append", "(", "'<{0}.h>'", ".", "format", "(", "name", ".", "replace", "(", "'::'", ",", "'/'", ")", ")", ")", "elif", "name", "==", "'allocator'", ":", "headers", ".", "append", "(", "'<memory>'", ")", "else", ":", "try", ":", "# is this just a basic type?", "CPPGrammar", ".", "BASIC_TYPE", ".", "parseString", "(", "name", ",", "parseAll", "=", "True", ")", "except", "ParseException", "as", "e", ":", "# nope... I don't know what it is", "log", ".", "warning", "(", "\"unable to guess headers required for {0}\"", ".", "format", "(", "name", ")", ")", "if", "self", ".", "params", ":", "for", "child", "in", "self", ".", "params", ":", "headers", ".", "extend", "(", "child", ".", "guess_headers", ")", "# remove duplicates", "return", "list", "(", "set", "(", "headers", ")", ")" ]
Attempt to guess what headers may be required in order to use this type. Returns `guess_headers` of all children recursively. * If the typename is in the :const:`KNOWN_TYPES` dictionary, use the header specified there * If it's an STL type, include <{type}> * If it exists in the ROOT namespace and begins with T, include <{type}.h>
[ "Attempt", "to", "guess", "what", "headers", "may", "be", "required", "in", "order", "to", "use", "this", "type", ".", "Returns", "guess_headers", "of", "all", "children", "recursively", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L217-L252
train
rootpy/rootpy
rootpy/stl.py
CPPType.cls
def cls(self): """ Return the class definition for this type """ # TODO: register the resulting type? return SmartTemplate(self.name)(", ".join(map(str, self.params)))
python
def cls(self): """ Return the class definition for this type """ # TODO: register the resulting type? return SmartTemplate(self.name)(", ".join(map(str, self.params)))
[ "def", "cls", "(", "self", ")", ":", "# TODO: register the resulting type?", "return", "SmartTemplate", "(", "self", ".", "name", ")", "(", "\", \"", ".", "join", "(", "map", "(", "str", ",", "self", ".", "params", ")", ")", ")" ]
Return the class definition for this type
[ "Return", "the", "class", "definition", "for", "this", "type" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L255-L260
train
rootpy/rootpy
rootpy/stl.py
CPPType.from_string
def from_string(cls, string): """ Parse ``string`` into a CPPType instance """ cls.TYPE.setParseAction(cls.make) try: return cls.TYPE.parseString(string, parseAll=True)[0] except ParseException: log.error("Failed to parse '{0}'".format(string)) raise
python
def from_string(cls, string): """ Parse ``string`` into a CPPType instance """ cls.TYPE.setParseAction(cls.make) try: return cls.TYPE.parseString(string, parseAll=True)[0] except ParseException: log.error("Failed to parse '{0}'".format(string)) raise
[ "def", "from_string", "(", "cls", ",", "string", ")", ":", "cls", ".", "TYPE", ".", "setParseAction", "(", "cls", ".", "make", ")", "try", ":", "return", "cls", ".", "TYPE", ".", "parseString", "(", "string", ",", "parseAll", "=", "True", ")", "[", "0", "]", "except", "ParseException", ":", "log", ".", "error", "(", "\"Failed to parse '{0}'\"", ".", "format", "(", "string", ")", ")", "raise" ]
Parse ``string`` into a CPPType instance
[ "Parse", "string", "into", "a", "CPPType", "instance" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L275-L284
train
rootpy/rootpy
rootpy/utils/cinterface.py
callback
def callback(cfunc): """ Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT """ # Note: # ROOT wants a c_voidp whose addressof() == the call site of the target # function. This hackery is necessary to achieve that. return C.c_voidp.from_address(C.cast(cfunc, C.c_voidp).value)
python
def callback(cfunc): """ Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT """ # Note: # ROOT wants a c_voidp whose addressof() == the call site of the target # function. This hackery is necessary to achieve that. return C.c_voidp.from_address(C.cast(cfunc, C.c_voidp).value)
[ "def", "callback", "(", "cfunc", ")", ":", "# Note:", "# ROOT wants a c_voidp whose addressof() == the call site of the target", "# function. This hackery is necessary to achieve that.", "return", "C", ".", "c_voidp", ".", "from_address", "(", "C", ".", "cast", "(", "cfunc", ",", "C", ".", "c_voidp", ")", ".", "value", ")" ]
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
[ "Turn", "a", "ctypes", "CFUNCTYPE", "instance", "into", "a", "value", "which", "can", "be", "passed", "into", "PyROOT" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/cinterface.py#L21-L28
train
rootpy/rootpy
rootpy/utils/cinterface.py
objectproxy_realaddress
def objectproxy_realaddress(obj): """ Obtain a real address as an integer from an objectproxy. """ voidp = QROOT.TPython.ObjectProxy_AsVoidPtr(obj) return C.addressof(C.c_char.from_buffer(voidp))
python
def objectproxy_realaddress(obj): """ Obtain a real address as an integer from an objectproxy. """ voidp = QROOT.TPython.ObjectProxy_AsVoidPtr(obj) return C.addressof(C.c_char.from_buffer(voidp))
[ "def", "objectproxy_realaddress", "(", "obj", ")", ":", "voidp", "=", "QROOT", ".", "TPython", ".", "ObjectProxy_AsVoidPtr", "(", "obj", ")", "return", "C", ".", "addressof", "(", "C", ".", "c_char", ".", "from_buffer", "(", "voidp", ")", ")" ]
Obtain a real address as an integer from an objectproxy.
[ "Obtain", "a", "real", "address", "as", "an", "integer", "from", "an", "objectproxy", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/cinterface.py#L31-L36
train
rootpy/rootpy
rootpy/plotting/style/__init__.py
set_style
def set_style(style, mpl=False, **kwargs): """ If mpl is False accept either style name or a TStyle instance. If mpl is True accept either style name or a matplotlib.rcParams-like dictionary """ if mpl: import matplotlib as mpl style_dictionary = {} if isinstance(style, string_types): style_dictionary = get_style(style, mpl=True, **kwargs) log.info("using matplotlib style '{0}'".format(style)) elif isinstance(style, dict): style_dictionary = style log.info("using user-defined matplotlib style") else: raise TypeError("style must be a matplotlib style name or dict") for k, v in style_dictionary.items(): mpl.rcParams[k] = v else: if isinstance(style, string_types): style = get_style(style, **kwargs) log.info("using ROOT style '{0}'".format(style.GetName())) style.cd()
python
def set_style(style, mpl=False, **kwargs): """ If mpl is False accept either style name or a TStyle instance. If mpl is True accept either style name or a matplotlib.rcParams-like dictionary """ if mpl: import matplotlib as mpl style_dictionary = {} if isinstance(style, string_types): style_dictionary = get_style(style, mpl=True, **kwargs) log.info("using matplotlib style '{0}'".format(style)) elif isinstance(style, dict): style_dictionary = style log.info("using user-defined matplotlib style") else: raise TypeError("style must be a matplotlib style name or dict") for k, v in style_dictionary.items(): mpl.rcParams[k] = v else: if isinstance(style, string_types): style = get_style(style, **kwargs) log.info("using ROOT style '{0}'".format(style.GetName())) style.cd()
[ "def", "set_style", "(", "style", ",", "mpl", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "mpl", ":", "import", "matplotlib", "as", "mpl", "style_dictionary", "=", "{", "}", "if", "isinstance", "(", "style", ",", "string_types", ")", ":", "style_dictionary", "=", "get_style", "(", "style", ",", "mpl", "=", "True", ",", "*", "*", "kwargs", ")", "log", ".", "info", "(", "\"using matplotlib style '{0}'\"", ".", "format", "(", "style", ")", ")", "elif", "isinstance", "(", "style", ",", "dict", ")", ":", "style_dictionary", "=", "style", "log", ".", "info", "(", "\"using user-defined matplotlib style\"", ")", "else", ":", "raise", "TypeError", "(", "\"style must be a matplotlib style name or dict\"", ")", "for", "k", ",", "v", "in", "style_dictionary", ".", "items", "(", ")", ":", "mpl", ".", "rcParams", "[", "k", "]", "=", "v", "else", ":", "if", "isinstance", "(", "style", ",", "string_types", ")", ":", "style", "=", "get_style", "(", "style", ",", "*", "*", "kwargs", ")", "log", ".", "info", "(", "\"using ROOT style '{0}'\"", ".", "format", "(", "style", ".", "GetName", "(", ")", ")", ")", "style", ".", "cd", "(", ")" ]
If mpl is False accept either style name or a TStyle instance. If mpl is True accept either style name or a matplotlib.rcParams-like dictionary
[ "If", "mpl", "is", "False", "accept", "either", "style", "name", "or", "a", "TStyle", "instance", ".", "If", "mpl", "is", "True", "accept", "either", "style", "name", "or", "a", "matplotlib", ".", "rcParams", "-", "like", "dictionary" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/style/__init__.py#L56-L80
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.cd_previous
def cd_previous(self): """ cd to the gDirectory before this file was open. """ if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT): return False if isinstance(self._prev_dir, ROOT.TFile): if self._prev_dir.IsOpen() and self._prev_dir.IsWritable(): self._prev_dir.cd() return True return False if not self._prev_dir.IsWritable(): # avoid warning from ROOT stating file is not writable return False prev_file = self._prev_dir.GetFile() if prev_file and prev_file.IsOpen(): self._prev_dir.cd() return True return False
python
def cd_previous(self): """ cd to the gDirectory before this file was open. """ if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT): return False if isinstance(self._prev_dir, ROOT.TFile): if self._prev_dir.IsOpen() and self._prev_dir.IsWritable(): self._prev_dir.cd() return True return False if not self._prev_dir.IsWritable(): # avoid warning from ROOT stating file is not writable return False prev_file = self._prev_dir.GetFile() if prev_file and prev_file.IsOpen(): self._prev_dir.cd() return True return False
[ "def", "cd_previous", "(", "self", ")", ":", "if", "self", ".", "_prev_dir", "is", "None", "or", "isinstance", "(", "self", ".", "_prev_dir", ",", "ROOT", ".", "TROOT", ")", ":", "return", "False", "if", "isinstance", "(", "self", ".", "_prev_dir", ",", "ROOT", ".", "TFile", ")", ":", "if", "self", ".", "_prev_dir", ".", "IsOpen", "(", ")", "and", "self", ".", "_prev_dir", ".", "IsWritable", "(", ")", ":", "self", ".", "_prev_dir", ".", "cd", "(", ")", "return", "True", "return", "False", "if", "not", "self", ".", "_prev_dir", ".", "IsWritable", "(", ")", ":", "# avoid warning from ROOT stating file is not writable", "return", "False", "prev_file", "=", "self", ".", "_prev_dir", ".", "GetFile", "(", ")", "if", "prev_file", "and", "prev_file", ".", "IsOpen", "(", ")", ":", "self", ".", "_prev_dir", ".", "cd", "(", ")", "return", "True", "return", "False" ]
cd to the gDirectory before this file was open.
[ "cd", "to", "the", "gDirectory", "before", "this", "file", "was", "open", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L229-L247
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.Close
def Close(self, *args): """ Like ROOT's Close but reverts to the gDirectory before this file was opened. """ super(_DirectoryBase, self).Close(*args) return self.cd_previous()
python
def Close(self, *args): """ Like ROOT's Close but reverts to the gDirectory before this file was opened. """ super(_DirectoryBase, self).Close(*args) return self.cd_previous()
[ "def", "Close", "(", "self", ",", "*", "args", ")", ":", "super", "(", "_DirectoryBase", ",", "self", ")", ".", "Close", "(", "*", "args", ")", "return", "self", ".", "cd_previous", "(", ")" ]
Like ROOT's Close but reverts to the gDirectory before this file was opened.
[ "Like", "ROOT", "s", "Close", "but", "reverts", "to", "the", "gDirectory", "before", "this", "file", "was", "opened", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L249-L255
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.keys
def keys(self, latest=False): """ Return a list of the keys in this directory. Parameters ---------- latest : bool, optional (default=False) If True then return a list of keys with unique names where only the key with the highest cycle number is included where multiple keys exist with the same name. Returns ------- keys : list List of keys """ if latest: keys = {} for key in self.keys(): name = key.GetName() if name in keys: if key.GetCycle() > keys[name].GetCycle(): keys[name] = key else: keys[name] = key return keys.values() return [asrootpy(key) for key in self.GetListOfKeys()]
python
def keys(self, latest=False): """ Return a list of the keys in this directory. Parameters ---------- latest : bool, optional (default=False) If True then return a list of keys with unique names where only the key with the highest cycle number is included where multiple keys exist with the same name. Returns ------- keys : list List of keys """ if latest: keys = {} for key in self.keys(): name = key.GetName() if name in keys: if key.GetCycle() > keys[name].GetCycle(): keys[name] = key else: keys[name] = key return keys.values() return [asrootpy(key) for key in self.GetListOfKeys()]
[ "def", "keys", "(", "self", ",", "latest", "=", "False", ")", ":", "if", "latest", ":", "keys", "=", "{", "}", "for", "key", "in", "self", ".", "keys", "(", ")", ":", "name", "=", "key", ".", "GetName", "(", ")", "if", "name", "in", "keys", ":", "if", "key", ".", "GetCycle", "(", ")", ">", "keys", "[", "name", "]", ".", "GetCycle", "(", ")", ":", "keys", "[", "name", "]", "=", "key", "else", ":", "keys", "[", "name", "]", "=", "key", "return", "keys", ".", "values", "(", ")", "return", "[", "asrootpy", "(", "key", ")", "for", "key", "in", "self", ".", "GetListOfKeys", "(", ")", "]" ]
Return a list of the keys in this directory. Parameters ---------- latest : bool, optional (default=False) If True then return a list of keys with unique names where only the key with the highest cycle number is included where multiple keys exist with the same name. Returns ------- keys : list List of keys
[ "Return", "a", "list", "of", "the", "keys", "in", "this", "directory", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L289-L318
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.Get
def Get(self, path, rootpy=True, **kwargs): """ Return the requested object cast as its corresponding subclass in rootpy if one exists and ``rootpy=True``, otherwise return the unadulterated TObject. """ thing = super(_DirectoryBase, self).Get(path) if not thing: raise DoesNotExist # Ensure that the file we took the object from is alive at least as # long as the object being taken from it. # Note, Python does *not* own `thing`, it is ROOT's responsibility to # delete it in the C++ sense. (SetOwnership is False). However, ROOT # will delete the object when the TFile's destructor is run. # Therefore, when `thing` goes out of scope and the file referred to # by `this` has no references left, the file is destructed and calls # `thing`'s delete. # (this is thanks to the fact that weak referents (used by keepalive) # are notified when they are dead). keepalive(thing, self) if rootpy: return asrootpy(thing, **kwargs) return thing
python
def Get(self, path, rootpy=True, **kwargs): """ Return the requested object cast as its corresponding subclass in rootpy if one exists and ``rootpy=True``, otherwise return the unadulterated TObject. """ thing = super(_DirectoryBase, self).Get(path) if not thing: raise DoesNotExist # Ensure that the file we took the object from is alive at least as # long as the object being taken from it. # Note, Python does *not* own `thing`, it is ROOT's responsibility to # delete it in the C++ sense. (SetOwnership is False). However, ROOT # will delete the object when the TFile's destructor is run. # Therefore, when `thing` goes out of scope and the file referred to # by `this` has no references left, the file is destructed and calls # `thing`'s delete. # (this is thanks to the fact that weak referents (used by keepalive) # are notified when they are dead). keepalive(thing, self) if rootpy: return asrootpy(thing, **kwargs) return thing
[ "def", "Get", "(", "self", ",", "path", ",", "rootpy", "=", "True", ",", "*", "*", "kwargs", ")", ":", "thing", "=", "super", "(", "_DirectoryBase", ",", "self", ")", ".", "Get", "(", "path", ")", "if", "not", "thing", ":", "raise", "DoesNotExist", "# Ensure that the file we took the object from is alive at least as", "# long as the object being taken from it.", "# Note, Python does *not* own `thing`, it is ROOT's responsibility to", "# delete it in the C++ sense. (SetOwnership is False). However, ROOT", "# will delete the object when the TFile's destructor is run.", "# Therefore, when `thing` goes out of scope and the file referred to", "# by `this` has no references left, the file is destructed and calls", "# `thing`'s delete.", "# (this is thanks to the fact that weak referents (used by keepalive)", "# are notified when they are dead).", "keepalive", "(", "thing", ",", "self", ")", "if", "rootpy", ":", "return", "asrootpy", "(", "thing", ",", "*", "*", "kwargs", ")", "return", "thing" ]
Return the requested object cast as its corresponding subclass in rootpy if one exists and ``rootpy=True``, otherwise return the unadulterated TObject.
[ "Return", "the", "requested", "object", "cast", "as", "its", "corresponding", "subclass", "in", "rootpy", "if", "one", "exists", "and", "rootpy", "=", "True", "otherwise", "return", "the", "unadulterated", "TObject", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L321-L348
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.GetKey
def GetKey(self, path, cycle=9999, rootpy=True, **kwargs): """ Override TDirectory's GetKey and also handle accessing keys nested arbitrarily deep in subdirectories. """ key = super(_DirectoryBase, self).GetKey(path, cycle) if not key: raise DoesNotExist if rootpy: return asrootpy(key, **kwargs) return key
python
def GetKey(self, path, cycle=9999, rootpy=True, **kwargs): """ Override TDirectory's GetKey and also handle accessing keys nested arbitrarily deep in subdirectories. """ key = super(_DirectoryBase, self).GetKey(path, cycle) if not key: raise DoesNotExist if rootpy: return asrootpy(key, **kwargs) return key
[ "def", "GetKey", "(", "self", ",", "path", ",", "cycle", "=", "9999", ",", "rootpy", "=", "True", ",", "*", "*", "kwargs", ")", ":", "key", "=", "super", "(", "_DirectoryBase", ",", "self", ")", ".", "GetKey", "(", "path", ",", "cycle", ")", "if", "not", "key", ":", "raise", "DoesNotExist", "if", "rootpy", ":", "return", "asrootpy", "(", "key", ",", "*", "*", "kwargs", ")", "return", "key" ]
Override TDirectory's GetKey and also handle accessing keys nested arbitrarily deep in subdirectories.
[ "Override", "TDirectory", "s", "GetKey", "and", "also", "handle", "accessing", "keys", "nested", "arbitrarily", "deep", "in", "subdirectories", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L360-L370
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.mkdir
def mkdir(self, path, title="", recurse=False): """ Make a new directory. If recurse is True, create parent directories as required. Return the newly created TDirectory. """ head, tail = os.path.split(os.path.normpath(path)) if tail == "": raise ValueError("invalid directory name: {0}".format(path)) with preserve_current_directory(): dest = self if recurse: parent_dirs = head.split(os.path.sep) for parent_dir in parent_dirs: try: newdest = dest.GetDirectory(parent_dir) dest = newdest except DoesNotExist: dest = dest.mkdir(parent_dir) elif head != "": dest = dest.GetDirectory(head) if tail in dest: raise ValueError("{0} already exists".format(path)) newdir = asrootpy(super(_DirectoryBase, dest).mkdir(tail, title)) return newdir
python
def mkdir(self, path, title="", recurse=False): """ Make a new directory. If recurse is True, create parent directories as required. Return the newly created TDirectory. """ head, tail = os.path.split(os.path.normpath(path)) if tail == "": raise ValueError("invalid directory name: {0}".format(path)) with preserve_current_directory(): dest = self if recurse: parent_dirs = head.split(os.path.sep) for parent_dir in parent_dirs: try: newdest = dest.GetDirectory(parent_dir) dest = newdest except DoesNotExist: dest = dest.mkdir(parent_dir) elif head != "": dest = dest.GetDirectory(head) if tail in dest: raise ValueError("{0} already exists".format(path)) newdir = asrootpy(super(_DirectoryBase, dest).mkdir(tail, title)) return newdir
[ "def", "mkdir", "(", "self", ",", "path", ",", "title", "=", "\"\"", ",", "recurse", "=", "False", ")", ":", "head", ",", "tail", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "normpath", "(", "path", ")", ")", "if", "tail", "==", "\"\"", ":", "raise", "ValueError", "(", "\"invalid directory name: {0}\"", ".", "format", "(", "path", ")", ")", "with", "preserve_current_directory", "(", ")", ":", "dest", "=", "self", "if", "recurse", ":", "parent_dirs", "=", "head", ".", "split", "(", "os", ".", "path", ".", "sep", ")", "for", "parent_dir", "in", "parent_dirs", ":", "try", ":", "newdest", "=", "dest", ".", "GetDirectory", "(", "parent_dir", ")", "dest", "=", "newdest", "except", "DoesNotExist", ":", "dest", "=", "dest", ".", "mkdir", "(", "parent_dir", ")", "elif", "head", "!=", "\"\"", ":", "dest", "=", "dest", ".", "GetDirectory", "(", "head", ")", "if", "tail", "in", "dest", ":", "raise", "ValueError", "(", "\"{0} already exists\"", ".", "format", "(", "path", ")", ")", "newdir", "=", "asrootpy", "(", "super", "(", "_DirectoryBase", ",", "dest", ")", ".", "mkdir", "(", "tail", ",", "title", ")", ")", "return", "newdir" ]
Make a new directory. If recurse is True, create parent directories as required. Return the newly created TDirectory.
[ "Make", "a", "new", "directory", ".", "If", "recurse", "is", "True", "create", "parent", "directories", "as", "required", ".", "Return", "the", "newly", "created", "TDirectory", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L385-L408
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.rm
def rm(self, path, cycle=';*'): """ Delete an object at `path` relative to this directory """ rdir = self with preserve_current_directory(): dirname, objname = os.path.split(os.path.normpath(path)) if dirname: rdir = rdir.Get(dirname) rdir.Delete(objname + cycle)
python
def rm(self, path, cycle=';*'): """ Delete an object at `path` relative to this directory """ rdir = self with preserve_current_directory(): dirname, objname = os.path.split(os.path.normpath(path)) if dirname: rdir = rdir.Get(dirname) rdir.Delete(objname + cycle)
[ "def", "rm", "(", "self", ",", "path", ",", "cycle", "=", "';*'", ")", ":", "rdir", "=", "self", "with", "preserve_current_directory", "(", ")", ":", "dirname", ",", "objname", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "normpath", "(", "path", ")", ")", "if", "dirname", ":", "rdir", "=", "rdir", ".", "Get", "(", "dirname", ")", "rdir", ".", "Delete", "(", "objname", "+", "cycle", ")" ]
Delete an object at `path` relative to this directory
[ "Delete", "an", "object", "at", "path", "relative", "to", "this", "directory" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L410-L419
train
rootpy/rootpy
rootpy/io/file.py
_DirectoryBase.copytree
def copytree(self, dest_dir, src=None, newname=None, exclude=None, overwrite=False): """ Copy this directory or just one contained object into another directory. Parameters ---------- dest_dir : string or Directory The destination directory. src : string, optional (default=None) If ``src`` is None then this entire directory is copied recursively otherwise if ``src`` is a string path to an object relative to this directory, only that object will be copied. The copied object can optionally be given a ``newname``. newname : string, optional (default=None) An optional new name for the copied object. exclude : callable, optional (default=None) ``exclude`` can optionally be a function which takes ``(path, object_name)`` and if returns True excludes objects from being copied if the entire directory is being copied recursively. overwrite : bool, optional (default=False) If True, then overwrite existing objects with the same name. """ def copy_object(obj, dest, name=None): if name is None: name = obj.GetName() if not overwrite and name in dest: raise ValueError( "{0} already exists in {1} and `overwrite=False`".format( name, dest._path)) dest.cd() if isinstance(obj, ROOT.R.TTree): new_obj = obj.CloneTree(-1, "fast") new_obj.Write(name, ROOT.R.TObject.kOverwrite) else: obj.Write(name, ROOT.R.TObject.kOverwrite) with preserve_current_directory(): if isinstance(src, string_types): src = asrootpy(self.Get(src)) else: src = self if isinstance(dest_dir, string_types): try: dest_dir = asrootpy(self.GetDirectory(dest_dir)) except DoesNotExist: dest_dir = self.mkdir(dest_dir) if isinstance(src, ROOT.R.TDirectory): # Copy a directory cp_name = newname if newname is not None else src.GetName() # See if the directory already exists if cp_name not in dest_dir: # Destination directory doesn't exist, so make a new one new_dir = dest_dir.mkdir(cp_name) else: new_dir = dest_dir.get(cp_name) # Copy everything in the src directory to the destination for (path, dirnames, objects) in src.walk(maxdepth=0): # Copy all the objects for object_name in objects: if exclude and exclude(path, object_name): continue thing = src.Get(object_name) copy_object(thing, new_dir) for dirname in dirnames: if exclude and exclude(path, dirname): continue rdir = src.GetDirectory(dirname) # Recursively copy objects in subdirectories rdir.copytree( new_dir, exclude=exclude, overwrite=overwrite) else: # Copy an object copy_object(src, dest_dir, name=newname)
python
def copytree(self, dest_dir, src=None, newname=None, exclude=None, overwrite=False): """ Copy this directory or just one contained object into another directory. Parameters ---------- dest_dir : string or Directory The destination directory. src : string, optional (default=None) If ``src`` is None then this entire directory is copied recursively otherwise if ``src`` is a string path to an object relative to this directory, only that object will be copied. The copied object can optionally be given a ``newname``. newname : string, optional (default=None) An optional new name for the copied object. exclude : callable, optional (default=None) ``exclude`` can optionally be a function which takes ``(path, object_name)`` and if returns True excludes objects from being copied if the entire directory is being copied recursively. overwrite : bool, optional (default=False) If True, then overwrite existing objects with the same name. """ def copy_object(obj, dest, name=None): if name is None: name = obj.GetName() if not overwrite and name in dest: raise ValueError( "{0} already exists in {1} and `overwrite=False`".format( name, dest._path)) dest.cd() if isinstance(obj, ROOT.R.TTree): new_obj = obj.CloneTree(-1, "fast") new_obj.Write(name, ROOT.R.TObject.kOverwrite) else: obj.Write(name, ROOT.R.TObject.kOverwrite) with preserve_current_directory(): if isinstance(src, string_types): src = asrootpy(self.Get(src)) else: src = self if isinstance(dest_dir, string_types): try: dest_dir = asrootpy(self.GetDirectory(dest_dir)) except DoesNotExist: dest_dir = self.mkdir(dest_dir) if isinstance(src, ROOT.R.TDirectory): # Copy a directory cp_name = newname if newname is not None else src.GetName() # See if the directory already exists if cp_name not in dest_dir: # Destination directory doesn't exist, so make a new one new_dir = dest_dir.mkdir(cp_name) else: new_dir = dest_dir.get(cp_name) # Copy everything in the src directory to the destination for (path, dirnames, objects) in src.walk(maxdepth=0): # Copy all the objects for object_name in objects: if exclude and exclude(path, object_name): continue thing = src.Get(object_name) copy_object(thing, new_dir) for dirname in dirnames: if exclude and exclude(path, dirname): continue rdir = src.GetDirectory(dirname) # Recursively copy objects in subdirectories rdir.copytree( new_dir, exclude=exclude, overwrite=overwrite) else: # Copy an object copy_object(src, dest_dir, name=newname)
[ "def", "copytree", "(", "self", ",", "dest_dir", ",", "src", "=", "None", ",", "newname", "=", "None", ",", "exclude", "=", "None", ",", "overwrite", "=", "False", ")", ":", "def", "copy_object", "(", "obj", ",", "dest", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "name", "=", "obj", ".", "GetName", "(", ")", "if", "not", "overwrite", "and", "name", "in", "dest", ":", "raise", "ValueError", "(", "\"{0} already exists in {1} and `overwrite=False`\"", ".", "format", "(", "name", ",", "dest", ".", "_path", ")", ")", "dest", ".", "cd", "(", ")", "if", "isinstance", "(", "obj", ",", "ROOT", ".", "R", ".", "TTree", ")", ":", "new_obj", "=", "obj", ".", "CloneTree", "(", "-", "1", ",", "\"fast\"", ")", "new_obj", ".", "Write", "(", "name", ",", "ROOT", ".", "R", ".", "TObject", ".", "kOverwrite", ")", "else", ":", "obj", ".", "Write", "(", "name", ",", "ROOT", ".", "R", ".", "TObject", ".", "kOverwrite", ")", "with", "preserve_current_directory", "(", ")", ":", "if", "isinstance", "(", "src", ",", "string_types", ")", ":", "src", "=", "asrootpy", "(", "self", ".", "Get", "(", "src", ")", ")", "else", ":", "src", "=", "self", "if", "isinstance", "(", "dest_dir", ",", "string_types", ")", ":", "try", ":", "dest_dir", "=", "asrootpy", "(", "self", ".", "GetDirectory", "(", "dest_dir", ")", ")", "except", "DoesNotExist", ":", "dest_dir", "=", "self", ".", "mkdir", "(", "dest_dir", ")", "if", "isinstance", "(", "src", ",", "ROOT", ".", "R", ".", "TDirectory", ")", ":", "# Copy a directory", "cp_name", "=", "newname", "if", "newname", "is", "not", "None", "else", "src", ".", "GetName", "(", ")", "# See if the directory already exists", "if", "cp_name", "not", "in", "dest_dir", ":", "# Destination directory doesn't exist, so make a new one", "new_dir", "=", "dest_dir", ".", "mkdir", "(", "cp_name", ")", "else", ":", "new_dir", "=", "dest_dir", ".", "get", "(", "cp_name", ")", "# Copy everything in the src directory to the destination", "for", "(", "path", ",", "dirnames", ",", "objects", ")", "in", "src", ".", "walk", "(", "maxdepth", "=", "0", ")", ":", "# Copy all the objects", "for", "object_name", "in", "objects", ":", "if", "exclude", "and", "exclude", "(", "path", ",", "object_name", ")", ":", "continue", "thing", "=", "src", ".", "Get", "(", "object_name", ")", "copy_object", "(", "thing", ",", "new_dir", ")", "for", "dirname", "in", "dirnames", ":", "if", "exclude", "and", "exclude", "(", "path", ",", "dirname", ")", ":", "continue", "rdir", "=", "src", ".", "GetDirectory", "(", "dirname", ")", "# Recursively copy objects in subdirectories", "rdir", ".", "copytree", "(", "new_dir", ",", "exclude", "=", "exclude", ",", "overwrite", "=", "overwrite", ")", "else", ":", "# Copy an object", "copy_object", "(", "src", ",", "dest_dir", ",", "name", "=", "newname", ")" ]
Copy this directory or just one contained object into another directory. Parameters ---------- dest_dir : string or Directory The destination directory. src : string, optional (default=None) If ``src`` is None then this entire directory is copied recursively otherwise if ``src`` is a string path to an object relative to this directory, only that object will be copied. The copied object can optionally be given a ``newname``. newname : string, optional (default=None) An optional new name for the copied object. exclude : callable, optional (default=None) ``exclude`` can optionally be a function which takes ``(path, object_name)`` and if returns True excludes objects from being copied if the entire directory is being copied recursively. overwrite : bool, optional (default=False) If True, then overwrite existing objects with the same name.
[ "Copy", "this", "directory", "or", "just", "one", "contained", "object", "into", "another", "directory", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L424-L506
train
rootpy/rootpy
rootpy/io/file.py
_FileBase.find
def find(self, regexp, negate_regexp=False, class_pattern=None, find_fnc=re.search, refresh_cache=False): """ yield the full path of the matching regular expression and the match itself """ if refresh_cache or not hasattr(self, 'cache'): self._populate_cache() b = self.cache split_regexp = regexp.split('/') # traverse as deep as possible in the cache # special case if the first character is not the root, i.e. not "" if split_regexp[0] == '': for d in split_regexp: if d in b: b = b[d] else: break else: b = b[''] # perform the search for path, (obj, classname) in b['obj']: if class_pattern: if not fnmatch(classname, class_pattern): continue joined_path = os.path.join(*['/', path, obj]) result = find_fnc(regexp, joined_path) if (result is not None) ^ negate_regexp: yield joined_path, result
python
def find(self, regexp, negate_regexp=False, class_pattern=None, find_fnc=re.search, refresh_cache=False): """ yield the full path of the matching regular expression and the match itself """ if refresh_cache or not hasattr(self, 'cache'): self._populate_cache() b = self.cache split_regexp = regexp.split('/') # traverse as deep as possible in the cache # special case if the first character is not the root, i.e. not "" if split_regexp[0] == '': for d in split_regexp: if d in b: b = b[d] else: break else: b = b[''] # perform the search for path, (obj, classname) in b['obj']: if class_pattern: if not fnmatch(classname, class_pattern): continue joined_path = os.path.join(*['/', path, obj]) result = find_fnc(regexp, joined_path) if (result is not None) ^ negate_regexp: yield joined_path, result
[ "def", "find", "(", "self", ",", "regexp", ",", "negate_regexp", "=", "False", ",", "class_pattern", "=", "None", ",", "find_fnc", "=", "re", ".", "search", ",", "refresh_cache", "=", "False", ")", ":", "if", "refresh_cache", "or", "not", "hasattr", "(", "self", ",", "'cache'", ")", ":", "self", ".", "_populate_cache", "(", ")", "b", "=", "self", ".", "cache", "split_regexp", "=", "regexp", ".", "split", "(", "'/'", ")", "# traverse as deep as possible in the cache", "# special case if the first character is not the root, i.e. not \"\"", "if", "split_regexp", "[", "0", "]", "==", "''", ":", "for", "d", "in", "split_regexp", ":", "if", "d", "in", "b", ":", "b", "=", "b", "[", "d", "]", "else", ":", "break", "else", ":", "b", "=", "b", "[", "''", "]", "# perform the search", "for", "path", ",", "(", "obj", ",", "classname", ")", "in", "b", "[", "'obj'", "]", ":", "if", "class_pattern", ":", "if", "not", "fnmatch", "(", "classname", ",", "class_pattern", ")", ":", "continue", "joined_path", "=", "os", ".", "path", ".", "join", "(", "*", "[", "'/'", ",", "path", ",", "obj", "]", ")", "result", "=", "find_fnc", "(", "regexp", ",", "joined_path", ")", "if", "(", "result", "is", "not", "None", ")", "^", "negate_regexp", ":", "yield", "joined_path", ",", "result" ]
yield the full path of the matching regular expression and the match itself
[ "yield", "the", "full", "path", "of", "the", "matching", "regular", "expression", "and", "the", "match", "itself" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L696-L730
train
rootpy/rootpy
rootpy/interactive/rootwait.py
start_new_gui_thread
def start_new_gui_thread(): """ Attempt to start a new GUI thread, if possible. It is only possible to start one if there was one running on module import. """ PyGUIThread = getattr(ROOT, 'PyGUIThread', None) if PyGUIThread is not None: assert not PyGUIThread.isAlive(), "GUI thread already running!" assert _processRootEvents, ( "GUI thread wasn't started when rootwait was imported, " "so it can't be restarted") ROOT.keeppolling = 1 ROOT.PyGUIThread = threading.Thread( None, _processRootEvents, None, (ROOT,)) ROOT.PyGUIThread.finishSchedule = _finishSchedule ROOT.PyGUIThread.setDaemon(1) ROOT.PyGUIThread.start() log.debug("successfully started a new GUI thread")
python
def start_new_gui_thread(): """ Attempt to start a new GUI thread, if possible. It is only possible to start one if there was one running on module import. """ PyGUIThread = getattr(ROOT, 'PyGUIThread', None) if PyGUIThread is not None: assert not PyGUIThread.isAlive(), "GUI thread already running!" assert _processRootEvents, ( "GUI thread wasn't started when rootwait was imported, " "so it can't be restarted") ROOT.keeppolling = 1 ROOT.PyGUIThread = threading.Thread( None, _processRootEvents, None, (ROOT,)) ROOT.PyGUIThread.finishSchedule = _finishSchedule ROOT.PyGUIThread.setDaemon(1) ROOT.PyGUIThread.start() log.debug("successfully started a new GUI thread")
[ "def", "start_new_gui_thread", "(", ")", ":", "PyGUIThread", "=", "getattr", "(", "ROOT", ",", "'PyGUIThread'", ",", "None", ")", "if", "PyGUIThread", "is", "not", "None", ":", "assert", "not", "PyGUIThread", ".", "isAlive", "(", ")", ",", "\"GUI thread already running!\"", "assert", "_processRootEvents", ",", "(", "\"GUI thread wasn't started when rootwait was imported, \"", "\"so it can't be restarted\"", ")", "ROOT", ".", "keeppolling", "=", "1", "ROOT", ".", "PyGUIThread", "=", "threading", ".", "Thread", "(", "None", ",", "_processRootEvents", ",", "None", ",", "(", "ROOT", ",", ")", ")", "ROOT", ".", "PyGUIThread", ".", "finishSchedule", "=", "_finishSchedule", "ROOT", ".", "PyGUIThread", ".", "setDaemon", "(", "1", ")", "ROOT", ".", "PyGUIThread", ".", "start", "(", ")", "log", ".", "debug", "(", "\"successfully started a new GUI thread\"", ")" ]
Attempt to start a new GUI thread, if possible. It is only possible to start one if there was one running on module import.
[ "Attempt", "to", "start", "a", "new", "GUI", "thread", "if", "possible", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L85-L107
train
rootpy/rootpy
rootpy/interactive/rootwait.py
stop_gui_thread
def stop_gui_thread(): """ Try to stop the GUI thread. If it was running returns True, otherwise False. """ PyGUIThread = getattr(ROOT, 'PyGUIThread', None) if PyGUIThread is None or not PyGUIThread.isAlive(): log.debug("no existing GUI thread is runnng") return False ROOT.keeppolling = 0 try: PyGUIThread.finishSchedule() except AttributeError: log.debug("unable to call finishSchedule() on PyGUIThread") pass PyGUIThread.join() log.debug("successfully stopped the existing GUI thread") return True
python
def stop_gui_thread(): """ Try to stop the GUI thread. If it was running returns True, otherwise False. """ PyGUIThread = getattr(ROOT, 'PyGUIThread', None) if PyGUIThread is None or not PyGUIThread.isAlive(): log.debug("no existing GUI thread is runnng") return False ROOT.keeppolling = 0 try: PyGUIThread.finishSchedule() except AttributeError: log.debug("unable to call finishSchedule() on PyGUIThread") pass PyGUIThread.join() log.debug("successfully stopped the existing GUI thread") return True
[ "def", "stop_gui_thread", "(", ")", ":", "PyGUIThread", "=", "getattr", "(", "ROOT", ",", "'PyGUIThread'", ",", "None", ")", "if", "PyGUIThread", "is", "None", "or", "not", "PyGUIThread", ".", "isAlive", "(", ")", ":", "log", ".", "debug", "(", "\"no existing GUI thread is runnng\"", ")", "return", "False", "ROOT", ".", "keeppolling", "=", "0", "try", ":", "PyGUIThread", ".", "finishSchedule", "(", ")", "except", "AttributeError", ":", "log", ".", "debug", "(", "\"unable to call finishSchedule() on PyGUIThread\"", ")", "pass", "PyGUIThread", ".", "join", "(", ")", "log", ".", "debug", "(", "\"successfully stopped the existing GUI thread\"", ")", "return", "True" ]
Try to stop the GUI thread. If it was running returns True, otherwise False.
[ "Try", "to", "stop", "the", "GUI", "thread", ".", "If", "it", "was", "running", "returns", "True", "otherwise", "False", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L110-L129
train
rootpy/rootpy
rootpy/interactive/rootwait.py
wait_for_zero_canvases
def wait_for_zero_canvases(middle_mouse_close=False): """ Wait for all canvases to be closed, or CTRL-c. If `middle_mouse_close`, middle click will shut the canvas. incpy.ignore """ if not __ACTIVE: wait_failover(wait_for_zero_canvases) return @dispatcher def count_canvases(): """ Count the number of active canvases and finish gApplication.Run() if there are none remaining. incpy.ignore """ if not get_visible_canvases(): try: ROOT.gSystem.ExitLoop() except AttributeError: # We might be exiting and ROOT.gROOT will raise an AttributeError pass @dispatcher def exit_application_loop(): """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT.gSystem.ExitLoop() # Handle CTRL-c sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True) sh.Add() sh.Connect("Notified()", "TPyDispatcher", exit_application_loop, "Dispatch()") visible_canvases = get_visible_canvases() for canvas in visible_canvases: log.debug("waiting for canvas {0} to close".format(canvas.GetName())) canvas.Update() if middle_mouse_close: attach_event_handler(canvas) if not getattr(canvas, "_py_close_dispatcher_attached", False): # Attach a handler only once to each canvas canvas._py_close_dispatcher_attached = True canvas.Connect("Closed()", "TPyDispatcher", count_canvases, "Dispatch()") keepalive(canvas, count_canvases) if visible_canvases and not ROOT.gROOT.IsBatch(): run_application_until_done() # Disconnect from canvases for canvas in visible_canvases: if getattr(canvas, "_py_close_dispatcher_attached", False): canvas._py_close_dispatcher_attached = False canvas.Disconnect("Closed()", count_canvases, "Dispatch()")
python
def wait_for_zero_canvases(middle_mouse_close=False): """ Wait for all canvases to be closed, or CTRL-c. If `middle_mouse_close`, middle click will shut the canvas. incpy.ignore """ if not __ACTIVE: wait_failover(wait_for_zero_canvases) return @dispatcher def count_canvases(): """ Count the number of active canvases and finish gApplication.Run() if there are none remaining. incpy.ignore """ if not get_visible_canvases(): try: ROOT.gSystem.ExitLoop() except AttributeError: # We might be exiting and ROOT.gROOT will raise an AttributeError pass @dispatcher def exit_application_loop(): """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT.gSystem.ExitLoop() # Handle CTRL-c sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True) sh.Add() sh.Connect("Notified()", "TPyDispatcher", exit_application_loop, "Dispatch()") visible_canvases = get_visible_canvases() for canvas in visible_canvases: log.debug("waiting for canvas {0} to close".format(canvas.GetName())) canvas.Update() if middle_mouse_close: attach_event_handler(canvas) if not getattr(canvas, "_py_close_dispatcher_attached", False): # Attach a handler only once to each canvas canvas._py_close_dispatcher_attached = True canvas.Connect("Closed()", "TPyDispatcher", count_canvases, "Dispatch()") keepalive(canvas, count_canvases) if visible_canvases and not ROOT.gROOT.IsBatch(): run_application_until_done() # Disconnect from canvases for canvas in visible_canvases: if getattr(canvas, "_py_close_dispatcher_attached", False): canvas._py_close_dispatcher_attached = False canvas.Disconnect("Closed()", count_canvases, "Dispatch()")
[ "def", "wait_for_zero_canvases", "(", "middle_mouse_close", "=", "False", ")", ":", "if", "not", "__ACTIVE", ":", "wait_failover", "(", "wait_for_zero_canvases", ")", "return", "@", "dispatcher", "def", "count_canvases", "(", ")", ":", "\"\"\"\n Count the number of active canvases and finish gApplication.Run()\n if there are none remaining.\n\n incpy.ignore\n \"\"\"", "if", "not", "get_visible_canvases", "(", ")", ":", "try", ":", "ROOT", ".", "gSystem", ".", "ExitLoop", "(", ")", "except", "AttributeError", ":", "# We might be exiting and ROOT.gROOT will raise an AttributeError", "pass", "@", "dispatcher", "def", "exit_application_loop", "(", ")", ":", "\"\"\"\n Signal handler for CTRL-c to cause gApplication.Run() to finish.\n\n incpy.ignore\n \"\"\"", "ROOT", ".", "gSystem", ".", "ExitLoop", "(", ")", "# Handle CTRL-c", "sh", "=", "ROOT", ".", "TSignalHandler", "(", "ROOT", ".", "kSigInterrupt", ",", "True", ")", "sh", ".", "Add", "(", ")", "sh", ".", "Connect", "(", "\"Notified()\"", ",", "\"TPyDispatcher\"", ",", "exit_application_loop", ",", "\"Dispatch()\"", ")", "visible_canvases", "=", "get_visible_canvases", "(", ")", "for", "canvas", "in", "visible_canvases", ":", "log", ".", "debug", "(", "\"waiting for canvas {0} to close\"", ".", "format", "(", "canvas", ".", "GetName", "(", ")", ")", ")", "canvas", ".", "Update", "(", ")", "if", "middle_mouse_close", ":", "attach_event_handler", "(", "canvas", ")", "if", "not", "getattr", "(", "canvas", ",", "\"_py_close_dispatcher_attached\"", ",", "False", ")", ":", "# Attach a handler only once to each canvas", "canvas", ".", "_py_close_dispatcher_attached", "=", "True", "canvas", ".", "Connect", "(", "\"Closed()\"", ",", "\"TPyDispatcher\"", ",", "count_canvases", ",", "\"Dispatch()\"", ")", "keepalive", "(", "canvas", ",", "count_canvases", ")", "if", "visible_canvases", "and", "not", "ROOT", ".", "gROOT", ".", "IsBatch", "(", ")", ":", "run_application_until_done", "(", ")", "# Disconnect from canvases", "for", "canvas", "in", "visible_canvases", ":", "if", "getattr", "(", "canvas", ",", "\"_py_close_dispatcher_attached\"", ",", "False", ")", ":", "canvas", ".", "_py_close_dispatcher_attached", "=", "False", "canvas", ".", "Disconnect", "(", "\"Closed()\"", ",", "count_canvases", ",", "\"Dispatch()\"", ")" ]
Wait for all canvases to be closed, or CTRL-c. If `middle_mouse_close`, middle click will shut the canvas. incpy.ignore
[ "Wait", "for", "all", "canvases", "to", "be", "closed", "or", "CTRL", "-", "c", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L161-L226
train
rootpy/rootpy
rootpy/interactive/rootwait.py
wait_for_frame
def wait_for_frame(frame): """ wait until a TGMainFrame is closed or ctrl-c """ if not frame: # It's already closed or maybe we're in batch mode return @dispatcher def close(): ROOT.gSystem.ExitLoop() if not getattr(frame, "_py_close_dispatcher_attached", False): frame._py_close_dispatcher_attached = True frame.Connect("CloseWindow()", "TPyDispatcher", close, "Dispatch()") @dispatcher def exit_application_loop(): """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT.gSystem.ExitLoop() # Handle CTRL-c sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True) sh.Add() sh.Connect("Notified()", "TPyDispatcher", exit_application_loop, "Dispatch()") if not ROOT.gROOT.IsBatch(): run_application_until_done() # Need to disconnect to prevent close handler from running when python # teardown has already commenced. frame.Disconnect("CloseWindow()", close, "Dispatch()")
python
def wait_for_frame(frame): """ wait until a TGMainFrame is closed or ctrl-c """ if not frame: # It's already closed or maybe we're in batch mode return @dispatcher def close(): ROOT.gSystem.ExitLoop() if not getattr(frame, "_py_close_dispatcher_attached", False): frame._py_close_dispatcher_attached = True frame.Connect("CloseWindow()", "TPyDispatcher", close, "Dispatch()") @dispatcher def exit_application_loop(): """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT.gSystem.ExitLoop() # Handle CTRL-c sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True) sh.Add() sh.Connect("Notified()", "TPyDispatcher", exit_application_loop, "Dispatch()") if not ROOT.gROOT.IsBatch(): run_application_until_done() # Need to disconnect to prevent close handler from running when python # teardown has already commenced. frame.Disconnect("CloseWindow()", close, "Dispatch()")
[ "def", "wait_for_frame", "(", "frame", ")", ":", "if", "not", "frame", ":", "# It's already closed or maybe we're in batch mode", "return", "@", "dispatcher", "def", "close", "(", ")", ":", "ROOT", ".", "gSystem", ".", "ExitLoop", "(", ")", "if", "not", "getattr", "(", "frame", ",", "\"_py_close_dispatcher_attached\"", ",", "False", ")", ":", "frame", ".", "_py_close_dispatcher_attached", "=", "True", "frame", ".", "Connect", "(", "\"CloseWindow()\"", ",", "\"TPyDispatcher\"", ",", "close", ",", "\"Dispatch()\"", ")", "@", "dispatcher", "def", "exit_application_loop", "(", ")", ":", "\"\"\"\n Signal handler for CTRL-c to cause gApplication.Run() to finish.\n\n incpy.ignore\n \"\"\"", "ROOT", ".", "gSystem", ".", "ExitLoop", "(", ")", "# Handle CTRL-c", "sh", "=", "ROOT", ".", "TSignalHandler", "(", "ROOT", ".", "kSigInterrupt", ",", "True", ")", "sh", ".", "Add", "(", ")", "sh", ".", "Connect", "(", "\"Notified()\"", ",", "\"TPyDispatcher\"", ",", "exit_application_loop", ",", "\"Dispatch()\"", ")", "if", "not", "ROOT", ".", "gROOT", ".", "IsBatch", "(", ")", ":", "run_application_until_done", "(", ")", "# Need to disconnect to prevent close handler from running when python", "# teardown has already commenced.", "frame", ".", "Disconnect", "(", "\"CloseWindow()\"", ",", "close", ",", "\"Dispatch()\"", ")" ]
wait until a TGMainFrame is closed or ctrl-c
[ "wait", "until", "a", "TGMainFrame", "is", "closed", "or", "ctrl", "-", "c" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L231-L266
train
rootpy/rootpy
rootpy/interactive/rootwait.py
wait_for_browser_close
def wait_for_browser_close(b): """ Can be used to wait until a TBrowser is closed """ if b: if not __ACTIVE: wait_failover(wait_for_browser_close) return wait_for_frame(b.GetBrowserImp().GetMainFrame())
python
def wait_for_browser_close(b): """ Can be used to wait until a TBrowser is closed """ if b: if not __ACTIVE: wait_failover(wait_for_browser_close) return wait_for_frame(b.GetBrowserImp().GetMainFrame())
[ "def", "wait_for_browser_close", "(", "b", ")", ":", "if", "b", ":", "if", "not", "__ACTIVE", ":", "wait_failover", "(", "wait_for_browser_close", ")", "return", "wait_for_frame", "(", "b", ".", "GetBrowserImp", "(", ")", ".", "GetMainFrame", "(", ")", ")" ]
Can be used to wait until a TBrowser is closed
[ "Can", "be", "used", "to", "wait", "until", "a", "TBrowser", "is", "closed" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L269-L277
train
rootpy/rootpy
rootpy/logger/__init__.py
log_trace
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True): """ log a statement on function entry and exit """ def wrap(function): l = logger.getChild(function.__name__).log @wraps(function) def thunk(*args, **kwargs): global trace_depth trace_depth.value += 1 try: start = time() if show_enter: l(level, "{0}> {1} {2}".format(" "*trace_depth.value, args, kwargs)) try: result = function(*args, **kwargs) except: _, result, _ = sys.exc_info() raise finally: if show_exit: l(level, "{0}< return {1} [{2:.2f} sec]".format( " "*trace_depth.value, result, time() - start)) finally: trace_depth.value -= 1 return result return thunk return wrap
python
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True): """ log a statement on function entry and exit """ def wrap(function): l = logger.getChild(function.__name__).log @wraps(function) def thunk(*args, **kwargs): global trace_depth trace_depth.value += 1 try: start = time() if show_enter: l(level, "{0}> {1} {2}".format(" "*trace_depth.value, args, kwargs)) try: result = function(*args, **kwargs) except: _, result, _ = sys.exc_info() raise finally: if show_exit: l(level, "{0}< return {1} [{2:.2f} sec]".format( " "*trace_depth.value, result, time() - start)) finally: trace_depth.value -= 1 return result return thunk return wrap
[ "def", "log_trace", "(", "logger", ",", "level", "=", "logging", ".", "DEBUG", ",", "show_enter", "=", "True", ",", "show_exit", "=", "True", ")", ":", "def", "wrap", "(", "function", ")", ":", "l", "=", "logger", ".", "getChild", "(", "function", ".", "__name__", ")", ".", "log", "@", "wraps", "(", "function", ")", "def", "thunk", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "global", "trace_depth", "trace_depth", ".", "value", "+=", "1", "try", ":", "start", "=", "time", "(", ")", "if", "show_enter", ":", "l", "(", "level", ",", "\"{0}> {1} {2}\"", ".", "format", "(", "\" \"", "*", "trace_depth", ".", "value", ",", "args", ",", "kwargs", ")", ")", "try", ":", "result", "=", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", ":", "_", ",", "result", ",", "_", "=", "sys", ".", "exc_info", "(", ")", "raise", "finally", ":", "if", "show_exit", ":", "l", "(", "level", ",", "\"{0}< return {1} [{2:.2f} sec]\"", ".", "format", "(", "\" \"", "*", "trace_depth", ".", "value", ",", "result", ",", "time", "(", ")", "-", "start", ")", ")", "finally", ":", "trace_depth", ".", "value", "-=", "1", "return", "result", "return", "thunk", "return", "wrap" ]
log a statement on function entry and exit
[ "log", "a", "statement", "on", "function", "entry", "and", "exit" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/__init__.py#L140-L168
train
rootpy/rootpy
rootpy/logger/extended_logger.py
log_stack
def log_stack(logger, level=logging.INFO, limit=None, frame=None): """ Display the current stack on ``logger``. This function is designed to be used during emission of log messages, so it won't call itself. """ if showing_stack.inside: return showing_stack.inside = True try: if frame is None: frame = sys._getframe(1) stack = "".join(traceback.format_stack(frame, limit)) for line in (l[2:] for l in stack.split("\n") if l.strip()): logger.log(level, line) finally: showing_stack.inside = False
python
def log_stack(logger, level=logging.INFO, limit=None, frame=None): """ Display the current stack on ``logger``. This function is designed to be used during emission of log messages, so it won't call itself. """ if showing_stack.inside: return showing_stack.inside = True try: if frame is None: frame = sys._getframe(1) stack = "".join(traceback.format_stack(frame, limit)) for line in (l[2:] for l in stack.split("\n") if l.strip()): logger.log(level, line) finally: showing_stack.inside = False
[ "def", "log_stack", "(", "logger", ",", "level", "=", "logging", ".", "INFO", ",", "limit", "=", "None", ",", "frame", "=", "None", ")", ":", "if", "showing_stack", ".", "inside", ":", "return", "showing_stack", ".", "inside", "=", "True", "try", ":", "if", "frame", "is", "None", ":", "frame", "=", "sys", ".", "_getframe", "(", "1", ")", "stack", "=", "\"\"", ".", "join", "(", "traceback", ".", "format_stack", "(", "frame", ",", "limit", ")", ")", "for", "line", "in", "(", "l", "[", "2", ":", "]", "for", "l", "in", "stack", ".", "split", "(", "\"\\n\"", ")", "if", "l", ".", "strip", "(", ")", ")", ":", "logger", ".", "log", "(", "level", ",", "line", ")", "finally", ":", "showing_stack", ".", "inside", "=", "False" ]
Display the current stack on ``logger``. This function is designed to be used during emission of log messages, so it won't call itself.
[ "Display", "the", "current", "stack", "on", "logger", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L25-L42
train
rootpy/rootpy
rootpy/logger/extended_logger.py
ExtendedLogger.showdeletion
def showdeletion(self, *objects): """ Record a stack trace at the point when an ROOT TObject is deleted """ from ..memory import showdeletion as S for o in objects: S.monitor_object_cleanup(o)
python
def showdeletion(self, *objects): """ Record a stack trace at the point when an ROOT TObject is deleted """ from ..memory import showdeletion as S for o in objects: S.monitor_object_cleanup(o)
[ "def", "showdeletion", "(", "self", ",", "*", "objects", ")", ":", "from", ".", ".", "memory", "import", "showdeletion", "as", "S", "for", "o", "in", "objects", ":", "S", ".", "monitor_object_cleanup", "(", "o", ")" ]
Record a stack trace at the point when an ROOT TObject is deleted
[ "Record", "a", "stack", "trace", "at", "the", "point", "when", "an", "ROOT", "TObject", "is", "deleted" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L67-L73
train
rootpy/rootpy
rootpy/logger/extended_logger.py
ExtendedLogger.trace
def trace(self, level=logging.DEBUG, show_enter=True, show_exit=True): """ Functions decorated with this function show function entry and exit with values, defaults to debug log level. :param level: log severity to use for function tracing :param show_enter: log function entry :param show_enter: log function exit Example use: .. sourcecode:: python log = rootpy.log["/myapp"] @log.trace() def salut(): return @log.trace() def hello(what): salut() return "42" hello("world") # Result: # DEBUG:myapp.trace.hello] > ('world',) {} # DEBUG:myapp.trace.salut] > () {} # DEBUG:myapp.trace.salut] < return None [0.00 sec] # DEBUG:myapp.trace.hello] < return 42 [0.00 sec] Output: .. sourcecode:: none """ from . import log_trace return log_trace(self, level, show_enter, show_exit)
python
def trace(self, level=logging.DEBUG, show_enter=True, show_exit=True): """ Functions decorated with this function show function entry and exit with values, defaults to debug log level. :param level: log severity to use for function tracing :param show_enter: log function entry :param show_enter: log function exit Example use: .. sourcecode:: python log = rootpy.log["/myapp"] @log.trace() def salut(): return @log.trace() def hello(what): salut() return "42" hello("world") # Result: # DEBUG:myapp.trace.hello] > ('world',) {} # DEBUG:myapp.trace.salut] > () {} # DEBUG:myapp.trace.salut] < return None [0.00 sec] # DEBUG:myapp.trace.hello] < return 42 [0.00 sec] Output: .. sourcecode:: none """ from . import log_trace return log_trace(self, level, show_enter, show_exit)
[ "def", "trace", "(", "self", ",", "level", "=", "logging", ".", "DEBUG", ",", "show_enter", "=", "True", ",", "show_exit", "=", "True", ")", ":", "from", ".", "import", "log_trace", "return", "log_trace", "(", "self", ",", "level", ",", "show_enter", ",", "show_exit", ")" ]
Functions decorated with this function show function entry and exit with values, defaults to debug log level. :param level: log severity to use for function tracing :param show_enter: log function entry :param show_enter: log function exit Example use: .. sourcecode:: python log = rootpy.log["/myapp"] @log.trace() def salut(): return @log.trace() def hello(what): salut() return "42" hello("world") # Result: # DEBUG:myapp.trace.hello] > ('world',) {} # DEBUG:myapp.trace.salut] > () {} # DEBUG:myapp.trace.salut] < return None [0.00 sec] # DEBUG:myapp.trace.hello] < return 42 [0.00 sec] Output: .. sourcecode:: none
[ "Functions", "decorated", "with", "this", "function", "show", "function", "entry", "and", "exit", "with", "values", "defaults", "to", "debug", "log", "level", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L91-L127
train
rootpy/rootpy
rootpy/logger/extended_logger.py
ExtendedLogger.frame_unique
def frame_unique(f): """ A tuple representing a value which is unique to a given frame's line of execution """ return f.f_code.co_filename, f.f_code.co_name, f.f_lineno
python
def frame_unique(f): """ A tuple representing a value which is unique to a given frame's line of execution """ return f.f_code.co_filename, f.f_code.co_name, f.f_lineno
[ "def", "frame_unique", "(", "f", ")", ":", "return", "f", ".", "f_code", ".", "co_filename", ",", "f", ".", "f_code", ".", "co_name", ",", "f", ".", "f_lineno" ]
A tuple representing a value which is unique to a given frame's line of execution
[ "A", "tuple", "representing", "a", "value", "which", "is", "unique", "to", "a", "given", "frame", "s", "line", "of", "execution" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L157-L162
train
rootpy/rootpy
rootpy/logger/extended_logger.py
ExtendedLogger.show_stack_depth
def show_stack_depth(self, record, frame): """ Compute the maximum stack depth to show requested by any hooks, returning -1 if there are none matching, or if we've already emitted one for the line of code referred to. """ logger = self depths = [-1] msg = record.getMessage() # For each logger in the hierarchy while logger: to_match = getattr(logger, "show_stack_regexes", ()) for regex, depth, once, min_level in to_match: if record.levelno < min_level: continue if not regex.match(record.msg): continue # Only for a given regex, line number and logger unique = regex, self.frame_unique(frame), record.name if once: if unique in logger.shown_stack_frames: # We've shown this one already. continue # Prevent this stack frame from being shown again logger.shown_stack_frames.add(unique) depths.append(depth) logger = logger.parent return max(depths)
python
def show_stack_depth(self, record, frame): """ Compute the maximum stack depth to show requested by any hooks, returning -1 if there are none matching, or if we've already emitted one for the line of code referred to. """ logger = self depths = [-1] msg = record.getMessage() # For each logger in the hierarchy while logger: to_match = getattr(logger, "show_stack_regexes", ()) for regex, depth, once, min_level in to_match: if record.levelno < min_level: continue if not regex.match(record.msg): continue # Only for a given regex, line number and logger unique = regex, self.frame_unique(frame), record.name if once: if unique in logger.shown_stack_frames: # We've shown this one already. continue # Prevent this stack frame from being shown again logger.shown_stack_frames.add(unique) depths.append(depth) logger = logger.parent return max(depths)
[ "def", "show_stack_depth", "(", "self", ",", "record", ",", "frame", ")", ":", "logger", "=", "self", "depths", "=", "[", "-", "1", "]", "msg", "=", "record", ".", "getMessage", "(", ")", "# For each logger in the hierarchy", "while", "logger", ":", "to_match", "=", "getattr", "(", "logger", ",", "\"show_stack_regexes\"", ",", "(", ")", ")", "for", "regex", ",", "depth", ",", "once", ",", "min_level", "in", "to_match", ":", "if", "record", ".", "levelno", "<", "min_level", ":", "continue", "if", "not", "regex", ".", "match", "(", "record", ".", "msg", ")", ":", "continue", "# Only for a given regex, line number and logger", "unique", "=", "regex", ",", "self", ".", "frame_unique", "(", "frame", ")", ",", "record", ".", "name", "if", "once", ":", "if", "unique", "in", "logger", ".", "shown_stack_frames", ":", "# We've shown this one already.", "continue", "# Prevent this stack frame from being shown again", "logger", ".", "shown_stack_frames", ".", "add", "(", "unique", ")", "depths", ".", "append", "(", "depth", ")", "logger", "=", "logger", ".", "parent", "return", "max", "(", "depths", ")" ]
Compute the maximum stack depth to show requested by any hooks, returning -1 if there are none matching, or if we've already emitted one for the line of code referred to.
[ "Compute", "the", "maximum", "stack", "depth", "to", "show", "requested", "by", "any", "hooks", "returning", "-", "1", "if", "there", "are", "none", "matching", "or", "if", "we", "ve", "already", "emitted", "one", "for", "the", "line", "of", "code", "referred", "to", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L164-L193
train
rootpy/rootpy
rootpy/logger/extended_logger.py
ExtendedLogger.getChild
def getChild(self, suffix): """ Taken from CPython 2.7, modified to remove duplicate prefix and suffixes """ if suffix is None: return self if self.root is not self: if suffix.startswith(self.name + "."): # Remove duplicate prefix suffix = suffix[len(self.name + "."):] suf_parts = suffix.split(".") if len(suf_parts) > 1 and suf_parts[-1] == suf_parts[-2]: # If we have a submodule's name equal to the parent's name, # omit it. suffix = ".".join(suf_parts[:-1]) suffix = '.'.join((self.name, suffix)) return self.manager.getLogger(suffix)
python
def getChild(self, suffix): """ Taken from CPython 2.7, modified to remove duplicate prefix and suffixes """ if suffix is None: return self if self.root is not self: if suffix.startswith(self.name + "."): # Remove duplicate prefix suffix = suffix[len(self.name + "."):] suf_parts = suffix.split(".") if len(suf_parts) > 1 and suf_parts[-1] == suf_parts[-2]: # If we have a submodule's name equal to the parent's name, # omit it. suffix = ".".join(suf_parts[:-1]) suffix = '.'.join((self.name, suffix)) return self.manager.getLogger(suffix)
[ "def", "getChild", "(", "self", ",", "suffix", ")", ":", "if", "suffix", "is", "None", ":", "return", "self", "if", "self", ".", "root", "is", "not", "self", ":", "if", "suffix", ".", "startswith", "(", "self", ".", "name", "+", "\".\"", ")", ":", "# Remove duplicate prefix", "suffix", "=", "suffix", "[", "len", "(", "self", ".", "name", "+", "\".\"", ")", ":", "]", "suf_parts", "=", "suffix", ".", "split", "(", "\".\"", ")", "if", "len", "(", "suf_parts", ")", ">", "1", "and", "suf_parts", "[", "-", "1", "]", "==", "suf_parts", "[", "-", "2", "]", ":", "# If we have a submodule's name equal to the parent's name,", "# omit it.", "suffix", "=", "\".\"", ".", "join", "(", "suf_parts", "[", ":", "-", "1", "]", ")", "suffix", "=", "'.'", ".", "join", "(", "(", "self", ".", "name", ",", "suffix", ")", ")", "return", "self", ".", "manager", ".", "getLogger", "(", "suffix", ")" ]
Taken from CPython 2.7, modified to remove duplicate prefix and suffixes
[ "Taken", "from", "CPython", "2", ".", "7", "modified", "to", "remove", "duplicate", "prefix", "and", "suffixes" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L225-L241
train
rootpy/rootpy
rootpy/decorators.py
method_file_check
def method_file_check(f): """ A decorator to check that a TFile as been created before f is called. This function can decorate methods. This requires special treatment since in Python 3 unbound methods are just functions: http://stackoverflow.com/a/3589335/1002176 but to get consistent access to the class in both 2.x and 3.x, we need self. """ @wraps(f) def wrapper(self, *args, **kwargs): curr_dir = ROOT.gDirectory if isinstance(curr_dir, ROOT.TROOT) or not curr_dir: raise RuntimeError( "You must first create a File before calling {0}.{1}".format( self.__class__.__name__, _get_qualified_name(f))) if not curr_dir.IsWritable(): raise RuntimeError( "Calling {0}.{1} requires that the " "current File is writable".format( self.__class__.__name__, _get_qualified_name(f))) return f(self, *args, **kwargs) return wrapper
python
def method_file_check(f): """ A decorator to check that a TFile as been created before f is called. This function can decorate methods. This requires special treatment since in Python 3 unbound methods are just functions: http://stackoverflow.com/a/3589335/1002176 but to get consistent access to the class in both 2.x and 3.x, we need self. """ @wraps(f) def wrapper(self, *args, **kwargs): curr_dir = ROOT.gDirectory if isinstance(curr_dir, ROOT.TROOT) or not curr_dir: raise RuntimeError( "You must first create a File before calling {0}.{1}".format( self.__class__.__name__, _get_qualified_name(f))) if not curr_dir.IsWritable(): raise RuntimeError( "Calling {0}.{1} requires that the " "current File is writable".format( self.__class__.__name__, _get_qualified_name(f))) return f(self, *args, **kwargs) return wrapper
[ "def", "method_file_check", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "curr_dir", "=", "ROOT", ".", "gDirectory", "if", "isinstance", "(", "curr_dir", ",", "ROOT", ".", "TROOT", ")", "or", "not", "curr_dir", ":", "raise", "RuntimeError", "(", "\"You must first create a File before calling {0}.{1}\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "_get_qualified_name", "(", "f", ")", ")", ")", "if", "not", "curr_dir", ".", "IsWritable", "(", ")", ":", "raise", "RuntimeError", "(", "\"Calling {0}.{1} requires that the \"", "\"current File is writable\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "_get_qualified_name", "(", "f", ")", ")", ")", "return", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
A decorator to check that a TFile as been created before f is called. This function can decorate methods. This requires special treatment since in Python 3 unbound methods are just functions: http://stackoverflow.com/a/3589335/1002176 but to get consistent access to the class in both 2.x and 3.x, we need self.
[ "A", "decorator", "to", "check", "that", "a", "TFile", "as", "been", "created", "before", "f", "is", "called", ".", "This", "function", "can", "decorate", "methods", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/decorators.py#L64-L86
train
rootpy/rootpy
rootpy/decorators.py
chainable
def chainable(f): """ Decorator which causes a 'void' function to return self Allows chaining of multiple modifier class methods. """ @wraps(f) def wrapper(self, *args, **kwargs): # perform action f(self, *args, **kwargs) # return reference to class. return self return wrapper
python
def chainable(f): """ Decorator which causes a 'void' function to return self Allows chaining of multiple modifier class methods. """ @wraps(f) def wrapper(self, *args, **kwargs): # perform action f(self, *args, **kwargs) # return reference to class. return self return wrapper
[ "def", "chainable", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# perform action", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# return reference to class.", "return", "self", "return", "wrapper" ]
Decorator which causes a 'void' function to return self Allows chaining of multiple modifier class methods.
[ "Decorator", "which", "causes", "a", "void", "function", "to", "return", "self" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/decorators.py#L103-L115
train
rootpy/rootpy
rootpy/decorators.py
snake_case_methods
def snake_case_methods(cls, debug=False): """ A class decorator adding snake_case methods that alias capitalized ROOT methods. cls must subclass a ROOT class and define the _ROOT class variable. """ if not CONVERT_SNAKE_CASE: return cls # get the ROOT base class root_base = cls._ROOT members = inspect.getmembers(root_base) # filter out any methods that already exist in lower and uppercase forms # i.e. TDirectory::cd and Cd... names = {} for name, member in members: lower_name = name.lower() if lower_name in names: del names[lower_name] else: names[lower_name] = None for name, member in members: if name.lower() not in names: continue # Don't touch special methods or methods without cap letters if name[0] == '_' or name.islower(): continue # Is this a method of the ROOT base class? if not inspect.ismethod(member) and not inspect.isfunction(member): continue # convert CamelCase to snake_case new_name = camel_to_snake(name) # Use a __dict__ lookup rather than getattr because we _want_ to # obtain the _descriptor_, and not what the descriptor gives us # when it is `getattr`'d. value = None skip = False for c in cls.mro(): # skip methods that are already overridden if new_name in c.__dict__: skip = True break if name in c.__dict__: value = c.__dict__[name] break # <neo>Woah, a use for for-else</neo> else: # Weird. Maybe the item lives somewhere else, such as on the # metaclass? value = getattr(cls, name) if skip: continue setattr(cls, new_name, value) return cls
python
def snake_case_methods(cls, debug=False): """ A class decorator adding snake_case methods that alias capitalized ROOT methods. cls must subclass a ROOT class and define the _ROOT class variable. """ if not CONVERT_SNAKE_CASE: return cls # get the ROOT base class root_base = cls._ROOT members = inspect.getmembers(root_base) # filter out any methods that already exist in lower and uppercase forms # i.e. TDirectory::cd and Cd... names = {} for name, member in members: lower_name = name.lower() if lower_name in names: del names[lower_name] else: names[lower_name] = None for name, member in members: if name.lower() not in names: continue # Don't touch special methods or methods without cap letters if name[0] == '_' or name.islower(): continue # Is this a method of the ROOT base class? if not inspect.ismethod(member) and not inspect.isfunction(member): continue # convert CamelCase to snake_case new_name = camel_to_snake(name) # Use a __dict__ lookup rather than getattr because we _want_ to # obtain the _descriptor_, and not what the descriptor gives us # when it is `getattr`'d. value = None skip = False for c in cls.mro(): # skip methods that are already overridden if new_name in c.__dict__: skip = True break if name in c.__dict__: value = c.__dict__[name] break # <neo>Woah, a use for for-else</neo> else: # Weird. Maybe the item lives somewhere else, such as on the # metaclass? value = getattr(cls, name) if skip: continue setattr(cls, new_name, value) return cls
[ "def", "snake_case_methods", "(", "cls", ",", "debug", "=", "False", ")", ":", "if", "not", "CONVERT_SNAKE_CASE", ":", "return", "cls", "# get the ROOT base class", "root_base", "=", "cls", ".", "_ROOT", "members", "=", "inspect", ".", "getmembers", "(", "root_base", ")", "# filter out any methods that already exist in lower and uppercase forms", "# i.e. TDirectory::cd and Cd...", "names", "=", "{", "}", "for", "name", ",", "member", "in", "members", ":", "lower_name", "=", "name", ".", "lower", "(", ")", "if", "lower_name", "in", "names", ":", "del", "names", "[", "lower_name", "]", "else", ":", "names", "[", "lower_name", "]", "=", "None", "for", "name", ",", "member", "in", "members", ":", "if", "name", ".", "lower", "(", ")", "not", "in", "names", ":", "continue", "# Don't touch special methods or methods without cap letters", "if", "name", "[", "0", "]", "==", "'_'", "or", "name", ".", "islower", "(", ")", ":", "continue", "# Is this a method of the ROOT base class?", "if", "not", "inspect", ".", "ismethod", "(", "member", ")", "and", "not", "inspect", ".", "isfunction", "(", "member", ")", ":", "continue", "# convert CamelCase to snake_case", "new_name", "=", "camel_to_snake", "(", "name", ")", "# Use a __dict__ lookup rather than getattr because we _want_ to", "# obtain the _descriptor_, and not what the descriptor gives us", "# when it is `getattr`'d.", "value", "=", "None", "skip", "=", "False", "for", "c", "in", "cls", ".", "mro", "(", ")", ":", "# skip methods that are already overridden", "if", "new_name", "in", "c", ".", "__dict__", ":", "skip", "=", "True", "break", "if", "name", "in", "c", ".", "__dict__", ":", "value", "=", "c", ".", "__dict__", "[", "name", "]", "break", "# <neo>Woah, a use for for-else</neo>", "else", ":", "# Weird. Maybe the item lives somewhere else, such as on the", "# metaclass?", "value", "=", "getattr", "(", "cls", ",", "name", ")", "if", "skip", ":", "continue", "setattr", "(", "cls", ",", "new_name", ",", "value", ")", "return", "cls" ]
A class decorator adding snake_case methods that alias capitalized ROOT methods. cls must subclass a ROOT class and define the _ROOT class variable.
[ "A", "class", "decorator", "adding", "snake_case", "methods", "that", "alias", "capitalized", "ROOT", "methods", ".", "cls", "must", "subclass", "a", "ROOT", "class", "and", "define", "the", "_ROOT", "class", "variable", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/decorators.py#L131-L184
train
rootpy/rootpy
rootpy/decorators.py
sync
def sync(lock): """ A synchronization decorator """ def sync(f): @wraps(f) def new_function(*args, **kwargs): lock.acquire() try: return f(*args, **kwargs) finally: lock.release() return new_function return sync
python
def sync(lock): """ A synchronization decorator """ def sync(f): @wraps(f) def new_function(*args, **kwargs): lock.acquire() try: return f(*args, **kwargs) finally: lock.release() return new_function return sync
[ "def", "sync", "(", "lock", ")", ":", "def", "sync", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "new_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "lock", ".", "acquire", "(", ")", "try", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "lock", ".", "release", "(", ")", "return", "new_function", "return", "sync" ]
A synchronization decorator
[ "A", "synchronization", "decorator" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/decorators.py#L187-L200
train
rootpy/rootpy
rootpy/stats/correlated_values.py
as_ufloat
def as_ufloat(roorealvar): """ Cast a `RooRealVar` to an `uncertainties.ufloat` """ if isinstance(roorealvar, (U.AffineScalarFunc, U.Variable)): return roorealvar return U.ufloat((roorealvar.getVal(), roorealvar.getError()))
python
def as_ufloat(roorealvar): """ Cast a `RooRealVar` to an `uncertainties.ufloat` """ if isinstance(roorealvar, (U.AffineScalarFunc, U.Variable)): return roorealvar return U.ufloat((roorealvar.getVal(), roorealvar.getError()))
[ "def", "as_ufloat", "(", "roorealvar", ")", ":", "if", "isinstance", "(", "roorealvar", ",", "(", "U", ".", "AffineScalarFunc", ",", "U", ".", "Variable", ")", ")", ":", "return", "roorealvar", "return", "U", ".", "ufloat", "(", "(", "roorealvar", ".", "getVal", "(", ")", ",", "roorealvar", ".", "getError", "(", ")", ")", ")" ]
Cast a `RooRealVar` to an `uncertainties.ufloat`
[ "Cast", "a", "RooRealVar", "to", "an", "uncertainties", ".", "ufloat" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/correlated_values.py#L13-L19
train
rootpy/rootpy
rootpy/stats/correlated_values.py
correlated_values
def correlated_values(param_names, roofitresult): """ Return symbolic values from a `RooFitResult` taking into account covariance This is useful for numerically computing the uncertainties for expressions using correlated values arising from a fit. Parameters ---------- param_names: list of strings A list of parameters to extract from the result. The order of the names is the order of the return value. roofitresult : RooFitResult A RooFitResult from a fit. Returns ------- list of correlated values from the uncertainties package. Examples -------- .. sourcecode:: python # Fit a pdf to a histogram pdf = some_roofit_pdf_with_variables("f(x, a, b, c)") fitresult = pdf.fitTo(histogram, ROOT.RooFit.Save()) a, b, c = correlated_values(["a", "b", "c"], fitresult) # Arbitrary math expression according to what the `uncertainties` # package supports, automatically computes correct error propagation sum_value = a + b + c value, error = sum_value.nominal_value, sum_value.std_dev() """ pars = roofitresult.floatParsFinal() #pars.Print() pars = [pars[i] for i in range(pars.getSize())] parnames = [p.GetName() for p in pars] values = [(p.getVal(), p.getError()) for p in pars] #values = [as_ufloat(p) for p in pars] matrix = asrootpy(roofitresult.correlationMatrix()).to_numpy() uvalues = U.correlated_values_norm(values, matrix.tolist()) uvalues = dict((n, v) for n, v in zip(parnames, uvalues)) assert all(n in uvalues for n in parnames), ( "name {0} isn't in parameter list {1}".format(n, parnames)) # Return a tuple in the order it was asked for return tuple(uvalues[n] for n in param_names)
python
def correlated_values(param_names, roofitresult): """ Return symbolic values from a `RooFitResult` taking into account covariance This is useful for numerically computing the uncertainties for expressions using correlated values arising from a fit. Parameters ---------- param_names: list of strings A list of parameters to extract from the result. The order of the names is the order of the return value. roofitresult : RooFitResult A RooFitResult from a fit. Returns ------- list of correlated values from the uncertainties package. Examples -------- .. sourcecode:: python # Fit a pdf to a histogram pdf = some_roofit_pdf_with_variables("f(x, a, b, c)") fitresult = pdf.fitTo(histogram, ROOT.RooFit.Save()) a, b, c = correlated_values(["a", "b", "c"], fitresult) # Arbitrary math expression according to what the `uncertainties` # package supports, automatically computes correct error propagation sum_value = a + b + c value, error = sum_value.nominal_value, sum_value.std_dev() """ pars = roofitresult.floatParsFinal() #pars.Print() pars = [pars[i] for i in range(pars.getSize())] parnames = [p.GetName() for p in pars] values = [(p.getVal(), p.getError()) for p in pars] #values = [as_ufloat(p) for p in pars] matrix = asrootpy(roofitresult.correlationMatrix()).to_numpy() uvalues = U.correlated_values_norm(values, matrix.tolist()) uvalues = dict((n, v) for n, v in zip(parnames, uvalues)) assert all(n in uvalues for n in parnames), ( "name {0} isn't in parameter list {1}".format(n, parnames)) # Return a tuple in the order it was asked for return tuple(uvalues[n] for n in param_names)
[ "def", "correlated_values", "(", "param_names", ",", "roofitresult", ")", ":", "pars", "=", "roofitresult", ".", "floatParsFinal", "(", ")", "#pars.Print()", "pars", "=", "[", "pars", "[", "i", "]", "for", "i", "in", "range", "(", "pars", ".", "getSize", "(", ")", ")", "]", "parnames", "=", "[", "p", ".", "GetName", "(", ")", "for", "p", "in", "pars", "]", "values", "=", "[", "(", "p", ".", "getVal", "(", ")", ",", "p", ".", "getError", "(", ")", ")", "for", "p", "in", "pars", "]", "#values = [as_ufloat(p) for p in pars]", "matrix", "=", "asrootpy", "(", "roofitresult", ".", "correlationMatrix", "(", ")", ")", ".", "to_numpy", "(", ")", "uvalues", "=", "U", ".", "correlated_values_norm", "(", "values", ",", "matrix", ".", "tolist", "(", ")", ")", "uvalues", "=", "dict", "(", "(", "n", ",", "v", ")", "for", "n", ",", "v", "in", "zip", "(", "parnames", ",", "uvalues", ")", ")", "assert", "all", "(", "n", "in", "uvalues", "for", "n", "in", "parnames", ")", ",", "(", "\"name {0} isn't in parameter list {1}\"", ".", "format", "(", "n", ",", "parnames", ")", ")", "# Return a tuple in the order it was asked for", "return", "tuple", "(", "uvalues", "[", "n", "]", "for", "n", "in", "param_names", ")" ]
Return symbolic values from a `RooFitResult` taking into account covariance This is useful for numerically computing the uncertainties for expressions using correlated values arising from a fit. Parameters ---------- param_names: list of strings A list of parameters to extract from the result. The order of the names is the order of the return value. roofitresult : RooFitResult A RooFitResult from a fit. Returns ------- list of correlated values from the uncertainties package. Examples -------- .. sourcecode:: python # Fit a pdf to a histogram pdf = some_roofit_pdf_with_variables("f(x, a, b, c)") fitresult = pdf.fitTo(histogram, ROOT.RooFit.Save()) a, b, c = correlated_values(["a", "b", "c"], fitresult) # Arbitrary math expression according to what the `uncertainties` # package supports, automatically computes correct error propagation sum_value = a + b + c value, error = sum_value.nominal_value, sum_value.std_dev()
[ "Return", "symbolic", "values", "from", "a", "RooFitResult", "taking", "into", "account", "covariance" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/correlated_values.py#L22-L75
train
rootpy/rootpy
rootpy/tree/treemodel.py
TreeModelMeta.checkattr
def checkattr(metacls, attr, value): """ Only allow class attributes that are instances of rootpy.types.Column, ROOT.TObject, or ROOT.ObjectProxy """ if not isinstance(value, ( types.MethodType, types.FunctionType, classmethod, staticmethod, property)): if attr in dir(type('dummy', (object,), {})) + \ ['__metaclass__', '__qualname__']: return if attr.startswith('_'): raise SyntaxError( "TreeModel attribute `{0}` " "must not start with `_`".format(attr)) if not inspect.isclass(value): if not isinstance(value, Column): raise TypeError( "TreeModel attribute `{0}` " "must be an instance of " "`rootpy.tree.treetypes.Column`".format(attr)) return if not issubclass(value, (ROOT.TObject, ROOT.ObjectProxy)): raise TypeError( "TreeModel attribute `{0}` must inherit " "from `ROOT.TObject` or `ROOT.ObjectProxy`".format( attr))
python
def checkattr(metacls, attr, value): """ Only allow class attributes that are instances of rootpy.types.Column, ROOT.TObject, or ROOT.ObjectProxy """ if not isinstance(value, ( types.MethodType, types.FunctionType, classmethod, staticmethod, property)): if attr in dir(type('dummy', (object,), {})) + \ ['__metaclass__', '__qualname__']: return if attr.startswith('_'): raise SyntaxError( "TreeModel attribute `{0}` " "must not start with `_`".format(attr)) if not inspect.isclass(value): if not isinstance(value, Column): raise TypeError( "TreeModel attribute `{0}` " "must be an instance of " "`rootpy.tree.treetypes.Column`".format(attr)) return if not issubclass(value, (ROOT.TObject, ROOT.ObjectProxy)): raise TypeError( "TreeModel attribute `{0}` must inherit " "from `ROOT.TObject` or `ROOT.ObjectProxy`".format( attr))
[ "def", "checkattr", "(", "metacls", ",", "attr", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "(", "types", ".", "MethodType", ",", "types", ".", "FunctionType", ",", "classmethod", ",", "staticmethod", ",", "property", ")", ")", ":", "if", "attr", "in", "dir", "(", "type", "(", "'dummy'", ",", "(", "object", ",", ")", ",", "{", "}", ")", ")", "+", "[", "'__metaclass__'", ",", "'__qualname__'", "]", ":", "return", "if", "attr", ".", "startswith", "(", "'_'", ")", ":", "raise", "SyntaxError", "(", "\"TreeModel attribute `{0}` \"", "\"must not start with `_`\"", ".", "format", "(", "attr", ")", ")", "if", "not", "inspect", ".", "isclass", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "Column", ")", ":", "raise", "TypeError", "(", "\"TreeModel attribute `{0}` \"", "\"must be an instance of \"", "\"`rootpy.tree.treetypes.Column`\"", ".", "format", "(", "attr", ")", ")", "return", "if", "not", "issubclass", "(", "value", ",", "(", "ROOT", ".", "TObject", ",", "ROOT", ".", "ObjectProxy", ")", ")", ":", "raise", "TypeError", "(", "\"TreeModel attribute `{0}` must inherit \"", "\"from `ROOT.TObject` or `ROOT.ObjectProxy`\"", ".", "format", "(", "attr", ")", ")" ]
Only allow class attributes that are instances of rootpy.types.Column, ROOT.TObject, or ROOT.ObjectProxy
[ "Only", "allow", "class", "attributes", "that", "are", "instances", "of", "rootpy", ".", "types", ".", "Column", "ROOT", ".", "TObject", "or", "ROOT", ".", "ObjectProxy" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treemodel.py#L53-L82
train
rootpy/rootpy
rootpy/tree/treemodel.py
TreeModelMeta.prefix
def prefix(cls, name): """ Create a new TreeModel where class attribute names are prefixed with ``name`` """ attrs = dict([(name + attr, value) for attr, value in cls.get_attrs()]) return TreeModelMeta( '_'.join([name, cls.__name__]), (TreeModel,), attrs)
python
def prefix(cls, name): """ Create a new TreeModel where class attribute names are prefixed with ``name`` """ attrs = dict([(name + attr, value) for attr, value in cls.get_attrs()]) return TreeModelMeta( '_'.join([name, cls.__name__]), (TreeModel,), attrs)
[ "def", "prefix", "(", "cls", ",", "name", ")", ":", "attrs", "=", "dict", "(", "[", "(", "name", "+", "attr", ",", "value", ")", "for", "attr", ",", "value", "in", "cls", ".", "get_attrs", "(", ")", "]", ")", "return", "TreeModelMeta", "(", "'_'", ".", "join", "(", "[", "name", ",", "cls", ".", "__name__", "]", ")", ",", "(", "TreeModel", ",", ")", ",", "attrs", ")" ]
Create a new TreeModel where class attribute names are prefixed with ``name``
[ "Create", "a", "new", "TreeModel", "where", "class", "attribute", "names", "are", "prefixed", "with", "name" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treemodel.py#L84-L92
train
rootpy/rootpy
rootpy/tree/treemodel.py
TreeModelMeta.get_attrs
def get_attrs(cls): """ Get all class attributes ordered by definition """ ignore = dir(type('dummy', (object,), {})) + ['__metaclass__'] attrs = [ item for item in inspect.getmembers(cls) if item[0] not in ignore and not isinstance( item[1], ( types.FunctionType, types.MethodType, classmethod, staticmethod, property))] # sort by idx and use attribute name to break ties attrs.sort(key=lambda attr: (getattr(attr[1], 'idx', -1), attr[0])) return attrs
python
def get_attrs(cls): """ Get all class attributes ordered by definition """ ignore = dir(type('dummy', (object,), {})) + ['__metaclass__'] attrs = [ item for item in inspect.getmembers(cls) if item[0] not in ignore and not isinstance( item[1], ( types.FunctionType, types.MethodType, classmethod, staticmethod, property))] # sort by idx and use attribute name to break ties attrs.sort(key=lambda attr: (getattr(attr[1], 'idx', -1), attr[0])) return attrs
[ "def", "get_attrs", "(", "cls", ")", ":", "ignore", "=", "dir", "(", "type", "(", "'dummy'", ",", "(", "object", ",", ")", ",", "{", "}", ")", ")", "+", "[", "'__metaclass__'", "]", "attrs", "=", "[", "item", "for", "item", "in", "inspect", ".", "getmembers", "(", "cls", ")", "if", "item", "[", "0", "]", "not", "in", "ignore", "and", "not", "isinstance", "(", "item", "[", "1", "]", ",", "(", "types", ".", "FunctionType", ",", "types", ".", "MethodType", ",", "classmethod", ",", "staticmethod", ",", "property", ")", ")", "]", "# sort by idx and use attribute name to break ties", "attrs", ".", "sort", "(", "key", "=", "lambda", "attr", ":", "(", "getattr", "(", "attr", "[", "1", "]", ",", "'idx'", ",", "-", "1", ")", ",", "attr", "[", "0", "]", ")", ")", "return", "attrs" ]
Get all class attributes ordered by definition
[ "Get", "all", "class", "attributes", "ordered", "by", "definition" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treemodel.py#L104-L120
train
rootpy/rootpy
rootpy/tree/treemodel.py
TreeModelMeta.to_struct
def to_struct(cls, name=None): """ Convert the TreeModel into a compiled C struct """ if name is None: name = cls.__name__ basic_attrs = dict([(attr_name, value) for attr_name, value in cls.get_attrs() if isinstance(value, Column)]) if not basic_attrs: return None src = 'struct {0} {{'.format(name) for attr_name, value in basic_attrs.items(): src += '{0} {1};'.format(value.type.typename, attr_name) src += '};' if ROOT.gROOT.ProcessLine(src) != 0: return None return getattr(ROOT, name, None)
python
def to_struct(cls, name=None): """ Convert the TreeModel into a compiled C struct """ if name is None: name = cls.__name__ basic_attrs = dict([(attr_name, value) for attr_name, value in cls.get_attrs() if isinstance(value, Column)]) if not basic_attrs: return None src = 'struct {0} {{'.format(name) for attr_name, value in basic_attrs.items(): src += '{0} {1};'.format(value.type.typename, attr_name) src += '};' if ROOT.gROOT.ProcessLine(src) != 0: return None return getattr(ROOT, name, None)
[ "def", "to_struct", "(", "cls", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "name", "=", "cls", ".", "__name__", "basic_attrs", "=", "dict", "(", "[", "(", "attr_name", ",", "value", ")", "for", "attr_name", ",", "value", "in", "cls", ".", "get_attrs", "(", ")", "if", "isinstance", "(", "value", ",", "Column", ")", "]", ")", "if", "not", "basic_attrs", ":", "return", "None", "src", "=", "'struct {0} {{'", ".", "format", "(", "name", ")", "for", "attr_name", ",", "value", "in", "basic_attrs", ".", "items", "(", ")", ":", "src", "+=", "'{0} {1};'", ".", "format", "(", "value", ".", "type", ".", "typename", ",", "attr_name", ")", "src", "+=", "'};'", "if", "ROOT", ".", "gROOT", ".", "ProcessLine", "(", "src", ")", "!=", "0", ":", "return", "None", "return", "getattr", "(", "ROOT", ",", "name", ",", "None", ")" ]
Convert the TreeModel into a compiled C struct
[ "Convert", "the", "TreeModel", "into", "a", "compiled", "C", "struct" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treemodel.py#L122-L139
train
rootpy/rootpy
rootpy/extern/hep/pdg.py
id_to_name
def id_to_name(id): """ Convert a PDG ID to a printable string. """ name = pdgid_names.get(id) if not name: name = repr(id) return name
python
def id_to_name(id): """ Convert a PDG ID to a printable string. """ name = pdgid_names.get(id) if not name: name = repr(id) return name
[ "def", "id_to_name", "(", "id", ")", ":", "name", "=", "pdgid_names", ".", "get", "(", "id", ")", "if", "not", "name", ":", "name", "=", "repr", "(", "id", ")", "return", "name" ]
Convert a PDG ID to a printable string.
[ "Convert", "a", "PDG", "ID", "to", "a", "printable", "string", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/hep/pdg.py#L36-L43
train
rootpy/rootpy
rootpy/extern/hep/pdg.py
id_to_root_name
def id_to_root_name(id): """ Convert a PDG ID to a string with root markup. """ name = root_names.get(id) if not name: name = repr(id) return name
python
def id_to_root_name(id): """ Convert a PDG ID to a string with root markup. """ name = root_names.get(id) if not name: name = repr(id) return name
[ "def", "id_to_root_name", "(", "id", ")", ":", "name", "=", "root_names", ".", "get", "(", "id", ")", "if", "not", "name", ":", "name", "=", "repr", "(", "id", ")", "return", "name" ]
Convert a PDG ID to a string with root markup.
[ "Convert", "a", "PDG", "ID", "to", "a", "string", "with", "root", "markup", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/hep/pdg.py#L46-L53
train
rootpy/rootpy
rootpy/utils/inject_closure.py
new_closure
def new_closure(vals): """ Build a new closure """ args = ','.join('x%i' % i for i in range(len(vals))) f = eval("lambda %s:lambda:(%s)" % (args, args)) if sys.version_info[0] >= 3: return f(*vals).__closure__ return f(*vals).func_closure
python
def new_closure(vals): """ Build a new closure """ args = ','.join('x%i' % i for i in range(len(vals))) f = eval("lambda %s:lambda:(%s)" % (args, args)) if sys.version_info[0] >= 3: return f(*vals).__closure__ return f(*vals).func_closure
[ "def", "new_closure", "(", "vals", ")", ":", "args", "=", "','", ".", "join", "(", "'x%i'", "%", "i", "for", "i", "in", "range", "(", "len", "(", "vals", ")", ")", ")", "f", "=", "eval", "(", "\"lambda %s:lambda:(%s)\"", "%", "(", "args", ",", "args", ")", ")", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "return", "f", "(", "*", "vals", ")", ".", "__closure__", "return", "f", "(", "*", "vals", ")", ".", "func_closure" ]
Build a new closure
[ "Build", "a", "new", "closure" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/inject_closure.py#L19-L27
train
rootpy/rootpy
rootpy/utils/inject_closure.py
_inject_closure_values_fix_closures
def _inject_closure_values_fix_closures(c, injected, **kwargs): """ Recursively fix closures Python bytecode for a closure looks like:: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> MAKE_CLOSURE or this in 3.6 (MAKE_CLOSURE is no longer an opcode):: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> LOAD_CONST <locals> MAKE_FUNCTION This function finds closures and adds the injected closed variables in the right place. """ code = c.code orig_len = len(code) for iback, (opcode, value) in enumerate(reversed(code)): i = orig_len - iback - 1 if opcode != MAKE_CLOSURE: continue codeobj = code[i-1-OPCODE_OFFSET] assert codeobj[0] == byteplay.LOAD_CONST build_tuple = code[i-2-OPCODE_OFFSET] assert build_tuple[0] == byteplay.BUILD_TUPLE n_closed = build_tuple[1] load_closures = code[i-2-OPCODE_OFFSET-n_closed:i-2-OPCODE_OFFSET] assert all(o == byteplay.LOAD_CLOSURE for o, _ in load_closures) newlcs = [(byteplay.LOAD_CLOSURE, inj) for inj in injected] code[i-2-OPCODE_OFFSET] = byteplay.BUILD_TUPLE, n_closed + len(injected) code[i-2-OPCODE_OFFSET:i-2-OPCODE_OFFSET] = newlcs _inject_closure_values_fix_code(codeobj[1], injected, **kwargs)
python
def _inject_closure_values_fix_closures(c, injected, **kwargs): """ Recursively fix closures Python bytecode for a closure looks like:: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> MAKE_CLOSURE or this in 3.6 (MAKE_CLOSURE is no longer an opcode):: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> LOAD_CONST <locals> MAKE_FUNCTION This function finds closures and adds the injected closed variables in the right place. """ code = c.code orig_len = len(code) for iback, (opcode, value) in enumerate(reversed(code)): i = orig_len - iback - 1 if opcode != MAKE_CLOSURE: continue codeobj = code[i-1-OPCODE_OFFSET] assert codeobj[0] == byteplay.LOAD_CONST build_tuple = code[i-2-OPCODE_OFFSET] assert build_tuple[0] == byteplay.BUILD_TUPLE n_closed = build_tuple[1] load_closures = code[i-2-OPCODE_OFFSET-n_closed:i-2-OPCODE_OFFSET] assert all(o == byteplay.LOAD_CLOSURE for o, _ in load_closures) newlcs = [(byteplay.LOAD_CLOSURE, inj) for inj in injected] code[i-2-OPCODE_OFFSET] = byteplay.BUILD_TUPLE, n_closed + len(injected) code[i-2-OPCODE_OFFSET:i-2-OPCODE_OFFSET] = newlcs _inject_closure_values_fix_code(codeobj[1], injected, **kwargs)
[ "def", "_inject_closure_values_fix_closures", "(", "c", ",", "injected", ",", "*", "*", "kwargs", ")", ":", "code", "=", "c", ".", "code", "orig_len", "=", "len", "(", "code", ")", "for", "iback", ",", "(", "opcode", ",", "value", ")", "in", "enumerate", "(", "reversed", "(", "code", ")", ")", ":", "i", "=", "orig_len", "-", "iback", "-", "1", "if", "opcode", "!=", "MAKE_CLOSURE", ":", "continue", "codeobj", "=", "code", "[", "i", "-", "1", "-", "OPCODE_OFFSET", "]", "assert", "codeobj", "[", "0", "]", "==", "byteplay", ".", "LOAD_CONST", "build_tuple", "=", "code", "[", "i", "-", "2", "-", "OPCODE_OFFSET", "]", "assert", "build_tuple", "[", "0", "]", "==", "byteplay", ".", "BUILD_TUPLE", "n_closed", "=", "build_tuple", "[", "1", "]", "load_closures", "=", "code", "[", "i", "-", "2", "-", "OPCODE_OFFSET", "-", "n_closed", ":", "i", "-", "2", "-", "OPCODE_OFFSET", "]", "assert", "all", "(", "o", "==", "byteplay", ".", "LOAD_CLOSURE", "for", "o", ",", "_", "in", "load_closures", ")", "newlcs", "=", "[", "(", "byteplay", ".", "LOAD_CLOSURE", ",", "inj", ")", "for", "inj", "in", "injected", "]", "code", "[", "i", "-", "2", "-", "OPCODE_OFFSET", "]", "=", "byteplay", ".", "BUILD_TUPLE", ",", "n_closed", "+", "len", "(", "injected", ")", "code", "[", "i", "-", "2", "-", "OPCODE_OFFSET", ":", "i", "-", "2", "-", "OPCODE_OFFSET", "]", "=", "newlcs", "_inject_closure_values_fix_code", "(", "codeobj", "[", "1", "]", ",", "injected", ",", "*", "*", "kwargs", ")" ]
Recursively fix closures Python bytecode for a closure looks like:: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> MAKE_CLOSURE or this in 3.6 (MAKE_CLOSURE is no longer an opcode):: LOAD_CLOSURE var1 BUILD_TUPLE <n_of_vars_closed_over> LOAD_CONST <code_object_containing_closure> LOAD_CONST <locals> MAKE_FUNCTION This function finds closures and adds the injected closed variables in the right place.
[ "Recursively", "fix", "closures" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/inject_closure.py#L30-L75
train
rootpy/rootpy
rootpy/utils/inject_closure.py
_inject_closure_values_fix_code
def _inject_closure_values_fix_code(c, injected, **kwargs): """ Fix code objects, recursively fixing any closures """ # Add more closure variables c.freevars += injected # Replace LOAD_GLOBAL with LOAD_DEREF (fetch from closure cells) # for named variables for i, (opcode, value) in enumerate(c.code): if opcode == byteplay.LOAD_GLOBAL and value in kwargs: c.code[i] = byteplay.LOAD_DEREF, value _inject_closure_values_fix_closures(c, injected, **kwargs) return c
python
def _inject_closure_values_fix_code(c, injected, **kwargs): """ Fix code objects, recursively fixing any closures """ # Add more closure variables c.freevars += injected # Replace LOAD_GLOBAL with LOAD_DEREF (fetch from closure cells) # for named variables for i, (opcode, value) in enumerate(c.code): if opcode == byteplay.LOAD_GLOBAL and value in kwargs: c.code[i] = byteplay.LOAD_DEREF, value _inject_closure_values_fix_closures(c, injected, **kwargs) return c
[ "def", "_inject_closure_values_fix_code", "(", "c", ",", "injected", ",", "*", "*", "kwargs", ")", ":", "# Add more closure variables", "c", ".", "freevars", "+=", "injected", "# Replace LOAD_GLOBAL with LOAD_DEREF (fetch from closure cells)", "# for named variables", "for", "i", ",", "(", "opcode", ",", "value", ")", "in", "enumerate", "(", "c", ".", "code", ")", ":", "if", "opcode", "==", "byteplay", ".", "LOAD_GLOBAL", "and", "value", "in", "kwargs", ":", "c", ".", "code", "[", "i", "]", "=", "byteplay", ".", "LOAD_DEREF", ",", "value", "_inject_closure_values_fix_closures", "(", "c", ",", "injected", ",", "*", "*", "kwargs", ")", "return", "c" ]
Fix code objects, recursively fixing any closures
[ "Fix", "code", "objects", "recursively", "fixing", "any", "closures" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/inject_closure.py#L78-L93
train
rootpy/rootpy
rootpy/utils/inject_closure.py
inject_closure_values
def inject_closure_values(func, **kwargs): """ Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. This can be useful for mocking and other nefarious activities. """ wrapped_by = None if isinstance(func, property): fget, fset, fdel = func.fget, func.fset, func.fdel if fget: fget = fix_func(fget, **kwargs) if fset: fset = fix_func(fset, **kwargs) if fdel: fdel = fix_func(fdel, **kwargs) wrapped_by = type(func) return wrapped_by(fget, fset, fdel) elif isinstance(func, (staticmethod, classmethod)): func = func.__func__ wrapped_by = type(func) newfunc = _inject_closure_values(func, **kwargs) if wrapped_by: newfunc = wrapped_by(newfunc) return newfunc
python
def inject_closure_values(func, **kwargs): """ Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. This can be useful for mocking and other nefarious activities. """ wrapped_by = None if isinstance(func, property): fget, fset, fdel = func.fget, func.fset, func.fdel if fget: fget = fix_func(fget, **kwargs) if fset: fset = fix_func(fset, **kwargs) if fdel: fdel = fix_func(fdel, **kwargs) wrapped_by = type(func) return wrapped_by(fget, fset, fdel) elif isinstance(func, (staticmethod, classmethod)): func = func.__func__ wrapped_by = type(func) newfunc = _inject_closure_values(func, **kwargs) if wrapped_by: newfunc = wrapped_by(newfunc) return newfunc
[ "def", "inject_closure_values", "(", "func", ",", "*", "*", "kwargs", ")", ":", "wrapped_by", "=", "None", "if", "isinstance", "(", "func", ",", "property", ")", ":", "fget", ",", "fset", ",", "fdel", "=", "func", ".", "fget", ",", "func", ".", "fset", ",", "func", ".", "fdel", "if", "fget", ":", "fget", "=", "fix_func", "(", "fget", ",", "*", "*", "kwargs", ")", "if", "fset", ":", "fset", "=", "fix_func", "(", "fset", ",", "*", "*", "kwargs", ")", "if", "fdel", ":", "fdel", "=", "fix_func", "(", "fdel", ",", "*", "*", "kwargs", ")", "wrapped_by", "=", "type", "(", "func", ")", "return", "wrapped_by", "(", "fget", ",", "fset", ",", "fdel", ")", "elif", "isinstance", "(", "func", ",", "(", "staticmethod", ",", "classmethod", ")", ")", ":", "func", "=", "func", ".", "__func__", "wrapped_by", "=", "type", "(", "func", ")", "newfunc", "=", "_inject_closure_values", "(", "func", ",", "*", "*", "kwargs", ")", "if", "wrapped_by", ":", "newfunc", "=", "wrapped_by", "(", "newfunc", ")", "return", "newfunc" ]
Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. This can be useful for mocking and other nefarious activities.
[ "Returns", "a", "new", "function", "identical", "to", "the", "previous", "one", "except", "that", "it", "acts", "as", "though", "global", "variables", "named", "in", "kwargs", "have", "been", "closed", "over", "with", "the", "values", "specified", "in", "the", "kwargs", "dictionary", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/inject_closure.py#L137-L165
train
rootpy/rootpy
rootpy/plotting/canvas.py
_PadBase.axes
def axes(self, ndim=1, xlimits=None, ylimits=None, zlimits=None, xbins=1, ybins=1, zbins=1): """ Create and return axes on this pad """ if xlimits is None: xlimits = (0, 1) if ylimits is None: ylimits = (0, 1) if zlimits is None: zlimits = (0, 1) if ndim == 1: from .hist import Hist hist = Hist(1, xlimits[0], xlimits[1]) elif ndim == 2: from .hist import Hist2D hist = Hist2D(1, xlimits[0], xlimits[1], 1, ylimits[0], ylimits[1]) elif ndim == 3: from .hist import Hist3D hist = Hist3D(1, xlimits[0], xlimits[1], 1, ylimits[0], ylimits[1], 1, zlimits[0], zlimits[1]) else: raise ValueError("ndim must be 1, 2, or 3") with self: hist.Draw('AXIS') xaxis = hist.xaxis yaxis = hist.yaxis if isinstance(xbins, (list, tuple)): xbins = array('d', xbins) if hasattr(xbins, '__iter__'): xaxis.Set(len(xbins) - 1, xbins) else: xaxis.Set(xbins, *xlimits) if ndim > 1: if isinstance(ybins, (list, tuple)): ybins = array('d', ybins) if hasattr(ybins, '__iter__'): yaxis.Set(len(ybins) - 1, ybins) else: yaxis.Set(ybins, *ylimits) else: yaxis.limits = ylimits yaxis.range_user = ylimits if ndim > 1: zaxis = hist.zaxis if ndim == 3: if isinstance(zbins, (list, tuple)): zbins = array('d', zbins) if hasattr(zbins, '__iter__'): zaxis.Set(len(zbins) - 1, zbins) else: zaxis.Set(zbins, *zlimits) else: zaxis.limits = zlimits zaxis.range_user = zlimits return xaxis, yaxis, zaxis return xaxis, yaxis
python
def axes(self, ndim=1, xlimits=None, ylimits=None, zlimits=None, xbins=1, ybins=1, zbins=1): """ Create and return axes on this pad """ if xlimits is None: xlimits = (0, 1) if ylimits is None: ylimits = (0, 1) if zlimits is None: zlimits = (0, 1) if ndim == 1: from .hist import Hist hist = Hist(1, xlimits[0], xlimits[1]) elif ndim == 2: from .hist import Hist2D hist = Hist2D(1, xlimits[0], xlimits[1], 1, ylimits[0], ylimits[1]) elif ndim == 3: from .hist import Hist3D hist = Hist3D(1, xlimits[0], xlimits[1], 1, ylimits[0], ylimits[1], 1, zlimits[0], zlimits[1]) else: raise ValueError("ndim must be 1, 2, or 3") with self: hist.Draw('AXIS') xaxis = hist.xaxis yaxis = hist.yaxis if isinstance(xbins, (list, tuple)): xbins = array('d', xbins) if hasattr(xbins, '__iter__'): xaxis.Set(len(xbins) - 1, xbins) else: xaxis.Set(xbins, *xlimits) if ndim > 1: if isinstance(ybins, (list, tuple)): ybins = array('d', ybins) if hasattr(ybins, '__iter__'): yaxis.Set(len(ybins) - 1, ybins) else: yaxis.Set(ybins, *ylimits) else: yaxis.limits = ylimits yaxis.range_user = ylimits if ndim > 1: zaxis = hist.zaxis if ndim == 3: if isinstance(zbins, (list, tuple)): zbins = array('d', zbins) if hasattr(zbins, '__iter__'): zaxis.Set(len(zbins) - 1, zbins) else: zaxis.Set(zbins, *zlimits) else: zaxis.limits = zlimits zaxis.range_user = zlimits return xaxis, yaxis, zaxis return xaxis, yaxis
[ "def", "axes", "(", "self", ",", "ndim", "=", "1", ",", "xlimits", "=", "None", ",", "ylimits", "=", "None", ",", "zlimits", "=", "None", ",", "xbins", "=", "1", ",", "ybins", "=", "1", ",", "zbins", "=", "1", ")", ":", "if", "xlimits", "is", "None", ":", "xlimits", "=", "(", "0", ",", "1", ")", "if", "ylimits", "is", "None", ":", "ylimits", "=", "(", "0", ",", "1", ")", "if", "zlimits", "is", "None", ":", "zlimits", "=", "(", "0", ",", "1", ")", "if", "ndim", "==", "1", ":", "from", ".", "hist", "import", "Hist", "hist", "=", "Hist", "(", "1", ",", "xlimits", "[", "0", "]", ",", "xlimits", "[", "1", "]", ")", "elif", "ndim", "==", "2", ":", "from", ".", "hist", "import", "Hist2D", "hist", "=", "Hist2D", "(", "1", ",", "xlimits", "[", "0", "]", ",", "xlimits", "[", "1", "]", ",", "1", ",", "ylimits", "[", "0", "]", ",", "ylimits", "[", "1", "]", ")", "elif", "ndim", "==", "3", ":", "from", ".", "hist", "import", "Hist3D", "hist", "=", "Hist3D", "(", "1", ",", "xlimits", "[", "0", "]", ",", "xlimits", "[", "1", "]", ",", "1", ",", "ylimits", "[", "0", "]", ",", "ylimits", "[", "1", "]", ",", "1", ",", "zlimits", "[", "0", "]", ",", "zlimits", "[", "1", "]", ")", "else", ":", "raise", "ValueError", "(", "\"ndim must be 1, 2, or 3\"", ")", "with", "self", ":", "hist", ".", "Draw", "(", "'AXIS'", ")", "xaxis", "=", "hist", ".", "xaxis", "yaxis", "=", "hist", ".", "yaxis", "if", "isinstance", "(", "xbins", ",", "(", "list", ",", "tuple", ")", ")", ":", "xbins", "=", "array", "(", "'d'", ",", "xbins", ")", "if", "hasattr", "(", "xbins", ",", "'__iter__'", ")", ":", "xaxis", ".", "Set", "(", "len", "(", "xbins", ")", "-", "1", ",", "xbins", ")", "else", ":", "xaxis", ".", "Set", "(", "xbins", ",", "*", "xlimits", ")", "if", "ndim", ">", "1", ":", "if", "isinstance", "(", "ybins", ",", "(", "list", ",", "tuple", ")", ")", ":", "ybins", "=", "array", "(", "'d'", ",", "ybins", ")", "if", "hasattr", "(", "ybins", ",", "'__iter__'", ")", ":", "yaxis", ".", "Set", "(", "len", "(", "ybins", ")", "-", "1", ",", "ybins", ")", "else", ":", "yaxis", ".", "Set", "(", "ybins", ",", "*", "ylimits", ")", "else", ":", "yaxis", ".", "limits", "=", "ylimits", "yaxis", ".", "range_user", "=", "ylimits", "if", "ndim", ">", "1", ":", "zaxis", "=", "hist", ".", "zaxis", "if", "ndim", "==", "3", ":", "if", "isinstance", "(", "zbins", ",", "(", "list", ",", "tuple", ")", ")", ":", "zbins", "=", "array", "(", "'d'", ",", "zbins", ")", "if", "hasattr", "(", "zbins", ",", "'__iter__'", ")", ":", "zaxis", ".", "Set", "(", "len", "(", "zbins", ")", "-", "1", ",", "zbins", ")", "else", ":", "zaxis", ".", "Set", "(", "zbins", ",", "*", "zlimits", ")", "else", ":", "zaxis", ".", "limits", "=", "zlimits", "zaxis", ".", "range_user", "=", "zlimits", "return", "xaxis", ",", "yaxis", ",", "zaxis", "return", "xaxis", ",", "yaxis" ]
Create and return axes on this pad
[ "Create", "and", "return", "axes", "on", "this", "pad" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/canvas.py#L34-L93
train
rootpy/rootpy
rootpy/root2hdf5.py
root2hdf5
def root2hdf5(rfile, hfile, rpath='', entries=-1, userfunc=None, show_progress=False, ignore_exception=False, **kwargs): """ Convert all trees in a ROOT file into tables in an HDF5 file. Parameters ---------- rfile : string or asrootpy'd ROOT File A ROOT File handle or string path to an existing ROOT file. hfile : string or PyTables HDF5 File A PyTables HDF5 File handle or string path to an existing HDF5 file. rpath : string, optional (default='') Top level path to begin traversal through the ROOT file. By default convert everything in and below the root directory. entries : int, optional (default=-1) The number of entries to read at once while converting a ROOT TTree into an HDF5 table. By default read the entire TTree into memory (this may not be desired if your TTrees are large). userfunc : callable, optional (default=None) A function that will be called on every tree and that must return a tree or list of trees that will be converted instead of the original tree. show_progress : bool, optional (default=False) If True, then display and update a progress bar on stdout as each tree is converted. ignore_exception : bool, optional (default=False) If True, then ignore exceptions raised in converting trees and instead skip such trees. kwargs : dict, optional Additional keyword arguments for the tree2array function. """ own_rootfile = False if isinstance(rfile, string_types): rfile = root_open(rfile) own_rootfile = True own_h5file = False if isinstance(hfile, string_types): hfile = tables_open(filename=hfile, mode="w", title="Data") own_h5file = True for dirpath, dirnames, treenames in rfile.walk( rpath, class_ref=QROOT.TTree): # skip directories w/o trees if not treenames: continue treenames.sort() group_where = '/' + os.path.dirname(dirpath) group_name = os.path.basename(dirpath) if not group_name: group = hfile.root elif TABLES_NEW_API: group = hfile.create_group(group_where, group_name, createparents=True) else: group = hfile.createGroup(group_where, group_name) ntrees = len(treenames) log.info( "Will convert {0:d} tree{1} in {2}".format( ntrees, 's' if ntrees != 1 else '', os.path.join(group_where, group_name))) for treename in treenames: input_tree = rfile.Get(os.path.join(dirpath, treename)) if userfunc is not None: tmp_file = TemporaryFile() # call user-defined function on tree and get output trees log.info("Calling user function on tree '{0}'".format( input_tree.GetName())) trees = userfunc(input_tree) if not isinstance(trees, list): trees = [trees] else: trees = [input_tree] tmp_file = None for tree in trees: try: tree2hdf5(tree, hfile, group=group, entries=entries, show_progress=show_progress, **kwargs) except Exception as e: if ignore_exception: log.error("Failed to convert tree '{0}': {1}".format( tree.GetName(), str(e))) else: raise input_tree.Delete() if userfunc is not None: for tree in trees: tree.Delete() tmp_file.Close() if own_h5file: hfile.close() if own_rootfile: rfile.Close()
python
def root2hdf5(rfile, hfile, rpath='', entries=-1, userfunc=None, show_progress=False, ignore_exception=False, **kwargs): """ Convert all trees in a ROOT file into tables in an HDF5 file. Parameters ---------- rfile : string or asrootpy'd ROOT File A ROOT File handle or string path to an existing ROOT file. hfile : string or PyTables HDF5 File A PyTables HDF5 File handle or string path to an existing HDF5 file. rpath : string, optional (default='') Top level path to begin traversal through the ROOT file. By default convert everything in and below the root directory. entries : int, optional (default=-1) The number of entries to read at once while converting a ROOT TTree into an HDF5 table. By default read the entire TTree into memory (this may not be desired if your TTrees are large). userfunc : callable, optional (default=None) A function that will be called on every tree and that must return a tree or list of trees that will be converted instead of the original tree. show_progress : bool, optional (default=False) If True, then display and update a progress bar on stdout as each tree is converted. ignore_exception : bool, optional (default=False) If True, then ignore exceptions raised in converting trees and instead skip such trees. kwargs : dict, optional Additional keyword arguments for the tree2array function. """ own_rootfile = False if isinstance(rfile, string_types): rfile = root_open(rfile) own_rootfile = True own_h5file = False if isinstance(hfile, string_types): hfile = tables_open(filename=hfile, mode="w", title="Data") own_h5file = True for dirpath, dirnames, treenames in rfile.walk( rpath, class_ref=QROOT.TTree): # skip directories w/o trees if not treenames: continue treenames.sort() group_where = '/' + os.path.dirname(dirpath) group_name = os.path.basename(dirpath) if not group_name: group = hfile.root elif TABLES_NEW_API: group = hfile.create_group(group_where, group_name, createparents=True) else: group = hfile.createGroup(group_where, group_name) ntrees = len(treenames) log.info( "Will convert {0:d} tree{1} in {2}".format( ntrees, 's' if ntrees != 1 else '', os.path.join(group_where, group_name))) for treename in treenames: input_tree = rfile.Get(os.path.join(dirpath, treename)) if userfunc is not None: tmp_file = TemporaryFile() # call user-defined function on tree and get output trees log.info("Calling user function on tree '{0}'".format( input_tree.GetName())) trees = userfunc(input_tree) if not isinstance(trees, list): trees = [trees] else: trees = [input_tree] tmp_file = None for tree in trees: try: tree2hdf5(tree, hfile, group=group, entries=entries, show_progress=show_progress, **kwargs) except Exception as e: if ignore_exception: log.error("Failed to convert tree '{0}': {1}".format( tree.GetName(), str(e))) else: raise input_tree.Delete() if userfunc is not None: for tree in trees: tree.Delete() tmp_file.Close() if own_h5file: hfile.close() if own_rootfile: rfile.Close()
[ "def", "root2hdf5", "(", "rfile", ",", "hfile", ",", "rpath", "=", "''", ",", "entries", "=", "-", "1", ",", "userfunc", "=", "None", ",", "show_progress", "=", "False", ",", "ignore_exception", "=", "False", ",", "*", "*", "kwargs", ")", ":", "own_rootfile", "=", "False", "if", "isinstance", "(", "rfile", ",", "string_types", ")", ":", "rfile", "=", "root_open", "(", "rfile", ")", "own_rootfile", "=", "True", "own_h5file", "=", "False", "if", "isinstance", "(", "hfile", ",", "string_types", ")", ":", "hfile", "=", "tables_open", "(", "filename", "=", "hfile", ",", "mode", "=", "\"w\"", ",", "title", "=", "\"Data\"", ")", "own_h5file", "=", "True", "for", "dirpath", ",", "dirnames", ",", "treenames", "in", "rfile", ".", "walk", "(", "rpath", ",", "class_ref", "=", "QROOT", ".", "TTree", ")", ":", "# skip directories w/o trees", "if", "not", "treenames", ":", "continue", "treenames", ".", "sort", "(", ")", "group_where", "=", "'/'", "+", "os", ".", "path", ".", "dirname", "(", "dirpath", ")", "group_name", "=", "os", ".", "path", ".", "basename", "(", "dirpath", ")", "if", "not", "group_name", ":", "group", "=", "hfile", ".", "root", "elif", "TABLES_NEW_API", ":", "group", "=", "hfile", ".", "create_group", "(", "group_where", ",", "group_name", ",", "createparents", "=", "True", ")", "else", ":", "group", "=", "hfile", ".", "createGroup", "(", "group_where", ",", "group_name", ")", "ntrees", "=", "len", "(", "treenames", ")", "log", ".", "info", "(", "\"Will convert {0:d} tree{1} in {2}\"", ".", "format", "(", "ntrees", ",", "'s'", "if", "ntrees", "!=", "1", "else", "''", ",", "os", ".", "path", ".", "join", "(", "group_where", ",", "group_name", ")", ")", ")", "for", "treename", "in", "treenames", ":", "input_tree", "=", "rfile", ".", "Get", "(", "os", ".", "path", ".", "join", "(", "dirpath", ",", "treename", ")", ")", "if", "userfunc", "is", "not", "None", ":", "tmp_file", "=", "TemporaryFile", "(", ")", "# call user-defined function on tree and get output trees", "log", ".", "info", "(", "\"Calling user function on tree '{0}'\"", ".", "format", "(", "input_tree", ".", "GetName", "(", ")", ")", ")", "trees", "=", "userfunc", "(", "input_tree", ")", "if", "not", "isinstance", "(", "trees", ",", "list", ")", ":", "trees", "=", "[", "trees", "]", "else", ":", "trees", "=", "[", "input_tree", "]", "tmp_file", "=", "None", "for", "tree", "in", "trees", ":", "try", ":", "tree2hdf5", "(", "tree", ",", "hfile", ",", "group", "=", "group", ",", "entries", "=", "entries", ",", "show_progress", "=", "show_progress", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "ignore_exception", ":", "log", ".", "error", "(", "\"Failed to convert tree '{0}': {1}\"", ".", "format", "(", "tree", ".", "GetName", "(", ")", ",", "str", "(", "e", ")", ")", ")", "else", ":", "raise", "input_tree", ".", "Delete", "(", ")", "if", "userfunc", "is", "not", "None", ":", "for", "tree", "in", "trees", ":", "tree", ".", "Delete", "(", ")", "tmp_file", ".", "Close", "(", ")", "if", "own_h5file", ":", "hfile", ".", "close", "(", ")", "if", "own_rootfile", ":", "rfile", ".", "Close", "(", ")" ]
Convert all trees in a ROOT file into tables in an HDF5 file. Parameters ---------- rfile : string or asrootpy'd ROOT File A ROOT File handle or string path to an existing ROOT file. hfile : string or PyTables HDF5 File A PyTables HDF5 File handle or string path to an existing HDF5 file. rpath : string, optional (default='') Top level path to begin traversal through the ROOT file. By default convert everything in and below the root directory. entries : int, optional (default=-1) The number of entries to read at once while converting a ROOT TTree into an HDF5 table. By default read the entire TTree into memory (this may not be desired if your TTrees are large). userfunc : callable, optional (default=None) A function that will be called on every tree and that must return a tree or list of trees that will be converted instead of the original tree. show_progress : bool, optional (default=False) If True, then display and update a progress bar on stdout as each tree is converted. ignore_exception : bool, optional (default=False) If True, then ignore exceptions raised in converting trees and instead skip such trees. kwargs : dict, optional Additional keyword arguments for the tree2array function.
[ "Convert", "all", "trees", "in", "a", "ROOT", "file", "into", "tables", "in", "an", "HDF5", "file", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/root2hdf5.py#L190-L310
train
rootpy/rootpy
rootpy/plotting/graph.py
_Graph1DBase.Reverse
def Reverse(self, copy=False): """ Reverse the order of the points """ numPoints = self.GetN() if copy: revGraph = self.Clone() else: revGraph = self X = self.GetX() EXlow = self.GetEXlow() EXhigh = self.GetEXhigh() Y = self.GetY() EYlow = self.GetEYlow() EYhigh = self.GetEYhigh() for i in range(numPoints): index = numPoints - 1 - i revGraph.SetPoint(i, X[index], Y[index]) revGraph.SetPointError( i, EXlow[index], EXhigh[index], EYlow[index], EYhigh[index]) return revGraph
python
def Reverse(self, copy=False): """ Reverse the order of the points """ numPoints = self.GetN() if copy: revGraph = self.Clone() else: revGraph = self X = self.GetX() EXlow = self.GetEXlow() EXhigh = self.GetEXhigh() Y = self.GetY() EYlow = self.GetEYlow() EYhigh = self.GetEYhigh() for i in range(numPoints): index = numPoints - 1 - i revGraph.SetPoint(i, X[index], Y[index]) revGraph.SetPointError( i, EXlow[index], EXhigh[index], EYlow[index], EYhigh[index]) return revGraph
[ "def", "Reverse", "(", "self", ",", "copy", "=", "False", ")", ":", "numPoints", "=", "self", ".", "GetN", "(", ")", "if", "copy", ":", "revGraph", "=", "self", ".", "Clone", "(", ")", "else", ":", "revGraph", "=", "self", "X", "=", "self", ".", "GetX", "(", ")", "EXlow", "=", "self", ".", "GetEXlow", "(", ")", "EXhigh", "=", "self", ".", "GetEXhigh", "(", ")", "Y", "=", "self", ".", "GetY", "(", ")", "EYlow", "=", "self", ".", "GetEYlow", "(", ")", "EYhigh", "=", "self", ".", "GetEYhigh", "(", ")", "for", "i", "in", "range", "(", "numPoints", ")", ":", "index", "=", "numPoints", "-", "1", "-", "i", "revGraph", ".", "SetPoint", "(", "i", ",", "X", "[", "index", "]", ",", "Y", "[", "index", "]", ")", "revGraph", ".", "SetPointError", "(", "i", ",", "EXlow", "[", "index", "]", ",", "EXhigh", "[", "index", "]", ",", "EYlow", "[", "index", "]", ",", "EYhigh", "[", "index", "]", ")", "return", "revGraph" ]
Reverse the order of the points
[ "Reverse", "the", "order", "of", "the", "points" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/graph.py#L493-L515
train
rootpy/rootpy
rootpy/plotting/graph.py
_Graph1DBase.Shift
def Shift(self, value, copy=False): """ Shift the graph left or right by value """ numPoints = self.GetN() if copy: shiftGraph = self.Clone() else: shiftGraph = self X = self.GetX() EXlow = self.GetEXlow() EXhigh = self.GetEXhigh() Y = self.GetY() EYlow = self.GetEYlow() EYhigh = self.GetEYhigh() for i in range(numPoints): shiftGraph.SetPoint(i, X[i] + value, Y[i]) shiftGraph.SetPointError( i, EXlow[i], EXhigh[i], EYlow[i], EYhigh[i]) return shiftGraph
python
def Shift(self, value, copy=False): """ Shift the graph left or right by value """ numPoints = self.GetN() if copy: shiftGraph = self.Clone() else: shiftGraph = self X = self.GetX() EXlow = self.GetEXlow() EXhigh = self.GetEXhigh() Y = self.GetY() EYlow = self.GetEYlow() EYhigh = self.GetEYhigh() for i in range(numPoints): shiftGraph.SetPoint(i, X[i] + value, Y[i]) shiftGraph.SetPointError( i, EXlow[i], EXhigh[i], EYlow[i], EYhigh[i]) return shiftGraph
[ "def", "Shift", "(", "self", ",", "value", ",", "copy", "=", "False", ")", ":", "numPoints", "=", "self", ".", "GetN", "(", ")", "if", "copy", ":", "shiftGraph", "=", "self", ".", "Clone", "(", ")", "else", ":", "shiftGraph", "=", "self", "X", "=", "self", ".", "GetX", "(", ")", "EXlow", "=", "self", ".", "GetEXlow", "(", ")", "EXhigh", "=", "self", ".", "GetEXhigh", "(", ")", "Y", "=", "self", ".", "GetY", "(", ")", "EYlow", "=", "self", ".", "GetEYlow", "(", ")", "EYhigh", "=", "self", ".", "GetEYhigh", "(", ")", "for", "i", "in", "range", "(", "numPoints", ")", ":", "shiftGraph", ".", "SetPoint", "(", "i", ",", "X", "[", "i", "]", "+", "value", ",", "Y", "[", "i", "]", ")", "shiftGraph", ".", "SetPointError", "(", "i", ",", "EXlow", "[", "i", "]", ",", "EXhigh", "[", "i", "]", ",", "EYlow", "[", "i", "]", ",", "EYhigh", "[", "i", "]", ")", "return", "shiftGraph" ]
Shift the graph left or right by value
[ "Shift", "the", "graph", "left", "or", "right", "by", "value" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/graph.py#L586-L607
train
rootpy/rootpy
rootpy/plotting/graph.py
_Graph1DBase.Integrate
def Integrate(self): """ Integrate using the trapazoidal method """ area = 0. X = self.GetX() Y = self.GetY() for i in range(self.GetN() - 1): area += (X[i + 1] - X[i]) * (Y[i] + Y[i + 1]) / 2. return area
python
def Integrate(self): """ Integrate using the trapazoidal method """ area = 0. X = self.GetX() Y = self.GetY() for i in range(self.GetN() - 1): area += (X[i + 1] - X[i]) * (Y[i] + Y[i + 1]) / 2. return area
[ "def", "Integrate", "(", "self", ")", ":", "area", "=", "0.", "X", "=", "self", ".", "GetX", "(", ")", "Y", "=", "self", ".", "GetY", "(", ")", "for", "i", "in", "range", "(", "self", ".", "GetN", "(", ")", "-", "1", ")", ":", "area", "+=", "(", "X", "[", "i", "+", "1", "]", "-", "X", "[", "i", "]", ")", "*", "(", "Y", "[", "i", "]", "+", "Y", "[", "i", "+", "1", "]", ")", "/", "2.", "return", "area" ]
Integrate using the trapazoidal method
[ "Integrate", "using", "the", "trapazoidal", "method" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/graph.py#L609-L618
train
rootpy/rootpy
rootpy/plotting/graph.py
_Graph1DBase.Append
def Append(self, other): """ Append points from another graph """ orig_len = len(self) self.Set(orig_len + len(other)) ipoint = orig_len if hasattr(self, 'SetPointError'): for point in other: self.SetPoint(ipoint, point.x.value, point.y.value) self.SetPointError( ipoint, point.x.error_low, point.x.error_hi, point.y.error_low, point.y.error_hi) ipoint += 1 else: for point in other: self.SetPoint(ipoint, point.x.value, point.y.value) ipoint += 1
python
def Append(self, other): """ Append points from another graph """ orig_len = len(self) self.Set(orig_len + len(other)) ipoint = orig_len if hasattr(self, 'SetPointError'): for point in other: self.SetPoint(ipoint, point.x.value, point.y.value) self.SetPointError( ipoint, point.x.error_low, point.x.error_hi, point.y.error_low, point.y.error_hi) ipoint += 1 else: for point in other: self.SetPoint(ipoint, point.x.value, point.y.value) ipoint += 1
[ "def", "Append", "(", "self", ",", "other", ")", ":", "orig_len", "=", "len", "(", "self", ")", "self", ".", "Set", "(", "orig_len", "+", "len", "(", "other", ")", ")", "ipoint", "=", "orig_len", "if", "hasattr", "(", "self", ",", "'SetPointError'", ")", ":", "for", "point", "in", "other", ":", "self", ".", "SetPoint", "(", "ipoint", ",", "point", ".", "x", ".", "value", ",", "point", ".", "y", ".", "value", ")", "self", ".", "SetPointError", "(", "ipoint", ",", "point", ".", "x", ".", "error_low", ",", "point", ".", "x", ".", "error_hi", ",", "point", ".", "y", ".", "error_low", ",", "point", ".", "y", ".", "error_hi", ")", "ipoint", "+=", "1", "else", ":", "for", "point", "in", "other", ":", "self", ".", "SetPoint", "(", "ipoint", ",", "point", ".", "x", ".", "value", ",", "point", ".", "y", ".", "value", ")", "ipoint", "+=", "1" ]
Append points from another graph
[ "Append", "points", "from", "another", "graph" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/graph.py#L620-L638
train
rootpy/rootpy
rootpy/memory/keepalive.py
keepalive
def keepalive(nurse, *patients): """ Keep ``patients`` alive at least as long as ``nurse`` is around using a ``WeakKeyDictionary``. """ if DISABLED: return if hashable(nurse): hashable_patients = [] for p in patients: if hashable(p): log.debug("Keeping {0} alive for lifetime of {1}".format(p, nurse)) hashable_patients.append(p) else: log.warning("Unable to keep unhashable object {0} " "alive for lifetime of {1}".format(p, nurse)) KEEPALIVE.setdefault(nurse, set()).update(hashable_patients) else: log.warning("Unable to keep objects alive for lifetime of " "unhashable object {0}".format(nurse))
python
def keepalive(nurse, *patients): """ Keep ``patients`` alive at least as long as ``nurse`` is around using a ``WeakKeyDictionary``. """ if DISABLED: return if hashable(nurse): hashable_patients = [] for p in patients: if hashable(p): log.debug("Keeping {0} alive for lifetime of {1}".format(p, nurse)) hashable_patients.append(p) else: log.warning("Unable to keep unhashable object {0} " "alive for lifetime of {1}".format(p, nurse)) KEEPALIVE.setdefault(nurse, set()).update(hashable_patients) else: log.warning("Unable to keep objects alive for lifetime of " "unhashable object {0}".format(nurse))
[ "def", "keepalive", "(", "nurse", ",", "*", "patients", ")", ":", "if", "DISABLED", ":", "return", "if", "hashable", "(", "nurse", ")", ":", "hashable_patients", "=", "[", "]", "for", "p", "in", "patients", ":", "if", "hashable", "(", "p", ")", ":", "log", ".", "debug", "(", "\"Keeping {0} alive for lifetime of {1}\"", ".", "format", "(", "p", ",", "nurse", ")", ")", "hashable_patients", ".", "append", "(", "p", ")", "else", ":", "log", ".", "warning", "(", "\"Unable to keep unhashable object {0} \"", "\"alive for lifetime of {1}\"", ".", "format", "(", "p", ",", "nurse", ")", ")", "KEEPALIVE", ".", "setdefault", "(", "nurse", ",", "set", "(", ")", ")", ".", "update", "(", "hashable_patients", ")", "else", ":", "log", ".", "warning", "(", "\"Unable to keep objects alive for lifetime of \"", "\"unhashable object {0}\"", ".", "format", "(", "nurse", ")", ")" ]
Keep ``patients`` alive at least as long as ``nurse`` is around using a ``WeakKeyDictionary``.
[ "Keep", "patients", "alive", "at", "least", "as", "long", "as", "nurse", "is", "around", "using", "a", "WeakKeyDictionary", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/memory/keepalive.py#L26-L45
train
rootpy/rootpy
rootpy/plotting/hist.py
canonify_slice
def canonify_slice(s, n): """ Convert a slice object into a canonical form to simplify treatment in histogram bin content and edge slicing. """ if isinstance(s, (int, long)): return canonify_slice(slice(s, s + 1, None), n) start = s.start % n if s.start is not None else 0 stop = s.stop % n if s.stop is not None else n step = s.step if s.step is not None else 1 return slice(start, stop, step)
python
def canonify_slice(s, n): """ Convert a slice object into a canonical form to simplify treatment in histogram bin content and edge slicing. """ if isinstance(s, (int, long)): return canonify_slice(slice(s, s + 1, None), n) start = s.start % n if s.start is not None else 0 stop = s.stop % n if s.stop is not None else n step = s.step if s.step is not None else 1 return slice(start, stop, step)
[ "def", "canonify_slice", "(", "s", ",", "n", ")", ":", "if", "isinstance", "(", "s", ",", "(", "int", ",", "long", ")", ")", ":", "return", "canonify_slice", "(", "slice", "(", "s", ",", "s", "+", "1", ",", "None", ")", ",", "n", ")", "start", "=", "s", ".", "start", "%", "n", "if", "s", ".", "start", "is", "not", "None", "else", "0", "stop", "=", "s", ".", "stop", "%", "n", "if", "s", ".", "stop", "is", "not", "None", "else", "n", "step", "=", "s", ".", "step", "if", "s", ".", "step", "is", "not", "None", "else", "1", "return", "slice", "(", "start", ",", "stop", ",", "step", ")" ]
Convert a slice object into a canonical form to simplify treatment in histogram bin content and edge slicing.
[ "Convert", "a", "slice", "object", "into", "a", "canonical", "form", "to", "simplify", "treatment", "in", "histogram", "bin", "content", "and", "edge", "slicing", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L37-L48
train
rootpy/rootpy
rootpy/plotting/hist.py
bin_to_edge_slice
def bin_to_edge_slice(s, n): """ Convert a bin slice into a bin edge slice. """ s = canonify_slice(s, n) start = s.start stop = s.stop if start > stop: _stop = start + 1 start = stop + 1 stop = _stop start = max(start - 1, 0) step = abs(s.step) if stop <= 1 or start >= n - 1 or stop == start + 1: return slice(0, None, min(step, n - 2)) s = slice(start, stop, abs(s.step)) if len(range(*s.indices(n - 1))) < 2: return slice(start, stop, stop - start - 1) return s
python
def bin_to_edge_slice(s, n): """ Convert a bin slice into a bin edge slice. """ s = canonify_slice(s, n) start = s.start stop = s.stop if start > stop: _stop = start + 1 start = stop + 1 stop = _stop start = max(start - 1, 0) step = abs(s.step) if stop <= 1 or start >= n - 1 or stop == start + 1: return slice(0, None, min(step, n - 2)) s = slice(start, stop, abs(s.step)) if len(range(*s.indices(n - 1))) < 2: return slice(start, stop, stop - start - 1) return s
[ "def", "bin_to_edge_slice", "(", "s", ",", "n", ")", ":", "s", "=", "canonify_slice", "(", "s", ",", "n", ")", "start", "=", "s", ".", "start", "stop", "=", "s", ".", "stop", "if", "start", ">", "stop", ":", "_stop", "=", "start", "+", "1", "start", "=", "stop", "+", "1", "stop", "=", "_stop", "start", "=", "max", "(", "start", "-", "1", ",", "0", ")", "step", "=", "abs", "(", "s", ".", "step", ")", "if", "stop", "<=", "1", "or", "start", ">=", "n", "-", "1", "or", "stop", "==", "start", "+", "1", ":", "return", "slice", "(", "0", ",", "None", ",", "min", "(", "step", ",", "n", "-", "2", ")", ")", "s", "=", "slice", "(", "start", ",", "stop", ",", "abs", "(", "s", ".", "step", ")", ")", "if", "len", "(", "range", "(", "*", "s", ".", "indices", "(", "n", "-", "1", ")", ")", ")", "<", "2", ":", "return", "slice", "(", "start", ",", "stop", ",", "stop", "-", "start", "-", "1", ")", "return", "s" ]
Convert a bin slice into a bin edge slice.
[ "Convert", "a", "bin", "slice", "into", "a", "bin", "edge", "slice", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L51-L69
train
rootpy/rootpy
rootpy/plotting/hist.py
histogram
def histogram(data, *args, **kwargs): """ Create and fill a one-dimensional histogram. The same arguments as the ``Hist`` class are expected. If the number of bins and the ranges are not specified they are automatically deduced with the ``autobinning`` function using the method specified by the ``binning`` argument. Only one-dimensional histogramming is supported. """ from .autobinning import autobinning dim = kwargs.pop('dim', 1) if dim != 1: raise NotImplementedError if 'binning' in kwargs: args = autobinning(data, kwargs['binning']) del kwargs['binning'] histo = Hist(*args, **kwargs) for d in data: histo.Fill(d) return list(histo.xedgesl()), histo
python
def histogram(data, *args, **kwargs): """ Create and fill a one-dimensional histogram. The same arguments as the ``Hist`` class are expected. If the number of bins and the ranges are not specified they are automatically deduced with the ``autobinning`` function using the method specified by the ``binning`` argument. Only one-dimensional histogramming is supported. """ from .autobinning import autobinning dim = kwargs.pop('dim', 1) if dim != 1: raise NotImplementedError if 'binning' in kwargs: args = autobinning(data, kwargs['binning']) del kwargs['binning'] histo = Hist(*args, **kwargs) for d in data: histo.Fill(d) return list(histo.xedgesl()), histo
[ "def", "histogram", "(", "data", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", "autobinning", "import", "autobinning", "dim", "=", "kwargs", ".", "pop", "(", "'dim'", ",", "1", ")", "if", "dim", "!=", "1", ":", "raise", "NotImplementedError", "if", "'binning'", "in", "kwargs", ":", "args", "=", "autobinning", "(", "data", ",", "kwargs", "[", "'binning'", "]", ")", "del", "kwargs", "[", "'binning'", "]", "histo", "=", "Hist", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "d", "in", "data", ":", "histo", ".", "Fill", "(", "d", ")", "return", "list", "(", "histo", ".", "xedgesl", "(", ")", ")", ",", "histo" ]
Create and fill a one-dimensional histogram. The same arguments as the ``Hist`` class are expected. If the number of bins and the ranges are not specified they are automatically deduced with the ``autobinning`` function using the method specified by the ``binning`` argument. Only one-dimensional histogramming is supported.
[ "Create", "and", "fill", "a", "one", "-", "dimensional", "histogram", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L2649-L2669
train
rootpy/rootpy
rootpy/plotting/hist.py
BinProxy.overflow
def overflow(self): """ Returns true if this BinProxy is for an overflow bin """ indices = self.hist.xyz(self.idx) for i in range(self.hist.GetDimension()): if indices[i] == 0 or indices[i] == self.hist.nbins(i) + 1: return True return False
python
def overflow(self): """ Returns true if this BinProxy is for an overflow bin """ indices = self.hist.xyz(self.idx) for i in range(self.hist.GetDimension()): if indices[i] == 0 or indices[i] == self.hist.nbins(i) + 1: return True return False
[ "def", "overflow", "(", "self", ")", ":", "indices", "=", "self", ".", "hist", ".", "xyz", "(", "self", ".", "idx", ")", "for", "i", "in", "range", "(", "self", ".", "hist", ".", "GetDimension", "(", ")", ")", ":", "if", "indices", "[", "i", "]", "==", "0", "or", "indices", "[", "i", "]", "==", "self", ".", "hist", ".", "nbins", "(", "i", ")", "+", "1", ":", "return", "True", "return", "False" ]
Returns true if this BinProxy is for an overflow bin
[ "Returns", "true", "if", "this", "BinProxy", "is", "for", "an", "overflow", "bin" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L217-L225
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.xyz
def xyz(self, idx): """ return binx, biny, binz corresponding to the global bin number """ # Not implemented for Python 3: # GetBinXYZ(i, x, y, z) nx = self.GetNbinsX() + 2 ny = self.GetNbinsY() + 2 ndim = self.GetDimension() if ndim < 2: binx = idx % nx biny = 0 binz = 0 elif ndim < 3: binx = idx % nx biny = ((idx - binx) // nx) % ny binz = 0 elif ndim < 4: binx = idx % nx biny = ((idx - binx) // nx) % ny binz = ((idx - binx) // nx - biny) // ny else: raise NotImplementedError return binx, biny, binz
python
def xyz(self, idx): """ return binx, biny, binz corresponding to the global bin number """ # Not implemented for Python 3: # GetBinXYZ(i, x, y, z) nx = self.GetNbinsX() + 2 ny = self.GetNbinsY() + 2 ndim = self.GetDimension() if ndim < 2: binx = idx % nx biny = 0 binz = 0 elif ndim < 3: binx = idx % nx biny = ((idx - binx) // nx) % ny binz = 0 elif ndim < 4: binx = idx % nx biny = ((idx - binx) // nx) % ny binz = ((idx - binx) // nx - biny) // ny else: raise NotImplementedError return binx, biny, binz
[ "def", "xyz", "(", "self", ",", "idx", ")", ":", "# Not implemented for Python 3:", "# GetBinXYZ(i, x, y, z)", "nx", "=", "self", ".", "GetNbinsX", "(", ")", "+", "2", "ny", "=", "self", ".", "GetNbinsY", "(", ")", "+", "2", "ndim", "=", "self", ".", "GetDimension", "(", ")", "if", "ndim", "<", "2", ":", "binx", "=", "idx", "%", "nx", "biny", "=", "0", "binz", "=", "0", "elif", "ndim", "<", "3", ":", "binx", "=", "idx", "%", "nx", "biny", "=", "(", "(", "idx", "-", "binx", ")", "//", "nx", ")", "%", "ny", "binz", "=", "0", "elif", "ndim", "<", "4", ":", "binx", "=", "idx", "%", "nx", "biny", "=", "(", "(", "idx", "-", "binx", ")", "//", "nx", ")", "%", "ny", "binz", "=", "(", "(", "idx", "-", "binx", ")", "//", "nx", "-", "biny", ")", "//", "ny", "else", ":", "raise", "NotImplementedError", "return", "binx", ",", "biny", ",", "binz" ]
return binx, biny, binz corresponding to the global bin number
[ "return", "binx", "biny", "binz", "corresponding", "to", "the", "global", "bin", "number" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L377-L400
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.nbins
def nbins(self, axis=0, overflow=False): """ Get the number of bins along an axis """ if axis == 0: nbins = self.GetNbinsX() elif axis == 1: nbins = self.GetNbinsY() elif axis == 2: nbins = self.GetNbinsZ() else: raise ValueError("axis must be 0, 1, or 2") if overflow: nbins += 2 return nbins
python
def nbins(self, axis=0, overflow=False): """ Get the number of bins along an axis """ if axis == 0: nbins = self.GetNbinsX() elif axis == 1: nbins = self.GetNbinsY() elif axis == 2: nbins = self.GetNbinsZ() else: raise ValueError("axis must be 0, 1, or 2") if overflow: nbins += 2 return nbins
[ "def", "nbins", "(", "self", ",", "axis", "=", "0", ",", "overflow", "=", "False", ")", ":", "if", "axis", "==", "0", ":", "nbins", "=", "self", ".", "GetNbinsX", "(", ")", "elif", "axis", "==", "1", ":", "nbins", "=", "self", ".", "GetNbinsY", "(", ")", "elif", "axis", "==", "2", ":", "nbins", "=", "self", ".", "GetNbinsZ", "(", ")", "else", ":", "raise", "ValueError", "(", "\"axis must be 0, 1, or 2\"", ")", "if", "overflow", ":", "nbins", "+=", "2", "return", "nbins" ]
Get the number of bins along an axis
[ "Get", "the", "number", "of", "bins", "along", "an", "axis" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L463-L477
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.bins_range
def bins_range(self, axis=0, overflow=False): """ Return a range of bin indices for iterating along an axis Parameters ---------- axis : int, optional (default=1) The axis (0, 1 or 2). overflow : bool, optional (default=False) If True then include the underflow and overflow bins otherwise only include the visible bins. Returns ------- an range object of bin indices """ nbins = self.nbins(axis=axis, overflow=False) if overflow: start = 0 end_offset = 2 else: start = 1 end_offset = 1 return range(start, nbins + end_offset)
python
def bins_range(self, axis=0, overflow=False): """ Return a range of bin indices for iterating along an axis Parameters ---------- axis : int, optional (default=1) The axis (0, 1 or 2). overflow : bool, optional (default=False) If True then include the underflow and overflow bins otherwise only include the visible bins. Returns ------- an range object of bin indices """ nbins = self.nbins(axis=axis, overflow=False) if overflow: start = 0 end_offset = 2 else: start = 1 end_offset = 1 return range(start, nbins + end_offset)
[ "def", "bins_range", "(", "self", ",", "axis", "=", "0", ",", "overflow", "=", "False", ")", ":", "nbins", "=", "self", ".", "nbins", "(", "axis", "=", "axis", ",", "overflow", "=", "False", ")", "if", "overflow", ":", "start", "=", "0", "end_offset", "=", "2", "else", ":", "start", "=", "1", "end_offset", "=", "1", "return", "range", "(", "start", ",", "nbins", "+", "end_offset", ")" ]
Return a range of bin indices for iterating along an axis Parameters ---------- axis : int, optional (default=1) The axis (0, 1 or 2). overflow : bool, optional (default=False) If True then include the underflow and overflow bins otherwise only include the visible bins. Returns ------- an range object of bin indices
[ "Return", "a", "range", "of", "bin", "indices", "for", "iterating", "along", "an", "axis" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L479-L506
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.uniform_binned
def uniform_binned(self, name=None): """ Return a new histogram with constant width bins along all axes by using the bin indices as the bin edges of the new histogram. """ if self.GetDimension() == 1: new_hist = Hist( self.GetNbinsX(), 0, self.GetNbinsX(), name=name, type=self.TYPE) elif self.GetDimension() == 2: new_hist = Hist2D( self.GetNbinsX(), 0, self.GetNbinsX(), self.GetNbinsY(), 0, self.GetNbinsY(), name=name, type=self.TYPE) else: new_hist = Hist3D( self.GetNbinsX(), 0, self.GetNbinsX(), self.GetNbinsY(), 0, self.GetNbinsY(), self.GetNbinsZ(), 0, self.GetNbinsZ(), name=name, type=self.TYPE) # copy over the bin contents and errors for outbin, inbin in zip(new_hist.bins(), self.bins()): outbin.value = inbin.value outbin.error = inbin.error new_hist.decorate(self) new_hist.entries = self.entries return new_hist
python
def uniform_binned(self, name=None): """ Return a new histogram with constant width bins along all axes by using the bin indices as the bin edges of the new histogram. """ if self.GetDimension() == 1: new_hist = Hist( self.GetNbinsX(), 0, self.GetNbinsX(), name=name, type=self.TYPE) elif self.GetDimension() == 2: new_hist = Hist2D( self.GetNbinsX(), 0, self.GetNbinsX(), self.GetNbinsY(), 0, self.GetNbinsY(), name=name, type=self.TYPE) else: new_hist = Hist3D( self.GetNbinsX(), 0, self.GetNbinsX(), self.GetNbinsY(), 0, self.GetNbinsY(), self.GetNbinsZ(), 0, self.GetNbinsZ(), name=name, type=self.TYPE) # copy over the bin contents and errors for outbin, inbin in zip(new_hist.bins(), self.bins()): outbin.value = inbin.value outbin.error = inbin.error new_hist.decorate(self) new_hist.entries = self.entries return new_hist
[ "def", "uniform_binned", "(", "self", ",", "name", "=", "None", ")", ":", "if", "self", ".", "GetDimension", "(", ")", "==", "1", ":", "new_hist", "=", "Hist", "(", "self", ".", "GetNbinsX", "(", ")", ",", "0", ",", "self", ".", "GetNbinsX", "(", ")", ",", "name", "=", "name", ",", "type", "=", "self", ".", "TYPE", ")", "elif", "self", ".", "GetDimension", "(", ")", "==", "2", ":", "new_hist", "=", "Hist2D", "(", "self", ".", "GetNbinsX", "(", ")", ",", "0", ",", "self", ".", "GetNbinsX", "(", ")", ",", "self", ".", "GetNbinsY", "(", ")", ",", "0", ",", "self", ".", "GetNbinsY", "(", ")", ",", "name", "=", "name", ",", "type", "=", "self", ".", "TYPE", ")", "else", ":", "new_hist", "=", "Hist3D", "(", "self", ".", "GetNbinsX", "(", ")", ",", "0", ",", "self", ".", "GetNbinsX", "(", ")", ",", "self", ".", "GetNbinsY", "(", ")", ",", "0", ",", "self", ".", "GetNbinsY", "(", ")", ",", "self", ".", "GetNbinsZ", "(", ")", ",", "0", ",", "self", ".", "GetNbinsZ", "(", ")", ",", "name", "=", "name", ",", "type", "=", "self", ".", "TYPE", ")", "# copy over the bin contents and errors", "for", "outbin", ",", "inbin", "in", "zip", "(", "new_hist", ".", "bins", "(", ")", ",", "self", ".", "bins", "(", ")", ")", ":", "outbin", ".", "value", "=", "inbin", ".", "value", "outbin", ".", "error", "=", "inbin", ".", "error", "new_hist", ".", "decorate", "(", "self", ")", "new_hist", ".", "entries", "=", "self", ".", "entries", "return", "new_hist" ]
Return a new histogram with constant width bins along all axes by using the bin indices as the bin edges of the new histogram.
[ "Return", "a", "new", "histogram", "with", "constant", "width", "bins", "along", "all", "axes", "by", "using", "the", "bin", "indices", "as", "the", "bin", "edges", "of", "the", "new", "histogram", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L749-L775
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.underflow
def underflow(self, axis=0): """ Return the underflow for the given axis. Depending on the dimension of the histogram, may return an array. """ if axis not in range(3): raise ValueError("axis must be 0, 1, or 2") if self.DIM == 1: return self.GetBinContent(0) elif self.DIM == 2: def idx(i): arg = [i] arg.insert(axis, 0) return arg return [ self.GetBinContent(*idx(i)) for i in self.bins_range(axis=(axis + 1) % 2, overflow=True)] elif self.DIM == 3: axes = [0, 1, 2] axes.remove(axis) axis2, axis3 = axes def idx(i, j): arg = [i, j] arg.insert(axis, 0) return arg return [[ self.GetBinContent(*idx(i, j)) for i in self.bins_range(axis=axis2, overflow=True)] for j in self.bins_range(axis=axis3, overflow=True)]
python
def underflow(self, axis=0): """ Return the underflow for the given axis. Depending on the dimension of the histogram, may return an array. """ if axis not in range(3): raise ValueError("axis must be 0, 1, or 2") if self.DIM == 1: return self.GetBinContent(0) elif self.DIM == 2: def idx(i): arg = [i] arg.insert(axis, 0) return arg return [ self.GetBinContent(*idx(i)) for i in self.bins_range(axis=(axis + 1) % 2, overflow=True)] elif self.DIM == 3: axes = [0, 1, 2] axes.remove(axis) axis2, axis3 = axes def idx(i, j): arg = [i, j] arg.insert(axis, 0) return arg return [[ self.GetBinContent(*idx(i, j)) for i in self.bins_range(axis=axis2, overflow=True)] for j in self.bins_range(axis=axis3, overflow=True)]
[ "def", "underflow", "(", "self", ",", "axis", "=", "0", ")", ":", "if", "axis", "not", "in", "range", "(", "3", ")", ":", "raise", "ValueError", "(", "\"axis must be 0, 1, or 2\"", ")", "if", "self", ".", "DIM", "==", "1", ":", "return", "self", ".", "GetBinContent", "(", "0", ")", "elif", "self", ".", "DIM", "==", "2", ":", "def", "idx", "(", "i", ")", ":", "arg", "=", "[", "i", "]", "arg", ".", "insert", "(", "axis", ",", "0", ")", "return", "arg", "return", "[", "self", ".", "GetBinContent", "(", "*", "idx", "(", "i", ")", ")", "for", "i", "in", "self", ".", "bins_range", "(", "axis", "=", "(", "axis", "+", "1", ")", "%", "2", ",", "overflow", "=", "True", ")", "]", "elif", "self", ".", "DIM", "==", "3", ":", "axes", "=", "[", "0", ",", "1", ",", "2", "]", "axes", ".", "remove", "(", "axis", ")", "axis2", ",", "axis3", "=", "axes", "def", "idx", "(", "i", ",", "j", ")", ":", "arg", "=", "[", "i", ",", "j", "]", "arg", ".", "insert", "(", "axis", ",", "0", ")", "return", "arg", "return", "[", "[", "self", ".", "GetBinContent", "(", "*", "idx", "(", "i", ",", "j", ")", ")", "for", "i", "in", "self", ".", "bins_range", "(", "axis", "=", "axis2", ",", "overflow", "=", "True", ")", "]", "for", "j", "in", "self", ".", "bins_range", "(", "axis", "=", "axis3", ",", "overflow", "=", "True", ")", "]" ]
Return the underflow for the given axis. Depending on the dimension of the histogram, may return an array.
[ "Return", "the", "underflow", "for", "the", "given", "axis", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L777-L806
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.lowerbound
def lowerbound(self, axis=0): """ Get the lower bound of the binning along an axis """ if not 0 <= axis < self.GetDimension(): raise ValueError( "axis must be a non-negative integer less than " "the dimensionality of the histogram") if axis == 0: return self.xedges(1) if axis == 1: return self.yedges(1) if axis == 2: return self.zedges(1) raise TypeError("axis must be an integer")
python
def lowerbound(self, axis=0): """ Get the lower bound of the binning along an axis """ if not 0 <= axis < self.GetDimension(): raise ValueError( "axis must be a non-negative integer less than " "the dimensionality of the histogram") if axis == 0: return self.xedges(1) if axis == 1: return self.yedges(1) if axis == 2: return self.zedges(1) raise TypeError("axis must be an integer")
[ "def", "lowerbound", "(", "self", ",", "axis", "=", "0", ")", ":", "if", "not", "0", "<=", "axis", "<", "self", ".", "GetDimension", "(", ")", ":", "raise", "ValueError", "(", "\"axis must be a non-negative integer less than \"", "\"the dimensionality of the histogram\"", ")", "if", "axis", "==", "0", ":", "return", "self", ".", "xedges", "(", "1", ")", "if", "axis", "==", "1", ":", "return", "self", ".", "yedges", "(", "1", ")", "if", "axis", "==", "2", ":", "return", "self", ".", "zedges", "(", "1", ")", "raise", "TypeError", "(", "\"axis must be an integer\"", ")" ]
Get the lower bound of the binning along an axis
[ "Get", "the", "lower", "bound", "of", "the", "binning", "along", "an", "axis" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L844-L858
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.bounds
def bounds(self, axis=0): """ Get the lower and upper bounds of the binning along an axis """ if not 0 <= axis < self.GetDimension(): raise ValueError( "axis must be a non-negative integer less than " "the dimensionality of the histogram") if axis == 0: return self.xedges(1), self.xedges(-2) if axis == 1: return self.yedges(1), self.yedges(-2) if axis == 2: return self.zedges(1), self.zedges(-2) raise TypeError("axis must be an integer")
python
def bounds(self, axis=0): """ Get the lower and upper bounds of the binning along an axis """ if not 0 <= axis < self.GetDimension(): raise ValueError( "axis must be a non-negative integer less than " "the dimensionality of the histogram") if axis == 0: return self.xedges(1), self.xedges(-2) if axis == 1: return self.yedges(1), self.yedges(-2) if axis == 2: return self.zedges(1), self.zedges(-2) raise TypeError("axis must be an integer")
[ "def", "bounds", "(", "self", ",", "axis", "=", "0", ")", ":", "if", "not", "0", "<=", "axis", "<", "self", ".", "GetDimension", "(", ")", ":", "raise", "ValueError", "(", "\"axis must be a non-negative integer less than \"", "\"the dimensionality of the histogram\"", ")", "if", "axis", "==", "0", ":", "return", "self", ".", "xedges", "(", "1", ")", ",", "self", ".", "xedges", "(", "-", "2", ")", "if", "axis", "==", "1", ":", "return", "self", ".", "yedges", "(", "1", ")", ",", "self", ".", "yedges", "(", "-", "2", ")", "if", "axis", "==", "2", ":", "return", "self", ".", "zedges", "(", "1", ")", ",", "self", ".", "zedges", "(", "-", "2", ")", "raise", "TypeError", "(", "\"axis must be an integer\"", ")" ]
Get the lower and upper bounds of the binning along an axis
[ "Get", "the", "lower", "and", "upper", "bounds", "of", "the", "binning", "along", "an", "axis" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L876-L890
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.check_compatibility
def check_compatibility(self, other, check_edges=False, precision=1E-7): """ Test whether two histograms are considered compatible by the number of dimensions, number of bins along each axis, and optionally the bin edges. Parameters ---------- other : histogram A rootpy histogram check_edges : bool, optional (default=False) If True then also check that the bin edges are equal within the specified precision. precision : float, optional (default=1E-7) The value below which differences between floats are treated as nil when comparing bin edges. Raises ------ TypeError If the histogram dimensionalities do not match ValueError If the histogram sizes, number of bins along an axis, or optionally the bin edges do not match """ if self.GetDimension() != other.GetDimension(): raise TypeError("histogram dimensionalities do not match") if len(self) != len(other): raise ValueError("histogram sizes do not match") for axis in range(self.GetDimension()): if self.nbins(axis=axis) != other.nbins(axis=axis): raise ValueError( "numbers of bins along axis {0:d} do not match".format( axis)) if check_edges: for axis in range(self.GetDimension()): if not all([abs(l - r) < precision for l, r in zip(self._edges(axis), other._edges(axis))]): raise ValueError( "edges do not match along axis {0:d}".format(axis))
python
def check_compatibility(self, other, check_edges=False, precision=1E-7): """ Test whether two histograms are considered compatible by the number of dimensions, number of bins along each axis, and optionally the bin edges. Parameters ---------- other : histogram A rootpy histogram check_edges : bool, optional (default=False) If True then also check that the bin edges are equal within the specified precision. precision : float, optional (default=1E-7) The value below which differences between floats are treated as nil when comparing bin edges. Raises ------ TypeError If the histogram dimensionalities do not match ValueError If the histogram sizes, number of bins along an axis, or optionally the bin edges do not match """ if self.GetDimension() != other.GetDimension(): raise TypeError("histogram dimensionalities do not match") if len(self) != len(other): raise ValueError("histogram sizes do not match") for axis in range(self.GetDimension()): if self.nbins(axis=axis) != other.nbins(axis=axis): raise ValueError( "numbers of bins along axis {0:d} do not match".format( axis)) if check_edges: for axis in range(self.GetDimension()): if not all([abs(l - r) < precision for l, r in zip(self._edges(axis), other._edges(axis))]): raise ValueError( "edges do not match along axis {0:d}".format(axis))
[ "def", "check_compatibility", "(", "self", ",", "other", ",", "check_edges", "=", "False", ",", "precision", "=", "1E-7", ")", ":", "if", "self", ".", "GetDimension", "(", ")", "!=", "other", ".", "GetDimension", "(", ")", ":", "raise", "TypeError", "(", "\"histogram dimensionalities do not match\"", ")", "if", "len", "(", "self", ")", "!=", "len", "(", "other", ")", ":", "raise", "ValueError", "(", "\"histogram sizes do not match\"", ")", "for", "axis", "in", "range", "(", "self", ".", "GetDimension", "(", ")", ")", ":", "if", "self", ".", "nbins", "(", "axis", "=", "axis", ")", "!=", "other", ".", "nbins", "(", "axis", "=", "axis", ")", ":", "raise", "ValueError", "(", "\"numbers of bins along axis {0:d} do not match\"", ".", "format", "(", "axis", ")", ")", "if", "check_edges", ":", "for", "axis", "in", "range", "(", "self", ".", "GetDimension", "(", ")", ")", ":", "if", "not", "all", "(", "[", "abs", "(", "l", "-", "r", ")", "<", "precision", "for", "l", ",", "r", "in", "zip", "(", "self", ".", "_edges", "(", "axis", ")", ",", "other", ".", "_edges", "(", "axis", ")", ")", "]", ")", ":", "raise", "ValueError", "(", "\"edges do not match along axis {0:d}\"", ".", "format", "(", "axis", ")", ")" ]
Test whether two histograms are considered compatible by the number of dimensions, number of bins along each axis, and optionally the bin edges. Parameters ---------- other : histogram A rootpy histogram check_edges : bool, optional (default=False) If True then also check that the bin edges are equal within the specified precision. precision : float, optional (default=1E-7) The value below which differences between floats are treated as nil when comparing bin edges. Raises ------ TypeError If the histogram dimensionalities do not match ValueError If the histogram sizes, number of bins along an axis, or optionally the bin edges do not match
[ "Test", "whether", "two", "histograms", "are", "considered", "compatible", "by", "the", "number", "of", "dimensions", "number", "of", "bins", "along", "each", "axis", "and", "optionally", "the", "bin", "edges", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1018-L1063
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.fill_array
def fill_array(self, array, weights=None): """ Fill this histogram with a NumPy array """ try: try: from root_numpy import fill_hist as fill_func except ImportError: from root_numpy import fill_array as fill_func except ImportError: log.critical( "root_numpy is needed for Hist*.fill_array. " "Is it installed and importable?") raise fill_func(self, array, weights=weights)
python
def fill_array(self, array, weights=None): """ Fill this histogram with a NumPy array """ try: try: from root_numpy import fill_hist as fill_func except ImportError: from root_numpy import fill_array as fill_func except ImportError: log.critical( "root_numpy is needed for Hist*.fill_array. " "Is it installed and importable?") raise fill_func(self, array, weights=weights)
[ "def", "fill_array", "(", "self", ",", "array", ",", "weights", "=", "None", ")", ":", "try", ":", "try", ":", "from", "root_numpy", "import", "fill_hist", "as", "fill_func", "except", "ImportError", ":", "from", "root_numpy", "import", "fill_array", "as", "fill_func", "except", "ImportError", ":", "log", ".", "critical", "(", "\"root_numpy is needed for Hist*.fill_array. \"", "\"Is it installed and importable?\"", ")", "raise", "fill_func", "(", "self", ",", "array", ",", "weights", "=", "weights", ")" ]
Fill this histogram with a NumPy array
[ "Fill", "this", "histogram", "with", "a", "NumPy", "array" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1192-L1206
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.fill_view
def fill_view(self, view): """ Fill this histogram from a view of another histogram """ other = view.hist _other_x_center = other.axis(0).GetBinCenter _other_y_center = other.axis(1).GetBinCenter _other_z_center = other.axis(2).GetBinCenter _other_get = other.GetBinContent _other_get_bin = super(_HistBase, other).GetBin other_sum_w2 = other.GetSumw2() _other_sum_w2_at = other_sum_w2.At _find = self.FindBin sum_w2 = self.GetSumw2() _sum_w2_at = sum_w2.At _sum_w2_setat = sum_w2.SetAt _set = self.SetBinContent _get = self.GetBinContent for x, y, z in view.points: idx = _find( _other_x_center(x), _other_y_center(y), _other_z_center(z)) other_idx = _other_get_bin(x, y, z) _set(idx, _get(idx) + _other_get(other_idx)) _sum_w2_setat( _sum_w2_at(idx) + _other_sum_w2_at(other_idx), idx)
python
def fill_view(self, view): """ Fill this histogram from a view of another histogram """ other = view.hist _other_x_center = other.axis(0).GetBinCenter _other_y_center = other.axis(1).GetBinCenter _other_z_center = other.axis(2).GetBinCenter _other_get = other.GetBinContent _other_get_bin = super(_HistBase, other).GetBin other_sum_w2 = other.GetSumw2() _other_sum_w2_at = other_sum_w2.At _find = self.FindBin sum_w2 = self.GetSumw2() _sum_w2_at = sum_w2.At _sum_w2_setat = sum_w2.SetAt _set = self.SetBinContent _get = self.GetBinContent for x, y, z in view.points: idx = _find( _other_x_center(x), _other_y_center(y), _other_z_center(z)) other_idx = _other_get_bin(x, y, z) _set(idx, _get(idx) + _other_get(other_idx)) _sum_w2_setat( _sum_w2_at(idx) + _other_sum_w2_at(other_idx), idx)
[ "def", "fill_view", "(", "self", ",", "view", ")", ":", "other", "=", "view", ".", "hist", "_other_x_center", "=", "other", ".", "axis", "(", "0", ")", ".", "GetBinCenter", "_other_y_center", "=", "other", ".", "axis", "(", "1", ")", ".", "GetBinCenter", "_other_z_center", "=", "other", ".", "axis", "(", "2", ")", ".", "GetBinCenter", "_other_get", "=", "other", ".", "GetBinContent", "_other_get_bin", "=", "super", "(", "_HistBase", ",", "other", ")", ".", "GetBin", "other_sum_w2", "=", "other", ".", "GetSumw2", "(", ")", "_other_sum_w2_at", "=", "other_sum_w2", ".", "At", "_find", "=", "self", ".", "FindBin", "sum_w2", "=", "self", ".", "GetSumw2", "(", ")", "_sum_w2_at", "=", "sum_w2", ".", "At", "_sum_w2_setat", "=", "sum_w2", ".", "SetAt", "_set", "=", "self", ".", "SetBinContent", "_get", "=", "self", ".", "GetBinContent", "for", "x", ",", "y", ",", "z", "in", "view", ".", "points", ":", "idx", "=", "_find", "(", "_other_x_center", "(", "x", ")", ",", "_other_y_center", "(", "y", ")", ",", "_other_z_center", "(", "z", ")", ")", "other_idx", "=", "_other_get_bin", "(", "x", ",", "y", ",", "z", ")", "_set", "(", "idx", ",", "_get", "(", "idx", ")", "+", "_other_get", "(", "other_idx", ")", ")", "_sum_w2_setat", "(", "_sum_w2_at", "(", "idx", ")", "+", "_other_sum_w2_at", "(", "other_idx", ")", ",", "idx", ")" ]
Fill this histogram from a view of another histogram
[ "Fill", "this", "histogram", "from", "a", "view", "of", "another", "histogram" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1208-L1237
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.get_sum_w2
def get_sum_w2(self, ix, iy=0, iz=0): """ Obtain the true number of entries in the bin weighted by w^2 """ if self.GetSumw2N() == 0: raise RuntimeError( "Attempting to access Sumw2 in histogram " "where weights were not stored") xl = self.nbins(axis=0, overflow=True) yl = self.nbins(axis=1, overflow=True) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self.GetSumw2N(): raise IndexError("bin index out of range") return self.GetSumw2().At(idx)
python
def get_sum_w2(self, ix, iy=0, iz=0): """ Obtain the true number of entries in the bin weighted by w^2 """ if self.GetSumw2N() == 0: raise RuntimeError( "Attempting to access Sumw2 in histogram " "where weights were not stored") xl = self.nbins(axis=0, overflow=True) yl = self.nbins(axis=1, overflow=True) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self.GetSumw2N(): raise IndexError("bin index out of range") return self.GetSumw2().At(idx)
[ "def", "get_sum_w2", "(", "self", ",", "ix", ",", "iy", "=", "0", ",", "iz", "=", "0", ")", ":", "if", "self", ".", "GetSumw2N", "(", ")", "==", "0", ":", "raise", "RuntimeError", "(", "\"Attempting to access Sumw2 in histogram \"", "\"where weights were not stored\"", ")", "xl", "=", "self", ".", "nbins", "(", "axis", "=", "0", ",", "overflow", "=", "True", ")", "yl", "=", "self", ".", "nbins", "(", "axis", "=", "1", ",", "overflow", "=", "True", ")", "idx", "=", "xl", "*", "yl", "*", "iz", "+", "xl", "*", "iy", "+", "ix", "if", "not", "0", "<=", "idx", "<", "self", ".", "GetSumw2N", "(", ")", ":", "raise", "IndexError", "(", "\"bin index out of range\"", ")", "return", "self", ".", "GetSumw2", "(", ")", ".", "At", "(", "idx", ")" ]
Obtain the true number of entries in the bin weighted by w^2
[ "Obtain", "the", "true", "number", "of", "entries", "in", "the", "bin", "weighted", "by", "w^2" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1245-L1258
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.set_sum_w2
def set_sum_w2(self, w, ix, iy=0, iz=0): """ Sets the true number of entries in the bin weighted by w^2 """ if self.GetSumw2N() == 0: raise RuntimeError( "Attempting to access Sumw2 in histogram " "where weights were not stored") xl = self.nbins(axis=0, overflow=True) yl = self.nbins(axis=1, overflow=True) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self.GetSumw2N(): raise IndexError("bin index out of range") self.GetSumw2().SetAt(w, idx)
python
def set_sum_w2(self, w, ix, iy=0, iz=0): """ Sets the true number of entries in the bin weighted by w^2 """ if self.GetSumw2N() == 0: raise RuntimeError( "Attempting to access Sumw2 in histogram " "where weights were not stored") xl = self.nbins(axis=0, overflow=True) yl = self.nbins(axis=1, overflow=True) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self.GetSumw2N(): raise IndexError("bin index out of range") self.GetSumw2().SetAt(w, idx)
[ "def", "set_sum_w2", "(", "self", ",", "w", ",", "ix", ",", "iy", "=", "0", ",", "iz", "=", "0", ")", ":", "if", "self", ".", "GetSumw2N", "(", ")", "==", "0", ":", "raise", "RuntimeError", "(", "\"Attempting to access Sumw2 in histogram \"", "\"where weights were not stored\"", ")", "xl", "=", "self", ".", "nbins", "(", "axis", "=", "0", ",", "overflow", "=", "True", ")", "yl", "=", "self", ".", "nbins", "(", "axis", "=", "1", ",", "overflow", "=", "True", ")", "idx", "=", "xl", "*", "yl", "*", "iz", "+", "xl", "*", "iy", "+", "ix", "if", "not", "0", "<=", "idx", "<", "self", ".", "GetSumw2N", "(", ")", ":", "raise", "IndexError", "(", "\"bin index out of range\"", ")", "self", ".", "GetSumw2", "(", ")", ".", "SetAt", "(", "w", ",", "idx", ")" ]
Sets the true number of entries in the bin weighted by w^2
[ "Sets", "the", "true", "number", "of", "entries", "in", "the", "bin", "weighted", "by", "w^2" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1260-L1273
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.rebinned
def rebinned(self, bins, axis=0): """ Return a new rebinned histogram Parameters ---------- bins : int, tuple, or iterable If ``bins`` is an int, then return a histogram that is rebinned by grouping N=``bins`` bins together along the axis ``axis``. If ``bins`` is a tuple, then it must contain the same number of elements as there are dimensions of this histogram and each element will be used to rebin along the associated axis. If ``bins`` is another iterable, then it will define the bin edges along the axis ``axis`` in the new rebinned histogram. axis : int, optional (default=0) The axis to rebin along. Returns ------- The rebinned histogram """ ndim = self.GetDimension() if axis >= ndim: raise ValueError( "axis must be less than the dimensionality of the histogram") if isinstance(bins, int): _bins = [1] * ndim try: _bins[axis] = bins except IndexError: raise ValueError("axis must be 0, 1, or 2") bins = tuple(_bins) if isinstance(bins, tuple): if len(bins) != ndim: raise ValueError( "bins must be a tuple with the same " "number of elements as histogram axes") newname = '{0}_{1}'.format(self.__class__.__name__, uuid()) if ndim == 1: hist = self.Rebin(bins[0], newname) elif ndim == 2: hist = self.Rebin2D(bins[0], bins[1], newname) else: hist = self.Rebin3D(bins[0], bins[1], bins[2], newname) hist = asrootpy(hist) elif hasattr(bins, '__iter__'): hist = self.empty_clone(bins, axis=axis) nbinsx = self.nbins(0) nbinsy = self.nbins(1) nbinsz = self.nbins(2) xaxis = self.xaxis yaxis = self.yaxis zaxis = self.zaxis sum_w2 = self.GetSumw2() _sum_w2_at = sum_w2.At new_sum_w2 = hist.GetSumw2() _new_sum_w2_at = new_sum_w2.At _new_sum_w2_setat = new_sum_w2.SetAt _x_center = xaxis.GetBinCenter _y_center = yaxis.GetBinCenter _z_center = zaxis.GetBinCenter _find = hist.FindBin _set = hist.SetBinContent _get = hist.GetBinContent _this_get = self.GetBinContent _get_bin = super(_HistBase, self).GetBin for z in range(1, nbinsz + 1): for y in range(1, nbinsy + 1): for x in range(1, nbinsx + 1): newbin = _find( _x_center(x), _y_center(y), _z_center(z)) idx = _get_bin(x, y, z) _set(newbin, _get(newbin) + _this_get(idx)) _new_sum_w2_setat( _new_sum_w2_at(newbin) + _sum_w2_at(idx), newbin) hist.SetEntries(self.GetEntries()) else: raise TypeError( "bins must either be an integer, a tuple, or an iterable") return hist
python
def rebinned(self, bins, axis=0): """ Return a new rebinned histogram Parameters ---------- bins : int, tuple, or iterable If ``bins`` is an int, then return a histogram that is rebinned by grouping N=``bins`` bins together along the axis ``axis``. If ``bins`` is a tuple, then it must contain the same number of elements as there are dimensions of this histogram and each element will be used to rebin along the associated axis. If ``bins`` is another iterable, then it will define the bin edges along the axis ``axis`` in the new rebinned histogram. axis : int, optional (default=0) The axis to rebin along. Returns ------- The rebinned histogram """ ndim = self.GetDimension() if axis >= ndim: raise ValueError( "axis must be less than the dimensionality of the histogram") if isinstance(bins, int): _bins = [1] * ndim try: _bins[axis] = bins except IndexError: raise ValueError("axis must be 0, 1, or 2") bins = tuple(_bins) if isinstance(bins, tuple): if len(bins) != ndim: raise ValueError( "bins must be a tuple with the same " "number of elements as histogram axes") newname = '{0}_{1}'.format(self.__class__.__name__, uuid()) if ndim == 1: hist = self.Rebin(bins[0], newname) elif ndim == 2: hist = self.Rebin2D(bins[0], bins[1], newname) else: hist = self.Rebin3D(bins[0], bins[1], bins[2], newname) hist = asrootpy(hist) elif hasattr(bins, '__iter__'): hist = self.empty_clone(bins, axis=axis) nbinsx = self.nbins(0) nbinsy = self.nbins(1) nbinsz = self.nbins(2) xaxis = self.xaxis yaxis = self.yaxis zaxis = self.zaxis sum_w2 = self.GetSumw2() _sum_w2_at = sum_w2.At new_sum_w2 = hist.GetSumw2() _new_sum_w2_at = new_sum_w2.At _new_sum_w2_setat = new_sum_w2.SetAt _x_center = xaxis.GetBinCenter _y_center = yaxis.GetBinCenter _z_center = zaxis.GetBinCenter _find = hist.FindBin _set = hist.SetBinContent _get = hist.GetBinContent _this_get = self.GetBinContent _get_bin = super(_HistBase, self).GetBin for z in range(1, nbinsz + 1): for y in range(1, nbinsy + 1): for x in range(1, nbinsx + 1): newbin = _find( _x_center(x), _y_center(y), _z_center(z)) idx = _get_bin(x, y, z) _set(newbin, _get(newbin) + _this_get(idx)) _new_sum_w2_setat( _new_sum_w2_at(newbin) + _sum_w2_at(idx), newbin) hist.SetEntries(self.GetEntries()) else: raise TypeError( "bins must either be an integer, a tuple, or an iterable") return hist
[ "def", "rebinned", "(", "self", ",", "bins", ",", "axis", "=", "0", ")", ":", "ndim", "=", "self", ".", "GetDimension", "(", ")", "if", "axis", ">=", "ndim", ":", "raise", "ValueError", "(", "\"axis must be less than the dimensionality of the histogram\"", ")", "if", "isinstance", "(", "bins", ",", "int", ")", ":", "_bins", "=", "[", "1", "]", "*", "ndim", "try", ":", "_bins", "[", "axis", "]", "=", "bins", "except", "IndexError", ":", "raise", "ValueError", "(", "\"axis must be 0, 1, or 2\"", ")", "bins", "=", "tuple", "(", "_bins", ")", "if", "isinstance", "(", "bins", ",", "tuple", ")", ":", "if", "len", "(", "bins", ")", "!=", "ndim", ":", "raise", "ValueError", "(", "\"bins must be a tuple with the same \"", "\"number of elements as histogram axes\"", ")", "newname", "=", "'{0}_{1}'", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "uuid", "(", ")", ")", "if", "ndim", "==", "1", ":", "hist", "=", "self", ".", "Rebin", "(", "bins", "[", "0", "]", ",", "newname", ")", "elif", "ndim", "==", "2", ":", "hist", "=", "self", ".", "Rebin2D", "(", "bins", "[", "0", "]", ",", "bins", "[", "1", "]", ",", "newname", ")", "else", ":", "hist", "=", "self", ".", "Rebin3D", "(", "bins", "[", "0", "]", ",", "bins", "[", "1", "]", ",", "bins", "[", "2", "]", ",", "newname", ")", "hist", "=", "asrootpy", "(", "hist", ")", "elif", "hasattr", "(", "bins", ",", "'__iter__'", ")", ":", "hist", "=", "self", ".", "empty_clone", "(", "bins", ",", "axis", "=", "axis", ")", "nbinsx", "=", "self", ".", "nbins", "(", "0", ")", "nbinsy", "=", "self", ".", "nbins", "(", "1", ")", "nbinsz", "=", "self", ".", "nbins", "(", "2", ")", "xaxis", "=", "self", ".", "xaxis", "yaxis", "=", "self", ".", "yaxis", "zaxis", "=", "self", ".", "zaxis", "sum_w2", "=", "self", ".", "GetSumw2", "(", ")", "_sum_w2_at", "=", "sum_w2", ".", "At", "new_sum_w2", "=", "hist", ".", "GetSumw2", "(", ")", "_new_sum_w2_at", "=", "new_sum_w2", ".", "At", "_new_sum_w2_setat", "=", "new_sum_w2", ".", "SetAt", "_x_center", "=", "xaxis", ".", "GetBinCenter", "_y_center", "=", "yaxis", ".", "GetBinCenter", "_z_center", "=", "zaxis", ".", "GetBinCenter", "_find", "=", "hist", ".", "FindBin", "_set", "=", "hist", ".", "SetBinContent", "_get", "=", "hist", ".", "GetBinContent", "_this_get", "=", "self", ".", "GetBinContent", "_get_bin", "=", "super", "(", "_HistBase", ",", "self", ")", ".", "GetBin", "for", "z", "in", "range", "(", "1", ",", "nbinsz", "+", "1", ")", ":", "for", "y", "in", "range", "(", "1", ",", "nbinsy", "+", "1", ")", ":", "for", "x", "in", "range", "(", "1", ",", "nbinsx", "+", "1", ")", ":", "newbin", "=", "_find", "(", "_x_center", "(", "x", ")", ",", "_y_center", "(", "y", ")", ",", "_z_center", "(", "z", ")", ")", "idx", "=", "_get_bin", "(", "x", ",", "y", ",", "z", ")", "_set", "(", "newbin", ",", "_get", "(", "newbin", ")", "+", "_this_get", "(", "idx", ")", ")", "_new_sum_w2_setat", "(", "_new_sum_w2_at", "(", "newbin", ")", "+", "_sum_w2_at", "(", "idx", ")", ",", "newbin", ")", "hist", ".", "SetEntries", "(", "self", ".", "GetEntries", "(", ")", ")", "else", ":", "raise", "TypeError", "(", "\"bins must either be an integer, a tuple, or an iterable\"", ")", "return", "hist" ]
Return a new rebinned histogram Parameters ---------- bins : int, tuple, or iterable If ``bins`` is an int, then return a histogram that is rebinned by grouping N=``bins`` bins together along the axis ``axis``. If ``bins`` is a tuple, then it must contain the same number of elements as there are dimensions of this histogram and each element will be used to rebin along the associated axis. If ``bins`` is another iterable, then it will define the bin edges along the axis ``axis`` in the new rebinned histogram. axis : int, optional (default=0) The axis to rebin along. Returns ------- The rebinned histogram
[ "Return", "a", "new", "rebinned", "histogram" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1405-L1491
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.smoothed
def smoothed(self, iterations=1): """ Return a smoothed copy of this histogram Parameters ---------- iterations : int, optional (default=1) The number of smoothing iterations Returns ------- hist : asrootpy'd histogram The smoothed histogram """ copy = self.Clone(shallow=True) copy.Smooth(iterations) return copy
python
def smoothed(self, iterations=1): """ Return a smoothed copy of this histogram Parameters ---------- iterations : int, optional (default=1) The number of smoothing iterations Returns ------- hist : asrootpy'd histogram The smoothed histogram """ copy = self.Clone(shallow=True) copy.Smooth(iterations) return copy
[ "def", "smoothed", "(", "self", ",", "iterations", "=", "1", ")", ":", "copy", "=", "self", ".", "Clone", "(", "shallow", "=", "True", ")", "copy", ".", "Smooth", "(", "iterations", ")", "return", "copy" ]
Return a smoothed copy of this histogram Parameters ---------- iterations : int, optional (default=1) The number of smoothing iterations Returns ------- hist : asrootpy'd histogram The smoothed histogram
[ "Return", "a", "smoothed", "copy", "of", "this", "histogram" ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1493-L1512
train
rootpy/rootpy
rootpy/plotting/hist.py
_HistBase.empty_clone
def empty_clone(self, binning=None, axis=0, type=None, **kwargs): """ Return a new empty histogram. The binning may be modified along one axis by specifying the binning and axis arguments. If binning is False, then the corresponding axis is dropped from the returned histogram. """ ndim = self.GetDimension() if binning is False and ndim == 1: raise ValueError( "cannot remove the x-axis of a 1D histogram") args = [] for iaxis in range(ndim): if iaxis == axis: if binning is False: # skip this axis continue elif binning is not None: if hasattr(binning, '__iter__'): binning = (binning,) args.extend(binning) continue args.append(list(self._edges(axis=iaxis))) if type is None: type = self.TYPE if binning is False: ndim -= 1 cls = [Hist, Hist2D, Hist3D][ndim - 1] return cls(*args, type=type, **kwargs)
python
def empty_clone(self, binning=None, axis=0, type=None, **kwargs): """ Return a new empty histogram. The binning may be modified along one axis by specifying the binning and axis arguments. If binning is False, then the corresponding axis is dropped from the returned histogram. """ ndim = self.GetDimension() if binning is False and ndim == 1: raise ValueError( "cannot remove the x-axis of a 1D histogram") args = [] for iaxis in range(ndim): if iaxis == axis: if binning is False: # skip this axis continue elif binning is not None: if hasattr(binning, '__iter__'): binning = (binning,) args.extend(binning) continue args.append(list(self._edges(axis=iaxis))) if type is None: type = self.TYPE if binning is False: ndim -= 1 cls = [Hist, Hist2D, Hist3D][ndim - 1] return cls(*args, type=type, **kwargs)
[ "def", "empty_clone", "(", "self", ",", "binning", "=", "None", ",", "axis", "=", "0", ",", "type", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ndim", "=", "self", ".", "GetDimension", "(", ")", "if", "binning", "is", "False", "and", "ndim", "==", "1", ":", "raise", "ValueError", "(", "\"cannot remove the x-axis of a 1D histogram\"", ")", "args", "=", "[", "]", "for", "iaxis", "in", "range", "(", "ndim", ")", ":", "if", "iaxis", "==", "axis", ":", "if", "binning", "is", "False", ":", "# skip this axis", "continue", "elif", "binning", "is", "not", "None", ":", "if", "hasattr", "(", "binning", ",", "'__iter__'", ")", ":", "binning", "=", "(", "binning", ",", ")", "args", ".", "extend", "(", "binning", ")", "continue", "args", ".", "append", "(", "list", "(", "self", ".", "_edges", "(", "axis", "=", "iaxis", ")", ")", ")", "if", "type", "is", "None", ":", "type", "=", "self", ".", "TYPE", "if", "binning", "is", "False", ":", "ndim", "-=", "1", "cls", "=", "[", "Hist", ",", "Hist2D", ",", "Hist3D", "]", "[", "ndim", "-", "1", "]", "return", "cls", "(", "*", "args", ",", "type", "=", "type", ",", "*", "*", "kwargs", ")" ]
Return a new empty histogram. The binning may be modified along one axis by specifying the binning and axis arguments. If binning is False, then the corresponding axis is dropped from the returned histogram.
[ "Return", "a", "new", "empty", "histogram", ".", "The", "binning", "may", "be", "modified", "along", "one", "axis", "by", "specifying", "the", "binning", "and", "axis", "arguments", ".", "If", "binning", "is", "False", "then", "the", "corresponding", "axis", "is", "dropped", "from", "the", "returned", "histogram", "." ]
3926935e1f2100d8ba68070c2ab44055d4800f73
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/hist.py#L1514-L1542
train