function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def reset_timer(self, *args):
""" Reset the timer.
"""
self.timing_tracker.reset(self.current_time())
self.restart_time = time.time()
self.elapsed_time = 0
if self.autoplay.is_looping():
self.autoplay.start_looping()
self.update_time() | Cimbali/pympress | [
820,
75,
820,
22,
1442468502
] |
def __init__(self):
""" Initialisation is important.
"""
GObject.Object.__init__(self) | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def __init__(self, uuid):
Budgie.Applet.__init__(self)
# for exit-from-panel procedure
self.uuid = uuid
self.currpanelsubject_settings = None
self.wstopwatch_onpanel = True
GLib.timeout_add_seconds(1, self.watchout)
# setup css
timer_css = """
.label {
padding-bottom: 7px;
padding-top: 0px;
font-weight: bold;
}
.button {
margin-top: 10px;
margin-left: 30px;
}
"""
self.provider = Gtk.CssProvider.new()
self.provider.load_from_data(timer_css.encode())
# setup general stuff
self.scr = Wnck.Screen.get_default()
# self.scr.force_update()
self.scr.connect("active-workspace-changed", self.act_on_change)
self.logfile = os.path.join(os.environ["HOME"], ".workspace_log")
self.load_data()
currws = self.scr.get_active_workspace()
self.starttime = time.time()
self.last_logged = self.starttime
self.act_on_change(self.scr, currws)
GLib.timeout_add_seconds(30, self.update_log)
self.maingrid = Gtk.Grid()
# panel
self.seticon = Gtk.Image.new_from_icon_name(
"budgie-wstopwatch-symbolic", Gtk.IconSize.MENU
)
self.box = Gtk.EventBox()
self.box.add(self.seticon)
self.add(self.box)
self.popover = Budgie.Popover.new(self.box)
self.popover.add(self.maingrid)
self.popover.get_child().show_all()
self.box.show_all()
self.show_all()
self.box.connect("button-press-event", self.on_press) | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def check_ifonpanel(self, *args):
applets = self.currpanelsubject_settings.get_strv("applets")
self.wstopwatch_onpanel = self.uuid in applets | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def load_data(self):
try:
self.workspace_data = ast.literal_eval(open(self.logfile).read())
except (FileNotFoundError, SyntaxError):
self.workspace_data = {} | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def act_on_change(self, screen, workspace):
self.workspaces = screen.get_workspaces()
key = self.workspaces.index(workspace)
currtime = time.time()
span = currtime - self.starttime
# try get current time for key, add key if needed
try:
curr_spent = self.workspace_data[key]["time"]
except KeyError:
curr_spent = 0
self.workspace_data[key] = {
"time": curr_spent,
"custom_name": "workspace: " + str(key + 1)
}
self.workspace_data[key]["time"] = curr_spent + span
self.starttime = currtime
open(self.logfile, "wt").write(str(self.workspace_data)) | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def do_update_popovers(self, manager):
self.manager = manager
self.manager.register_popover(self.box, self.popover) | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def show_result(self, *args):
self.maingrid.destroy()
self.maingrid = Gtk.Grid()
self.popover.add(self.maingrid)
# update to latest
currws = self.scr.get_active_workspace()
self.act_on_change(self.scr, currws)
topleft = Gtk.Label()
topleft.set_text("\t")
self.maingrid.attach(topleft, 0, 0, 1, 1)
bottomright = Gtk.Label()
bottomright.set_text("\t")
self.maingrid.attach(bottomright, 100, 100, 1, 1)
workspace_header = Gtk.Label()
workspace_header.set_text("Workspace")
self.maingrid.attach(workspace_header, 2, 1, 1, 1)
workspace_header.set_xalign(0)
time_header = Gtk.Label()
time_header.set_text("Time")
self.maingrid.attach(time_header, 4, 1, 1, 1)
time_header.set_xalign(0)
for label in [workspace_header, time_header]:
self.set_widgetstyle(label, "label")
n = 2
for k in sorted(self.workspace_data.keys()):
if n - 2 == self.workspaces.index(currws):
bullet = Gtk.Label()
bullet.set_text("⮕ ")
self.maingrid.attach(bullet, 1, n, 1, 1)
entry = Gtk.Entry()
entry.set_text(self.workspace_data[k]["custom_name"])
entry.connect("changed", self.update_customname, k)
self.maingrid.attach(entry, 2, n, 1, 1)
spacer = Gtk.Label()
spacer.set_text("\t")
self.maingrid.attach(spacer, 3, n, 1, 1)
timelabel = Gtk.Label()
timelabel.set_text(
str(self.time_format(int(self.workspace_data[k]["time"])))
)
timelabel.set_xalign(0)
self.maingrid.attach(timelabel, 4, n, 1, 1)
n = n + 1
resetbutton = Gtk.Button.new_with_label("Reset")
resetbutton.grab_focus()
resetbutton.connect("clicked", self.reset_data)
self.set_widgetstyle(resetbutton, "button")
self.maingrid.attach(
resetbutton, 4, 99, 1, 1
)
resetbutton.grab_focus()
self.maingrid.show_all()
self.popover.show_all() | UbuntuBudgie/budgie-extras | [
139,
35,
139,
49,
1502096081
] |
def __init__(self, max_core_distance, min_energy, max_energy, *args,
**kwargs):
"""Simulation initialization
:param max_core_distance: maximum distance of shower core to
center of cluster (in meters).
:param min_energy,max_energy: Minimum and maximum energy of the
shower (in eV).
"""
super(BaseLdfSimulation, self).__init__(*args, **kwargs)
self.ldf = BaseLdf()
self.max_core_distance = max_core_distance
self.min_energy = min_energy
self.max_energy = max_energy
# The cluster is not moved, so detector positions can be stored.
for station in self.cluster.stations:
for detector in station.detectors:
detector.xy_coordinates = detector.get_xy_coordinates() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def simulate_detector_response(self, detector, shower_parameters):
"""Simulate detector response to a shower
Get the mips in a detector from the LDF.
:param detector: :class:`~sapphire.clusters.Detector` for which
the observables will be determined.
:param shower_parameters: dictionary with the shower parameters.
"""
n_detected = self.get_num_particles_in_detector(detector,
shower_parameters)
theta = shower_parameters['zenith']
if n_detected:
mips = self.simulate_detector_mips(n_detected, theta)
observables = {'n': mips}
else:
observables = {'n': 0.}
return observables | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def simulate_particles_for_density(p):
"""Get number of particles in detector given a particle density
:param p: particle density in number per detector area.
:return: random number from Poisson distribution.
"""
return random.poisson(p) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def simulate_particles_for_density(p):
"""Exact number"""
return p | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def __init__(self, *args, **kwargs):
super(NkgLdfSimulation, self).__init__(*args, **kwargs)
self.ldf = NkgLdf() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def __init__(self, *args, **kwargs):
super(KascadeLdfSimulation, self).__init__(*args, **kwargs)
self.ldf = KascadeLdf() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def __init__(self, *args, **kwargs):
super(EllipsLdfSimulation, self).__init__(*args, **kwargs)
self.ldf = EllipsLdf() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def get_num_particles_in_detector(self, detector, shower_parameters):
"""Get the number of particles in a detector
:param detector: :class:`~sapphire.clusters.Detector` for which
the number of particles will be determined.
:param shower_parameters: dictionary with the shower parameters.
"""
x, y = detector.xy_coordinates
core_x, core_y = shower_parameters['core_pos']
zenith = shower_parameters['zenith']
azimuth = shower_parameters['azimuth']
size = shower_parameters['size']
r, phi = self.ldf.calculate_core_distance_and_angle(x, y, core_x,
core_y)
p_ground = self.ldf.calculate_ldf_value(r, phi, size, zenith, azimuth)
num_particles = self.simulate_particles_for_density(
p_ground * detector.get_area())
return num_particles | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def calculate_ldf_value(self, r, n_electrons=None, s=None):
return 0. | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def __init__(self, n_electrons=None, s=None):
"""NKG LDF setup
:param n_electrons: Shower size (number of electrons).
:param s: Shower age parameter.
"""
if n_electrons is not None:
self._n_electrons = n_electrons
if s is not None:
self._s = s
self._cache_c_s_value() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def calculate_ldf_value(self, r, n_electrons=None, s=None):
"""Calculate the LDF value
:param r: core distance in m.
:param n_electrons: number of electrons in the shower.
:param s: shower age parameter.
:return: particle density in m ** -2.
"""
if n_electrons is None:
n_electrons = self._n_electrons
if s is None:
s = self._s
return self.ldf_value(r, n_electrons, s) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def _c(self, s):
"""Part of the LDF
As given in Fokkema2012 eq 7.3.
:param s: shower age parameter.
:return: c(s)
"""
r0 = self._r0
return (gamma(4.5 - s) /
(2 * pi * r0 ** 2 * gamma(s) * gamma(4.5 - 2 * s))) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def ldf_value(self, r, n_electrons, s):
"""Calculate the LDF value
Given a core distance, shower size, and shower age.
As given in Fokkema2012 eq 7.4.
:param r: core distance in m.
:param n_electrons: number of electrons in the shower.
:param s: shower shape parameter.
:return: particle density in m ** -2.
"""
if s == self._s:
c_s = self._c_s
else:
c_s = self._c(s)
r0 = self._r0
alpha = self._alpha
beta = self._beta
return (n_electrons * c_s * (r / r0) ** (s - alpha) *
(1 + r / r0) ** (s - beta)) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def __init__(self, n_electrons=None, zenith=None, azimuth=None, s1=None,
s2=None):
if n_electrons is not None:
self._n_electrons = n_electrons
if zenith is not None:
self._zenith = zenith
if azimuth is not None:
self._azimuth = azimuth
if s1 is not None:
self._s1 = s1
if s2 is not None:
self._s2 = s2
self._cache_c_s_value() | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def calculate_ldf_value(self, r, phi, n_electrons=None, zenith=None,
azimuth=None):
"""Calculate the LDF value for a given core distance and polar angle
:param r: core distance in m.
:param phi: polar angle in rad.
:param n_electrons: number of electrons in the shower.
:return: particle density in m ** -2.
"""
if n_electrons is None:
n_electrons = self._n_electrons
if zenith is None:
zenith = self._zenith
if azimuth is None:
azimuth = self._azimuth
return self.ldf_value(r, phi, n_electrons, zenith, azimuth, self._s1,
self._s2) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def _c(self, s1, s2):
"""Normalization of the LDF
As given in Montanus, paper to follow.
:param s1: shower shape parameter.
:param s2: shower shape parameter.
:return: c(s1,s2)
"""
r0 = self._r0
return (gamma(-s2) /
(2 * pi * r0 ** 2 * gamma(s1 + 2) * gamma(-s1 - s2 - 2))) | HiSPARC/sapphire | [
8,
10,
8,
17,
1353340410
] |
def setUpClass(cls):
cls.cache_dir = tempfile.mkdtemp()
cls.server_dir = tempfile.mkdtemp()
cls.temp_dir = tempfile.mkdtemp()
cls.server_process = Process(target=_start_server, args=(cls.server_dir,))
cls.server_process.start()
time.sleep(2) # give server some time to start
cls.client = Client(
cache_dir=cls.cache_dir,
remote_url='http://127.0.0.1:{}'.format(_TEST_PORT_NUMBER),
) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def tearDownClass(cls):
cls.server_process.terminate()
shutil.rmtree(cls.cache_dir)
shutil.rmtree(cls.server_dir)
shutil.rmtree(cls.temp_dir) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_put_file_should_save_file_both_locally_and_remotely(self):
temp_file = os.path.join(self.temp_dir, 'put.txt')
with open(temp_file, 'w') as tf:
tf.write('hello')
self.client.put_file('/put.txt', temp_file)
cache_path = os.path.join(self.cache_dir, 'files', 'put.txt')
remote_path = os.path.join(self.server_dir, 'links', 'put.txt')
self.assertTrue(os.path.exists(cache_path))
self.assertTrue(os.path.exists(remote_path))
with open(cache_path, 'r') as cf:
self.assertEqual(cf.read(), 'hello')
rf, _ = self.client.get_stream('/put.txt')
self.assertEqual(rf.read(), b'hello') | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_get_file_should_save_file_contents_to_destination(self):
src_file = os.path.join(self.temp_dir, 'get_src.txt')
dest_file = os.path.join(self.temp_dir, 'get_dest.txt')
with open(src_file, 'w') as sf:
sf.write('hello')
self.client.put_file('/get.txt', src_file)
self.client.get_file('/get.txt', dest_file)
with open(dest_file, 'r') as df:
self.assertEqual(df.read(), 'hello') | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_big_files_should_be_handled_correctly(self):
# To be more precise, Content-Length header should be
# set to the actual size of the file.
src_file = os.path.join(self.temp_dir, 'big.txt')
with open(src_file, 'wb') as sf:
sf.write(b'r')
for _ in range(1024 * 1024):
sf.write(b'ee')
self.client.put_file('/big.txt', src_file)
f, _ = self.client.get_stream('/big.txt')
with open(src_file, 'rb') as sf:
self.assertEqual(sf.read(), f.read()) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_file_size_should_return_decompressed_size_without_cache(self):
src_file = os.path.join(self.temp_dir, 'size.txt')
with open(src_file, 'wb') as sf:
sf.write(b'hello size') # size = 10
self.client.put_file('/size.txt', src_file, to_local_store=False)
self.assertEqual(self.client.file_size('/size.txt'), len(b'hello size')) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_put_older_should_fail(self):
"""This test assumes file version is stored in mtime."""
src_file = os.path.join(self.temp_dir, 'older.txt')
with open(src_file, 'wb') as sf:
sf.write(b'version 1')
self.client.put_file('/older.txt@1', src_file)
with open(src_file, 'wb') as sf:
sf.write(b'version 2')
self.client.put_file('/older.txt@2', src_file)
with open(src_file, 'wb') as sf:
sf.write(b'version 3 (1)')
self.client.put_file('/older.txt@1', src_file)
f, _ = self.client.get_stream('/older.txt')
self.assertEqual(f.read(), b'version 2')
with self.assertRaises(FiletrackerError):
self.client.get_stream('/older.txt@1') | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def test_delete_nonexistent_should_404(self):
with self.assertRaisesRegexp(FiletrackerError, "404"):
self.client.delete_file('/nonexistent.txt') | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def __init__(self, dbname, host, port, user, password):
self.dbname = dbname
self.host = host
self.port = port
self.user = user
self.password = password
try:
self.conn = psycopg2.connect(
"dbname=%s host=%s port=%s \
user=%s password=%s" %
(dbname, host, port, user, password))
print("Opened database successfully")
except BaseException:
print("I'm unable to connect to the database. Exiting function.") | balazsdukai/batch3dfier | [
3,
1,
3,
7,
1498038705
] |
def getQuery(self, query):
"""DB query where the results need to return (e.g. SELECT)
Parameters
----------
query : str
SQL query
Returns
-------
psycopg2 resultset
"""
with self.conn:
with self.conn.cursor() as cur:
cur.execute(query)
return(cur.fetchall()) | balazsdukai/batch3dfier | [
3,
1,
3,
7,
1498038705
] |
def except_hook(exc_type, exc_value, exc_tb):
tb = "".join(traceback.format_exception(exc_type, exc_value, exc_tb))
root_logger.error(tb) | OceanOptics/Inlinino | [
8,
2,
8,
1,
1466951811
] |
def default(self, obj):
if isinstance(obj, bytes):
return {'__bytes__': self.ENCODING, 'content': obj.decode(self.ENCODING)}
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj) | OceanOptics/Inlinino | [
8,
2,
8,
1,
1466951811
] |
def __init__(self):
self.__logger = logging.getLogger('CFG')
with open(PATH_TO_CFG_FILE) as file:
self.__logger.debug('Reading configuration.')
cfg = json.load(file, object_hook=as_bytes)
if 'instruments' not in cfg.keys():
self.__logger.critical('Unable to load instruments from configuration file.')
sys.exit(-1)
self.instruments = cfg['instruments'] | OceanOptics/Inlinino | [
8,
2,
8,
1,
1466951811
] |
def __init__(self, _length, _dtype=None):
# initialize buffer with NaN values
# length correspond to the size of the buffer
if _dtype is None:
self.data = np.empty(_length) # np.dtype = float64
self.data[:] = np.NAN
else:
# type needs to be compatible with np.NaN
self.data = np.empty(_length, dtype=_dtype)
self.data[:] = None | OceanOptics/Inlinino | [
8,
2,
8,
1,
1466951811
] |
def get(self, _n=1):
# return the most recent n element(s) in buffer
return self.data[-1 * _n:] | OceanOptics/Inlinino | [
8,
2,
8,
1,
1466951811
] |
def forwards(self, orm):
# Adding model 'Party'
db.create_table(u'votes_party', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('official_site', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('facebook_page', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('wikpedia_article', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('wikpedia_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('open_k_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('logo_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
))
db.send_create_signal(u'votes', ['Party'])
# Adding model 'Candidate'
db.create_table(u'votes_candidate', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('party', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['votes.Party'], null=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('number_of_votes', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('is_knesset_member', self.gf('django.db.models.fields.BooleanField')(default=False)),
('pesonal_site', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('facebook_page', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('wikpedia_article', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('wikpedia_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('open_k_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('image_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
))
db.send_create_signal(u'votes', ['Candidate'])
# Adding M2M table for field voters on 'Candidate'
m2m_table_name = db.shorten_name(u'votes_candidate_voters')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('candidate', models.ForeignKey(orm[u'votes.candidate'], null=False)),
('facebookcustomuser', models.ForeignKey(orm[u'django_facebook.facebookcustomuser'], null=False))
))
db.create_unique(m2m_table_name, ['candidate_id', 'facebookcustomuser_id']) | AriMeidan/Fantasy-Knesset | [
2,
2,
2,
13,
1386866227
] |
def onIncomingCall(self,number):
# Uncomment to accept all incoming calls
# self.accept()
pass | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def get_output_layers(net):
layer_names = net.getLayerNames()
output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]
return output_layers | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def letterbox_image(image, size):
'''resize image with unchanged aspect ratio using padding'''
import cv2
import numpy as np
iw, ih = image.shape[0:2][::-1]
w, h = size
scale = min(w/iw, h/ih)
nw = int(iw*scale)
nh = int(ih*scale)
image = cv2.resize(image, (nw,nh), interpolation=cv2.INTER_CUBIC)
new_image = np.zeros((size[1], size[0], 3), np.uint8)
new_image.fill(128)
dx = (w-nw)//2
dy = (h-nh)//2
new_image[dy:dy+nh, dx:dx+nw,:] = image
return new_image | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def recognize_tflite(i,r):
import tflite_runtime.interpreter as tflite
import cv2
import PIL.Image
import PIL.ImageOps
import PIL.ImageFilter
invoke_time = time.time()
i = PIL.Image.open(io.BytesIO(i))
pilimg=i
i=i.filter(PIL.ImageFilter.GaussianBlur(1))
i=PIL.ImageOps.autocontrast(i, cutoff=(0.1,0,25))
if not objectDetector[0]:
objectDetector[0]=tflite.Interpreter(num_threads=4, model_path=os.path.join(path,"efficientdet/efficientdet-lite0-f32.tflite"))
objectDetector[0].allocate_tensors()
objectDetector[1]=numpy.loadtxt(os.path.join(path,"labelmap.txt"),dtype = str, delimiter="/n")
interpreter = objectDetector[0]
labels = objectDetector[1]
original_image = toImgOpenCV(i)
# Get input and output tensors.
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
tensor_w = input_details[0]['shape'][1]
tensor_h= input_details[0]['shape'][2]
image = letterbox_image(original_image,(tensor_w,tensor_h))
image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
input_image = numpy.expand_dims(image,0) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def on_service_state_change(zeroconf, service_type, name, state_change):
with httplock:
info = zeroconf.get_service_info(service_type, name)
if not info:
return
if state_change is ServiceStateChange.Added:
httpservices.append((tuple(sorted(
[socket.inet_ntoa(i) for i in info.addresses])), service_type, name, info.port))
if len(httpservices) > 2048:
httpservices.pop(0)
else:
try:
httpservices.remove((tuple(sorted(
[socket.inet_ntoa(i) for i in info.addresses])), service_type, name, info.port))
except:
logging.exception("???") | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def onMotionBegin(self, *a, **k):
self.mcb(True) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def onPresenceValue(self, v):
self.presenceval(v) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def onBarcode(self, *a, **k):
self.bcb(*a, **k) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def getGstreamerSourceData(self, s, cfg,un,pw):
self.config = cfg
self.h264source = self.mp3src = False
self.syncFile = False
# The source is an HLS stream
if s.endswith(".m3u8") and s.startswith("http"):
self.addElement("souphttpsrc", location=s)
self.addElement("hlsdemux")
self.addElement("tsdemux")
self.addElement("parsebin")
self.h264source = self.addElement("tee")
elif s.startswith("file://"):
if not os.path.exists(s[len("file://"):]):
raise RuntimeError("Bad file: " + s)
self.addElement(
"multifilesrc", location=s[len("file://"):], loop=True)
if s.endswith(".mkv"):
dm = self.addElement("matroskademux")
else:
dm = self.addElement("qtdemux")
self.addElement(
"h264parse", connectWhenAvailable="video/x-h264")
#self.addElement('identity', sync=True)
self.syncFile = True
self.addElement('queue', max_size_time=10000000)
self.h264source = self.addElement("tee")
self.addElement("decodebin3", connectToOutput=dm,
connectWhenAvailable="audio")
self.addElement("audioconvert", connectWhenAvailable="audio")
self.addElement("audiorate")
self.addElement("queue", max_size_time=10000000)
self.addElement("voaacenc")
self.addElement("aacparse")
self.mp3src = self.addElement("queue", max_size_time=10000000)
# Make a video test src just for this purpose
elif not s:
self.addElement("videotestsrc", is_live=True)
self.addElement("videorate")
self.addElement("capsfilter", caps="video/x-raw,framerate=" +
(self.config.get('device.fps', '4') or '4') + "/1")
self.addElement(
"capsfilter", caps="video/x-raw, format=I420, width=320, height=240")
self.addElement("videoconvert")
self.addElement("x264enc", tune="zerolatency",
byte_stream=True, rc_lookahead=0)
self.addElement("h264parse")
self.h264source = self.addElement("tee")
# Make a video test src just for this purpose
elif s == "test":
self.addElement("videotestsrc", is_live=True)
self.addElement("capsfilter", caps="video/x-raw,framerate=" +
(self.config.get('device.fps', '4') or '4') + "/1")
self.addElement(
"capsfilter", caps="video/x-raw, format=I420, width=320, height=240")
self.addElement("videoconvert")
self.addElement("x264enc", tune="zerolatency", key_int_max=int(
(self.config.get('device.fps', '4') or '4')) * 2)
self.addElement("h264parse")
self.h264source = self.addElement("tee")
elif s == "webcam" or s == "webcam_audio":
self.addElement("v4l2src")
self.addElement("videorate", drop_only=True)
self.addElement("capsfilter", caps="video/x-raw,framerate=" +
(self.config.get('device.fps', '4') or '4') + "/1")
self.addElement("videoconvert")
self.addElement("queue", max_size_time=10000000)
try:
self.addElement("omxh264enc", interval_intraframes=int(
(self.config.get('device.fps', '4') or '4')) * 2)
except Exception:
self.addElement("x264enc", tune="zerolatency",
rc_lookahead=0, bitrate=int(self.dev.config['device.bitrate']), key_int_max=int((self.config.get('device.fps', '4') or '4')) * 2)
self.addElement(
"capsfilter", caps="video/x-h264, profile=main")
self.addElement("h264parse", config_interval=1)
self.h264source = self.addElement("tee")
self.addElement("alsasrc", connectToOutput=False)
self.addElement("queue")
self.addElement("audioconvert")
self.addElement("voaacenc")
self.addElement("aacparse")
self.mp3src = self.addElement("queue", max_size_time=10000000)
elif s.startswith("rtsp://"):
rtsp = self.addElement(
"rtspsrc", location=s, latency=100, async_handling=True, user_id=un or None, user_pw=pw or None)
self.addElement("rtph264depay", connectWhenAvailable="video")
self.addElement("h264parse", config_interval=1)
self.h264source = self.addElement("tee")
self.addElement("decodebin", connectToOutput=rtsp,
connectWhenAvailable="audio", async_handling=True)
self.addElement("audioconvert")
self.addElement("audiorate")
self.addElement("voaacenc")
self.addElement("aacparse")
self.mp3src = self.addElement("queue", max_size_time=10000000)
elif s == "screen":
self.addElement("ximagesrc")
self.addElement("capsfilter", caps="video/x-raw,framerate=" +
(self.config.get('device.fps', '4') or '4') + "/1")
self.addElement("videoconvert")
self.addElement("queue", max_size_time=10000000)
try:
self.addElement("omxh264enc", interval_intraframes=int(
(self.config.get('device.fps', '4') or '4')))
except Exception:
self.addElement("x264enc", tune="zerolatency",
rc_lookahead=0, bitrate=int(self.dev.config['device.bitrate']), key_int_max=int((self.config.get('device.fps', '4') or '4')) * 2)
self.addElement(
"capsfilter", caps="video/x-h264, profile=main")
self.addElement("h264parse")
self.h264source = self.addElement("tee")
# Tested
# rtspsrc location=rtsp://192.168.1.6:8080/h264_pcm.sdp latency=100 ! queue ! rtph264depay ! h264parse
return s | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def putTrashInBuffer(self):
"Force a wake up of a thread sitting around waiting for the pipe"
if os.path.exists(self.rawFeedPipe):
import select
try:
f = os.open(self.rawFeedPipe,
flags=os.O_NONBLOCK | os.O_APPEND)
s = 0
for i in range(188 * 42):
r, w, x = select.select([], [f], [], 0.2)
if w:
f.write(b'b')
else:
s += 1
if s > 15:
return
except Exception:
print(traceback.format_exc()) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def close(self):
self.closed = True
try:
self.process.stop()
except Exception:
print(traceback.format_exc())
self.runWidgetThread = False
try:
self.putTrashInBuffer()
except Exception:
print(traceback.format_exc())
try:
os.remove(self.rawFeedPipe)
except Exception:
print(traceback.format_exc())
s = 10
while s:
s -= 1
if self.threadExited:
break
time.sleep(0.1)
devices.Device.close(self)
try:
shutil.rmtree("/dev/shm/knvr_buffer/" + self.name)
except Exception:
pass
try:
self.checker.unregister()
except Exception:
logger.exception("Unregistering") | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def onRawTSData(self, data):
pass | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def connect(self, config):
if self.closed:
return
self.config = config
if time.monotonic() - self.lastStart < 15:
return
# When we reconnect we stop the recording and motion
self.set_data_point("record", False, None, automated_record_uuid)
self.set_data_point("raw_motion_value", 0)
self.set_data_point("motion_detected", 0)
self.activeSegmentDir = self.segmentDir = None
self.lastStart = time.monotonic()
if self.process:
try:
self.process.stop()
except Exception:
print(traceback.format_exc())
# Used to check that things are actually still working.
# Set them to prevent a loop.
self.lastSegment = time.monotonic()
self.lastPushedWSData = time.monotonic()
# Can't stop as soon as they push stop, still need to capture
# the currently being recorded segment
self.stoprecordingafternextsegment = 0
try:
shutil.rmtree("/dev/shm/knvr_buffer/" + self.name)
except Exception:
pass
os.makedirs("/dev/shm/knvr_buffer/" + self.name)
try:
# Make it so nobody else can read the files
os.chmod("/dev/shm/knvr_buffer/" + self.name, 0o700)
except Exception:
pass
# Close the old thread
self.runWidgetThread = time.monotonic()
self.putTrashInBuffer()
s = 10
while s:
s -= 1
if self.threadExited:
break
time.sleep(0.1)
# Exec is needed so we can kill it
# self.process = reap.Popen("exec gst-launch-1.0 -q "+getGstreamerSourceData(self.data.get('device.source','')) +"! ",shell=True)
self.process = Pipeline()
self.process.dev = self
self.process.getGstreamerSourceData(
self.config.get('device.source', ''), self.config, self.config.get('device.username', ''), self.config.get('device.password', ''))
x = self.process.addElement(
"queue", connectToOutput=self.process.h264source, max_size_time=10000000)
self.process.addElement("mpegtsmux", connectToOutput=(
x, self.process.mp3src))
self.mpegtssrc = self.process.addElement("tee")
# Path to be created
path = self.rawFeedPipe
# Get rid of the old one, it could be clogged
try:
os.remove(path)
except OSError:
pass
try:
os.mkfifo(path)
except OSError:
print("Failed to create FIFO")
os.chmod(path, 0o700)
self.process.addElement("queue", max_size_time=10000000)
self.process.addElement("filesink", location=path,
buffer_mode=2, sync=self.process.syncFile)
# # Motion detection part of the graph
# # This flag discards every unit that cannot be handled individually
self.process.addElement(
"identity", drop_buffer_flags=8192, connectToOutput=self.process.h264source)
self.process.addElement("queue", max_size_time=20000000,
leaky=2)
self.process.addElement("capsfilter", caps="video/x-h264")
try:
self.process.addElement("omxh264dec")
except:
self.process.addElement("avdec_h264")
# self.process.addElement("videorate",drop_only=True)
# self.process.addElement("capsfilter", caps="video/x-raw,framerate=1/1")
rawtee= self.process.addElement("tee")
self.process.addElement("queue",max_size_buffers=1,leaky=2)
self.snapshotter = self.process.addPILCapture()
self.process.addElement("videoanalyse",connectToOutput=rawtee)
if self.config.get('device.barcodes', '').lower() in ("yes", "true", "detect", "enable", "on"):
self.process.addElement("zbar")
self.print("Barcode detection enabled")
#self.process.addElement("videoconvert", chroma_resampler=0)
# self.process.addElement(
# "motioncells", sensitivity=float(self.config.get('device.motion_sensitivity', '0.75')), gap=1, display=False)
# self.process.addElement("fakesink")
# Not a real GST element. The iceflow backend hardcodes this motion/presense detection
self.process.addPresenceDetector((640, 480))
self.process.mcb = self.motion
self.process.bcb = self.barcode
self.process.acb = self.analysis
self.process.presenceval = self.presencevalue
self.process.addElement("hlssink", connectToOutput=self.mpegtssrc, message_forward=True, async_handling=True, max_files=0,
location=os.path.join(
"/dev/shm/knvr_buffer/", self.name, r"segment%08d.ts"),
playlist_root=os.path.join(
"/dev/shm/knvr_buffer/", self.name),
playlist_location=os.path.join(
"/dev/shm/knvr_buffer/", self.name, "playlist.m3u8"),
target_duration=5)
self.datapusher = threading.Thread(
target=self.thread, daemon=True, name="NVR")
self.datapusher.start()
self.process.start()
# Used to check that things are actually still working.
# Set them to prevent a loop.
self.lastSegment = time.monotonic()
self.lastPushedWSData = time.monotonic() | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def setsegmentDir(self, manual=False):
with self.recordlock:
# Manually triggered recordings should go in a different folder
my_date = datetime.utcnow()
date = my_date.replace(
hour=0, minute=0, second=0, microsecond=0).isoformat() + "+00:00"
t = my_date.isoformat() + "+00:00"
d = os.path.join(self.storageDir, self.name, "recordings", date, t)
os.makedirs(d)
self.segmentDir = d
with open(os.path.join(self.segmentDir, "playlist.m3u8"), "w") as f:
f.write("#EXTM3U\r\n")
f.write("#EXT-X-START: TIME-OFFSET=0\r\n")
f.write("#EXT-X-PLAYLIST-TYPE: VOD\r\n")
f.write("#EXT-X-VERSION:3\r\n")
f.write("#EXT-X-ALLOW-CACHE:NO\r\n")
f.write("#EXT-X-TARGETDURATION:5\r\n")
# Capture a tiny preview snapshot
import PIL
x = PIL.Image.open(io.BytesIO(self.request_data_point("bmp_snapshot")))
x.thumbnail((320,240))
x=PIL.ImageOps.autocontrast(x, cutoff=(0.1,0,25))
with open(os.path.join(self.segmentDir, "thumbnail.jpg"),'wb') as f:
x.save(f,'jpeg') | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def onMultiFileSink(self, fn, *a, **k):
with self.recordlock:
self.moveSegments()
d = os.path.join("/dev/shm/knvr_buffer/", self.name)
ls = os.listdir(d)
ls = list(sorted([i for i in ls if i.endswith(".ts")]))
n = max(1,int((float(self.config.get('device.loop_record_length', 5))+2.5)/5)) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def moveSegments(self):
with self.recordlock:
d = os.path.join("/dev/shm/knvr_buffer/", self.name)
ls = os.listdir(d)
ls = list(sorted([i for i in ls if i.endswith(".ts")]))
if self.activeSegmentDir or self.segmentDir:
# Ignore latest, that could still be recording
for i in ls[:-1]:
self.lastSegment = time.monotonic()
self.set_data_point('running', 1)
# Someone could delete a segment dir while it is being written to.
# Prevent that from locking everything up.
if os.path.exists(self.activeSegmentDir or self.segmentDir):
# Find the duration of the segment from the hlssink playlist file
with open(os.path.join(d, "playlist.m3u8")) as f:
x = f.read()
if not i in x:
return
x = x.split(i)[0]
x = float(re.findall(r"EXTINF:\s*([\d\.]*)", x)[-1])
# Assume the start time is mod time minus length
my_date = datetime.utcfromtimestamp(
os.stat(os.path.join(d, i)).st_mtime - x)
t = my_date.isoformat() + "+00:00"
shutil.move(os.path.join(d, i),
self.activeSegmentDir or self.segmentDir)
with open(os.path.join(self.activeSegmentDir or self.segmentDir, "playlist.m3u8"), "a+") as f:
f.write("\r\n")
f.write("#EXTINF:" + str(x) + ",\r\n")
f.write("#EXT-X-PROGRAM-DATE-TIME:" + t + "\r\n")
f.write(i + "\r\n")
self.directorySegments += 1
if self.stoprecordingafternextsegment:
x = self.segmentDir
self.segmentDir = None
self.activeSegmentDir = None
with open(os.path.join(x, "playlist.m3u8"), "a+") as f:
f.write("\r\n#EXT-X-ENDLIST\r\n")
break
else:
# Don't make single directories with more than an hour of video.
if self.directorySegments > (3600 / 5):
self.setsegmentDir()
# Now we can transition to the new one!
self.activeSegmentDir = self.segmentDir
self.directorySegments = 0 | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def commandState(self, v, t, a):
with self.streamLock:
if not v:
if self.process:
self.process.stop()
self.runWidgetThread = False
try:
self.putTrashInBuffer()
except Exception:
print(traceback.format_exc())
s = 10
while s:
s -= 1
if self.threadExited:
break
time.sleep(0.1)
else:
self.check() | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def motion(self, v):
self.doMotionRecordControl(v)
self.set_data_point("motion_detected", v) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def presencevalue(self, v):
"Takes a raw presence value. Unfortunately it seems we need to do our own motion detection."
self.set_data_point("raw_motion_value", v)
self.motion(v > float(self.config.get(
'device.motion_threshold', 0.08))) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def f():
# Wait longer if not already recording so that things that don't need to detect as much give up faster.
# prioritize reliable start of record!
#Cannot wait too long thogh because we nee to quickly fail back to motion only.
# This calculates our length in terms of how much loop recorded footage we have
# We have to detect within this window or it will dissapear before we capture it.
#Note
n = max(1, int((float(self.config.get('device.loop_record_length', 5))+2.5)/5))*5
# If we have not seen any objects lately, better check more often because
# We might be about to stop the recording even if there is still motion, so it must be accurate.
if self.lastObjectDetectionHit > (time.monotonic()-15):
t = 3 if self.datapoints['record'] else (n*0.75)
else:
t = n*0.75
if object_detection_lock.acquire(True, t+(random.random()*0.1)):
try:
# We have to make sure an older detection does not wait on a newer detection.
# Only the latest should get through, or we would queue up a problem.
if self.obj_rec_wait_timestamp > obj_rec_wait:
return
o=recognize_tflite(self.request_data_point("bmp_snapshot"), self)
self.lastDidObjectRecognition=time.monotonic()
self.lastObjectSet=o | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def analysis(self, v):
self.set_data_point("luma_average", v['luma-average'])
self.set_data_point("luma_variance", v['luma-variance']) | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def __init__(self, name, data):
devices.Device.__init__(self, name, data)
try:
self.runWidgetThread = True
self.threadExited = True
self.closed = False
self.set_config_default("device.storage_dir", '~/NVR')
self.set_config_default("device.loop_record_length", '5')
self.process = None
self.lastInferenceTime = 1
self.lastDidObjectRecognition = 0
# So we can tell if there is new object recogintion data since we last checked.
self.lastDidMotionRecordControl=0
# Used to detect motion by looking at changes in the number of relevant objects.
# Response time may be very low.
self.oldRelevantObjectCount = -1
# The most recent set of object detection results.
self.lastObjectSet=None
# We don't want to stop till a few seconds after an event that would cause motion
self.lastRecordTrigger = 0
# We also DO want to stop if we are in object record mode and have not seen the object in a long time
self.lastObjectDetectionHit = 0
# If this is true, record when there is motion
self.set_config_default("device.motion_recording", 'no')
self.storageDir = os.path.expanduser(
self.config['device.storage_dir'] or '~/NVR')
self.segmentDir = None
# When changing segment dir, we can't do it instantly, we instead wait to be done with the current file.
self.activeSegmentDir = None
# How many segments in this dir. Must track so we can switch to a new directory if we need to.
self.directorySegments = 0
self.lastshm = None
self.canAutoStopRecord = False
if not os.path.exists(self.storageDir):
os.makedirs(self.storageDir)
# Secure it!
os.chmod(self.storageDir, 0o700)
self.tsQueue = b''
self.recordlock = threading.RLock()
self.rawFeedPipe = "/dev/shm/knvr_buffer/" + self.name + "." + str(time.monotonic()) + ".raw_feed.tspipe"
self.bytestream_data_point("raw_feed",
subtype='mpegts',
writable=False)
#Give this a little bit of caching
self.bytestream_data_point("bmp_snapshot",
subtype='bmp',
writable=False,
interval=0.3)
self.set_data_point_getter('bmp_snapshot', self.getSnapshot)
self.numeric_data_point("switch",
min=0,
max=1,
subtype='bool',
default=1,
handler=self.commandState)
self.numeric_data_point("record",
min=0,
max=1,
subtype='bool',
default=0,
handler=self.onRecordingChange)
self.numeric_data_point("running",
min=0,
max=1,
subtype='bool',
writable=False)
self.numeric_data_point("motion_detected",
min=0,
max=1,
subtype='bool',
writable=False)
self.numeric_data_point("raw_motion_value",
min=0,
max=250,
writable=False)
self.numeric_data_point("luma_average",
min=0,
max=1,
writable=False)
self.numeric_data_point("luma_variance",
min=0,
max=1,
writable=False)
self.set_alarm("Camera dark", "luma_average",
"value < 0.095", trip_delay=3, auto_ack=True)
self.set_alarm("Camera low varience", "luma_variance",
"value < 0.008", trip_delay=3, auto_ack=True)
self.set_alarm("Long_recording", "record",
"value > 0.5", trip_delay=800, auto_ack=True, priority='debug')
self.set_alarm("Not Running", "running",
"value < 0.5", trip_delay=5, auto_ack=False, priority='warning')
self.set_config_default("device.source", '')
self.set_config_default("device.username", '')
self.set_config_default("device.password", '')
self.set_config_default("device.fps", '4')
self.set_config_default("device.barcodes", 'no')
self.set_config_default("device.object_detection", 'no')
self.set_config_default("device.object_record", 'person, dog, cat, horse, sheep, cow, handbag, frisbee, bird, backpack, suitcase, sports ball')
self.set_config_default("device.motion_threshold", '0.08')
self.set_config_default("device.bitrate", '386')
self.set_config_default("device.retain_days", '90')
self.config.pop("device.motion_sensitivity", 0)
self.retainDays = int(self.config['device.retain_days'])
if self.config['device.barcodes'].lower() in ('yes', 'true', 'enable', 'enabled'):
self.object_data_point("barcode",
writable=False)
if self.config['device.object_detection'].lower() in ('yes', 'true', 'enable', 'enabled'):
self.object_data_point("detected_objects",
writable=False)
self.config_properties['device.loop_record_length']={
'description':'How many seconds to buffer at all times to allow recording things before motion events actually happen.'
}
self.config_properties['device.barcodes'] = {
'type': 'bool'
}
self.config_properties['device.object_detection'] = {
'type': 'bool',
'description': "Enable object detection. See kaithem readme for where to put model files. "
}
self.config_properties['device.object_record'] = {
'description': "Does nothing without object detection. Only record if there is both motion, and a recognized object on the list in the frame. If empty, always record. Can use any COCO item."
}
self.config_properties['device.source'] = {
'secret': True
}
self.config_properties['device.motion_recording'] = {
'type': 'bool'
}
self.config_properties['device.storage_dir'] = {
'type': 'local_fs_dir'
}
self.streamLock = threading.RLock()
self.lastStart = 0
mediaFolders[name] = self
self.connect(self.config)
self.set_data_point('switch', 1)
# Used to check that things are actually still working.
self.lastSegment = time.monotonic()
self.lastPushedWSData = time.monotonic()
self.check()
from src import scheduling
self.checker = scheduling.scheduler.every(self.check, 3)
except Exception:
self.handleException() | EternityForest/KaithemAutomation | [
33,
5,
33,
2,
1369977456
] |
def help_handler(t, s, p):
p = p.strip()
q = re.search('^(\-..\ )?\.?(.+)$', p)
if q:
rlang = q.groups()[0]
if rlang: rlang = rlang[1:3]
else: rlang = lang.getLang(s.jid)
p = q.groups()[1]
if p.startswith('.'): p = p[1:]
else:
rlang = lang.getLang(s.jid)
p = ''
if p:
if p.startswith('.'): p = p[1:]
if p in HELP_CATEGORIES:
answer = HELP_CATEGORIES[p]
answer.sort()
answer = ', '.join(answer)
s.lmsg(t, 'help_category', answer)
else:
if p in HELP_LANGS:
q = HELP_LANGS[p]
if rlang in q:
content = load_help_content(p, rlang)
categories = ', '.join([w for w in HELP_CATEGORIES.keys() if p in HELP_CATEGORIES[w]])
s.lmsg(t, 'help_show', categories, content)
else:
languages = HELP_LANGS[p]
languages = ["'.help -%s %s'" % (w, p) for w in languages]
s.lmsg(t, 'help_other_languages', p, rlang, ', '.join(languages))
else: s.lmsg(t, 'help_not_found', p)
else:
ans = ['%s(%s)' % (w, len(HELP_CATEGORIES[w])) for w in HELP_CATEGORIES.keys()]
ans.sort()
categories = ', '.join(ans)
s.lmsg(t, 'help_categories', categories) | TLemur/freq-bot | [
7,
3,
7,
40,
1303910122
] |
def poll(cls, context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def poll(cls, context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def invoke(self,context,event):
wm = context.window_manager
return wm.invoke_props_dialog(self) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def execute(self, context):
obj = context.active_object
if obj.data.shape_keys == None:
obj.shape_key_add(name="Basis",from_mix=False) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def poll(cls, context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def invoke(self,context,event):
wm = context.window_manager
return wm.invoke_confirm(self,event) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def execute(self, context):
obj = context.active_object | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def poll(cls, context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def invoke(self,context,event):
obj = context.active_object
idx = int(obj.coa_selected_shapekey)
shape = obj.data.shape_keys.key_blocks[idx] | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def execute(self, context):
obj = context.active_object
idx = int(obj.coa_selected_shapekey)
shape = obj.data.shape_keys.key_blocks[idx] | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def get_shapekeys(self,context):
SHAPEKEYS = []
SHAPEKEYS.append(("NEW_KEY","New Shapekey","New Shapekey","NEW",0))
obj = context.active_object
if obj.type == "MESH" and obj.data.shape_keys != None:
i = 0
for i,shape in enumerate(obj.data.shape_keys.key_blocks):
if i > 0:
SHAPEKEYS.append((shape.name,shape.name,shape.name,"SHAPEKEY_DATA",i+1)) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def poll(cls, context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def check(self,context):
return True | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def draw(self,context):
layout = self.layout
col = layout.column()
col.prop(self,"shapekeys")
if self.shapekeys == "NEW_KEY":
col.prop(self,"shapekey_name") | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def invoke(self,context,event):
obj = context.active_object | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def set_most_driven_shapekey(self,obj):
### select most driven shapekey
index = None
value = 0.0
if obj != None and obj.data.shape_keys != None:
for i,shape in enumerate(obj.data.shape_keys.key_blocks):
if shape.value > value:
index = i
value = shape.value
if index != None:
obj.active_shape_key_index = index | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def execute(self, context):
self.objs = []
if context.active_object == None or context.active_object.type != "MESH":
self.report({"ERROR"},"Sprite is not selected. Cannot go in Edit Mode.")
return{"CANCELLED"}
obj = bpy.data.objects[context.active_object.name] if context.active_object.name in bpy.data.objects else None | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def exit_edit_mode(self,context,event,obj):
### remove draw handler on exiting modal mode
bpy.types.SpaceView3D.draw_handler_remove(self.draw_handler, "WINDOW")
for obj in context.selected_objects:
obj.select = False
self.sprite_object.coa_edit_shapekey = False
self.sprite_object.coa_edit_mode = "OBJECT"
for obj_name in self.objs:
obj = bpy.context.scene.objects[obj_name]
obj.hide = False
if obj.type == "MESH" and obj != None:
context.scene.objects.active = obj
bpy.ops.object.mode_set(mode="OBJECT")
obj.show_only_shape_key = False
context.scene.objects.active = obj
obj.select = True
if self.armature != None and self.armature.data != None:
self.armature.data.pose_position = "POSE"
return {"FINISHED"} | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def modal(self, context, event):
obj = None
obj_name = context.active_object.name if context.active_object != None else None
obj = context.scene.objects[obj_name] if obj_name != None else None
self.sprite_object = bpy.data.objects[self.sprite_object_name]
self.armature = bpy.data.objects[self.armature_name]
try:
# used for debugging
# if event.ctrl and event.type == "Z" and len(context.selected_objects) == 2:
# pdb.set_trace()
if obj != None:
if obj_name != self.last_obj_name:
if obj.type == "MESH":
self.set_most_driven_shapekey(obj) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def draw_callback_px(self):
draw_edit_mode(self,bpy.context,offset=2) | ndee85/coa_tools | [
827,
128,
827,
118,
1457295769
] |
def __init__(self, dockarea):
super(convertSub, self).__init__()
self.obj_validation = Validation()
self.obj_appconfig = Appconfig()
self.obj_dockarea = dockarea | FOSSEE/eSim | [
60,
60,
60,
27,
1426227114
] |
def logErr(failure):
failure.printTraceback() | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def __init__(self):
self._reg = {} | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def remove(self, listener):
del self._reg[id(listener)] | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def iterListeners(self):
for listener in self._reg.itervalues():
yield listener | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def __init__(self):
self._garbageCollect() | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def feed(self, track, clear=False):
if clear:
self.data.clear()
try:
chunks = track.dataChunks(ITER_TIME)
except IOError:
log.err('{0} can not be read'.format(repr(track)))
#self.onTrackFinished()
raise
else:
self.currentSize = sum(map(len, chunks))
self.data.extend(chunks) | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def _timerUpdate(self):
if self.currentSize == 0:
self.onTimerUpdate(0)
return
remainingSize = sum(map(len, self.data))
progressPercent = int((self.currentSize - remainingSize ) * 100.0 / self.currentSize)
# update timer
self.onTimerUpdate(progressPercent) | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def stop(self):
self.data = deque()
self.history = deque()
self.playing = False
self.paused = False
self.onStop() | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def resume(self):
self.paused = False
self.play()
self.onPaused(False) | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def onStart(self):
log.err('Player not attached') | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def onStop(self):
log.err('Player not attached') | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def onPaused(self):
log.err('Player not attached') | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def iterTrackUid(self):
keys = sorted(self._order.keys())
for dposition in keys:
trackUid = self._order[dposition]
yield trackUid | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def playlistData(self):
data = (track.meta for track in self.iterTrack())
data = [meta for meta in data if meta is not None]
return data | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def _paths(self):
return [track._path for track in self.iterTrack()] | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
def mark(self):
self._undos.append((dict(self._reg), dict(self._order)))
self._redos.clear() | petrushev/txplaya | [
1,
1,
1,
3,
1449794901
] |
Subsets and Splits