prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>tool_ssp_simulation.py<|end_file_name|><|fim▁begin|>#
# =================================================================
# =================================================================
# def _enum(**enums):
# return type('Enum', (), enums)
import eventlet
from eventlet import greenthread
import paxes_cinder.k2aclient.k2asample as k2asample
from paxes_cinder.k2aclient.v1 import k2uom
from paxes_cinder.k2aclient.k2asample import dump_k2resp
from paxes_cinder.k2aclient import client
from paxes_cinder.k2aclient.openstack.common import lockutils
from paxes_cinder.k2aclient import exceptions as k2exceptions
from paxes_cinder.k2aclient.k2asample.k2_ssp_cluster_vios_snap \
import cluster_vios_snap
from itertools import repeat
from collections import deque
import time
import pickle
import logging
from os.path import expanduser
from random import randrange
import json
import random
import datetime
import paxes_cinder.k2aclient.v1.cluster_manager as cluster_manager
# import numpy as np
MOCK = False
VIOS_DUMP_ACTIVATED = False
synchronized = lockutils.synchronized_with_prefix('k2a-')
class MockLu(object):
ugenid = 0
def __init__(self):
self.unique_device_id = MockLu.ugenid
MockLu.ugenid += 1
def _timer(prev_time):
"""Simple timer"""
return time.time() - prev_time
def _chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i + n]
_last_dump = None
@synchronized('simulation')
def _process_k2_exception(simulation, e):
if not VIOS_DUMP_ACTIVATED:
msg = ("Exception:"
" msg: >%s<,"
" VIOS dump is not activated,"
" continuing ...")
print (msg % (e,))
return
time_between_dumps = 300
global _last_dump
if _last_dump is not None:
delta = time.time() - _last_dump
if delta < time_between_dumps:
msg = ("exception: >%s<,"
" recent dump,"
" take a break ...")
print (msg, (e,))
greenthread.sleep(100)
return
dump = False
diagfspec = None
if isinstance(e, k2exceptions.K2aCrudException):
dump = True
diagfspec = e.diagfspec
elif isinstance(e, k2exceptions.K2aK2Error):
dump = True
diagfspec = e.diagfspec
elif isinstance(e, k2exceptions.K2JobFailure):
dump = True
diagfspec = e.diagfspec
if dump and diagfspec is not None:
msg = ("exception: >%s<, "
" take a dump corresponding "
" to e.diagfspec: >%s<, "
" and then take a break ...")
print (msg % (e, diagfspec,))
if simulation.vios_password is not None:
cluster_vios_snap(simulation.image_pool.vios_ips,
diagfspec + ".vios",
password=simulation.vios_password)
greenthread.sleep(100)
_last_dump = time.time()
else:
msg = ("exception: >%s<,"
" but no dump ...")
print (msg % (e,))
def _enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = dict((value, key) for key, value in enums.iteritems())
enums['reverse_mapping'] = reverse
return type('Enum', (), enums)
# def _enum(*sequential):
# enums = dict(zip(sequential,sequential))
# return type('Enum', (), enums)
OperationType = _enum("DEPLOY_FROM_IMAGE",
"DEPLOY_FROM_SNAPSHOT",
"SNAPSHOT_A_DEPLOY",
"DELETE_A_SNAPSHOT",
"DELETE_A_DEPLOY")
DeployState = _enum("INCREASING",
"DECREASING")
SnapshotState = _enum("INCREASING",
"DECREASING")
def _record(simulation, operation, e, duration):
# mu, sigma = 1500, 150
# # # x = mu + sigma * np.random.randn(10000)
# #
# # duration = time.time()-start
# # duration = mu + sigma * np.random.randn()
#
# for i, d in enumerate(duration):
# duration[i] = mu + sigma * np.random.randn()
estr = None
if e is not None:
estr = str(e)
if MOCK:
t = time.time() * 10000.0
else:
t = time.time()
simulation.operations.append((operation.type, estr, duration, t))
# track number of snapshots and number of deploys
if len(simulation.deploys_at_oper) == 0:
prev_deploys = 0
else:
prev_deploys = simulation.deploys_at_oper[-1]
if len(simulation.snapshots_at_oper) == 0:
prev_snapshots = 0
else:
prev_snapshots = simulation.snapshots_at_oper[-1]
if operation.type == OperationType.DEPLOY_FROM_IMAGE:
simulation.deploys_at_oper.append(prev_deploys + 1)
simulation.snapshots_at_oper.append(prev_snapshots)
elif operation.type == OperationType.DEPLOY_FROM_SNAPSHOT:
simulation.deploys_at_oper.append(prev_deploys + 1)
simulation.snapshots_at_oper.append(prev_snapshots)
elif operation.type == OperationType.SNAPSHOT_A_DEPLOY:
simulation.deploys_at_oper.append(prev_deploys)
simulation.snapshots_at_oper.append(prev_snapshots + 1)
elif operation.type == OperationType.DELETE_A_SNAPSHOT:
simulation.deploys_at_oper.append(prev_deploys)
simulation.snapshots_at_oper.append(prev_snapshots - 1)
elif operation.type == OperationType.DELETE_A_DEPLOY:
simulation.deploys_at_oper.append(prev_deploys - 1)
simulation.snapshots_at_oper.append(prev_snapshots)
def _parse_vios(node_vios):
node_parts = node_vios.split('/')
ms_id = node_parts[-3]
vios_id = node_parts[-1]
return ms_id, vios_id
class ImagePool(object):
def __init__(self, cs, cluster_id, existing=None, fake=None):
if MOCK:
self._cs = cs
self._cluster = None
self._ssp_id = None
self._ssp = None
self._fake = True # MOCK is always fake
self._next = 0
if fake is not None:
prefix, num_images, image_size, thin, lut = fake
self._images = num_images * [None]
else:
self._images = len(existing) * [None]
return
self._cs = cs
self._cluster = self._cs.cluster.get(cluster_id)
self._ssp_id = self._cluster.sharedstoragepool_id()
self._ssp = self._cs.sharedstoragepool.get(self._ssp_id)
self._images = []
self._next = 0
# vios
self._vios_ips = []
for node in self._cluster.node.node:
if not node.virtual_io_server:
print(_("Node: >%s<,"
" has no virtual_io_server,"
" continuing ...") % node.partition_name)
ms_id, vios_id = _parse_vios(node.virtual_io_server)
try:
vios = cs.\
virtualioserver.get(ms_id,
vios_id,
xag=["None"])
except Exception as e:
msg = _("Failed to retrieve"
" node: >%s<,"
" msg: >%s<,"
" continuing ...")
raise Exception(msg % (node.partition_name, e))
self._vios_ips.append(vios.resource_monitoring_ip_address)
# if fake is not None then (mock) image LUs will be created
self._fake = False
if existing is None and fake is None:
raise ValueError("must specify existing or fake")
if existing is not None and fake is not None:
x = "must specify either existing or fake, but not both"
raise ValueError(x)
if fake is not None:
self._fake = True
prefix, num_images, image_size, thin, lut = fake
n = cs.sharedstoragepool.create_unique_name
images = [(n("%s%07d" % (prefix, i,)),
image_size, thin, lut) for i in range(num_images)]
(image_lu_pool, self._ssp) = self._ssp.update_append_lus(images)
# self._images = [lu.unique_device_id for lu in image_lu_pool]
self._images = image_lu_pool
elif existing is not None:
self._images = []
for lu in self._ssp.logical_units.logical_unit:
if lu.unit_name in existing:
self._images.append(lu)
# self._images.append(lu.unique_device_id)
if len(self._images) == 0:
raise Exception("Empty Image Pool")
@property
def cs(self):
return self._cs
@property
def cluster(self):
return self._cluster
@property
def ssp_id(self):
return self._ssp_id
@property
def size(self):
return len(self._images)
@property
def ssp(self):
return self._ssp
@ssp.setter
def ssp(self, value):
self._ssp = value
def next(self):
if len(self._images) == 0:
return None
if self._next > len(self._images) - 1:
self._next = 0
self._next += 1
return self._images[self._next - 1]
def destroy(self):
if not self._fake:
print "too dangerous, wont destroy existing images"
self._images = []
return
if not MOCK:
# chunk to work around k2 timeouts
chunksize = 10
for chunk in _chunks(self._images, chunksize):
ssp = self._cs.sharedstoragepool.get(self._ssp_id)
lu_udids = [lu.unique_device_id for lu in chunk]
ssp.update_del_lus(lu_udids)
# ssp.update_del_lus(chunk)
self._images = []
class Operation(object):
def __init__(self, otype):
self._otype = otype
self._op_number = None
# self._duration = None
#
# @property
# def duration(self):
# return self._duration
# @duration.setter
# def duration(self, value):
# self._duration = value
@property
def op_number(self):
return self._op_number
@op_number.setter
def op_number(self, value):
self._op_number = value
@property
def type(self):
return self._otype
@property
def type_as_str(self):
return OperationType.reverse_mapping[self._otype]
class K2_XA(object):
def __init__(self, tag):
self._tag = tag
self._i = 0
self._ct = time.localtime()
def _fmt(self):
return ("%(tag)s %(ts)s %(i)d" %
{"tag": self._tag,
"ts": time.strftime("%Y-%m-%d %H:%M:%S", self._ct),
"i": self._i})
def r(self):
return self._fmt()
def ri(self):
x = self._fmt()
self._i += 1
return x
# basic clone operation
def _clone(
simulation,
operation,
source_lu,
dest_lu_unit_name,
dest_lu_lut):
n = simulation.image_pool.cs.sharedstoragepool.create_unique_name
dest_lu_unit_name = n(dest_lu_unit_name)
# print ("start clone: >%s<" % (dest_lu_unit_name,))
cluster = simulation.image_pool.cluster
e = None
dest_lu_udid = None
times = []
if MOCK:
# print "Clone: unique_device_id: >%s<" % (dest_lu.unique_device_id, )
if (simulation.tick_count % 90) == 0:
e = Exception("Mock Exception")
times.append(random.uniform(0, 4))
times.append(random.uniform(0, 4))
else:
status = "COMPLETED_OK"
dest_lu_udid = MockLu().unique_device_id
times.append(random.uniform(3, 5))
times.append(random.uniform(25, 30))
return e, dest_lu_udid, times
start = time.time() # only for exception
try:
(status, dest_lu_udid, job_id) = \
cluster.api.lu_linked_clone_of_lu_bj(
cluster,
source_lu,
dest_lu_unit_name,
dest_lu_logical_unit_type=dest_lu_lut,
xa=simulation.xa(operation))
print "clone: name: >%s<, lu_udid: >%s<" % \
(dest_lu_unit_name, dest_lu_udid)
times.append(time.time() - start)
times.append(0.0)
# print " time: >%f<, >%f<" % (times[0], times[1]) #DEBUGA
if status != "COMPLETED_OK":
msg = "issue for clone: >%s<, job_id: >%s<, status: >%s<"
x = msg % (dest_lu_unit_name, job_id, status,)
print (x)
except Exception as e:
msg = "exception for clone: >%s<"
x = msg % (dest_lu_unit_name,)
print (x)
_process_k2_exception(simulation, e)
times = []
times.append(time.time() - start)
times.append(0.0)
simulation.total_number_of_deploy_exceptions += 1
return e, dest_lu_udid, times
# basic clone operation
def _clone_ds(
simulation,
operation,
source_lu,
dest_lu_unit_name,
dest_lu_lut):
n = simulation.image_pool.cs.sharedstoragepool.create_unique_name
dest_lu_unit_name = n(dest_lu_unit_name)
# print ("start clone: >%s<" % (dest_lu_unit_name,))
cluster = simulation.image_pool.cluster
e = None
dest_lu = None
times = []
if MOCK:
# print "Clone: unique_device_id: >%s<" % (dest_lu.unique_device_id, )
if (simulation.tick_count % 90) == 0:
e = Exception("Mock Exception")
times.append(random.uniform(0, 4))
times.append(random.uniform(0, 4))
else:
status = "COMPLETED_OK"
dest_lu = MockLu()
times.append(random.uniform(3, 5))
times.append(random.uniform(25, 30))
return e, dest_lu, times
start = time.time()
try:
# CREATE TARGET
(status, target_udid, job_id) = \
cluster.lu_create(dest_lu_unit_name,
source_lu.unit_capacity,
xa=simulation.xa(operation))
if status != "COMPLETED_OK":
msg = "issue for job >%s< create: >%s<, status: >%s<"
x = msg % (job_id, dest_lu_unit_name, status,)
print (x)
times.append(time.time() - start)
except Exception as e:
msg = "exception for create: >%s<"
x = msg % (dest_lu_unit_name,)
print (x)
_process_k2_exception(simulation, e)
times.append(time.time() - start)
times.append(0.0)
return e, dest_lu, times
start = time.time()
try:
# CLONE
status, job_id = cluster.lu_linked_clone(
source_lu.unique_device_id,
target_udid,
xa=simulation.xa(operation))
times.append(time.time() - start)
if status != "COMPLETED_OK":
msg = "issue for clone: >%s<, job_id: >%s<, status: >%s<"
x = msg % (dest_lu_unit_name, job_id, status,)
print (x)
except Exception as e:
msg = "exception for clone: >%s<"
x = msg % (dest_lu_unit_name,)
print (x)
_process_k2_exception(simulation, e)
times.append(time.time() - start)
return e, dest_lu, times
# print "Create: unique_device_id: >%s<" % \
# (dest_lu.unique_device_id) #DEBUGA
simulation.total_number_of_deploy_exceptions += 1
return e, target_udid, times
def _delete(simulation, del_lu_name, del_lu_udid, operation):
# print ("Delete: unique_device_id: >%s<" % (del_lu_udid,)) #DEBUGA
e = None
times = []
if MOCK:
# print "Delete: unique_device_id: >%s<" % (del_lu_udid, )
times.append(random.uniform(3, 5))
else:
start = time.time()
print "delete: name: >%s<, udid: >%s<" % (del_lu_name, del_lu_udid)
try:
ssp = simulation.image_pool.ssp.update_del_lus(
[del_lu_udid],
xa=simulation.xa(operation))
simulation.image_pool.ssp = ssp
except Exception as e:
msg = "exception for delete: >%s<"
x = msg % (del_lu_udid,)
print (x)
_process_k2_exception(simulation, e)
times.append(time.time() - start)
# print " time: >%f<" % (times[0]) #DEBUGA
if e is not None:
simulation.total_number_of_delete_exceptions += 1
return e, times
def simulate(simulation,
prefix):
def perform_operation(simulation, operation):
####################
# DEPLOY_FROM_IMAGE
if operation.type == OperationType.DEPLOY_FROM_IMAGE:
# print "Operation: DEPLOY_FROM_IMAGE" #DEBUGA
# simulation.deploys.append("dfi")
# source_lu = simulation.image_pool.next()
source_lu = simulation.image_pool.next()
x = "P2Z-DEPLOY_FROM_IMAGE-%07d"
dest_lu_unit_name = x % (simulation.next_clone_number(),)
times = []
e, new_lu_udid, times = _clone(
simulation,
operation,
source_lu,
dest_lu_unit_name,
"VirtualIO_Disk")
_record(simulation, operation, e, times)
simulation._tick()
simulation.check_for_termination()
if e is not None:
raise e
simulation.deploys[new_lu_udid] = (
dest_lu_unit_name,
source_lu.unit_capacity,
source_lu.thin_device)
simulation.total_number_of_image_deploys += 1
####################
# DEPLOY_FROM_SNAPSHOT
elif operation.type == OperationType.DEPLOY_FROM_SNAPSHOT:
if len(simulation.snapshots) < 1:
# no snapshot to deploy
return
# print "Operation: DEPLOY_FROM_SNAPSHOT" #DEBUGA
# simulation.deploys.append("dfs")
ilu = randrange(len(simulation.snapshots))
source_lu = simulation.snapshots[ilu]
x = "P2Z-DEPLOY_FROM_SNAPSHOT-%07d"
dest_lu_unit_name = x % (simulation.next_clone_number(),)
e, new_lu_udid, times = _clone(
simulation,
operation,
source_lu,
dest_lu_unit_name,
"VirtualIO_Disk")
_record(simulation, operation, e, times)
simulation._tick()
simulation.check_for_termination()
if e is not None:
raise e
simulation.deploys[new_lu_udid] = (
dest_lu_unit_name,
source_lu.unit_capacity,
source_lu.thin_device)
simulation.total_number_of_snapshot_deploys += 1
####################
# SNAPSHOT_A_DEPLOY
elif operation.type == OperationType.SNAPSHOT_A_DEPLOY:
if len(simulation.deploys) < 1:
# nothing to snapshot
return
# print "Operation: SNAPSHOT_A_DEPLOY" #DEBUGA
# simulation.snapshots.append("sd")
keys = simulation.deploys.keys()
ilu = randrange(len(keys))
source_props = simulation.deploys[keys[ilu]]
source_lu = k2uom.LogicalUnit()
source_lu.unit_name = source_props[0]
source_lu.unit_capacity = source_props[1]
source_lu.thin_device = source_props[2]
x = "P2Z-SNAPSHOT_A_DEPLOY-%07d"
dest_lu_unit_name = x % (simulation.next_clone_number(),)
times = []
e, new_lu_udid, times = _clone(
simulation,
operation,
source_lu,
dest_lu_unit_name,
"VirtualIO_Image")
_record(simulation, operation, e, times)
simulation._tick()
simulation.check_for_termination()
if e is not None:
raise e
simulation.snapshots.append(new_lu_udid)
simulation.total_number_of_snapshots += 1
####################
# DELETE_A_SNAPSHOT
elif operation.type == OperationType.DELETE_A_SNAPSHOT:
if len(simulation.snapshots) < 1:
# no snapshot to delete
return
# print "Operation: DELETE_A_SNAPSHOT" #DEBUGA
ilu = randrange(len(simulation.snapshots))
del_lu_name = simulation.snapshots[ilu].unit_name
del_lu_udid = simulation.snapshots[ilu].unique_device_id
# simulation.image_pool.ssp.update_del_lus([del_lu_udid])
e, times = _delete(simulation, del_lu_name, del_lu_udid, operation)
_record(simulation, operation, e, times)
if e is not None:
raise e
del simulation.snapshots[ilu]
####################
# DELETE_A_DEPLOY
elif operation.type == OperationType.DELETE_A_DEPLOY:
if len(simulation.deploys) < 1:
# no deploy to delete
return
# print "Operation: DELETE_A_DEPLOY" #DEBUGA
keys = simulation.deploys.keys()
ilu = randrange(len(keys))
del_lu_udid = keys[ilu]
(del_lu_name, x, x) = simulation.deploys.pop(del_lu_udid, None)
# del_lu_udid = simulation.deploys[keys[ilu]].unique_device_id
# simulation.image_pool.ssp.update_del_lus([del_lu_udid])
e, times = _delete(simulation, del_lu_name, del_lu_udid, operation)
_record(simulation, operation, e, times)
if e is not None:
raise e
# del simulation.deploys[ilu]
####################
# ERRORS
else:
raise Exception("programming error")
# cluster = cs.cluster.get(cluster_id)
# ssp_id = cluster.sharedstoragepool_id()
# ssp = cs.sharedstoragepool.get(ssp_id)
simulation.start_time = time.time()
pool = eventlet.GreenPool(simulation.num_threads)
op_number = 0
while True:
if simulation.terminate:
print "TERMINATE"
break
if len(simulation.opq) < 1:
simulation.schedule()
operation = simulation.opq.popleft()
operation.op_number = op_number
op_number += 1
try:
pool.spawn_n(perform_operation, simulation, operation)
except (SystemExit, KeyboardInterrupt): # TODO are these correct?
break
pool.waitall()
# done
simulation.checkpoint()
class Simulation(object):
SNAPSHOT_ON = False
DEPLOY_TO_SNAPSHOT_RATIO = 5
SNAPSHOT_STEP_FORWARD = 4 # must be greater than 1
DEPLOY_IMAGE_PER_CYCLE = 2
DEPLOY_SNAPSHOT_PER_CYCLE = 1
DEPLOY_STEP_FORWARD = 10
SECOND_ORDER = True
def __init__(self, result_file,
title,
vios_password,
target_number_of_deploys,
min_deploys, max_deploys, min_snapshots,
max_snapshots, image_pool, num_threads):
self.result_file = result_file
self.title = title
self.vios_password = vios_password
self.image_pool = image_pool
self.num_threads = num_threads
self.start_time = -1
self.checkpoint_time = -1
self.target_number_of_deploys = target_number_of_deploys
assert max_deploys > min_deploys
self.min_deploys = min_deploys
self.max_deploys = max_deploys
assert max_snapshots > min_snapshots
self.min_snapshots = min_snapshots
self.max_snapshots = max_snapshots
self.current_dtsr = 0
self._current_clone_number = 0
# statistics
self.image_pool_size = image_pool.size
self.total_number_of_image_deploys = 0
self.total_number_of_snapshot_deploys = 0
self.total_number_of_snapshots = 0
self.current_deploystate = DeployState.INCREASING
self.current_snapshotstate = SnapshotState.INCREASING
self.opq = deque([])
self.deploys = {}
self.snapshots = []
self.operations = []
self.deploys_at_oper = []
self.snapshots_at_oper = []
self.snapshot_inflections = []
self.deploy_inflections = []
self.exceptions = []
self.total_number_of_deploy_exceptions = 0
self.total_number_of_delete_exceptions = 0
self.tick_count = 0
self.terminate = False
@property
def total_deploys(self):
return self.total_number_of_image_deploys + \
self.total_number_of_snapshot_deploys
def check_for_termination(self):
if self.total_deploys > self.target_number_of_deploys:
self.terminate = True
def schedule(self):
issnapshot = (self.current_dtsr % self.DEPLOY_TO_SNAPSHOT_RATIO) == 0
self.current_dtsr += 1
if self.SNAPSHOT_ON and issnapshot:
while True:
if self.current_snapshotstate is SnapshotState.INCREASING:
if len(self.snapshots) > self.max_snapshots:
opnum = len(self.operations)
x = "Snapshot: INCREASING -> DECREASING at op # >%d<"
print x % (opnum,)
self.snapshot_inflections.append(("I2D", opnum))
self.current_snapshotstate = SnapshotState.DECREASING
continue
# 1st order
op = OperationType.SNAPSHOT_A_DEPLOY
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE * \
self.SNAPSHOT_STEP_FORWARD
self.opq.extend(repeat(Operation(op), ct))
# 2nd order
if self.SECOND_ORDER:
op = OperationType.DELETE_A_SNAPSHOT
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE
self.opq.extend(repeat(Operation(op), ct))
return
elif self.current_snapshotstate is SnapshotState.DECREASING:
if len(self.snapshots) < self.min_snapshots:
opnum = len(self.operations)
x = "Snapshot: DECREASING -> INCREASING at op # >%d<"
print x % (opnum,)
self.snapshot_inflections.append(("D2I", opnum))
self.current_snapshotstate = SnapshotState.INCREASING
continue
# 1st order
op = OperationType.DELETE_A_SNAPSHOT
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE * \
self.SNAPSHOT_STEP_FORWARD
self.opq.extend(repeat(Operation(op), ct))
# 2nd order
if self.SECOND_ORDER:
op = OperationType.SNAPSHOT_A_DEPLOY
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE
self.opq.extend(repeat(Operation(op), ct))
return
else:
raise Exception("Programming Error")
else:
while True:
if self.current_deploystate is DeployState.INCREASING:
if len(self.deploys) > self.max_deploys:
opnum = len(self.operations)
x = "Deploy: INCREASING -> DECREASING at op # >%d<"
print x % (opnum,)
self.deploy_inflections.append(("I2D", opnum))
self.current_deploystate = DeployState.DECREASING
continue
# 1st order for IMAGE
ot = OperationType.DEPLOY_FROM_IMAGE
ct = self.DEPLOY_IMAGE_PER_CYCLE * self.DEPLOY_STEP_FORWARD
self.opq.extend(repeat(Operation(ot), ct))
# 1st order for DEPLOY
ot = OperationType.DEPLOY_FROM_SNAPSHOT
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE * \
self.DEPLOY_STEP_FORWARD
self.opq.extend(repeat(Operation(ot), ct))
# 2nd order
if self.SECOND_ORDER:
ot = OperationType.DELETE_A_DEPLOY
ct = self.DEPLOY_IMAGE_PER_CYCLE + \
self.DEPLOY_SNAPSHOT_PER_CYCLE
self.opq.extend(repeat(Operation(ot), ct))
return
elif self.current_deploystate is DeployState.DECREASING:
if len(self.deploys) < self.min_deploys:
opnum = len(self.operations)
x = "Deploy: DECREASING -> INCREASING at op # >%d<"
print x % (opnum,)
self.deploy_inflections.append(("D2I", opnum))
self.current_deploystate = DeployState.INCREASING
continue
ot = OperationType.DELETE_A_DEPLOY
ct = (self.DEPLOY_IMAGE_PER_CYCLE +
self.DEPLOY_SNAPSHOT_PER_CYCLE)
ct = ct * self.DEPLOY_STEP_FORWARD
self.opq.extend(repeat(Operation(ot), ct))
if self.SECOND_ORDER:
ot = OperationType.DEPLOY_FROM_IMAGE
ct = self.DEPLOY_IMAGE_PER_CYCLE
self.opq.extend(repeat(Operation(ot), ct))
ot = OperationType.DEPLOY_FROM_SNAPSHOT
ct = self.DEPLOY_SNAPSHOT_PER_CYCLE
self.opq.extend(repeat(Operation(ot), ct))
return
else:
raise Exception("Programming Error")
def next_clone_number(self):
cn = self._current_clone_number
self._current_clone_number += 1
return cn
def _tick(self):
self.tick_count += 1
if (self.tick_count % 100) == 0:
print "Operation number: >%d<" % (self.tick_count)
if (self.tick_count % 10) == 0:
self.checkpoint()
def checkpoint(self):
# save for plotting
with open(self.result_file, 'w') as f:
self.checkpoint_time = time.time()
pickle.dump(self, f)
def xa(self, op):
return (self.title +
"-" +
OperationType.reverse_mapping[op.type] +
"-" +
str(op.op_number)
)
# def simulation(cs, cluster_id, image_pool):
def simulation(title,
cluster_id,
image_pool,
result_file,
vios_password=None,
num_threads=5,
target_number_of_deploys=100,
min_deploys=10,
max_deploys=20,
min_snapshots=10,
max_snapshots=20):
s = Simulation(result_file,
title,
vios_password,
target_number_of_deploys,
min_deploys, max_deploys, min_snapshots,
max_snapshots, image_pool, num_threads)
start_time = time.time()
print "START"
x = {}
x["num_threads"] = num_threads
x["target_number_of_deploys"] = target_number_of_deploys
x["min_deploys"] = min_deploys
x["max_deploys"] = max_deploys
x["min_snapshots"] = min_snapshots
x["max_snapshots"] = max_snapshots
print json.dumps(x, indent=4)
simulate(s, "P2Z-")
total_time = time.time() - start_time
print "END: total runtime: h:m:s >%s<" % \
(datetime.timedelta(seconds=int(total_time)))
return s
def run_simulation_with_pool():
"""Setup existing image pool and run"""
k2acfg = k2asample.getk2acfg()
k2asample.configure_logging(logging.getLevelName(k2acfg['loglevel']))
# k2asample.configure_logging(logging.DEBUG,
# k2_loglevel=logging.WARNING,
# logdir=expanduser("~"))
# # gerald 238
# k2_url = "9.114.181.238"
# k2_password = "Passw0rd"
# k2_url = "hmc5.watson.ibm.com"
# k2_password = k2acfg['k2_password']
# cluster_id = "04628d39-67df-3047-b90e-c4d9b4057267" # p730_810_A
# result_file = 'my_sim_003_gerald'
# # gerald 168
# k2_url = "9.114.181.168"
# k2_password = "passw0rd"
# cluster_id = "02803f50-7063-3602-a304-fb54e4ca2d44" # p730_810_A
# result_file = 'my_sim_003_gerald_168'
# # N23 / N24
# title = "N23/N24"
# k2_url = "hmc5.watson.ibm.com"
# k2_password = k2acfg['k2_password']
# cluster_id = "ea1b0b5f-3b3a-39dc-bade-6e9cebd18bb2" # cluster-a
# result_file = 'my_sim_003_cluster_a'
# # REJY
# title = "REJY"
# k2_url = "9.126.139.241"
# k2_password = k2acfg['k2_password']
# cluster_id = "c43fbdcd-95f2-3b4a-b643-234ff00eded4" # TestCluster
# result_file = 'my_sim_003_REJY'
# # N8
# title = "N8"
# k2_url = "hmc4.watson.ibm.com"
# k2_password = k2acfg['k2_password']
# cluster_id = "0c737495-d09a-337a-a7e9-6173d4bb6d20" # cluster-c
# result_file = 'my_sim_003_N8'
# vios_password = "sde2013"
# N7
title = "N7"
k2_url = "hmc5.watson.ibm.com"
k2_password = k2acfg['k2_password']
cluster_id = "fe3fbe0f-5ba8-3374-ab75-7b653c9a57ff" # cluster-b
result_file = 'my_sim_003_N7'
vios_password = "sde2013"
vios_password = None
if not MOCK:
cs = client.Client(k2acfg['api_version'],
k2_url, # k2acfg['k2_url'],
k2acfg['k2_username'],
k2_password, # k2acfg['k2_password'],
k2_auditmemento=k2acfg['k2_auditmemento'],
k2_certpath=k2acfg['k2_certpath'],
retries=30, # k2acfg['retries']
timeout=1200, # k2acfg['timeout']
excdir="/tmp/ssp_simulation") # k2acfg['excdir']
else:<|fim▁hole|>
use_fake_images = True
if not use_fake_images:
existing = ["RHEL64"]
image_pool = ImagePool(cs, cluster_id, existing)
else:
prefix = "P2Z-FAKEIMAGE-"
num_images = 1
image_size = 1
thin = True
lut = "VirtualIO_Image"
fake = (prefix, num_images, image_size, thin, lut)
image_pool = ImagePool(cs, cluster_id, fake=fake)
print "Image_pool_size: >%d<" % (len(image_pool._images),)
num_threads = 5
# num_threads = 1
target_number_of_deploys = 30
target_number_of_deploys = 500
target_number_of_deploys = 5
target_number_of_deploys = 2000
# target_number_of_deploys = 1000
target_number_of_deploys = 1000
min_deploys = 100
# max_deploys = 2000
max_deploys = 200
min_snapshots = 100
max_snapshots = 200
# print "NOMONKEY"
# min_deploys = 5
# max_deploys = 10
s = simulation(title,
cluster_id,
image_pool,
result_file,
vios_password=vios_password,
num_threads=num_threads,
target_number_of_deploys=target_number_of_deploys,
min_deploys=min_deploys,
max_deploys=max_deploys,
min_snapshots=min_snapshots,
max_snapshots=max_snapshots)
image_pool.destroy()
r = {}
r["total_number_of_image_deploys"] = s.total_number_of_image_deploys
r["total_number_of_snapshot_deploys"] = s.total_number_of_snapshot_deploys
r["total_number_of_snapshots"] = s.total_number_of_snapshots
r["current_number_of_deploys"] = len(s.deploys)
r["current_number_of_snapshots"] = len(s.snapshots)
r["total_number_of_deploy_exceptions"] = \
s.total_number_of_deploy_exceptions
r["total_number_of_delete_exceptions"] = \
s.total_number_of_delete_exceptions
print "Result:"
print json.dumps(r, indent=4)
if __name__ == '__main__':
# print "NO MONKEY"
eventlet.monkey_patch()
try:
run_simulation_with_pool()
except Exception as e:
logging.exception(e)<|fim▁end|> | cs = None |
<|file_name|>ActionToggleCameraActorInteraction.cpp<|end_file_name|><|fim▁begin|>/************************************************************************
* Copyright (C) 2019 Spatial Information Systems Research Limited
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
************************************************************************/
#include <Action/ActionToggleCameraActorInteraction.h>
#include <FaceModelViewer.h>
#include <FaceModel.h>
using FaceTools::Action::ActionToggleCameraActorInteraction;
using FaceTools::Interactor::ActorMoveNotifier;
using FaceTools::Action::FaceAction;
using FaceTools::Action::Event;
using FaceTools::FaceModelViewer;
using FaceTools::ModelViewer;
using FaceTools::FM;
using FaceTools::FVS;
using FaceTools::Vis::FV;
using MS = FaceTools::Action::ModelSelector;
ActionToggleCameraActorInteraction::ActionToggleCameraActorInteraction( const QString& dn, const QIcon& ico, const QKeySequence& ks)
: FaceAction( dn, ico, ks), _dblClickDrag(false)
{
const Interactor::SelectNotifier *sn = MS::selector();
connect( sn, &Interactor::SelectNotifier::onDoubleClickedSelected, this, &ActionToggleCameraActorInteraction::_doDoubleClicked);
connect( sn, &Interactor::SelectNotifier::onLeftButtonUp, this, &ActionToggleCameraActorInteraction::_doLeftButtonUp);
_moveNotifier = std::shared_ptr<ActorMoveNotifier>( new ActorMoveNotifier);
connect( &*_moveNotifier, &ActorMoveNotifier::onActorStart, this, &ActionToggleCameraActorInteraction::_doOnActorStart);
connect( &*_moveNotifier, &ActorMoveNotifier::onActorStop, this, &ActionToggleCameraActorInteraction::_doOnActorStop);
setCheckable( true, false);
} // end ctor
QString ActionToggleCameraActorInteraction::toolTip() const
{
return "When on, click and drag the selected model to change its position or orientation.";
} // end toolTip
QString ActionToggleCameraActorInteraction::whatsThis() const
{
QStringList htext;
htext << "With this option toggled off, mouse clicking and dragging causes the camera to move around.";
htext << "When this option is toggled on, clicking and dragging on a model will reposition or reorient it in space.";
htext << "Click and drag with the left mouse button to rotate the model in place.";
htext << "Click and drag with the right mouse button (or hold down the SHIFT key while left clicking and dragging)";
htext << "to shift the model laterally. Click and drag with the middle mouse button (or hold down the CTRL key while";
htext << "left or right clicking and dragging) to move the model towards or away from you.";
htext << "Note that clicking and dragging off the model's surface will still move the camera around, but that this also";
htext << "toggles this option off (any camera action from the menu/toolbar will also toggle this option off).";<|fim▁hole|>
bool ActionToggleCameraActorInteraction::checkState( Event)
{
return MS::interactionMode() == IMode::ACTOR_INTERACTION;
} // end checkState
bool ActionToggleCameraActorInteraction::checkEnable( Event)
{
const FM* fm = MS::selectedModel();
return fm || isChecked();
} // end checkEnabled
void ActionToggleCameraActorInteraction::doAction( Event)
{
if ( isChecked())
{
MS::showStatus( "Model interaction ACTIVE");
MS::setInteractionMode( IMode::ACTOR_INTERACTION, true);
} // end if
else
{
MS::showStatus( "Camera interaction ACTIVE", 5000);
MS::setInteractionMode( IMode::CAMERA_INTERACTION);
} // end else
} // end doAction
void ActionToggleCameraActorInteraction::_doOnActorStart()
{
storeUndo( this, Event::AFFINE_CHANGE);
} // end _doOnActorStart
void ActionToggleCameraActorInteraction::_doOnActorStop()
{
emit onEvent( Event::AFFINE_CHANGE);
} // end _doOnActorStop
// Called only when user double clicks on an already selected model.
void ActionToggleCameraActorInteraction::_doDoubleClicked()
{
_dblClickDrag = true;
setChecked( true);
execute( Event::USER);
} // end _doDoubleClicked
void ActionToggleCameraActorInteraction::_doLeftButtonUp()
{
if ( _dblClickDrag)
{
_dblClickDrag = false;
setChecked( false);
execute( Event::USER);
} // end if
} // end _doLeftButtonUp<|fim▁end|> | return tr( htext.join(" ").toStdString().c_str());
} // end whatsThis
|
<|file_name|>AppContainer.js<|end_file_name|><|fim▁begin|>import React from 'react';
import 'isomorphic-fetch';
import {RouteHandler} from 'react-router';
import Transmit from 'react-transmit';
import {createStore, combineReducers} from 'redux';<|fim▁hole|>import {Provider} from 'react-redux';
import * as reducers from '../reducers/index';
class AppContainer extends React.Component {
static propTypes = {
initialState: React.PropTypes.object.isRequired
}
render() {
const reducer = combineReducers(reducers);
const store = createStore(reducer, this.props.initialState);
return (
<Provider store={store}>
{() =>
<RouteHandler />
}
</Provider>
);
}
}
export default Transmit.createContainer(AppContainer, {
queries: {}
});<|fim▁end|> | |
<|file_name|>build_utils_codes.py<|end_file_name|><|fim▁begin|>'''
Copyright (C) 2016 Bastille Networks
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
def i_code (code3):
return code3[0]
def o_code (code3):
if len (code3) >= 2:
return code3[1]
else:
return code3[0]
def tap_code (code3):<|fim▁hole|> else:
return code3[0]
def i_type (code3):
return char_to_type[i_code (code3)]
def o_type (code3):
return char_to_type[o_code (code3)]
def tap_type (code3):
return char_to_type[tap_code (code3)]
char_to_type = {}
char_to_type['s'] = 'short'
char_to_type['i'] = 'int'
char_to_type['f'] = 'float'
char_to_type['c'] = 'gr_complex'
char_to_type['b'] = 'unsigned char'<|fim▁end|> | if len (code3) >= 3:
return code3[2] |
<|file_name|>iron-selector.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
const { computed } = Ember;
let IronSelector = Ember.Component.extend({
attributeBindings: [
'selected',
'role',
'attrForSelected',
'multi'
],
selectedItem: computed({
get() {},
set(key, value) {
let items = this.get('items');
let idx = -1;
if (items) {
idx = this.get('items').indexOf(value);
}
if (this.getSelectedIndex() !== idx && idx !== -1) {
this.set('selected', idx);
}
return value;
}
}),
getSelectedIndex() {
let el = this.element;
if (el) {
return typeof el.selected === 'number' ?
el.selected :
el.indexOf(el.selectedItem);
} else {
return -1;
}
},<|fim▁hole|> this.$().on('iron-select', () => {
let el = this.element;
let items = this.get('items');
if (items) {
this.set('selectedItem', items[this.getSelectedIndex()]);
} else {
this.set('selectedItem', el.selected);
}
});
// initial selection
let selectedItem = this.get('selectedItem');
if (selectedItem) {
let items = this.get('items');
if (items) {
this.element.select(selectedItem === 'number' ?
selectedItem :
items.indexOf(selectedItem));
} else {
this.element.select(selectedItem === 'number' ?
selectedItem :
this.element.items.indexOf(selectedItem));
}
}
}
});
IronSelector.reopenClass({
positionalParams: [ 'items' ]
});
export default IronSelector;<|fim▁end|> |
didInsertElement() {
this._super(...arguments);
|
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>@Component({
// tslint:disable-next-line
selector: 'body',
template: '<router-outlet></router-outlet>'
})
export class AppComponent implements OnInit {
constructor(private router: Router) { }
ngOnInit() {
this.router.events.subscribe((evt) => {
if (!(evt instanceof NavigationEnd)) {
return;
}
window.scrollTo(0, 0)
});
}
}<|fim▁end|> | import { Component, OnInit } from '@angular/core';
import { Router, NavigationEnd } from '@angular/router';
|
<|file_name|>Adafruit_GPS.cpp<|end_file_name|><|fim▁begin|>/***********************************
This is our GPS library
Adafruit invests time and resources providing this open source code,
please support Adafruit and open-source hardware by purchasing
products from Adafruit!
Written by Limor Fried/Ladyada for Adafruit Industries.
BSD license, check license.txt for more information
All text above must be included in any redistribution
****************************************/
#include "Adafruit_GPS.h"
#include "math.h"
#include <ctype.h>
// how long are max NMEA lines to parse?
#define MAXLINELENGTH 120
// we double buffer: read one line in and leave one for the main program
volatile char line1[MAXLINELENGTH];
volatile char line2[MAXLINELENGTH];
// our index into filling the current line
volatile uint8_t lineidx=0;
// pointers to the double buffers
volatile char *currentline;
volatile char *lastline;
volatile boolean recvdflag;
volatile boolean inStandbyMode;
boolean Adafruit_GPS::parse(char *nmea) {
// do checksum check
// first look if we even have one
if (nmea[strlen(nmea)-4] == '*') {
uint16_t sum = parseHex(nmea[strlen(nmea)-3]) * 16;
sum += parseHex(nmea[strlen(nmea)-2]);
// check checksum
for (uint8_t i=1; i < (strlen(nmea)-4); i++) {
sum ^= nmea[i];
}
if (sum != 0) {
// bad checksum :(
//return false;
Spark.publish("GPS", "{ error: \"bad checksum\"}", 60, PRIVATE );
}
}
// look for a few common sentences
if (strstr(nmea, "$GPGGA")) {
// found GGA
char *p = nmea;
// get time
p = strchr(p, ',')+1;
float timef = atof(p);
uint32_t time = timef;
hour = time / 10000;
minute = (time % 10000) / 100;
seconds = (time % 100);
milliseconds = fmod(timef, 1.0) * 1000;
// parse out latitude
p = strchr(p, ',')+1;
latitude = atof(p);
p = strchr(p, ',')+1;
if (p[0] == 'N') lat = 'N';
else if (p[0] == 'S') lat = 'S';
else if (p[0] == ',') lat = 0;
else return false;
// parse out longitude
p = strchr(p, ',')+1;
longitude = atof(p);
p = strchr(p, ',')+1;
if (p[0] == 'W') lon = 'W';
else if (p[0] == 'E') lon = 'E';
else if (p[0] == ',') lon = 0;
else return false;
p = strchr(p, ',')+1;
fixquality = atoi(p);
p = strchr(p, ',')+1;
satellites = atoi(p);
p = strchr(p, ',')+1;
HDOP = atof(p);
p = strchr(p, ',')+1;
altitude = atof(p);
p = strchr(p, ',')+1;
p = strchr(p, ',')+1;
geoidheight = atof(p);
return true;
}
if (strstr(nmea, "$GPRMC")) {
// found RMC
char *p = nmea;
// get time
p = strchr(p, ',')+1;
float timef = atof(p);
uint32_t time = timef;
hour = time / 10000;
minute = (time % 10000) / 100;
seconds = (time % 100);
milliseconds = fmod(timef, 1.0) * 1000;
p = strchr(p, ',')+1;
// Serial.println(p);
if (p[0] == 'A')
fix = true;
else if (p[0] == 'V')
fix = false;
else
return false;
// parse out latitude
p = strchr(p, ',')+1;
latitude = atof(p);
p = strchr(p, ',')+1;
if (p[0] == 'N') lat = 'N';
else if (p[0] == 'S') lat = 'S';
else if (p[0] == ',') lat = 0;
else return false;
// parse out longitude
p = strchr(p, ',')+1;
longitude = atof(p);
p = strchr(p, ',')+1;
if (p[0] == 'W') lon = 'W';
else if (p[0] == 'E') lon = 'E';
else if (p[0] == ',') lon = 0;
else return false;
// speed
p = strchr(p, ',')+1;
speed = atof(p);
// angle
p = strchr(p, ',')+1;
angle = atof(p);
p = strchr(p, ',')+1;
uint32_t fulldate = atof(p);
day = fulldate / 10000;
month = (fulldate % 10000) / 100;
year = (fulldate % 100);
// we dont parse the remaining, yet!
return true;
}
return false;
}
char Adafruit_GPS::read(void) {
char c = 0;
if (paused) return c;
#ifdef __AVR__
if(gpsSwSerial) {
if(!gpsSwSerial->available()) return c;
c = gpsSwSerial->read();
} else
#endif
{
if(!gpsHwSerial->available()) return c;
c = gpsHwSerial->read();
}
//Serial.print(c);
if (c == '$') {
currentline[lineidx] = 0;
lineidx = 0;
}
if (c == '\n') {
currentline[lineidx] = 0;
if (currentline == line1) {
currentline = line2;
lastline = line1;
} else {
currentline = line1;
lastline = line2;
}
//Serial.println("----");
//Serial.println((char *)lastline);
//Serial.println("----");
lineidx = 0;
recvdflag = true;
}
currentline[lineidx++] = c;
if (lineidx >= MAXLINELENGTH)
lineidx = MAXLINELENGTH-1;
return c;
}
#ifdef __AVR__
// Constructor when using SoftwareSerial or NewSoftSerial
#if ARDUINO >= 100
Adafruit_GPS::Adafruit_GPS(SoftwareSerial *ser)
#else
Adafruit_GPS::Adafruit_GPS(NewSoftSerial *ser)
#endif
{
common_init(); // Set everything to common state, then...
gpsSwSerial = ser; // ...override gpsSwSerial with value passed.
}
#endif
// Constructor when using HardwareSerial
Adafruit_GPS::Adafruit_GPS(Stream *ser) {
common_init(); // Set everything to common state, then...
gpsHwSerial = ser; // ...override gpsHwSerial with value passed.
}
// Initialization code used by all constructor types
void Adafruit_GPS::common_init(void) {
#ifdef __AVR__
gpsSwSerial = NULL; // Set both to NULL, then override correct
#endif
gpsHwSerial = NULL; // port pointer in corresponding constructor
recvdflag = false;
paused = false;
lineidx = 0;
currentline = line1;
lastline = line2;
hour = minute = seconds = year = month = day =
fixquality = satellites = 0; // uint8_t
lat = lon = mag = 0; // char
fix = false; // boolean
milliseconds = 0; // uint16_t
latitude = longitude = geoidheight = altitude =
speed = angle = magvariation = HDOP = 0.0; // float
}
void Adafruit_GPS::begin(uint16_t baud)
{
#ifdef __AVR__
if(gpsSwSerial)
gpsSwSerial->begin(baud);
else
gpsHwSerial->begin(baud);
#endif
delay(10);
}
void Adafruit_GPS::sendCommand(char *str) {
#ifdef __AVR__
if(gpsSwSerial)
gpsSwSerial->println(str);
else
#endif
gpsHwSerial->println(str);
}
boolean Adafruit_GPS::newNMEAreceived(void) {
return recvdflag;
}
void Adafruit_GPS::pause(boolean p) {
paused = p;
}
char *Adafruit_GPS::lastNMEA(void) {
recvdflag = false;
return (char *)lastline;
}
// read a Hex value and return the decimal equivalent
uint8_t Adafruit_GPS::parseHex(char c) {
if (c < '0')
return 0;
if (c <= '9')
return c - '0';
if (c < 'A')
return 0;
if (c <= 'F')
return (c - 'A')+10;
return 0;<|fim▁hole|>
uint8_t i=0;
while (i < max) {
if (newNMEAreceived()) {
char *nmea = lastNMEA();
strncpy(str, nmea, 20);
str[19] = 0;
i++;
if (strstr(str, wait4me))
return true;
}
}
return false;
}
boolean Adafruit_GPS::LOCUS_StartLogger(void) {
sendCommand(PMTK_LOCUS_STARTLOG);
recvdflag = false;
return waitForSentence(PMTK_LOCUS_LOGSTARTED);
}
boolean Adafruit_GPS::LOCUS_ReadStatus(void) {
sendCommand(PMTK_LOCUS_QUERY_STATUS);
if (! waitForSentence("$PMTKLOG"))
return false;
char *response = lastNMEA();
uint16_t parsed[10];
uint8_t i;
for (i=0; i<10; i++) parsed[i] = -1;
response = strchr(response, ',');
for (i=0; i<10; i++) {
if (!response || (response[0] == 0) || (response[0] == '*'))
break;
response++;
parsed[i]=0;
while ((response[0] != ',') && (response[0] != '*') && (response[0] != 0))
{
parsed[i] *= 10;
char c = response[0];
if (isdigit(c))
parsed[i] += c - '0';
else
parsed[i] = c;
response++;
}
}
LOCUS_serial = parsed[0];
LOCUS_type = parsed[1];
if (isalpha(parsed[2])) {
parsed[2] = parsed[2] - 'a' + 10;
}
LOCUS_mode = parsed[2];
LOCUS_config = parsed[3];
LOCUS_interval = parsed[4];
LOCUS_distance = parsed[5];
LOCUS_speed = parsed[6];
LOCUS_status = !parsed[7];
LOCUS_records = parsed[8];
LOCUS_percent = parsed[9];
return true;
}
// Standby Mode Switches
boolean Adafruit_GPS::standby(void) {
if (inStandbyMode) {
return false; // Returns false if already in standby mode, so that you do not wake it up by sending commands to GPS
}
else {
inStandbyMode = true;
sendCommand(PMTK_STANDBY);
//return waitForSentence(PMTK_STANDBY_SUCCESS); // don't seem to be fast enough to catch the message, or something else just is not working
return true;
}
}
boolean Adafruit_GPS::wakeup(void) {
if (inStandbyMode) {
inStandbyMode = false;
sendCommand(""); // send byte to wake it up
return waitForSentence(PMTK_AWAKE);
}
else {
return false; // Returns false if not in standby mode, nothing to wakeup
}
}<|fim▁end|> | }
boolean Adafruit_GPS::waitForSentence(char *wait4me, uint8_t max) {
char str[20]; |
<|file_name|>DVDInputStreamHTSP.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)<|fim▁hole|> * any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
* http://www.gnu.org/copyleft/gpl.html
*
*/
#include "DVDInputStreamHTSP.h"
#include "URL.h"
#include "FileItem.h"
#include "utils/log.h"
#include <limits.h>
extern "C" {
#include "lib/libhts/net.h"
#include "lib/libhts/htsmsg.h"
#include "lib/libhts/htsmsg_binary.h"
#include "lib/libhts/sha1.h"
}
using namespace std;
using namespace HTSP;
htsmsg_t* CDVDInputStreamHTSP::ReadStream()
{
htsmsg_t* msg;
/* after anything has started reading, *
* we can guarantee a new stream */
m_startup = false;
while((msg = m_session.ReadMessage(1000)))
{
const char* method;
if((method = htsmsg_get_str(msg, "method")) == NULL)
return msg;
if (strstr(method, "channelAdd"))
CHTSPSession::ParseChannelUpdate(msg, m_channels);
else if(strstr(method, "channelUpdate"))
CHTSPSession::ParseChannelUpdate(msg, m_channels);
else if(strstr(method, "channelRemove"))
CHTSPSession::ParseChannelRemove(msg, m_channels);
uint32_t subs;
if(htsmsg_get_u32(msg, "subscriptionId", &subs) || subs != m_subs)
{
htsmsg_destroy(msg);
continue;
}
return msg;
}
return NULL;
}
CDVDInputStreamHTSP::CDVDInputStreamHTSP()
: CDVDInputStream(DVDSTREAM_TYPE_HTSP)
, m_subs(0)
, m_startup(false)
, m_channel(0)
, m_tag(0)
{
}
CDVDInputStreamHTSP::~CDVDInputStreamHTSP()
{
Close();
}
bool CDVDInputStreamHTSP::Open(const char* file, const std::string& content)
{
if (!CDVDInputStream::Open(file, content))
return false;
CURL url(file);
if(sscanf(url.GetFileName().c_str(), "tags/%d/%d", &m_tag, &m_channel) != 2)
{
CLog::Log(LOGERROR, "CDVDInputStreamHTSP::Open - invalid url (%s)\n", url.GetFileName().c_str());
return false;
}
if(!m_session.Connect(url.GetHostName(), url.GetPort()))
return false;
if(!url.GetUserName().IsEmpty())
m_session.Auth(url.GetUserName(), url.GetPassWord());
m_session.SendEnableAsync();
if(!m_session.SendSubscribe(m_subs, m_channel))
return false;
m_startup = true;
return true;
}
bool CDVDInputStreamHTSP::IsEOF()
{
return false;
}
void CDVDInputStreamHTSP::Close()
{
CDVDInputStream::Close();
m_session.Close();
m_read.Clear();
}
int CDVDInputStreamHTSP::Read(BYTE* buf, int buf_size)
{
size_t count = m_read.Size();
if(count == 0)
{
htsmsg_t* msg = ReadStream();
if(msg == NULL)
return -1;
uint8_t* p;
if(htsmsg_binary_serialize(msg, (void**)&p, &count, INT_MAX) < 0)
{
htsmsg_destroy(msg);
return -1;
}
htsmsg_destroy(msg);
m_read.Clear();
m_read.buf = p;
m_read.cur = p;
m_read.end = p + count;
}
if(count == 0)
return 0;
if(count > (size_t)buf_size)
count = buf_size;
memcpy(buf, m_read.cur, count);
m_read.cur += count;
return count;
}
bool CDVDInputStreamHTSP::SetChannel(int channel)
{
CLog::Log(LOGDEBUG, "CDVDInputStreamHTSP::SetChannel - changing to channel %d", channel);
if(!m_session.SendUnsubscribe(m_subs))
CLog::Log(LOGERROR, "CDVDInputStreamHTSP::SetChannel - failed to unsubscribe from previous channel");
if(!m_session.SendSubscribe(m_subs+1, channel))
{
if(m_session.SendSubscribe(m_subs, m_channel))
CLog::Log(LOGERROR, "CDVDInputStreamHTSP::SetChannel - failed to set channel");
else
CLog::Log(LOGERROR, "CDVDInputStreamHTSP::SetChannel - failed to set channel and restore old channel");
return false;
}
m_channel = channel;
m_subs = m_subs+1;
m_startup = true;
return true;
}
bool CDVDInputStreamHTSP::GetChannels(SChannelV &channels, SChannelV::iterator &it)
{
for(SChannels::iterator it2 = m_channels.begin(); it2 != m_channels.end(); it2++)
{
if(m_tag == 0 || it2->second.MemberOf(m_tag))
channels.push_back(it2->second);
}
sort(channels.begin(), channels.end());
for(it = channels.begin(); it != channels.end(); it++)
if(it->id == m_channel)
return true;
return false;
}
bool CDVDInputStreamHTSP::NextChannel()
{
SChannelV channels;
SChannelV::iterator it;
if(!GetChannels(channels, it))
return false;
SChannelC circ(channels.begin(), channels.end(), it);
if(++circ == it)
return false;
else
return SetChannel(circ->id);
}
bool CDVDInputStreamHTSP::PrevChannel()
{
SChannelV channels;
SChannelV::iterator it;
if(!GetChannels(channels, it))
return false;
SChannelC circ(channels.begin(), channels.end(), it);
if(--circ == it)
return false;
else
return SetChannel(circ->id);
}
bool CDVDInputStreamHTSP::SelectChannel(unsigned int channel)
{
return SetChannel(channel);
}
bool CDVDInputStreamHTSP::UpdateItem(CFileItem& item)
{
SChannels::iterator it = m_channels.find(m_channel);
if(it == m_channels.end())
return false;
SChannel& channel = it->second;
if(channel.event != m_event.id)
{
if(!m_session.GetEvent(m_event, channel.event))
{
m_event.Clear();
m_event.id = channel.event;
}
}
CFileItem current(item);
CHTSPSession::ParseItem(channel, m_tag, m_event, item);
item.SetThumbnailImage(channel.icon);
item.SetCachedVideoThumb();
return current.m_strPath != item.m_strPath
|| current.m_strTitle != item.m_strTitle;
}
int CDVDInputStreamHTSP::GetTotalTime()
{
if(m_event.id == 0)
return 0;
return (m_event.stop - m_event.start) * 1000;
}
int CDVDInputStreamHTSP::GetTime()
{
CDateTimeSpan time;
time = CDateTime::GetUTCDateTime()
- CDateTime((time_t)m_event.start);
return time.GetDays() * 1000 * 60 * 60 * 24
+ time.GetHours() * 1000 * 60 * 60
+ time.GetMinutes() * 1000 * 60
+ time.GetSeconds() * 1000;
}
void CDVDInputStreamHTSP::Abort()
{
m_session.Abort();
}<|fim▁end|> | |
<|file_name|>reporter.py<|end_file_name|><|fim▁begin|>import collections
import contextlib
import copy
import json
import warnings
import numpy
import six
from chainer.backends import cuda
from chainer import configuration
from chainer import serializer as serializer_module
from chainer import variable
def _copy_variable(value):
if isinstance(value, variable.Variable):
return copy.copy(value)
return value
class Reporter(object):
"""Object to which observed values are reported.
Reporter is used to collect values that users want to watch. The reporter
object holds a mapping from value names to the actually observed values.
We call this mapping `observations`.
When a value is passed to the reporter, an object called `observer` can be
optionally attached. In this case, the name of the observer is added as the
prefix of the value name. The observer name should be registered
beforehand.
See the following example::
>>> from chainer import Reporter, report, report_scope
>>>
>>> reporter = Reporter()
>>> observer = object() # it can be an arbitrary (reference) object
>>> reporter.add_observer('my_observer', observer)
>>> observation = {}
>>> with reporter.scope(observation):
... reporter.report({'x': 1}, observer)
...
>>> observation
{'my_observer/x': 1}
There are also a global API to add values::
>>> observation = {}
>>> with report_scope(observation):
... report({'x': 1}, observer)
...
>>> observation
{'my_observer/x': 1}
The most important application of Reporter is to report observed values
from each link or chain in the training and validation procedures.
:class:`~chainer.training.Trainer` and some extensions prepare their own
Reporter object with the hierarchy of the target link registered as
observers. We can use :func:`report` function inside any links and chains
to report the observed values (e.g., training loss, accuracy, activation
statistics, etc.).
Attributes:
observation: Dictionary of observed values.
"""
def __init__(self):<|fim▁hole|> self._observer_names = {}
self.observation = {}
def __enter__(self):
"""Makes this reporter object current."""
_reporters.append(self)
def __exit__(self, exc_type, exc_value, traceback):
"""Recovers the previous reporter object to the current."""
_reporters.pop()
@contextlib.contextmanager
def scope(self, observation):
"""Creates a scope to report observed values to ``observation``.
This is a context manager to be passed to ``with`` statements. In this
scope, the observation dictionary is changed to the given one.
It also makes this reporter object current.
Args:
observation (dict): Observation dictionary. All observations
reported inside of the ``with`` statement are written to this
dictionary.
"""
old = self.observation
self.observation = observation
self.__enter__()
yield
self.__exit__(None, None, None)
self.observation = old
def add_observer(self, name, observer):
"""Registers an observer of values.
Observer defines a scope of names for observed values. Values observed
with the observer are registered with names prefixed by the observer
name.
Args:
name (str): Name of the observer.
observer: The observer object. Note that the reporter distinguishes
the observers by their object ids (i.e., ``id(owner)``), rather
than the object equality.
"""
self._observer_names[id(observer)] = name
def add_observers(self, prefix, observers):
"""Registers multiple observers at once.
This is a convenient method to register multiple objects at once.
Args:
prefix (str): Prefix of each name of observers.
observers: Iterator of name and observer pairs.
"""
for name, observer in observers:
self._observer_names[id(observer)] = prefix + name
def report(self, values, observer=None):
"""Reports observed values.
The values are written with the key, prefixed by the name of the
observer object if given.
.. note::
As of v2.0.0, if a value is of type :class:`~chainer.Variable`, the
variable is copied without preserving the computational graph and
the new variable object purged from the graph is stored to the
observer. This behavior can be changed by setting
``chainer.config.keep_graph_on_report`` to ``True``.
Args:
values (dict): Dictionary of observed values.
observer: Observer object. Its object ID is used to retrieve the
observer name, which is used as the prefix of the registration
name of the observed value.
"""
if not configuration.config.keep_graph_on_report:
values = {k: _copy_variable(v) for k, v in six.iteritems(values)}
if observer is not None:
observer_id = id(observer)
if observer_id not in self._observer_names:
raise KeyError(
'Given observer is not registered to the reporter.')
observer_name = self._observer_names[observer_id]
for key, value in six.iteritems(values):
name = '%s/%s' % (observer_name, key)
self.observation[name] = value
else:
self.observation.update(values)
_reporters = []
def get_current_reporter():
"""Returns the current reporter object."""
return _reporters[-1]
def report(values, observer=None):
"""Reports observed values with the current reporter object.
Any reporter object can be set current by the ``with`` statement. This
function calls the :meth:`Report.report` method of the current reporter.
If no reporter object is current, this function does nothing.
.. admonition:: Example
The most typical example is a use within links and chains. Suppose that
a link is registered to the current reporter as an observer (for
example, the target link of the optimizer is automatically registered to
the reporter of the :class:`~chainer.training.Trainer`). We can report
some values from the link as follows::
class MyRegressor(chainer.Chain):
def __init__(self, predictor):
super(MyRegressor, self).__init__(predictor=predictor)
def __call__(self, x, y):
# This chain just computes the mean absolute and squared
# errors between the prediction and y.
pred = self.predictor(x)
abs_error = F.sum(F.abs(pred - y)) / len(x)
loss = F.mean_squared_error(pred, y)
# Report the mean absolute and squared errors.
report({'abs_error': abs_error, 'squared_error': loss}, self)
return loss
If the link is named ``'main'`` in the hierarchy (which is the default
name of the target link in the
:class:`~chainer.training.updaters.StandardUpdater`),
these reported values are
named ``'main/abs_error'`` and ``'main/squared_error'``. If these values
are reported inside the :class:`~chainer.training.extension.Evaluator`
extension, ``'validation/'`` is added at the head of the link name, thus
the item names are changed to ``'validation/main/abs_error'`` and
``'validation/main/squared_error'`` (``'validation'`` is the default
name of the Evaluator extension).
Args:
values (dict): Dictionary of observed values.
observer: Observer object. Its object ID is used to retrieve the
observer name, which is used as the prefix of the registration name
of the observed value.
"""
if _reporters:
current = _reporters[-1]
current.report(values, observer)
@contextlib.contextmanager
def report_scope(observation):
"""Returns a report scope with the current reporter.
This is equivalent to ``get_current_reporter().scope(observation)``,
except that it does not make the reporter current redundantly.
"""
current = _reporters[-1]
old = current.observation
current.observation = observation
yield
current.observation = old
def _get_device(x):
if numpy.isscalar(x):
return cuda.DummyDevice
else:
return cuda.get_device_from_array(x)
class Summary(object):
"""Online summarization of a sequence of scalars.
Summary computes the statistics of given scalars online.
"""
def __init__(self):
self._x = 0
self._x2 = 0
self._n = 0
def add(self, value, weight=1):
"""Adds a scalar value.
Args:
value: Scalar value to accumulate. It is either a NumPy scalar or
a zero-dimensional array (on CPU or GPU).
weight: An optional weight for the value. It is a NumPy scalar or
a zero-dimensional array (on CPU or GPU).
Default is 1 (integer).
"""
with _get_device(value):
self._x += weight * value
self._x2 += weight * value * value
self._n += weight
def compute_mean(self):
"""Computes the mean."""
x, n = self._x, self._n
with _get_device(x):
return x / n
def make_statistics(self):
"""Computes and returns the mean and standard deviation values.
Returns:
tuple: Mean and standard deviation values.
"""
x, n = self._x, self._n
xp = cuda.get_array_module(x)
with _get_device(x):
mean = x / n
var = self._x2 / n - mean * mean
std = xp.sqrt(var)
return mean, std
def serialize(self, serializer):
try:
self._x = serializer('_x', self._x)
self._x2 = serializer('_x2', self._x2)
self._n = serializer('_n', self._n)
except KeyError:
warnings.warn('The previous statistics are not saved.')
class DictSummary(object):
"""Online summarization of a sequence of dictionaries.
``DictSummary`` computes the statistics of a given set of scalars online.
It only computes the statistics for scalar values and variables of scalar
values in the dictionaries.
"""
def __init__(self):
self._summaries = collections.defaultdict(Summary)
def add(self, d):
"""Adds a dictionary of scalars.
Args:
d (dict): Dictionary of scalars to accumulate. Only elements of
scalars, zero-dimensional arrays, and variables of
zero-dimensional arrays are accumulated. When the value
is a tuple, the second element is interpreted as a weight.
"""
summaries = self._summaries
for k, v in six.iteritems(d):
w = 1
if isinstance(v, tuple):
w = v[1]
v = v[0]
if isinstance(w, variable.Variable):
w = w.array
if not numpy.isscalar(w) and not getattr(w, 'ndim', -1) == 0:
raise ValueError(
'Given weight to {} was not scalar.'.format(k))
if isinstance(v, variable.Variable):
v = v.array
if numpy.isscalar(v) or getattr(v, 'ndim', -1) == 0:
summaries[k].add(v, weight=w)
def compute_mean(self):
"""Creates a dictionary of mean values.
It returns a single dictionary that holds a mean value for each entry
added to the summary.
Returns:
dict: Dictionary of mean values.
"""
return {name: summary.compute_mean()
for name, summary in six.iteritems(self._summaries)}
def make_statistics(self):
"""Creates a dictionary of statistics.
It returns a single dictionary that holds mean and standard deviation
values for every entry added to the summary. For an entry of name
``'key'``, these values are added to the dictionary by names ``'key'``
and ``'key.std'``, respectively.
Returns:
dict: Dictionary of statistics of all entries.
"""
stats = {}
for name, summary in six.iteritems(self._summaries):
mean, std = summary.make_statistics()
stats[name] = mean
stats[name + '.std'] = std
return stats
def serialize(self, serializer):
if isinstance(serializer, serializer_module.Serializer):
names = list(self._summaries.keys())
serializer('_names', json.dumps(names))
for index, name in enumerate(names):
self._summaries[name].serialize(
serializer['_summaries'][str(index)])
else:
self._summaries.clear()
try:
names = json.loads(serializer('_names', ''))
except KeyError:
warnings.warn('The names of statistics are not saved.')
return
for index, name in enumerate(names):
self._summaries[name].serialize(
serializer['_summaries'][str(index)])<|fim▁end|> | |
<|file_name|>sers_windows.go<|end_file_name|><|fim▁begin|>// +build windows
package sers
// taken from https://github.com/tarm/goserial
// and slightly modified
// (C) 2011, 2012 Tarmigan Casebolt, Benjamin Siegert, Michael Meier
// All rights reserved.
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file.
import (
"fmt"
"os"
"sync"
"syscall"
"unsafe"
)
type serialPort struct {
f *os.File
fd syscall.Handle
rl sync.Mutex
wl sync.Mutex
ro *syscall.Overlapped
wo *syscall.Overlapped
}
type structDCB struct {
DCBlength, BaudRate uint32
flags [4]byte
wReserved, XonLim, XoffLim uint16
ByteSize, Parity, StopBits byte
XonChar, XoffChar, ErrorChar, EofChar, EvtChar byte
wReserved1 uint16
}
type structTimeouts struct {
ReadIntervalTimeout uint32
ReadTotalTimeoutMultiplier uint32
ReadTotalTimeoutConstant uint32
WriteTotalTimeoutMultiplier uint32
WriteTotalTimeoutConstant uint32
}
//func openPort(name string) (rwc io.ReadWriteCloser, err error) { // TODO
func Open(name string) (rwc SerialPort, err error) {
if len(name) > 0 && name[0] != '\\' {
name = "\\\\.\\" + name
}
h, err := syscall.CreateFile(syscall.StringToUTF16Ptr(name),
syscall.GENERIC_READ|syscall.GENERIC_WRITE,
0,
nil,
syscall.OPEN_EXISTING,
syscall.FILE_ATTRIBUTE_NORMAL|syscall.FILE_FLAG_OVERLAPPED,
0)
if err != nil {
return nil, err
}
f := os.NewFile(uintptr(h), name)
defer func() {
if err != nil {
f.Close()
}
}()
/*if err = setCommState(h, baud); err != nil {
return
}*/
if err = setupComm(h, 64, 64); err != nil {
return
}
if err = setCommTimeouts(h, 0.0); err != nil {
return
}
if err = setCommMask(h); err != nil {
return
}
ro, err := newOverlapped()
if err != nil {
return
}
wo, err := newOverlapped()
if err != nil {
return
}
port := new(serialPort)
port.f = f
port.fd = h
port.ro = ro
port.wo = wo
return port, nil
}
func (p *serialPort) Close() error {
return p.f.Close()
}
func (p *serialPort) Write(buf []byte) (int, error) {
p.wl.Lock()
defer p.wl.Unlock()
if err := resetEvent(p.wo.HEvent); err != nil {
return 0, err
}
var n uint32
err := syscall.WriteFile(p.fd, buf, &n, p.wo)
//fmt.Printf("n %d err %v\n", n, err)
_ = fmt.Printf
if err != nil && err != syscall.ERROR_IO_PENDING {
//fmt.Printf("returning...\n")
return int(n), err
}
return getOverlappedResult(p.fd, p.wo)
}
func (p *serialPort) Read(buf []byte) (int, error) {
//fmt.Printf("read(<%d bytes>)\n", len(buf))
if p == nil || p.f == nil {
return 0, fmt.Errorf("Invalid port on read %v %v", p, p.f)
}
p.rl.Lock()
defer p.rl.Unlock()
if err := resetEvent(p.ro.HEvent); err != nil {
return 0, err
}
var done uint32
//fmt.Printf("calling ReadFile... ")
err := syscall.ReadFile(p.fd, buf, &done, p.ro)
//fmt.Printf(" done. %d, %v\n", done, err)
if err != nil && err != syscall.ERROR_IO_PENDING {
return int(done), err
}
//fmt.Printf("getting OverlappedResult... ")
n, err := getOverlappedResult(p.fd, p.ro)
//fmt.Printf(" done. n %d err %v\n", n, err)
if n == 0 && err == nil {
return n, winSersTimeout{}
}
return n, err
}
var (
nSetCommState,
nSetCommTimeouts,
nSetCommMask,
nSetupComm,
nGetOverlappedResult,
nCreateEvent,
nResetEvent uintptr
)
func init() {
k32, err := syscall.LoadLibrary("kernel32.dll")
if err != nil {
panic("LoadLibrary " + err.Error())
}
defer syscall.FreeLibrary(k32)
nSetCommState = getProcAddr(k32, "SetCommState")
nSetCommTimeouts = getProcAddr(k32, "SetCommTimeouts")
nSetCommMask = getProcAddr(k32, "SetCommMask")
nSetupComm = getProcAddr(k32, "SetupComm")
nGetOverlappedResult = getProcAddr(k32, "GetOverlappedResult")
nCreateEvent = getProcAddr(k32, "CreateEventW")
nResetEvent = getProcAddr(k32, "ResetEvent")
}
func getProcAddr(lib syscall.Handle, name string) uintptr {
addr, err := syscall.GetProcAddress(lib, name)
if err != nil {
panic(name + " " + err.Error())
}
return addr
}
func setCommState(h syscall.Handle, baud, databits, parity, handshake int) error {
var params structDCB
params.DCBlength = uint32(unsafe.Sizeof(params))
params.flags[0] = 0x01 // fBinary
params.flags[0] |= 0x10 // Assert DSR
params.ByteSize = byte(databits)
params.BaudRate = uint32(baud)
//params.ByteSize = 8
switch parity {
case N:
params.flags[0] &^= 0x02
params.Parity = 0 // NOPARITY
case E:
params.flags[0] |= 0x02
params.Parity = 2 // EVENPARITY
case O:
params.flags[0] |= 0x02
params.Parity = 1 // ODDPARITY
default:
return StringError("invalid parity setting")
}
switch handshake {
case NO_HANDSHAKE:
// TODO: reset handshake
default:
return StringError("only NO_HANDSHAKE is supported on windows")
}
r, _, err := syscall.Syscall(nSetCommState, 2, uintptr(h), uintptr(unsafe.Pointer(¶ms)), 0)
if r == 0 {
return err
}
return nil
}
func setCommTimeouts(h syscall.Handle, constTimeout float64) error {
var timeouts structTimeouts
const MAXDWORD = 1<<32 - 1
timeouts.ReadIntervalTimeout = MAXDWORD
timeouts.ReadTotalTimeoutMultiplier = MAXDWORD
//timeouts.ReadTotalTimeoutConstant = MAXDWORD - 1
if constTimeout == 0 {
timeouts.ReadTotalTimeoutConstant = MAXDWORD - 1
} else {
timeouts.ReadTotalTimeoutConstant = uint32(constTimeout * 1000.0)
}
/* From http://msdn.microsoft.com/en-us/library/aa363190(v=VS.85).aspx
For blocking I/O see below:
Remarks:
If an application sets ReadIntervalTimeout and
ReadTotalTimeoutMultiplier to MAXDWORD and sets
ReadTotalTimeoutConstant to a value greater than zero and
less than MAXDWORD, one of the following occurs when the
ReadFile function is called:
If there are any bytes in the input buffer, ReadFile returns
immediately with the bytes in the buffer.
If there are no bytes in the input buffer, ReadFile waits
until a byte arrives and then returns immediately.
If no bytes arrive within the time specified by
ReadTotalTimeoutConstant, ReadFile times out.
*/
r, _, err := syscall.Syscall(nSetCommTimeouts, 2, uintptr(h), uintptr(unsafe.Pointer(&timeouts)), 0)
if r == 0 {
return err
}
return nil
}
func setupComm(h syscall.Handle, in, out int) error {
r, _, err := syscall.Syscall(nSetupComm, 3, uintptr(h), uintptr(in), uintptr(out))
if r == 0 {
return err
}
return nil
}
func setCommMask(h syscall.Handle) error {
const EV_RXCHAR = 0x0001
r, _, err := syscall.Syscall(nSetCommMask, 2, uintptr(h), EV_RXCHAR, 0)
if r == 0 {
return err
}
return nil
}
func resetEvent(h syscall.Handle) error {
r, _, err := syscall.Syscall(nResetEvent, 1, uintptr(h), 0, 0)
if r == 0 {
return err
}
return nil
}
func newOverlapped() (*syscall.Overlapped, error) {
var overlapped syscall.Overlapped
r, _, err := syscall.Syscall6(nCreateEvent, 4, 0, 1, 0, 0, 0, 0)
if r == 0 {
return nil, err
}
overlapped.HEvent = syscall.Handle(r)
return &overlapped, nil
}
func getOverlappedResult(h syscall.Handle, overlapped *syscall.Overlapped) (int, error) {
var n int
r, _, err := syscall.Syscall6(nGetOverlappedResult, 4,
uintptr(h),
uintptr(unsafe.Pointer(overlapped)),
uintptr(unsafe.Pointer(&n)), 1, 0, 0)
if r == 0 {
return n, err<|fim▁hole|> return n, nil
}
func (sp *serialPort) SetMode(baudrate, databits, parity, stopbits, handshake int) error {
if err := setCommState(syscall.Handle(sp.f.Fd()), baudrate, databits, parity, handshake); err != nil {
return err
}
//return StringError("SetMode not implemented yet on Windows")
return nil
}
func (sp *serialPort) SetReadParams(minread int, timeout float64) error {
// TODO: minread is ignored!
return setCommTimeouts(sp.fd, timeout)
//return StringError("SetReadParams not implemented yet on Windows")
}
type winSersTimeout struct{}
func (wst winSersTimeout) Error() string {
return "a timeout has occured"
}
func (wst winSersTimeout) Timeout() bool {
return true
}<|fim▁end|> | }
//fmt.Printf("n %d err %v\n", n, err) |
<|file_name|>yaml.min.js<|end_file_name|><|fim▁begin|>Joomla 3.6.4 = 5e60174db2edd61c1c32011464017d84<|fim▁hole|>Joomla 3.7.0 = c7eeeb362de64acba3e76fd13e0ad6af
Joomla 3.4.1 = 399117bc209c7f4eb16f72bfef504db7<|fim▁end|> | |
<|file_name|>categories.client.config.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|> // Menus.addMenuItem('topbar', 'Categories', 'categories', 'item', '/categories(?:/[^/]+)?', null, null, 9);
// Set admin menu items
Menus.addMenuItem('admin', 'Categories', 'categories', 'dropdown', '/categories(/create)?');
Menus.addSubMenuItem('admin', 'categories', 'List Categories', 'categories');
Menus.addSubMenuItem('admin', 'categories', 'New Category', 'categories/create');
}
]);<|fim▁end|> | // Configuring the Articles module
angular.module('categories').run(['Menus',
function(Menus) {
// Set top bar menu items |
<|file_name|>geste2lfmm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# Copyright 2018 Francisco Pina Martins <[email protected]>
# This file is part of geste2lfmm.
# geste2lfmm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# geste2lfmm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License<|fim▁hole|># Usage: python3 geste2lfmm.py file.geste file.lfmm
from collections import OrderedDict
def parse_geste(infile_name):
"""
Parses a GESTE file and retuns an OrderedDict with:
{"Population_name":[Freq_ref_allele_on SNP_1,Freq_ref_allele_on SNP_2,...]}
"""
infile = open(infile_name, "r")
pop_freqs = OrderedDict()
pop_starter = "[pop]="
popname = ""
for line in infile:
# Neat trick to ignore data that is not SNP info
# This code section should be very performant since it replaces most
# if - else tests with try -> except statements
line = line.split()
try:
int(line[0])
except ValueError: # In case it's a new section
if line[0].startswith(pop_starter):
popname = "Pop %s" % line[0].strip().replace(pop_starter, "")
pop_freqs[popname] = []
continue
except IndexError: # In case it's an empty line
continue
try:
ref_frequency = round(int(line[3]) / int(line[1]), 3)
except ZeroDivisionError:
ref_frequency = 9
pop_freqs[popname].append(ref_frequency)
infile.close()
return pop_freqs
def write_lfmm(pop_freqs, lfmm_filename):
"""
Write a LFMM inpt file based on the OrderedDict extracted from the GESTE
file.
"""
outfile = open(lfmm_filename, 'w')
for name, freq in pop_freqs.items():
outfile.write(name + "\t")
outfile.write("\t".join(map(str, freq)) + "\n")
outfile.close()
if __name__ == "__main__":
from sys import argv
POP_FREQS = parse_geste(argv[1])
write_lfmm(POP_FREQS, argv[2])<|fim▁end|> | # along with geste2lfmm. If not, see <http://www.gnu.org/licenses/>.
|
<|file_name|>test_project.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
from horton.meanfield.test.common import helper_compute
def test_project_msg_identical():
mol = IOData.from_file(context.get_fn('test/water_sto3g_hf_g03.fchk'))
exp = mol.lf.create_expansion()
project_orbitals_mgs(mol.obasis, mol.obasis, mol.exp_alpha, exp)
assert (exp.energies == 0.0).all()<|fim▁hole|>
def test_project_ortho_basis_identical():
mol = IOData.from_file(context.get_fn('test/water_sto3g_hf_g03.fchk'))
exp = mol.lf.create_expansion()
project_orbitals_ortho(mol.obasis, mol.obasis, mol.exp_alpha, exp)
assert (exp.energies == 0.0).all()
assert (exp.occupations == mol.exp_alpha.occupations).all()
assert abs(exp.coeffs - mol.exp_alpha.coeffs).max() < 1e-9
def test_project_ortho_olp_identical():
mol = IOData.from_file(context.get_fn('test/water_sto3g_hf_g03.fchk'))
olp = mol.lf.create_two_index()
for i in xrange(olp.nbasis):
olp.set_element(i, i, 1.0)
exp = mol.lf.create_expansion()
project_orbitals_ortho(mol.obasis, mol.obasis, mol.exp_alpha, exp)
assert (exp.energies == 0.0).all()
assert (exp.occupations == mol.exp_alpha.occupations).all()
assert abs(exp.coeffs - mol.exp_alpha.coeffs).max() < 1e-9
def test_project_msg_larger():
# Load STO3G system and keep essential results
mol = IOData.from_file(context.get_fn('test/water_sto3g_hf_g03.fchk'))
obasis0 = mol.obasis
exp0 = mol.exp_alpha
# Upgrade the basis to 3-21G and project
obasis1 = get_gobasis(mol.coordinates, mol.numbers, '3-21G')
lf1 = DenseLinalgFactory(obasis1.nbasis)
exp1 = lf1.create_expansion()
project_orbitals_mgs(obasis0, obasis1, exp0, exp1)
assert (exp1.energies == 0.0).all()
assert exp0.occupations.sum() == exp1.occupations.sum()
assert (exp1.coeffs[:,5:] == 0.0).all()
# Check the normalization of the projected orbitals
olp = obasis1.compute_overlap(lf1)
exp1.check_orthonormality(olp)
# Setup HF hamiltonian and compute energy
kin = obasis1.compute_kinetic(lf1)
na = obasis1.compute_nuclear_attraction(mol.coordinates, mol.pseudo_numbers, lf1)
er = obasis1.compute_electron_repulsion(lf1)
terms = [
RTwoIndexTerm(kin, 'kin'),
RDirectTerm(er, 'hartree'),
RExchangeTerm(er, 'x_hf'),
RTwoIndexTerm(na, 'ne'),
]
ham = REffHam(terms)
# Compute energy after projection
energy1 = helper_compute(ham, lf1, exp1)[0]
# Optimize wfn
scf_solver = PlainSCFSolver(1e-6)
occ_model = AufbauOccModel(5)
scf_solver(ham, lf1, olp, occ_model, exp1)
energy2 = ham.cache['energy']
assert energy2 < energy1 # the energy should decrease after scf convergence
# Construct a core initial guess
guess_core_hamiltonian(olp, kin, na, exp1)
energy3 = helper_compute(ham, lf1, exp1)[0]
assert energy3 > energy1 # the projected guess should be better than the core guess
def test_project_msg_smaller():
# Load 3-21G system and keep essential results
mol = IOData.from_file(context.get_fn('test/li_h_3-21G_hf_g09.fchk'))
obasis0 = mol.obasis
exp0_alpha = mol.exp_alpha
exp0_beta = mol.exp_beta
# Downgrade the basis to sto-3g and project
obasis1 = get_gobasis(mol.coordinates, mol.numbers, 'sto-3g')
lf1 = DenseLinalgFactory(obasis1.nbasis)
exp1_alpha = lf1.create_expansion()
exp1_beta = lf1.create_expansion()
project_orbitals_mgs(obasis0, obasis1, exp0_alpha, exp1_alpha)
project_orbitals_mgs(obasis0, obasis1, exp0_beta, exp1_beta)
assert (exp1_alpha.energies == 0.0).all()
assert (exp1_beta.energies == 0.0).all()
assert exp1_alpha.occupations.sum() == 2
assert exp1_beta.occupations.sum() == 1
assert (exp1_alpha.coeffs[:,2:] == 0.0).all()
assert (exp1_beta.coeffs[:,1:] == 0.0).all()
# Check the normalization of the projected orbitals
olp = obasis1.compute_overlap(lf1)
exp1_alpha.check_orthonormality(olp)
exp1_beta.check_orthonormality(olp)
# Setup HF hamiltonian and compute energy
kin = obasis1.compute_kinetic(lf1)
na = obasis1.compute_nuclear_attraction(mol.coordinates, mol.pseudo_numbers, lf1)
er = obasis1.compute_electron_repulsion(lf1)
terms = [
UTwoIndexTerm(kin, 'kin'),
UDirectTerm(er, 'hartree'),
UExchangeTerm(er, 'x_hf'),
UTwoIndexTerm(na, 'ne'),
]
ham = UEffHam(terms)
# Compute energy before SCF
energy1 = helper_compute(ham, lf1, exp1_alpha, exp1_beta)[0]
scf_solver = PlainSCFSolver(1e-6)
occ_model = AufbauOccModel(2, 1)
scf_solver(ham, lf1, olp, occ_model, exp1_alpha, exp1_beta)
energy2 = ham.cache['energy']
assert energy2 < energy1 # the energy should decrease after scf convergence
def get_basis_pair_geometry():
'''Prepare two basis sets that only differ in geometry'''
# Create initial system
mol = IOData.from_file(context.get_fn('test/water.xyz'))
obasis0 = get_gobasis(mol.coordinates, mol.numbers, 'sto-3g')
lf = DenseLinalgFactory(obasis0.nbasis)
exp0 = lf.create_expansion()
# Occupy all orbitals such that orthogonality is well tested
exp0.occupations[:] = 1.0
# core-hamiltonian guess
olp = obasis0.compute_overlap(lf)
kin = obasis0.compute_kinetic(lf)
na = obasis0.compute_nuclear_attraction(mol.coordinates, mol.pseudo_numbers, lf)
er = obasis0.compute_electron_repulsion(lf)
guess_core_hamiltonian(olp, kin, na, exp0)
# Internal consistency check
exp0.check_orthonormality(obasis0.compute_overlap(lf))
# Change geometry
mol.coordinates[1,2] += 0.5
mol.coordinates[0,1] -= 1.5
obasis1 = get_gobasis(mol.coordinates, mol.numbers, 'sto-3g')
exp1 = lf.create_expansion()
return obasis0, obasis1, exp0, exp1, lf
def test_project_msg_geometry():
obasis0, obasis1, exp0, exp1, lf = get_basis_pair_geometry()
# Project from one to other:
project_orbitals_mgs(obasis0, obasis1, exp0, exp1)
# Basic checks
assert (exp1.energies == 0.0).all()
assert (exp1.occupations == exp0.occupations).all()
assert abs(exp1.coeffs[:,:5] - exp0.coeffs[:,:5]).max() > 1e-3 # something should change
# Check orthonormality
exp1.check_orthonormality(obasis1.compute_overlap(lf))
def test_project_ortho_basis_geometry():
obasis0, obasis1, exp0, exp1, lf = get_basis_pair_geometry()
# Project from one to other:
project_orbitals_ortho(obasis0, obasis1, exp0, exp1)
# Basic checks
assert (exp1.energies == 0.0).all()
assert (exp1.occupations == exp0.occupations).all()
assert abs(exp1.coeffs[:,:5] - exp0.coeffs[:,:5]).max() > 1e-3 # something should change
# Check orthonormality
exp1.check_orthonormality(obasis1.compute_overlap(lf))
def test_project_ortho_olp_geometry():
obasis0, obasis1, exp0, exp1, lf = get_basis_pair_geometry()
# Project from one to other:
olp0 = obasis0.compute_overlap(lf)
olp1 = obasis1.compute_overlap(lf)
project_orbitals_ortho(olp0, olp1, exp0, exp1)
# Basic checks
assert (exp1.energies == 0.0).all()
assert (exp1.occupations == exp0.occupations).all()
assert abs(exp1.coeffs[:,:5] - exp0.coeffs[:,:5]).max() > 1e-3 # something should change
# Check orthonormality
exp1.check_orthonormality(obasis1.compute_overlap(lf))<|fim▁end|> | assert (exp.occupations == mol.exp_alpha.occupations).all()
assert abs(exp.coeffs[:,:-2] - mol.exp_alpha.coeffs[:,:-2]).max() < 1e-9
assert (exp.coeffs[:,-2:] == 0.0).all()
|
<|file_name|>http_server.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use foxbox_taxonomy::manager::*;
use hyper::net::{ NetworkListener };
use iron::{ AfterMiddleware, Chain, Handler,
HttpServerFactory, Iron, IronResult, Request,
Response, ServerFactory };
use iron_cors::CORS;
use iron::error::{ IronError };
use iron::method::Method;
use iron::status::Status;
use mount::Mount;
use router::NoRoute;
use static_router;
use std::net::SocketAddr;
use std::sync::Arc;
use std::thread;
use taxonomy_router;
use tls::SniServerFactory;
use traits::Controller;
const THREAD_COUNT: usize = 8;
struct Custom404;
impl AfterMiddleware for Custom404 {
fn catch(&self, _: &mut Request, err: IronError) -> IronResult<Response> {
use std::io::Error as StdError;
use std::io::ErrorKind;
if let Some(_) = err.error.downcast::<NoRoute>() {
// Router error
return Ok(Response::with((Status::NotFound,
format!("Unknown resource: {}", err))));
} else if let Some(err) = err.error.downcast::<StdError>() {
// StaticFile error
if err.kind() == ErrorKind::NotFound {
return Ok(Response::with((Status::NotFound,
format!("Unknown resource: {}", err))));
}
}
// Just let other errors go through, like 401.
Err(err)
}
}
struct Ping;
impl Handler for Ping {
fn handle (&self, _: &mut Request) -> IronResult<Response> {
Ok(Response::with(Status::NoContent))
}
}
pub struct HttpServer<T: Controller> {
controller: T
}
impl<T: Controller> HttpServer<T> {
pub fn new(controller: T) -> Self {
HttpServer { controller: controller }
}
pub fn start(&mut self, adapter_api: &Arc<AdapterManager>) {
let taxonomy_chain = taxonomy_router::create(self.controller.clone(),
adapter_api);
let users_manager = self.controller.get_users_manager();
let mut mount = Mount::new();
mount.mount("/", static_router::create(users_manager.clone()))
.mount("/ping", Ping)
.mount("/api/v1", taxonomy_chain)
.mount("/users", users_manager.get_router_chain());
let mut chain = Chain::new(mount);
chain.link_after(Custom404);
let cors = CORS::new(vec![
(vec![Method::Get], "ping".to_owned()),
(vec![Method::Get, Method::Post, Method::Put, Method::Delete],
"services/:service/:command".to_owned()),
(vec![Method::Get], "services/list".to_owned()),
<|fim▁hole|> (vec![Method::Get, Method::Post], "api/v1/services".to_owned()),
(vec![Method::Post, Method::Delete], "api/v1/services/tags".to_owned()),
(vec![Method::Get, Method::Post], "api/v1/channels".to_owned()),
(vec![Method::Put], "api/v1/channels/get".to_owned()),
(vec![Method::Put], "api/v1/channels/set".to_owned()),
(vec![Method::Post, Method::Delete], "api/v1/channels/tags".to_owned())
]);
chain.link_after(cors);
let addrs: Vec<_> = self.controller.http_as_addrs().unwrap().collect();
if self.controller.get_tls_enabled() {
let mut certificate_manager = self.controller.get_certificate_manager();
let server_factory = SniServerFactory::new(&mut certificate_manager);
start_server(addrs, chain, server_factory);
} else {
start_server(addrs, chain, HttpServerFactory {});
}
}
}
fn start_server<TListener, T>(addrs: Vec<SocketAddr>, chain: Chain, factory: T)
where TListener: NetworkListener + Send + 'static,
T: ServerFactory<TListener> + Send + 'static {
thread::Builder::new().name("HttpServer".to_owned())
.spawn(move || {
Iron::new(chain)
.listen_with(addrs[0], THREAD_COUNT, &factory, None)
.unwrap();
}).unwrap();
}
#[cfg(test)]
describe! ping {
before_each {
use mount::Mount;
use iron::Headers;
use iron::status::Status;
use iron_test::request;
use super::Ping;
let mut mount = Mount::new();
mount.mount("/ping", Ping);
}
it "should response 204 NoContent" {
let response = request::get("http://localhost:3000/ping",
Headers::new(),
&mount).unwrap();
assert_eq!(response.status.unwrap(), Status::NoContent);
}
}
#[cfg(test)]
describe! http_server {
before_each {
extern crate hyper;
use foxbox_taxonomy::manager::AdapterManager;
use std::thread;
use std::sync::Arc;
use std::time::Duration;
use stubs::controller::ControllerStub;
let taxo_manager = Arc::new(AdapterManager::new(None));
let mut http_server = HttpServer::new(ControllerStub::new());
http_server.start(&taxo_manager);
// HACK: Let some time for the http server to start.
thread::sleep(Duration::new(3, 0));
}
it "should get the appropriate CORS headers" {
use iron::headers;
use iron::method::Method;
let endpoints = vec![
(vec![Method::Get, Method::Post, Method::Put],
"services/:service/:command".to_owned()),
(vec![Method::Get], "services/list".to_owned())
];
let client = hyper::Client::new();
for endpoint in endpoints {
let (_, path) = endpoint;
let path = "http://localhost:3000/".to_owned() +
&(path.replace(":", "foo"));
let res = client.get(&path).send();
let headers = &res.unwrap().headers;
assert!(headers.has::<headers::AccessControlAllowOrigin>());
assert!(headers.has::<headers::AccessControlAllowHeaders>());
assert!(headers.has::<headers::AccessControlAllowMethods>());
};
}
it "should respond with 404" {
use iron::status::Status;
use std::io::Read;
let client = hyper::Client::new();
let path = "http://localhost:3000/foo/bar".to_owned();
let mut res = client.get(&path).send().unwrap();
assert_eq!(res.status, Status::NotFound);
let mut body = String::new();
res.read_to_string(&mut body).unwrap();
assert_eq!(body, "Unknown resource: No such file or \
directory (os error 2)".to_owned());
}
}<|fim▁end|> | // Taxonomy router paths. Keep in sync with taxonomy_router.rs |
<|file_name|>StrongComponentRenderer.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012 Tom Denley
* This file incorporates work covered by the following copyright and
* permission notice:
*
* Copyright (c) 2003-2008, Franz-Josef Elmer, All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.netmelody.neoclassycle.renderer;
import org.netmelody.neoclassycle.graph.StrongComponent;
/**<|fim▁hole|> */
public interface StrongComponentRenderer {
/** Renderes the specified {@link StrongComponent}. */
public String render(StrongComponent component);
}<|fim▁end|> | * Interface for rendering a {@link StrongComponent}.
*
* @author Franz-Josef Elmer |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8; -*-
"""
Copyright (C) 2007-2012 Lincoln de Sousa <[email protected]>
Copyright (C) 2007 Gabriel Falcão <[email protected]>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301 USA
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import dbus
import gtk
import logging
import os
import subprocess
import sys
from optparse import OptionParser
from guake.common import ShowableError
from guake.common import _
from guake.common import test_gconf
from guake.dbusiface import DBUS_NAME
from guake.dbusiface import DBUS_PATH
from guake.dbusiface import DbusManager
from guake.globals import KEY
from guake.guake_app import Guake
log = logging.getLogger(__name__)
def main():
"""Parses the command line parameters and decide if dbus methods
should be called or not. If there is already a guake instance
running it will be used and a True value will be returned,
otherwise, false will be returned.
"""
# COLORTERM is an environment variable set by some terminal emulators such as gnome-terminal.
# To avoid confusing applications running inside Guake, clean up COLORTERM at startup.
if "COLORTERM" in os.environ:
del os.environ['COLORTERM']
# Force to xterm-256 colors for compatibility with some old command line programs
os.environ["TERM"] = "xterm-256color"
parser = OptionParser()
parser.add_option('-f', '--fullscreen', dest='fullscreen',
action='store_true', default=False,
help=_('Put Guake in fullscreen mode'))
parser.add_option('-t', '--toggle-visibility', dest='show_hide',
action='store_true', default=False,
help=_('Toggles the visibility of the terminal window'))
parser.add_option('--show', dest="show",
action='store_true', default=False,
help=_('Shows Guake main window'))
parser.add_option('--hide', dest='hide',
action='store_true', default=False,
help=_('Hides Guake main window'))
parser.add_option('-p', '--preferences', dest='show_preferences',
action='store_true', default=False,
help=_('Shows Guake preference window'))
parser.add_option('-a', '--about', dest='show_about',
action='store_true', default=False,
help=_('Shows Guake\'s about info'))
parser.add_option('-n', '--new-tab', dest='new_tab',
action='store', default='',
help=_('Add a new tab (with current directory set to NEW_TAB)'))
parser.add_option('-s', '--select-tab', dest='select_tab',
action='store', default='',
help=_('Select a tab (SELECT_TAB is the index of the tab)'))
parser.add_option('-g', '--selected-tab', dest='selected_tab',
action='store_true', default=False,
help=_('Return the selected tab index.'))
parser.add_option('-e', '--execute-command', dest='command',
action='store', default='',
help=_('Execute an arbitrary command in the selected tab.'))
parser.add_option('-i', '--tab-index', dest='tab_index',
action='store', default='0',
help=_('Specify the tab to rename. Default is 0.'))
parser.add_option('--bgcolor', dest='bgcolor',
action='store', default='',
help=_('Set the hexadecimal (#rrggbb) background color of '
'the selected tab.'))
parser.add_option('--fgcolor', dest='fgcolor',
action='store', default='',
help=_('Set the hexadecimal (#rrggbb) foreground color of the '
'selected tab.'))
parser.add_option('--rename-tab', dest='rename_tab',
metavar='TITLE',
action='store', default='',
help=_('Rename the specified tab. Reset to default if TITLE is '
'a single dash "-".'))
parser.add_option('-r', '--rename-current-tab', dest='rename_current_tab',
metavar='TITLE',
action='store', default='',
help=_('Rename the current tab. Reset to default if TITLE is a '
'single dash "-".'))
parser.add_option('-q', '--quit', dest='quit',
action='store_true', default=False,
help=_('Says to Guake go away =('))
parser.add_option('-u', '--no-startup-script', dest='execute_startup_script',
action='store_false', default=True,
help=_('Do not execute the start up script'))
options = parser.parse_args()[0]
instance = None
# Trying to get an already running instance of guake. If it is not
# possible, lets create a new instance. This function will return
# a boolean value depending on this decision.
try:
bus = dbus.SessionBus()
remote_object = bus.get_object(DBUS_NAME, DBUS_PATH)
already_running = True
except dbus.DBusException:
instance = Guake()
remote_object = DbusManager(instance)
already_running = False
only_show_hide = True
if options.fullscreen:
remote_object.fullscreen()
if options.show:
remote_object.show_from_remote()
if options.hide:
remote_object.hide_from_remote()
if options.show_preferences:
remote_object.show_prefs()
only_show_hide = False
if options.new_tab:
remote_object.add_tab(options.new_tab)
only_show_hide = False
if options.select_tab:
selected = int(options.select_tab)
i = remote_object.select_tab(selected)
if i is None:
sys.stdout.write('invalid index: %d\n' % selected)
only_show_hide = False
if options.selected_tab:
selected = remote_object.get_selected_tab()
sys.stdout.write('%d\n' % selected)
only_show_hide = False
if options.command:
remote_object.execute_command(options.command)
only_show_hide = False
if options.tab_index and options.rename_tab:
remote_object.rename_tab(int(options.tab_index), options.rename_tab)
only_show_hide = False
if options.bgcolor:
remote_object.set_bgcolor(options.bgcolor)
only_show_hide = False<|fim▁hole|>
if options.fgcolor:
remote_object.set_fgcolor(options.fgcolor)
only_show_hide = False
if options.rename_current_tab:
remote_object.rename_current_tab(options.rename_current_tab)
only_show_hide = False
if options.show_about:
remote_object.show_about()
only_show_hide = False
if already_running and only_show_hide:
# here we know that guake was called without any parameter and
# it is already running, so, lets toggle its visibility.
remote_object.show_hide()
if options.execute_startup_script:
if not already_running:
startup_script = instance.client.get_string(KEY("/general/startup_script"))
if startup_script:
log.info("Calling startup script: %s", startup_script)
pid = subprocess.Popen([startup_script], shell=True, stdin=None, stdout=None,
stderr=None, close_fds=True)
log.info("Startup script started with pid: %s", pid)
# Please ensure this is the last line !!!!
else:
log.info("--no-startup-script argument defined, so don't execute the startup script")
return already_running
def exec_main():
if not test_gconf():
raise ShowableError(_('Guake can not init!'),
_('Gconf Error.\n'
'Have you installed <b>guake.schemas</b> properly?'))
if not main():
gtk.main()
if __name__ == '__main__':
exec_main()<|fim▁end|> | |
<|file_name|>automated_deploy_stage_rollback_policy.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// DevOps API
//
// Use the DevOps APIs to create a DevOps project to group the pipelines, add reference to target deployment environments, add artifacts to deploy, and create deployment pipelines needed to deploy your software.<|fim▁hole|>//
package devops
import (
"encoding/json"
"github.com/oracle/oci-go-sdk/v46/common"
)
// AutomatedDeployStageRollbackPolicy Specifies the automated rollback policy for a stage on failure.
type AutomatedDeployStageRollbackPolicy struct {
}
func (m AutomatedDeployStageRollbackPolicy) String() string {
return common.PointerString(m)
}
// MarshalJSON marshals to json representation
func (m AutomatedDeployStageRollbackPolicy) MarshalJSON() (buff []byte, e error) {
type MarshalTypeAutomatedDeployStageRollbackPolicy AutomatedDeployStageRollbackPolicy
s := struct {
DiscriminatorParam string `json:"policyType"`
MarshalTypeAutomatedDeployStageRollbackPolicy
}{
"AUTOMATED_STAGE_ROLLBACK_POLICY",
(MarshalTypeAutomatedDeployStageRollbackPolicy)(m),
}
return json.Marshal(&s)
}<|fim▁end|> | |
<|file_name|>test_tableservice.py<|end_file_name|><|fim▁begin|># coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import base64
import time
import unittest
from datetime import datetime
from azure import WindowsAzureError, WindowsAzureBatchOperationError
from azure.storage import (
Entity,
EntityProperty,
StorageServiceProperties,
TableService,
)
from util import (
AzureTestCase,
credentials,
getUniqueName,
set_service_options,
)
#------------------------------------------------------------------------------
MAX_RETRY = 60
#------------------------------------------------------------------------------
class TableServiceTest(AzureTestCase):
def setUp(self):
self.ts = TableService(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
set_service_options(self.ts)
self.table_name = getUniqueName('uttable')
self.additional_table_names = []
def tearDown(self):
self.cleanup()
return super(TableServiceTest, self).tearDown()
def cleanup(self):
try:
self.ts.delete_table(self.table_name)
except:
pass
for name in self.additional_table_names:
try:
self.ts.delete_table(name)
except:
pass
#--Helpers-----------------------------------------------------------------
def _create_table(self, table_name):
'''
Creates a table with the specified name.
'''
self.ts.create_table(table_name, True)
def _create_table_with_default_entities(self, table_name, entity_count):
'''
Creates a table with the specified name and adds entities with the
default set of values. PartitionKey is set to 'MyPartition' and RowKey
is set to a unique counter value starting at 1 (as a string).
'''
entities = []
self._create_table(table_name)
for i in range(1, entity_count + 1):
entities.append(self.ts.insert_entity(
table_name,
self._create_default_entity_dict('MyPartition', str(i))))
return entities
def _create_default_entity_class(self, partition, row):
'''
Creates a class-based entity with fixed values, using all
of the supported data types.
'''
entity = Entity()
entity.PartitionKey = partition
entity.RowKey = row
entity.age = 39
entity.sex = 'male'
entity.married = True
entity.deceased = False
entity.optional = None
entity.ratio = 3.1
entity.large = 9333111000
entity.Birthday = datetime(1973, 10, 4)
entity.birthday = datetime(1970, 10, 4)
entity.binary = None
entity.other = EntityProperty('Edm.Int64', 20)
entity.clsid = EntityProperty(
'Edm.Guid', 'c9da6455-213d-42c9-9a79-3e9149a57833')
return entity
def _create_default_entity_dict(self, partition, row):
'''
Creates a dictionary-based entity with fixed values, using all
of the supported data types.
'''
return {'PartitionKey': partition,
'RowKey': row,
'age': 39,
'sex': 'male',
'married': True,
'deceased': False,
'optional': None,
'ratio': 3.1,
'large': 9333111000,
'Birthday': datetime(1973, 10, 4),
'birthday': datetime(1970, 10, 4),
'other': EntityProperty('Edm.Int64', 20),
'clsid': EntityProperty(
'Edm.Guid',
'c9da6455-213d-42c9-9a79-3e9149a57833')}
def _create_updated_entity_dict(self, partition, row):
'''
Creates a dictionary-based entity with fixed values, with a
different set of values than the default entity. It
adds fields, changes field values, changes field types,
and removes fields when compared to the default entity.
'''
return {'PartitionKey': partition,
'RowKey': row,
'age': 'abc',
'sex': 'female',
'sign': 'aquarius',
'birthday': datetime(1991, 10, 4)}
def _assert_default_entity(self, entity):
'''
Asserts that the entity passed in matches the default entity.
'''
self.assertEqual(entity.age, 39)
self.assertEqual(entity.sex, 'male')
self.assertEqual(entity.married, True)
self.assertEqual(entity.deceased, False)
self.assertFalse(hasattr(entity, "aquarius"))
self.assertEqual(entity.ratio, 3.1)
self.assertEqual(entity.large, 9333111000)
self.assertEqual(entity.Birthday, datetime(1973, 10, 4))
self.assertEqual(entity.birthday, datetime(1970, 10, 4))
self.assertEqual(entity.other, 20)
self.assertIsInstance(entity.clsid, EntityProperty)
self.assertEqual(entity.clsid.type, 'Edm.Guid')
self.assertEqual(entity.clsid.value,
'c9da6455-213d-42c9-9a79-3e9149a57833')
def _assert_updated_entity(self, entity):
'''
Asserts that the entity passed in matches the updated entity.
'''
self.assertEqual(entity.age, 'abc')
self.assertEqual(entity.sex, 'female')
self.assertFalse(hasattr(entity, "married"))
self.assertFalse(hasattr(entity, "deceased"))
self.assertEqual(entity.sign, 'aquarius')
self.assertFalse(hasattr(entity, "optional"))
self.assertFalse(hasattr(entity, "ratio"))
self.assertFalse(hasattr(entity, "large"))
self.assertFalse(hasattr(entity, "Birthday"))
self.assertEqual(entity.birthday, datetime(1991, 10, 4))
self.assertFalse(hasattr(entity, "other"))
self.assertFalse(hasattr(entity, "clsid"))
def _assert_merged_entity(self, entity):
'''
Asserts that the entity passed in matches the default entity
merged with the updated entity.
'''
self.assertEqual(entity.age, 'abc')
self.assertEqual(entity.sex, 'female')
self.assertEqual(entity.sign, 'aquarius')
self.assertEqual(entity.married, True)
self.assertEqual(entity.deceased, False)
self.assertEqual(entity.sign, 'aquarius')
self.assertEqual(entity.ratio, 3.1)
self.assertEqual(entity.large, 9333111000)
self.assertEqual(entity.Birthday, datetime(1973, 10, 4))
self.assertEqual(entity.birthday, datetime(1991, 10, 4))
self.assertEqual(entity.other, 20)
self.assertIsInstance(entity.clsid, EntityProperty)
self.assertEqual(entity.clsid.type, 'Edm.Guid')
self.assertEqual(entity.clsid.value,
'c9da6455-213d-42c9-9a79-3e9149a57833')
#--Test cases for table service -------------------------------------------
def test_get_set_table_service_properties(self):
table_properties = self.ts.get_table_service_properties()
self.ts.set_table_service_properties(table_properties)
tests = [('logging.delete', True),
('logging.delete', False),
('logging.read', True),
('logging.read', False),
('logging.write', True),
('logging.write', False),
]
for path, value in tests:
# print path
cur = table_properties
for component in path.split('.')[:-1]:
cur = getattr(cur, component)
last_attr = path.split('.')[-1]
setattr(cur, last_attr, value)
self.ts.set_table_service_properties(table_properties)
retry_count = 0
while retry_count < MAX_RETRY:
table_properties = self.ts.get_table_service_properties()
cur = table_properties
for component in path.split('.'):
cur = getattr(cur, component)
if value == cur:
break
time.sleep(1)
retry_count += 1
self.assertEqual(value, cur)
def test_table_service_retention_single_set(self):
table_properties = self.ts.get_table_service_properties()
table_properties.logging.retention_policy.enabled = False
table_properties.logging.retention_policy.days = 5
# TODO: Better error, ValueError?
self.assertRaises(WindowsAzureError,
self.ts.set_table_service_properties,
table_properties)
table_properties = self.ts.get_table_service_properties()
table_properties.logging.retention_policy.days = None
table_properties.logging.retention_policy.enabled = True
# TODO: Better error, ValueError?
self.assertRaises(WindowsAzureError,
self.ts.set_table_service_properties,
table_properties)
def test_table_service_set_both(self):
table_properties = self.ts.get_table_service_properties()
table_properties.logging.retention_policy.enabled = True
table_properties.logging.retention_policy.days = 5
self.ts.set_table_service_properties(table_properties)
table_properties = self.ts.get_table_service_properties()
self.assertEqual(
True, table_properties.logging.retention_policy.enabled)
self.assertEqual(5, table_properties.logging.retention_policy.days)
#--Test cases for tables --------------------------------------------------
def test_create_table(self):
# Arrange
# Act
created = self.ts.create_table(self.table_name)
# Assert
self.assertTrue(created)
def test_create_table_fail_on_exist(self):
# Arrange
# Act
created = self.ts.create_table(self.table_name, True)
# Assert
self.assertTrue(created)
def test_create_table_with_already_existing_table(self):
# Arrange
# Act
created1 = self.ts.create_table(self.table_name)
created2 = self.ts.create_table(self.table_name)
# Assert
self.assertTrue(created1)
self.assertFalse(created2)
def test_create_table_with_already_existing_table_fail_on_exist(self):
# Arrange
# Act
created = self.ts.create_table(self.table_name)
with self.assertRaises(WindowsAzureError):
self.ts.create_table(self.table_name, True)
# Assert
self.assertTrue(created)
def test_query_tables(self):
# Arrange
self._create_table(self.table_name)
# Act
tables = self.ts.query_tables()
for table in tables:
pass
# Assert
tableNames = [x.name for x in tables]
self.assertGreaterEqual(len(tableNames), 1)
self.assertGreaterEqual(len(tables), 1)
self.assertIn(self.table_name, tableNames)
def test_query_tables_with_table_name(self):
# Arrange
self._create_table(self.table_name)
# Act
tables = self.ts.query_tables(self.table_name)
for table in tables:
pass
# Assert
self.assertEqual(len(tables), 1)
self.assertEqual(tables[0].name, self.table_name)
def test_query_tables_with_table_name_no_tables(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.ts.query_tables(self.table_name)
# Assert
def test_query_tables_with_top(self):
# Arrange
self.additional_table_names = [
self.table_name + suffix for suffix in 'abcd']
for name in self.additional_table_names:
self.ts.create_table(name)
# Act
tables = self.ts.query_tables(None, 3)
for table in tables:
pass
# Assert
self.assertEqual(len(tables), 3)
def test_query_tables_with_top_and_next_table_name(self):
# Arrange
self.additional_table_names = [
self.table_name + suffix for suffix in 'abcd']
for name in self.additional_table_names:
self.ts.create_table(name)
# Act
tables_set1 = self.ts.query_tables(None, 3)
tables_set2 = self.ts.query_tables(
None, 3, tables_set1.x_ms_continuation['NextTableName'])
# Assert
self.assertEqual(len(tables_set1), 3)
self.assertGreaterEqual(len(tables_set2), 1)
self.assertLessEqual(len(tables_set2), 3)
def test_delete_table_with_existing_table(self):
# Arrange
self._create_table(self.table_name)
# Act
deleted = self.ts.delete_table(self.table_name)
# Assert
self.assertTrue(deleted)
tables = self.ts.query_tables()
self.assertNamedItemNotInContainer(tables, self.table_name)
def test_delete_table_with_existing_table_fail_not_exist(self):
# Arrange
self._create_table(self.table_name)
# Act
deleted = self.ts.delete_table(self.table_name, True)
# Assert
self.assertTrue(deleted)
tables = self.ts.query_tables()
self.assertNamedItemNotInContainer(tables, self.table_name)
def test_delete_table_with_non_existing_table(self):
# Arrange
# Act
deleted = self.ts.delete_table(self.table_name)
# Assert
self.assertFalse(deleted)<|fim▁hole|> def test_delete_table_with_non_existing_table_fail_not_exist(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.ts.delete_table(self.table_name, True)
# Assert
#--Test cases for entities ------------------------------------------
def test_insert_entity_dictionary(self):
# Arrange
self._create_table(self.table_name)
# Act
dict = self._create_default_entity_dict('MyPartition', '1')
resp = self.ts.insert_entity(self.table_name, dict)
# Assert
self.assertIsNotNone(resp)
def test_insert_entity_class_instance(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = self._create_default_entity_class('MyPartition', '1')
resp = self.ts.insert_entity(self.table_name, entity)
# Assert
self.assertIsNotNone(resp)
def test_insert_entity_conflict(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.insert_entity(
self.table_name,
self._create_default_entity_dict('MyPartition', '1'))
# Assert
def test_get_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
resp = self.ts.get_entity(self.table_name, 'MyPartition', '1')
# Assert
self.assertEqual(resp.PartitionKey, 'MyPartition')
self.assertEqual(resp.RowKey, '1')
self._assert_default_entity(resp)
def test_get_entity_not_existing(self):
# Arrange
self._create_table(self.table_name)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.get_entity(self.table_name, 'MyPartition', '1')
# Assert
def test_get_entity_with_select(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
resp = self.ts.get_entity(
self.table_name, 'MyPartition', '1', 'age,sex')
# Assert
self.assertEqual(resp.age, 39)
self.assertEqual(resp.sex, 'male')
self.assertFalse(hasattr(resp, "birthday"))
self.assertFalse(hasattr(resp, "married"))
self.assertFalse(hasattr(resp, "deceased"))
def test_query_entities(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 2)
# Act
resp = self.ts.query_entities(self.table_name)
# Assert
self.assertEqual(len(resp), 2)
for entity in resp:
self.assertEqual(entity.PartitionKey, 'MyPartition')
self._assert_default_entity(entity)
self.assertEqual(resp[0].RowKey, '1')
self.assertEqual(resp[1].RowKey, '2')
def test_query_entities_with_filter(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 2)
self.ts.insert_entity(
self.table_name,
self._create_default_entity_dict('MyOtherPartition', '3'))
# Act
resp = self.ts.query_entities(
self.table_name, "PartitionKey eq 'MyPartition'")
# Assert
self.assertEqual(len(resp), 2)
for entity in resp:
self.assertEqual(entity.PartitionKey, 'MyPartition')
self._assert_default_entity(entity)
def test_query_entities_with_select(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 2)
# Act
resp = self.ts.query_entities(self.table_name, None, 'age,sex')
# Assert
self.assertEqual(len(resp), 2)
self.assertEqual(resp[0].age, 39)
self.assertEqual(resp[0].sex, 'male')
self.assertFalse(hasattr(resp[0], "birthday"))
self.assertFalse(hasattr(resp[0], "married"))
self.assertFalse(hasattr(resp[0], "deceased"))
def test_query_entities_with_top(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 3)
# Act
resp = self.ts.query_entities(self.table_name, None, None, 2)
# Assert
self.assertEqual(len(resp), 2)
def test_query_entities_with_top_and_next(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 5)
# Act
resp1 = self.ts.query_entities(self.table_name, None, None, 2)
resp2 = self.ts.query_entities(
self.table_name, None, None, 2,
resp1.x_ms_continuation['NextPartitionKey'],
resp1.x_ms_continuation['NextRowKey'])
resp3 = self.ts.query_entities(
self.table_name, None, None, 2,
resp2.x_ms_continuation['NextPartitionKey'],
resp2.x_ms_continuation['NextRowKey'])
# Assert
self.assertEqual(len(resp1), 2)
self.assertEqual(len(resp2), 2)
self.assertEqual(len(resp3), 1)
self.assertEqual(resp1[0].RowKey, '1')
self.assertEqual(resp1[1].RowKey, '2')
self.assertEqual(resp2[0].RowKey, '3')
self.assertEqual(resp2[1].RowKey, '4')
self.assertEqual(resp3[0].RowKey, '5')
def test_update_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.update_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_update_entity_with_if_matches(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.update_entity(
self.table_name,
'MyPartition', '1', sent_entity, if_match=entities[0].etag)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_update_entity_with_if_doesnt_match(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
with self.assertRaises(WindowsAzureError):
self.ts.update_entity(
self.table_name, 'MyPartition', '1', sent_entity,
if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"')
# Assert
def test_insert_or_merge_entity_with_existing_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.insert_or_merge_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_merged_entity(received_entity)
def test_insert_or_merge_entity_with_non_existing_entity(self):
# Arrange
self._create_table(self.table_name)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.insert_or_merge_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_insert_or_replace_entity_with_existing_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.insert_or_replace_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_insert_or_replace_entity_with_non_existing_entity(self):
# Arrange
self._create_table(self.table_name)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.insert_or_replace_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_merge_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.merge_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_merged_entity(received_entity)
def test_merge_entity_not_existing(self):
# Arrange
self._create_table(self.table_name)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
with self.assertRaises(WindowsAzureError):
self.ts.merge_entity(
self.table_name, 'MyPartition', '1', sent_entity)
# Assert
def test_merge_entity_with_if_matches(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
resp = self.ts.merge_entity(
self.table_name, 'MyPartition', '1',
sent_entity, if_match=entities[0].etag)
# Assert
self.assertIsNotNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_merged_entity(received_entity)
def test_merge_entity_with_if_doesnt_match(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
with self.assertRaises(WindowsAzureError):
self.ts.merge_entity(
self.table_name, 'MyPartition', '1', sent_entity,
if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"')
# Assert
def test_delete_entity(self):
# Arrange
self._create_table_with_default_entities(self.table_name, 1)
# Act
resp = self.ts.delete_entity(self.table_name, 'MyPartition', '1')
# Assert
self.assertIsNone(resp)
with self.assertRaises(WindowsAzureError):
self.ts.get_entity(self.table_name, 'MyPartition', '1')
def test_delete_entity_not_existing(self):
# Arrange
self._create_table(self.table_name)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.delete_entity(self.table_name, 'MyPartition', '1')
# Assert
def test_delete_entity_with_if_matches(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
resp = self.ts.delete_entity(
self.table_name, 'MyPartition', '1', if_match=entities[0].etag)
# Assert
self.assertIsNone(resp)
with self.assertRaises(WindowsAzureError):
self.ts.get_entity(self.table_name, 'MyPartition', '1')
def test_delete_entity_with_if_doesnt_match(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.delete_entity(
self.table_name, 'MyPartition', '1',
if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"')
# Assert
#--Test cases for batch ---------------------------------------------
def test_with_filter_single(self):
called = []
def my_filter(request, next):
called.append(True)
return next(request)
tc = self.ts.with_filter(my_filter)
tc.create_table(self.table_name)
self.assertTrue(called)
del called[:]
tc.delete_table(self.table_name)
self.assertTrue(called)
del called[:]
def test_with_filter_chained(self):
called = []
def filter_a(request, next):
called.append('a')
return next(request)
def filter_b(request, next):
called.append('b')
return next(request)
tc = self.ts.with_filter(filter_a).with_filter(filter_b)
tc.create_table(self.table_name)
self.assertEqual(called, ['b', 'a'])
tc.delete_table(self.table_name)
def test_batch_insert(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_insert'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.begin_batch()
self.ts.insert_entity(self.table_name, entity)
self.ts.commit_batch()
# Assert
result = self.ts.get_entity(self.table_name, '001', 'batch_insert')
self.assertIsNotNone(result)
def test_batch_update(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_update'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.insert_entity(self.table_name, entity)
entity = self.ts.get_entity(self.table_name, '001', 'batch_update')
self.assertEqual(3, entity.test3)
entity.test2 = 'value1'
self.ts.begin_batch()
self.ts.update_entity(self.table_name, '001', 'batch_update', entity)
self.ts.commit_batch()
entity = self.ts.get_entity(self.table_name, '001', 'batch_update')
# Assert
self.assertEqual('value1', entity.test2)
def test_batch_merge(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_merge'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.insert_entity(self.table_name, entity)
entity = self.ts.get_entity(self.table_name, '001', 'batch_merge')
self.assertEqual(3, entity.test3)
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_merge'
entity.test2 = 'value1'
self.ts.begin_batch()
self.ts.merge_entity(self.table_name, '001', 'batch_merge', entity)
self.ts.commit_batch()
entity = self.ts.get_entity(self.table_name, '001', 'batch_merge')
# Assert
self.assertEqual('value1', entity.test2)
self.assertEqual(1234567890, entity.test4)
def test_batch_update_if_match(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 1)
# Act
sent_entity = self._create_updated_entity_dict('MyPartition', '1')
self.ts.begin_batch()
resp = self.ts.update_entity(
self.table_name,
'MyPartition', '1', sent_entity, if_match=entities[0].etag)
self.ts.commit_batch()
# Assert
self.assertIsNone(resp)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_updated_entity(received_entity)
def test_batch_update_if_doesnt_match(self):
# Arrange
entities = self._create_table_with_default_entities(self.table_name, 2)
# Act
sent_entity1 = self._create_updated_entity_dict('MyPartition', '1')
sent_entity2 = self._create_updated_entity_dict('MyPartition', '2')
self.ts.begin_batch()
self.ts.update_entity(
self.table_name, 'MyPartition', '1', sent_entity1,
if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"')
self.ts.update_entity(
self.table_name, 'MyPartition', '2', sent_entity2)
try:
self.ts.commit_batch()
except WindowsAzureBatchOperationError as error:
self.assertEqual(error.code, 'UpdateConditionNotSatisfied')
self.assertTrue(str(error).startswith('0:The update condition specified in the request was not satisfied.'))
else:
self.fail('WindowsAzureBatchOperationError was expected')
# Assert
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '1')
self._assert_default_entity(received_entity)
received_entity = self.ts.get_entity(
self.table_name, 'MyPartition', '2')
self._assert_default_entity(received_entity)
def test_batch_insert_replace(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_insert_replace'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.begin_batch()
self.ts.insert_or_replace_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
self.ts.commit_batch()
entity = self.ts.get_entity(
self.table_name, '001', 'batch_insert_replace')
# Assert
self.assertIsNotNone(entity)
self.assertEqual('value', entity.test2)
self.assertEqual(1234567890, entity.test4)
def test_batch_insert_merge(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_insert_merge'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.begin_batch()
self.ts.insert_or_merge_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
self.ts.commit_batch()
entity = self.ts.get_entity(
self.table_name, '001', 'batch_insert_merge')
# Assert
self.assertIsNotNone(entity)
self.assertEqual('value', entity.test2)
self.assertEqual(1234567890, entity.test4)
def test_batch_delete(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '001'
entity.RowKey = 'batch_delete'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.insert_entity(self.table_name, entity)
entity = self.ts.get_entity(self.table_name, '001', 'batch_delete')
#self.assertEqual(3, entity.test3)
self.ts.begin_batch()
self.ts.delete_entity(self.table_name, '001', 'batch_delete')
self.ts.commit_batch()
def test_batch_inserts(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = 'batch_inserts'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
self.ts.begin_batch()
for i in range(100):
entity.RowKey = str(i)
self.ts.insert_entity(self.table_name, entity)
self.ts.commit_batch()
entities = self.ts.query_entities(
self.table_name, "PartitionKey eq 'batch_inserts'", '')
# Assert
self.assertIsNotNone(entities)
self.assertEqual(100, len(entities))
def test_batch_all_operations_together(self):
# Arrange
self._create_table(self.table_name)
# Act
entity = Entity()
entity.PartitionKey = '003'
entity.RowKey = 'batch_all_operations_together-1'
entity.test = EntityProperty('Edm.Boolean', 'true')
entity.test2 = 'value'
entity.test3 = 3
entity.test4 = EntityProperty('Edm.Int64', '1234567890')
entity.test5 = datetime.utcnow()
self.ts.insert_entity(self.table_name, entity)
entity.RowKey = 'batch_all_operations_together-2'
self.ts.insert_entity(self.table_name, entity)
entity.RowKey = 'batch_all_operations_together-3'
self.ts.insert_entity(self.table_name, entity)
entity.RowKey = 'batch_all_operations_together-4'
self.ts.insert_entity(self.table_name, entity)
self.ts.begin_batch()
entity.RowKey = 'batch_all_operations_together'
self.ts.insert_entity(self.table_name, entity)
entity.RowKey = 'batch_all_operations_together-1'
self.ts.delete_entity(
self.table_name, entity.PartitionKey, entity.RowKey)
entity.RowKey = 'batch_all_operations_together-2'
entity.test3 = 10
self.ts.update_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
entity.RowKey = 'batch_all_operations_together-3'
entity.test3 = 100
self.ts.merge_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
entity.RowKey = 'batch_all_operations_together-4'
entity.test3 = 10
self.ts.insert_or_replace_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
entity.RowKey = 'batch_all_operations_together-5'
self.ts.insert_or_merge_entity(
self.table_name, entity.PartitionKey, entity.RowKey, entity)
self.ts.commit_batch()
# Assert
entities = self.ts.query_entities(
self.table_name, "PartitionKey eq '003'", '')
self.assertEqual(5, len(entities))
def test_batch_same_row_operations_fail(self):
# Arrange
self._create_table(self.table_name)
entity = self._create_default_entity_dict('001', 'batch_negative_1')
self.ts.insert_entity(self.table_name, entity)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.begin_batch()
entity = self._create_updated_entity_dict(
'001', 'batch_negative_1')
self.ts.update_entity(
self.table_name,
entity['PartitionKey'],
entity['RowKey'], entity)
entity = self._create_default_entity_dict(
'001', 'batch_negative_1')
self.ts.merge_entity(
self.table_name,
entity['PartitionKey'],
entity['RowKey'], entity)
self.ts.cancel_batch()
# Assert
def test_batch_different_partition_operations_fail(self):
# Arrange
self._create_table(self.table_name)
entity = self._create_default_entity_dict('001', 'batch_negative_1')
self.ts.insert_entity(self.table_name, entity)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.begin_batch()
entity = self._create_updated_entity_dict(
'001', 'batch_negative_1')
self.ts.update_entity(
self.table_name, entity['PartitionKey'], entity['RowKey'],
entity)
entity = self._create_default_entity_dict(
'002', 'batch_negative_1')
self.ts.insert_entity(self.table_name, entity)
self.ts.cancel_batch()
# Assert
def test_batch_different_table_operations_fail(self):
# Arrange
other_table_name = self.table_name + 'other'
self.additional_table_names = [other_table_name]
self._create_table(self.table_name)
self._create_table(other_table_name)
# Act
with self.assertRaises(WindowsAzureError):
self.ts.begin_batch()
entity = self._create_default_entity_dict(
'001', 'batch_negative_1')
self.ts.insert_entity(self.table_name, entity)
entity = self._create_default_entity_dict(
'001', 'batch_negative_2')
self.ts.insert_entity(other_table_name, entity)
self.ts.cancel_batch()
def test_unicode_property_value(self):
''' regression test for github issue #57'''
# Act
self._create_table(self.table_name)
self.ts.insert_entity(
self.table_name,
{'PartitionKey': 'test', 'RowKey': 'test1', 'Description': u'ꀕ'})
self.ts.insert_entity(
self.table_name,
{'PartitionKey': 'test', 'RowKey': 'test2', 'Description': 'ꀕ'})
resp = self.ts.query_entities(
self.table_name, "PartitionKey eq 'test'")
# Assert
self.assertEqual(len(resp), 2)
self.assertEqual(resp[0].Description, u'ꀕ')
self.assertEqual(resp[1].Description, u'ꀕ')
def test_unicode_property_name(self):
# Act
self._create_table(self.table_name)
self.ts.insert_entity(
self.table_name,
{'PartitionKey': 'test', 'RowKey': 'test1', u'啊齄丂狛狜': u'ꀕ'})
self.ts.insert_entity(
self.table_name,
{'PartitionKey': 'test', 'RowKey': 'test2', u'啊齄丂狛狜': 'hello'})
resp = self.ts.query_entities(
self.table_name, "PartitionKey eq 'test'")
# Assert
self.assertEqual(len(resp), 2)
self.assertEqual(resp[0].__dict__[u'啊齄丂狛狜'], u'ꀕ')
self.assertEqual(resp[1].__dict__[u'啊齄丂狛狜'], u'hello')
def test_unicode_create_table_unicode_name(self):
# Arrange
self.table_name = self.table_name + u'啊齄丂狛狜'
# Act
with self.assertRaises(WindowsAzureError):
# not supported - table name must be alphanumeric, lowercase
self.ts.create_table(self.table_name)
# Assert
def test_empty_and_spaces_property_value(self):
# Act
self._create_table(self.table_name)
self.ts.insert_entity(
self.table_name,
{
'PartitionKey': 'test',
'RowKey': 'test1',
'EmptyByte': '',
'EmptyUnicode': u'',
'SpacesOnlyByte': ' ',
'SpacesOnlyUnicode': u' ',
'SpacesBeforeByte': ' Text',
'SpacesBeforeUnicode': u' Text',
'SpacesAfterByte': 'Text ',
'SpacesAfterUnicode': u'Text ',
'SpacesBeforeAndAfterByte': ' Text ',
'SpacesBeforeAndAfterUnicode': u' Text ',
})
resp = self.ts.get_entity(self.table_name, 'test', 'test1')
# Assert
self.assertIsNotNone(resp)
self.assertEqual(resp.EmptyByte, '')
self.assertEqual(resp.EmptyUnicode, u'')
self.assertEqual(resp.SpacesOnlyByte, ' ')
self.assertEqual(resp.SpacesOnlyUnicode, u' ')
self.assertEqual(resp.SpacesBeforeByte, ' Text')
self.assertEqual(resp.SpacesBeforeUnicode, u' Text')
self.assertEqual(resp.SpacesAfterByte, 'Text ')
self.assertEqual(resp.SpacesAfterUnicode, u'Text ')
self.assertEqual(resp.SpacesBeforeAndAfterByte, ' Text ')
self.assertEqual(resp.SpacesBeforeAndAfterUnicode, u' Text ')
def test_none_property_value(self):
# Act
self._create_table(self.table_name)
self.ts.insert_entity(
self.table_name,
{
'PartitionKey': 'test',
'RowKey': 'test1',
'NoneValue': None,
})
resp = self.ts.get_entity(self.table_name, 'test', 'test1')
# Assert
self.assertIsNotNone(resp)
self.assertFalse(hasattr(resp, 'NoneValue'))
def test_binary_property_value(self):
# Act
binary_data = b'\x01\x02\x03\x04\x05\x06\x07\x08\t\n'
self._create_table(self.table_name)
self.ts.insert_entity(
self.table_name,
{
'PartitionKey': 'test',
'RowKey': 'test1',
'binary': EntityProperty('Edm.Binary', binary_data)
})
resp = self.ts.get_entity(self.table_name, 'test', 'test1')
# Assert
self.assertIsNotNone(resp)
self.assertEqual(resp.binary.type, 'Edm.Binary')
self.assertEqual(resp.binary.value, binary_data)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>getUser.js<|end_file_name|><|fim▁begin|><|fim▁hole|> return createReturnPromise(firebase.getUser(), path)
}
return getUser
}
export default getUserFactory<|fim▁end|> | import { createReturnPromise } from '../helpers'
function getUserFactory() {
function getUser({ firebase, path }) { |
<|file_name|>getMachinesSpec.js<|end_file_name|><|fim▁begin|>// Test get machiens
var fs = require('fs');
try {
fs.accessSync('testdb.json', fs.F_OK);
fs.unlinkSync('testdb.json');
// Do something
} catch (e) {
// It isn't accessible
console.log(e);
}
var server = require('../server.js').createServer(8000, 'testdb.json');
var addTestMachine = function(name) {
var newMachine = {};
newMachine.name = name;
newMachine.type = 'washer';
newMachine.queue = [];
newMachine.operational = true;
newMachine.problemMessage = "";
newMachine.activeJob = {};
server.db('machines').push(newMachine);
}
describe('ALL THE TESTS LOL', function() {
it('should add a machine', function(done) {
var options = {
method: 'POST',
url: '/machines',
payload: {
name: 'test1',
type: 'washer'
}
}
server.inject(options, function(response) {
expect(response.statusCode).toBe(200);
var p = JSON.parse(response.payload);
expect(p.name).toBe(options.payload.name);
expect(p.type).toBe(options.payload.type);
done();
})
});
it('should get all machines', function(done) {
var name = 'testMachine';
var name2 = 'anotherMachine';
addTestMachine(name);
addTestMachine(name2);
var options = {<|fim▁hole|> method: 'GET',
url: '/machines',
}
server.inject(options, function(res) {
expect(res.statusCode).toBe(200);
var p = JSON.parse(res.payload);
// check p has test and anotherMachine
done();
});
});
it('should get one machine', function(done) {
var name = 'sweetTestMachine';
addTestMachine(name);
var options = {
method: 'GET',
url: '/machines/'+name
};
server.inject(options, function(res) {
expect(res.statusCode).toBe(200);
var p = JSON.parse(res.payload);
expect(p.name).toBe(name);
done();
});
});
it('should add a job the queue then th queue should have the person', function(done) {
addTestMachine('queueTest');
var addOptions = {
method: 'POST',
url: '/machines/queueTest/queue',
payload: {
user: '[email protected]',
pin: 1234,
minutes: 50
}
};
server.inject(addOptions, function(res) {
expect(res.statusCode).toBe(200);
var p = JSON.parse(res.payload);
expect(p.name).toBe('queueTest');
var getQueue = {
method: 'GET',
url: '/machines/queueTest/queue'
};
server.inject(getQueue, function(newRes) {
expect(newRes.statusCode).toBe(200);
var q = JSON.parse(newRes.payload);
console.log(q);
expect(q.queue[0].user).toBe('[email protected]');
expect(q.queue[0].pin).toBe(1234);
done();
})
})
});
it('should delete a job from the queue', function(done) {
addTestMachine('anotherQueue');
var addOptions = {
method: 'POST',
url: '/machines/anotherQueue/queue',
payload: {
user: '[email protected]',
pin: 1235,
minutes: 50
}
};
server.inject(addOptions, function(res) {
var deleteOptions = addOptions;
deleteOptions.url = '/machines/anotherQueue/queue/delete';
deleteOptions.payload = {
user: addOptions.payload.user,
pin: addOptions.payload.pin
}
server.inject(deleteOptions, function(r) {
expect(r.statusCode).toBe(200);
console.log(JSON.parse(r.payload));
done();
})
})
})
it('should add a job to the active queue', function(done) {
addTestMachine('activeQueue');
var addOptions = {
method: 'POST',
url: '/machines/activeQueue/queue',
payload: {
user: '[email protected]',
pin: 1235,
minutes: 50
}
};
server.inject(addOptions, function(r) {
var runJobOptions = {
method: 'POST',
url: '/machines/activeQueue/queue/start',
payload: {
command: 'next',
pin: 1235,
minutes: 0
}
};
server.inject(runJobOptions, function(res) {
expect(res.statusCode).toBe(200);
done();
})
})
});
});<|fim▁end|> | |
<|file_name|>IBeaconConstants.java<|end_file_name|><|fim▁begin|>package uk.co.alt236.bluetoothlelib.device.beacon.ibeacon;
/**
*
*/
public class IBeaconConstants {
public static final byte[] MANUFACTURER_DATA_IBEACON_PREFIX = {0x4C, 0x00, 0x02, 0x15};
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>gui.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './QtGUI.ui'
#
# Created: Sat Oct 11 18:25:23 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from gui.LrBase.main import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(770, 604)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setStyleSheet(_fromUtf8(""))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.Demo = QtGui.QWidget()
self.Demo.setObjectName(_fromUtf8("Demo"))
self.gridLayout = QtGui.QGridLayout(self.Demo)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(self.Demo)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.textBrowser = QtGui.QTextBrowser(self.Demo)
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.verticalLayout.addWidget(self.textBrowser)
self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 2)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.label_2 = QtGui.QLabel(self.Demo)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout_2.addWidget(self.label_2)
self.textBrowser_2 = QtGui.QTextBrowser(self.Demo)
self.textBrowser_2.setObjectName(_fromUtf8("textBrowser_2"))
self.verticalLayout_2.addWidget(self.textBrowser_2)
self.gridLayout.addLayout(self.verticalLayout_2, 1, 0, 1, 2)
self.comboBox = QtGui.QComboBox(self.Demo)
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox, 2, 0, 1, 1)
self.pushButton = QtGui.QPushButton(self.Demo)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.gridLayout.addWidget(self.pushButton, 2, 1, 1, 1)
self.tabWidget.addTab(self.Demo, _fromUtf8(""))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.gridLayout_2 = QtGui.QGridLayout(self.tab_4)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.textBrowser_3 = QtGui.QTextBrowser(self.tab_4)
self.textBrowser_3.setObjectName(_fromUtf8("textBrowser_3"))
self.gridLayout_2.addWidget(self.textBrowser_3, 0, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_3 = QtGui.QLabel(self.tab_4)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_2.addWidget(self.label_3)
self.pushButton_2 = QtGui.QPushButton(self.tab_4)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.horizontalLayout_2.addWidget(self.pushButton_2)
self.gridLayout_2.addLayout(self.horizontalLayout_2, 0, 1, 1, 1)
self.tabWidget.addTab(self.tab_4, _fromUtf8(""))
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName(_fromUtf8("tab_5"))
self.tabWidget.addTab(self.tab_5, _fromUtf8(""))
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.tabWidget.addTab(self.tab_3, _fromUtf8(""))
self.horizontalLayout.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 770, 25))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.comboBox, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(int)")), self.textBrowser.reload)
QtCore.QObject.connect(self.comboBox, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(int)")), self.textBrowser_2.reload)
QtCore.QObject.connect(self.pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.statusbar.show)
QtCore.QObject.connect(self.pushButton_2, QtCore.SIGNAL(_fromUtf8("clicked()")), compileLr.compileMEAM_REAX)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Lammps Unofficial GUI", None))
self.label.setText(_translate("MainWindow", "Script Preview", None))
self.label_2.setText(_translate("MainWindow", "Notes", None))
self.comboBox.setItemText(0, _translate("MainWindow", "Lattice Constant", None))
self.comboBox.setItemText(1, _translate("MainWindow", "Elastic", None))
self.pushButton.setText(_translate("MainWindow", "Run", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Demo), _translate("MainWindow", "Demo", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Homework", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Project", None))
self.label_3.setText(_translate("MainWindow", "Minimum", None))
self.pushButton_2.setText(_translate("MainWindow", "Compile", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("MainWindow", "Compile", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_5), _translate("MainWindow", "Preferences", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("MainWindow", "About", None))
if __name__ == "__main__":
#mainLr.main()<|fim▁hole|> app = QApplication(sys.argv)
window = QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(window)
window.show()
sys.exit(app.exec_())<|fim▁end|> | |
<|file_name|>init.js<|end_file_name|><|fim▁begin|>tinyMCE.init({
mode : 'textareas',
theme : "advanced",
theme_advanced_buttons1 : "save,newdocument,|,bold,italic,underline,strikethrough,|,justifyleft,justifycenter,justifyright,justifyfull,|,styleselect,formatselect,fontselect,fontsizeselect",
theme_advanced_buttons2 : "cut,copy,paste,pastetext,pasteword,|,search,replace,|,bullist,numlist,|,outdent,indent,blockquote,|,undo,redo,|,link,unlink,anchor,image,cleanup,help,code,|,insertdate,inserttime,preview,|,forecolor,backcolor",<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>ConfigFile.cpp<|end_file_name|><|fim▁begin|>/*
--------------------------------------------------------------------------------
Based on class found at: http://ai.stanford.edu/~gal/Code/FindMotifs/
--------------------------------------------------------------------------------
*/
#include "Platform/StableHeaders.h"
#include "Util/Helper/ConfigFile.h"
//------------------------------------------------------------------------------
namespace Dangine {
//------------------------------------------------------------------------------
Config::Config(String filename, String delimiter, String comment, String sentry)
: myDelimiter(delimiter)
, myComment(comment)
, mySentry(sentry)
{
// Construct a Config, getting keys and values from given file
std::ifstream in(filename.c_str());
if (!in) throw file_not_found(filename);
in >> (*this);
}
//------------------------------------------------------------------------------
Config::Config()
: myDelimiter(String(1,'='))
, myComment(String(1,'#'))
{
// Construct a Config without a file; empty
}
//------------------------------------------------------------------------------
void Config::remove(const String& key)
{
// Remove key and its value
myContents.erase(myContents.find(key));
return;
}
//------------------------------------------------------------------------------
bool Config::keyExists(const String& key) const
{
// Indicate whether key is found
mapci p = myContents.find(key);
return (p != myContents.end());
}
//------------------------------------------------------------------------------
/* static */
void Config::trim(String& s)
{
// Remove leading and trailing whitespace
static const char whitespace[] = " \n\t\v\r\f";
s.erase(0, s.find_first_not_of(whitespace));
s.erase(s.find_last_not_of(whitespace) + 1U);
}
//------------------------------------------------------------------------------
std::ostream& operator<<(std::ostream& os, const Config& cf)
{
// Save a Config to os
for (Config::mapci p = cf.myContents.begin();
p != cf.myContents.end();
++p)
{
os << p->first << " " << cf.myDelimiter << " ";
os << p->second << std::endl;
}
return os;
}
//------------------------------------------------------------------------------
std::istream& operator>>(std::istream& is, Config& cf)
{
// Load a Config from is
// Read in keys and values, keeping internal whitespace
typedef String::size_type pos;
const String& delim = cf.myDelimiter; // separator
const String& comm = cf.myComment; // comment
const String& sentry = cf.mySentry; // end of file sentry
const pos skip = delim.length(); // length of separator
String nextline = ""; // might need to read ahead to see where value ends
while (is || nextline.length() > 0)
{
// Read an entire line at a time
String line;
if (nextline.length() > 0)
{
line = nextline; // we read ahead; use it now
nextline = "";
}
else
{
std::getline(is, line);
}
// Ignore comments
line = line.substr(0, line.find(comm));
// Check for end of file sentry
if (sentry != "" && line.find(sentry) != String::npos) return is;
// Parse the line if it contains a delimiter
pos delimPos = line.find(delim);
if (delimPos < String::npos)
{
// Extract the key
String key = line.substr(0, delimPos);
line.replace(0, delimPos+skip, "");
// See if value continues on the next line
// Stop at blank line, next line with a key, end of stream,
// or end of file sentry
bool terminate = false;
while(!terminate && is)
{
std::getline(is, nextline);
terminate = true;
String nlcopy = nextline;
Config::trim(nlcopy);
if (nlcopy == "") continue;
nextline = nextline.substr(0, nextline.find(comm));
if (nextline.find(delim) != String::npos)
continue;
if (sentry != "" && nextline.find(sentry) != String::npos)
continue;
nlcopy = nextline;
Config::trim(nlcopy);
if (nlcopy != "") line += "\n";
line += nextline;
terminate = false;
}
// Store key and value
Config::trim(key);
Config::trim(line);
cf.myContents[key] = line; // overwrites if key is repeated
}
}
<|fim▁hole|>} // namespace Dangine
//------------------------------------------------------------------------------<|fim▁end|> |
return is;
}
//------------------------------------------------------------------------------
|
<|file_name|>grasp_demo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import rospy
import actionlib<|fim▁hole|>
if __name__ == "__main__":
rospy.init_node("grasp_demo")
rospy.loginfo("Waiting for play_motion...")
client = actionlib.SimpleActionClient("/play_motion", PlayMotionAction)
client.wait_for_server()
rospy.loginfo("...connected.")
rospy.wait_for_message("/joint_states", JointState)
rospy.sleep(3.0)
rospy.loginfo("Grasping demo...")
goal = PlayMotionGoal()
goal.motion_name = 'home'
goal.skip_planning = True
client.send_goal(goal)
client.wait_for_result(rospy.Duration(15.0))
goal.motion_name = 'look_at_object_demo'
goal.skip_planning = True
client.send_goal(goal)
client.wait_for_result(rospy.Duration(5.0))
goal.motion_name = 'pregrasp_demo'
goal.skip_planning = True
client.send_goal(goal)
client.wait_for_result(rospy.Duration(40.0))
goal.motion_name = 'grasp_demo'
goal.skip_planning = True
client.send_goal(goal)
client.wait_for_result(rospy.Duration(80.0))
goal.motion_name = 'pick_demo'
goal.skip_planning = True
client.send_goal(goal)
client.wait_for_result(rospy.Duration(40.0))
rospy.loginfo("Grasping demo OK.")<|fim▁end|> | from play_motion_msgs.msg import PlayMotionAction, PlayMotionGoal
from sensor_msgs.msg import JointState |
<|file_name|>template.go<|end_file_name|><|fim▁begin|>/*
Copyright © 2020 Henry Huang <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package errors
import (
"bytes"
"fmt"
"html/template"
"time"
"github.com/iTrellis/common/encryption/hash"
)
const (
errorcodeParseTemplate = 1
errorcodeExecuteTemplate = 2
)
// ErrorCodeTmpl error code template
type ErrorCodeTmpl struct {
namespace string
code uint64<|fim▁hole|>
// Params template params
type Params map[string]interface{}
var tmplDefined = make(map[string]bool)
// TN returns a new error code template
func TN(namespace string, code uint64, message string) *ErrorCodeTmpl {
eKey := fmt.Sprintf("%s:%d", namespace, code)
if tmplDefined[eKey] {
panic(fmt.Errorf("error code (%s) is already exists", eKey))
}
tmplDefined[eKey] = true
tmpl := &ErrorCodeTmpl{
namespace: namespace,
code: code,
message: message,
}
return tmpl
}
// New ErrorCodeTmpl new error code by template
func (p *ErrorCodeTmpl) New(v ...Params) ErrorCode {
params := Params{}
if len(v) != 0 {
for _, v := range v {
for k, param := range v {
params[k] = param
}
}
}
eCode := &errorCode{
code: p.code,
stackTrace: callersDeepth(5),
context: make(map[string]interface{}),
}
errID := hash.NewCRCIEEE().Sum(fmt.Sprintf("%s.%d.%s.%d",
p.namespace, p.code, p.message, time.Now().UnixNano()))
t, e := template.New(genErrorCodeKey(p.namespace, p.code)).Parse(p.message)
if e != nil {
eCode.code = errorcodeParseTemplate
eCode.err = new(p.namespace, errID, fmt.Sprintf(
"parser template error, namespace: %s, code: %d, error: %s",
p.namespace, p.code, e.Error()))
return eCode
}
var buf bytes.Buffer
if e := t.Execute(&buf, params); e != nil {
eCode.code = errorcodeExecuteTemplate
eCode.err = new(p.namespace, errID, fmt.Sprintf(
"execute template error, namespace: %s code: %d, error: %s",
p.message, p.code, e.Error()))
return eCode
}
eCode.err = new(p.namespace, errID, buf.String())
return eCode
}<|fim▁end|> | message string
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from distutils.core import setup
setup(
name='snapboard',
version='0.2.1',
author='Bo Shi',
maintainer='SNAPboard developers',
maintainer_email='[email protected]',
url='http://code.google.com/p/snapboard/',
description='Bulletin board application for Django.',
long_description='''SNAPboard is forum/bulletin board application based on the Django web
framework. It integrates easily in any Django project.
Among its features are:
* Editable posts with all revisions publicly available
* Messages posted within threads can be made visible only to selected
users
* BBCode, Markdown and Textile supported for post formatting
* BBCode toolbar
* Multiple forums with four types of permissions
* Forum permissions can be assigned to custom groups of users
* Group administration can be delegated to end users on a per-group basis
* Moderators for each forum
* User preferences
* Watched topics
* Abuse reports
* User and IP address bans that don't automatically spread to other Django
applications within the project
* i18n hooks to create your own translations
* Included translations: French, Russian
SNAPboard requires Django 1.0.''',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: New BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications :: BBS',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
],
packages=['snapboard',],
package_dir={'snapboard': 'snapboard'},
package_data={'snapboard': [
'media/*/*.*',
'media/*/*/*.*',
'templates/*.*',
'templates/snapboard/*.*',
'templates/notification/*.*',<|fim▁hole|>
# vim: ai ts=4 sts=4 et sw=4<|fim▁end|> | 'templates/notification/*/*.*',
]},
) |
<|file_name|>CardDef.ts<|end_file_name|><|fim▁begin|>import {CardClass, CardSet, CardType, MultiClassGroup, Race, Rarity} from "./Enums";
import {cleanEnum} from "./helpers";
export default class CardDef {
public attack: number;
public armor: number;
public cardClass: CardClass;
public cardSet: CardSet;
public collectionText: string;
public cost: number;
public costsHealth: boolean;
public elite: boolean;
public health: number;
public hideStats: boolean;
public id: string;
public name: string;
public multiClassGroup: MultiClassGroup;
public rarity: Rarity;
public race: Race;
public silenced: boolean;
public text: string;
public type: CardType;
constructor(props: any) {
this.attack = props.attack || 0;
this.armor = props.armor || 0;
this.cardClass = cleanEnum(props.cardClass, CardClass) as CardClass;
this.cardSet = cleanEnum(props.set, CardSet) as CardSet;
this.cost = props.cost || 0;
this.costsHealth = props.costsHealth || false;
this.elite = props.elite || false;<|fim▁hole|> this.hideStats = props.hideStats || false;
this.multiClassGroup = cleanEnum(props.multiClassGroup, MultiClassGroup) as MultiClassGroup;
this.name = props.name || "";
this.race = cleanEnum(props.race, Race) as Race;
this.rarity = cleanEnum(props.rarity, Rarity) as Rarity;
this.silenced = props.silenced || false;
this.type = cleanEnum(props.type, CardType) as CardType;
if (this.type === CardType.WEAPON && props.durability) {
// Weapons alias health to durability
this.health = props.durability;
} else if (this.type === CardType.HERO && props.armor) {
// Hero health gem is Armor
this.health = props.armor;
}
this.collectionText = props.collectionText || "";
this.text = props.text || "";
}
}<|fim▁end|> | this.health = props.health || 0; |
<|file_name|>dns.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 David Vavra ([email protected])
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,<|fim▁hole|>
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from yapsy.IPlugin import IPlugin
class DNS(IPlugin):
def __init__(self):
self.hosts = {}
def addHost(self,id,host):
self.hosts[id] = host
def parseContext(self,context,*args):
for dns in context.iter('dns_host'):
for dnsServer in dns.iter('dns_host'):
self.addHost(dnsServer.attrib['id'],dnsServer.text)<|fim▁end|> | but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details. |
<|file_name|>env.go<|end_file_name|><|fim▁begin|>package infrastructure
import (
"errors"
"os"
"strings"
"github.com/fabiofalci/flagrc"
"github.com/mitchellh/go-homedir"
)
const SCONSIFY_CONF_LOCATION = "/.sconsify"
func GetCacheLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/cache"
}
return ""
}
func DeleteCache(cacheLocation string) error {
if strings.HasSuffix(cacheLocation, SCONSIFY_CONF_LOCATION+"/cache") {
return os.RemoveAll(cacheLocation)
}
return errors.New("Invalid cache location: " + cacheLocation)
}
func GetLogFileLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/sconsify.log"
}
return ""
}
func GetStateFileLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/state.json"
}
return ""
}
func GetWebApiCacheFileLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/web-api-cache.json"
}
return ""<|fim▁hole|>}
func GetWebApiTokenLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/web-api-token.json"
}
return ""
}
func GetKeyFunctionsFileLocation() string {
if basePath := getConfLocation(); basePath != "" {
return basePath + "/key-functions.json"
}
return ""
}
func SaveFile(fileLocation string, content []byte) {
file, err := os.OpenFile(fileLocation, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
if err == nil {
defer file.Close()
file.Write(content)
}
}
func getConfLocation() string {
if dir, err := homedir.Dir(); err == nil {
if dir, err = homedir.Expand(dir); err == nil && dir != "" {
return dir + SCONSIFY_CONF_LOCATION
}
}
return ""
}
func ProcessSconsifyrc() {
if basePath := getConfLocation(); basePath != "" {
flagrc.ProcessFlagrc(basePath + "/sconsifyrc")
}
}<|fim▁end|> | |
<|file_name|>attach.go<|end_file_name|><|fim▁begin|>package container
import (
"context"
"fmt"
"io"
"github.com/docker/cli/cli"
"github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/client"
"github.com/moby/sys/signal"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
type attachOptions struct {
noStdin bool
proxy bool
detachKeys string
container string
}
func inspectContainerAndCheckState(ctx context.Context, cli client.APIClient, args string) (*types.ContainerJSON, error) {
c, err := cli.ContainerInspect(ctx, args)
if err != nil {
return nil, err
}
if !c.State.Running {
return nil, errors.New("You cannot attach to a stopped container, start it first")
}
if c.State.Paused {
return nil, errors.New("You cannot attach to a paused container, unpause it first")
}
if c.State.Restarting {
return nil, errors.New("You cannot attach to a restarting container, wait until it is running")
}
return &c, nil
}
// NewAttachCommand creates a new cobra.Command for `docker attach`
func NewAttachCommand(dockerCli command.Cli) *cobra.Command {
var opts attachOptions
cmd := &cobra.Command{
Use: "attach [OPTIONS] CONTAINER",
Short: "Attach local standard input, output, and error streams to a running container",
Args: cli.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
opts.container = args[0]
return runAttach(dockerCli, &opts)
},
}
flags := cmd.Flags()
flags.BoolVar(&opts.noStdin, "no-stdin", false, "Do not attach STDIN")
flags.BoolVar(&opts.proxy, "sig-proxy", true, "Proxy all received signals to the process")
flags.StringVar(&opts.detachKeys, "detach-keys", "", "Override the key sequence for detaching a container")
return cmd
}
func runAttach(dockerCli command.Cli, opts *attachOptions) error {
ctx := context.Background()
client := dockerCli.Client()
// request channel to wait for client
resultC, errC := client.ContainerWait(ctx, opts.container, "")
c, err := inspectContainerAndCheckState(ctx, client, opts.container)
if err != nil {
return err
}
if err := dockerCli.In().CheckTty(!opts.noStdin, c.Config.Tty); err != nil {
return err
}
if opts.detachKeys != "" {
dockerCli.ConfigFile().DetachKeys = opts.detachKeys
}
options := types.ContainerAttachOptions{
Stream: true,
Stdin: !opts.noStdin && c.Config.OpenStdin,
Stdout: true,
Stderr: true,
DetachKeys: dockerCli.ConfigFile().DetachKeys,
}
var in io.ReadCloser
if options.Stdin {
in = dockerCli.In()
}
if opts.proxy && !c.Config.Tty {
sigc := notifyAllSignals()
go ForwardAllSignals(ctx, dockerCli, opts.container, sigc)
defer signal.StopCatch(sigc)
}
resp, errAttach := client.ContainerAttach(ctx, opts.container, options)
if errAttach != nil {
return errAttach
}
defer resp.Close()
// If use docker attach command to attach to a stop container, it will return
// "You cannot attach to a stopped container" error, it's ok, but when
// attach to a running container, it(docker attach) use inspect to check
// the container's state, if it pass the state check on the client side,
// and then the container is stopped, docker attach command still attach to
// the container and not exit.
//
// Recheck the container's state to avoid attach block.
_, err = inspectContainerAndCheckState(ctx, client, opts.container)
if err != nil {
return err
}
if c.Config.Tty && dockerCli.Out().IsTerminal() {
resizeTTY(ctx, dockerCli, opts.container)
}
streamer := hijackedIOStreamer{
streams: dockerCli,
inputStream: in,
outputStream: dockerCli.Out(),
errorStream: dockerCli.Err(),
resp: resp,
tty: c.Config.Tty,
detachKeys: options.DetachKeys,
}
if err := streamer.stream(ctx); err != nil {
return err
}
return getExitStatus(errC, resultC)
}
func getExitStatus(errC <-chan error, resultC <-chan container.ContainerWaitOKBody) error {
select {
case result := <-resultC:
if result.Error != nil {
return fmt.Errorf(result.Error.Message)
}<|fim▁hole|> }
case err := <-errC:
return err
}
return nil
}
func resizeTTY(ctx context.Context, dockerCli command.Cli, containerID string) {
height, width := dockerCli.Out().GetTtySize()
// To handle the case where a user repeatedly attaches/detaches without resizing their
// terminal, the only way to get the shell prompt to display for attaches 2+ is to artificially
// resize it, then go back to normal. Without this, every attach after the first will
// require the user to manually resize or hit enter.
resizeTtyTo(ctx, dockerCli.Client(), containerID, height+1, width+1, false)
// After the above resizing occurs, the call to MonitorTtySize below will handle resetting back
// to the actual size.
if err := MonitorTtySize(ctx, dockerCli, containerID, false); err != nil {
logrus.Debugf("Error monitoring TTY size: %s", err)
}
}<|fim▁end|> | if result.StatusCode != 0 {
return cli.StatusError{StatusCode: int(result.StatusCode)} |
<|file_name|>MeshHelpers.hpp<|end_file_name|><|fim▁begin|>#ifndef SLG_MESH_HELPERS_HPP
#define SLG_MESH_HELPERS_HPP
#include "glm/glm.hpp"
#include "slg/Mesh.hpp"
#include <vector>
namespace slg {
bool loadObj(const char * filename,
std::vector<glm::vec3> & vertices,
std::vector<glm::vec2> & uvs,
std::vector<glm::vec3> & normals);
void computeTangentBasis(std::vector<glm::vec3> const& vertices,
std::vector<glm::vec2> const& uvs,
std::vector<glm::vec3> const& normals,
std::vector<glm::vec3> & tangents,
std::vector<glm::vec3> & bitangents);
void calculateIndex(std::vector<glm::vec3> const& inVertices,
std::vector<glm::vec2> const& inUvs,
std::vector<glm::vec3> const& inNormals,
std::vector<unsigned short> & outIndices,
std::vector<glm::vec3> & outVertices,
std::vector<glm::vec2> & outUvs,
std::vector<glm::vec3> & outNormals);
void calculateIndex(std::vector<glm::vec3> const& inVertices,
std::vector<glm::vec2> const& inUvs,
std::vector<glm::vec3> const& inNormals,
std::vector<glm::vec3> const& inTangents,<|fim▁hole|> std::vector<unsigned short> & outIndices,
std::vector<glm::vec3> & outVertices,
std::vector<glm::vec2> & outUvs,
std::vector<glm::vec3> & outNormals,
std::vector<glm::vec3> & outTangents,
std::vector<glm::vec3> & outBitangents);
void createQuad(Mesh & mesh, bool inClipSpace);
inline void pushTriangle(std::vector<unsigned short> & data, unsigned short a, unsigned short b, unsigned short c)
{
data.push_back(a);
data.push_back(b);
data.push_back(c);
}
}
#endif<|fim▁end|> | std::vector<glm::vec3> const& inBitangents, |
<|file_name|>webhookPublisher.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Fission Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package publisher
import (
"bytes"
"io/ioutil"
"net/http"
"strings"
"time"
"go.uber.org/zap"
)
type (
// A webhook publisher for a single URL. Satisifies the Publisher interface.
WebhookPublisher struct {
logger *zap.Logger
requestChannel chan *publishRequest
maxRetries int
retryDelay time.Duration
baseUrl string
}
publishRequest struct {
body string
headers map[string]string
target string
retries int
retryDelay time.Duration
}
)
func MakeWebhookPublisher(logger *zap.Logger, baseUrl string) *WebhookPublisher {
p := &WebhookPublisher{
logger: logger.Named("webhook_publisher"),
baseUrl: baseUrl,
requestChannel: make(chan *publishRequest, 32), // buffered channel
// TODO make this configurable
maxRetries: 10,
retryDelay: 500 * time.Millisecond,<|fim▁hole|> go p.svc()
return p
}
func (p *WebhookPublisher) Publish(body string, headers map[string]string, target string) {
// serializing the request gives user a guarantee that the request is sent in sequence order
p.requestChannel <- &publishRequest{
body: body,
headers: headers,
target: target,
retries: p.maxRetries,
retryDelay: p.retryDelay,
}
}
func (p *WebhookPublisher) svc() {
for {
r := <-p.requestChannel
p.makeHttpRequest(r)
}
}
func (p *WebhookPublisher) makeHttpRequest(r *publishRequest) {
url := p.baseUrl + "/" + strings.TrimPrefix(r.target, "/")
msg := "making HTTP request"
level := zap.ErrorLevel
fields := []zap.Field{zap.String("url", url), zap.String("type", "publish_request")}
// log once for this request
defer func() {
if ce := p.logger.Check(level, msg); ce != nil {
ce.Write(fields...)
}
}()
var buf bytes.Buffer
buf.WriteString(r.body)
// Create request
req, err := http.NewRequest(http.MethodPost, url, &buf)
if err != nil {
fields = append(fields, zap.Error(err))
return
}
for k, v := range r.headers {
req.Header.Set(k, v)
}
// Make the request
resp, err := http.DefaultClient.Do(req)
if err != nil {
fields = append(fields, zap.Error(err), zap.Any("request", r))
} else {
var body []byte
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
fields = append(fields, zap.Error(err), zap.Any("request", r))
msg = "read response body error"
} else {
fields = append(fields, zap.Int("status_code", resp.StatusCode), zap.String("body", string(body)))
if resp.StatusCode >= 200 && resp.StatusCode < 400 {
level = zap.InfoLevel
} else if resp.StatusCode >= 400 && resp.StatusCode < 500 {
msg = "request returned bad request status code"
level = zap.WarnLevel
} else {
msg = "request returned failure status code"
}
return
}
}
// Schedule a retry, or give up if out of retries
r.retries--
if r.retries > 0 {
r.retryDelay *= time.Duration(2)
time.AfterFunc(r.retryDelay, func() {
p.requestChannel <- r
})
} else {
msg = "final retry failed, giving up"
// Event dropped
}
}<|fim▁end|> | } |
<|file_name|>Signal.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
################################################################################
#
# Copyright 2014 Stjepan Henc <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
import scipy.io.wavfile as wav
import numpy as np
import copy
class Signal:
# Data loaders
def LoadFromFile(self, file):
self.fs, self.s = wav.read(file)
self.sLength, self.nChans = self.s.shape
def LoadWF(self, waveform, fs):
self.s = waveform<|fim▁hole|> self.fs = fs
self.sLength, self.nChans = self.s.shape
def __init__(self, *args):
#signal properties
self.singlePrecision = 0
self.s = np.array([])
self.fs = 44100
self.sLength = 0
self.nChans = 0
self.weightingFunction = np.hamming #FIXME
#STFT properties
self.S = np.array([])
self.windowLength = 60
self.nfft = 0
self.nfftUtil = 0
self.overlapRatio = 0.5
self.framesPositions = np.array([])
self.nFrames = 0
self.weightingWindow = np.array([])
self.overlap = 0
# Windowing properties
self.sWin = np.array([])
self.sWeights = np.array([])
self.sWin = np.array([])
self.sWeights = np.array([])
if len(args) == 1:
if type(args[0]) == type(''): # it's a filename
self.LoadFromFile(args[0])
elif type(args[0] == type(self)): # copy data from other signal
self.__dict__ = copy.deepcopy(args[0].__dict__)
elif len(args) == 2: # args[0] is a signal, args[1] is sample freq.
self.LoadWF(args(0), args(1))<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for roastdog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""<|fim▁hole|>import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
application = Cling(get_wsgi_application())<|fim▁end|> | from django.core.wsgi import get_wsgi_application
from dj_static import Cling |
<|file_name|>a00980.js<|end_file_name|><|fim▁begin|><|fim▁hole|> [ "kReverseIfHasRTL", "a00980.html#a402a13eb2d1593daed0190a118f23d88", null ],
[ "RTLReversePolicyNames", "a00980.html#af5c5a40d574f6ab56726b8b2f963f65c", null ]
];<|fim▁end|> | var a00980 =
[
[ "kDoNotReverse", "a00980.html#a0db2198747060995d61a01dcfac97eb7", null ],
[ "kForceReverse", "a00980.html#a597d22620e40ecc0ec38dfa4f65a9d85", null ], |
<|file_name|>build_clib.py<|end_file_name|><|fim▁begin|>"""distutils.command.build_clib
Implements the Distutils 'build_clib' command, to build a C/C++ library
that is included in the module distribution and needed by an extension
module."""
__revision__ = "$Id$"
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from distutils.core import Command
from distutils.errors import DistutilsSetupError
from distutils.ccompiler import customize_compiler
from distutils import log
def show_compilers():
from distutils.ccompiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by Python extensions"
user_options = [
('build-clib=', 'b',
"directory to build C/C++ libraries to"),
('build-temp=', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = 0
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'))
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise DistutilsSetupError, \
"'libraries' option must be a list of tuples"
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise DistutilsSetupError, \
"each element of 'libraries' must a 2-tuple"
name, build_info = lib
if not isinstance(name, str):
raise DistutilsSetupError, \
"first element of each tuple in 'libraries' " + \
"must be a string (the library name)"
if '/' in name or (os.sep != '/' and os.sep in name):
raise DistutilsSetupError, \
("bad library name '%s': " +
"may not contain directory separators") % \
lib[0]
if not isinstance(build_info, dict):
raise DistutilsSetupError, \
"second element of each tuple in 'libraries' " + \
"must be a dictionary (build info)"
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for (lib_name, build_info) in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for (lib_name, build_info) in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames") % lib_name
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for (lib_name, build_info) in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name
sources = list(sources)
log.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
<|fim▁hole|> output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)<|fim▁end|> | # files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
|
<|file_name|>mysqlDAL.js<|end_file_name|><|fim▁begin|>var mysql = require('mysql');
function mysqlConn(config,logger) {
this.connectionPool = mysql.createPool(config);
this.initialized = true;
this.logger = logger;
}
mysqlConn.prototype = {
/// if the raw connection is needed
getConnection: function (callback) {<|fim▁hole|> if (!this.initialized) {
callback(new Error("Connection not initialized"));
return;
}
this.connectionPool.getConnection(function (err, connection) {
// Use the connection
if (err )
this.logger.error('#Database -> Connection: ' + JSON.stringify(err));
if (callback) callback(err, connection);
connection.release();
});
}
,executeSP: function (procedureName, params, callback) {
if (!this.initialized) {
callback(new Error("Connection not initialized"));
return;
}
if (typeof (params) == "function" && callback == undefined) {
callback = params;
params = null;
}
var sql = 'CALL ' + procedureName + '(params)';
sql = this._injectParams(sql, params);
var l= this.logger;
//Execute stored procedure call
this.connectionPool.query(sql, function (err, rows, fields) {
if (err) {
try {
if (err.code == 'ER_SIGNAL_EXCEPTION' && err.sqlState == '45000' && err.message) {
var errorCode = err.message.replace('ER_SIGNAL_EXCEPTION: ', '');
l.warn('#Database -> Stored Procedure: ' + sql + ' Error code ##' + errorCode + '## was relieved while executing stored procedure :' ,err);
err.errorCode = errorCode;
}
else {
l.error('#Database -> Stored Procedure: ' + sql + ' an error has occurred while executing stored procedure :', err);
}
}
catch(e) {
console.error(e);
}
callback(err, null);
}
else {
l.debug('#Database -> Stored Procedure: ' + sql + ' connected to database successfully');
callback(null, rows);
}
});
}
,_injectParams: function (query, params) {
//Inject parameters in Stored Procedure Call
var parameters = '';
if (params) {
params.forEach(function (param, index) {
if (param == null || param.value == null)
parameters += "null";
else{
try{
parameters += "@" + param.name + ':=' + mysql.escape(param.value);
}
catch(e)
{
console.log(e);
throw e;
}
}
if (index < params.length - 1) parameters += ",";
});
}
query = query.replace("params", parameters);
return query;
}
, createCommand: function (procedureName) {
return new mysqlCommand(procedureName, this);
}
};
function mysqlCommand(procedureName, connectionPool) {
this.connectionPool = connectionPool;
this.procedureName = procedureName;
this.params = [];
}
mysqlCommand.prototype = {
addParam: function (name, value) {
this.params.push({ "name": name , "value" : value });
}
,getDataSet: function (callback) {
this.connectionPool.executeSP(this.procedureName, this.params, function (err, data) {
if (err)
callback(err, null);
else {
if (data)
callback(null, data);
else
callback(null, null);
}
});
}
,getDataTable: function (callback) {
this.getDataSet(function (err, data) {
if (err)
callback(err, null);
else {
if (data && data.length > 0)
callback(null, data[0]);
else
callback(null, []);
}
});
}
,getDataObject: function (callback) {
this.getDataTable(function (err, data) {
if (err)
callback(err, null);
else {
if (data && data.length > 0)
callback(null, data[0]);
else
callback(null, null);
}
});
}
,getScalar: function (callback) {
this.getDataObject(function (err, data) {
if (err)
callback(err, null);
else {
if (data != null) {
var key = Object.keys(data);
callback(null, data[key[0]]);
}
else
callback(null, null);
}
});
}
}
module.exports = mysqlConn;<|fim▁end|> | |
<|file_name|>print-path.py<|end_file_name|><|fim▁begin|># -----------
# User Instructions:
#
# Modify the the search function so that it returns
# a shortest path as follows:
#
# [['>', 'v', ' ', ' ', ' ', ' '],
# [' ', '>', '>', '>', '>', 'v'],
# [' ', ' ', ' ', ' ', ' ', 'v'],
# [' ', ' ', ' ', ' ', ' ', 'v'],
# [' ', ' ', ' ', ' ', ' ', '*']]
#
# Where '>', '<', '^', and 'v' refer to right, left,
# up, and down motions. Note that the 'v' should be
# lowercase. '*' should mark the goal cell.
#
# You may assume that all test cases for this function
# will have a path from init to goal.
# ----------
grid = [[0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0]]
init = [0, 0]
goal = [len(grid)-1, len(grid[0])-1]
cost = 1
delta = [[-1, 0 ], # go up
[ 0, -1], # go left
[ 1, 0 ], # go down
[ 0, 1 ]] # go right
delta_name = ['^', '<', 'v', '>']
def search(grid,init,goal,cost):
# ----------------------------------------
# modify code below
# ----------------------------------------
closed = [[0 for row in range(len(grid[0]))] for col in range(len(grid))]
closed[init[0]][init[1]] = 1
g_grid = [[' ' for row in range(len(grid[0]))] for col in range(len(grid))] #fill with g values
expand = [[' ' for row in range(len(grid[0]))] for col in range(len(grid))]
x = init[0]
y = init[1]
g = 0
g_grid[x][y] = g
open = [[g, x, y]]
found = False # flag that is set when search is complete
resign = False # flag set if we can't find expand
while not found and not resign:
if len(open) == 0:
resign = True
return 'fail'
else:
open.sort()
open.reverse()
next = open.pop()
x = next[1]
y = next[2]
g = next[0]
if x == goal[0] and y == goal[1]:
found = True
expand[x][y] = '*'
else:
for i in range(len(delta)):
x2 = x + delta[i][0]
y2 = y + delta[i][1]
if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2 < len(grid[0]):
if closed[x2][y2] == 0 and grid[x2][y2] == 0:
g2 = g + cost
g_grid[x2][y2] = g2
open.append([g2, x2, y2])
closed[x2][y2] = 1
for n in range(g2, -1, -1):
for i in range(len(delta)):
x2 = x + delta[i][0]
y2 = y + delta[i][1]
if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2 < len(grid[0]):
if g_grid[x2][y2] == (n-1):
expand[x2][y2] = delta_name[(i+2)%4]
x = x2
y = y2
return expand # make sure you return the shortest path
result = search(grid,init,goal,cost)
for row in result:<|fim▁hole|><|fim▁end|> | print(row) |
<|file_name|>validators.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright (C) 2017 Dmitry Berezovsky
#
# JointBox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# JointBox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def boolean(value):
return isinstance(value, bool)
def integer(value):
return isinstance(value, int)
def module_id(value):
return True<|fim▁end|> | # JointBox - Your DIY smart home. Simplified. |
<|file_name|>CPOrmLoader.java<|end_file_name|><|fim▁begin|>package za.co.cporm.model.loader.support;
import android.content.Context;
import android.database.Cursor;<|fim▁hole|>import android.support.v4.content.CursorLoader;
import za.co.cporm.model.generate.TableDetails;
import za.co.cporm.model.query.Select;
import za.co.cporm.model.util.CPOrmCursor;
import za.co.cporm.model.util.ContentResolverValues;
/**
* Created by hennie.brink on 2015-03-31.
*/
public class CPOrmLoader<Model> extends CursorLoader {
private TableDetails tableDetails;
private int cacheSize = 0;
/**
* Creates a new cursor loader using the select statement provided. The default implementation
* will enable the cache of the cursor to improve view performance. To manually specify the
* cursor cache size, use the overloaded constructor.
* @param context The context that will be used to create the cursor.
* @param select The select statement that will be used to retrieve the data.
*/
public CPOrmLoader(Context context, Select<Model> select) {
super(context);
ContentResolverValues resolverValues = select.asContentResolverValue(context);
setUri(resolverValues.getItemUri());
setProjection(resolverValues.getProjection());
setSelection(resolverValues.getWhere());
setSelectionArgs(resolverValues.getWhereArgs());
setSortOrder(resolverValues.getSortOrder());
tableDetails = resolverValues.getTableDetails();
}
/**
* Creates a new cursor loader using the select statement provided. You
* can specify the cache size to use, or use -1 to disable cursor caching.
* @param context The context that will be used to create the cursor.
* @param select The select statement that will be used to retrieve the data.
* @param cacheSize The cache size for the cursor, or -1 to disable caching
*/
public CPOrmLoader(Context context, Select<Model> select, int cacheSize) {
this(context, select);
enableCursorCache(cacheSize);
}
public void enableCursorCache(int size) {
cacheSize = size;
}
@Override
public CPOrmCursor<Model> loadInBackground() {
Cursor asyncCursor = super.loadInBackground();
if(asyncCursor == null)
return null;
CPOrmCursor<Model> cursor = new CPOrmCursor<>(tableDetails, asyncCursor);
if(cacheSize == 0){
cursor.enableCache();
} else if(cacheSize > 0) {
cursor.enableCache(cacheSize);
}
//Prefetch at least some items in preparation for the list
int count = cursor.getCount();
for (int i = 0; i < count && cursor.isCacheEnabled() && i < 100; i++) {
cursor.moveToPosition(i);
Model inflate = cursor.inflate();
}
return cursor;
}
}<|fim▁end|> | |
<|file_name|>select.template.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { MAT_GAP_FIX_DIRECTIVE } from '../directives/mat-label-fix.directive';
import * as cd from 'cd-interfaces';
import * as consts from 'cd-common/consts';
import * as mat from '../material-shared';
import {
TemplateFactory,
lookupPropAtPath,
propsBinding,
wrapInCurlyBraces,
INPUTS,
} from 'cd-common/models';
export const SELECT_TAG = 'mat-select';
export const OPTION_TAG = 'mat-option';
export const MAT_FORM_FIELD_TAG = 'mat-form-field';
const SEL_TRIGGER_TAG = 'mat-select-trigger';
const OPTION_VAR = 'option';
const SELECTION_CHANGE = 'selectionChange';
const MAT_SELECT_FIX_PIPE = 'matSelectFix';
export default function (mode: cd.TemplateBuildMode, props: cd.ISelectProperties): string {
return new TemplateFactory(mode, MAT_FORM_FIELD_TAG)
.ifInternal((me) =>
me
.addWrapper(
new TemplateFactory(mode, consts.DIV_TAG).addDefaultAttributes().addFitContentClass()
)
.addDirective(MAT_GAP_FIX_DIRECTIVE)
.addPropsBoundInputAttribute(mat.APPEARANCE_TAG)
.addPropsBoundInputAttribute(consts.COLOR_ATTR)
.addChild(buildInternalContent())
)<|fim▁hole|> me
.addAttribute(consts.COLOR_ATTR, props.inputs.color)
.addChild(buildExportContent(props))
.build()
)
.build();
}
const buildInternalContent = (): string => {
const mode = cd.TemplateBuildMode.Internal;
const labelElem = new mat.MaterialLabel('').template(mode);
const optionValue = lookupPropAtPath(OPTION_VAR, consts.VALUE_ATTR); // option.value
const optionDisabled = lookupPropAtPath(OPTION_VAR, consts.DISABLED_ATTR); // option.disabled
const optionText = lookupPropAtPath(OPTION_VAR, consts.NAME_ATTR); // option.name
const selectTriggerContent = `${propsBinding(INPUTS)} | ${MAT_SELECT_FIX_PIPE}`;
const selectTriggerElem = new TemplateFactory(mode, SEL_TRIGGER_TAG)
.addChild(wrapInCurlyBraces(selectTriggerContent))
.build();
const optionElem = new TemplateFactory(mode, OPTION_TAG)
.add_ngFor_Attribute(OPTION_VAR, consts.OPTIONS_ATTR, true) // for option of options
.addBoundAttribute(consts.VALUE_ATTR, optionValue)
.addBoundAttribute(consts.DISABLED_ATTR, optionDisabled)
.addBoundAttribute(consts.INNER_TEXT, optionText)
.build();
const selectElem = new TemplateFactory(mode, SELECT_TAG)
.addPropsBoundInputAttribute(consts.DISABLED_ATTR, undefined, true, cd.CoerceValue.Boolean)
.addPropsBoundInputAttribute(consts.REQUIRED_ATTR, undefined, true, cd.CoerceValue.Boolean)
.addPropsBoundInputAttribute(consts.VALUE_ATTR)
.addOutputBinding(SELECTION_CHANGE, consts.VALUE_ATTR, consts.VALUE_ATTR)
.addChild(selectTriggerElem)
.addChild(optionElem)
.build();
const hintElem = new mat.MaterialHint('').template(mode);
return labelElem + selectElem + hintElem;
};
const buildExportContent = (props: cd.ISelectProperties): string => {
const mode = cd.TemplateBuildMode.Simple;
const { options, disabled, hint, required, label, value } = props.inputs;
const optionsHtml: string[] = [];
const selectedIndex = options.findIndex((item) => item.value === value);
for (const [index, option] of options.entries()) {
const { disabled: optionDisabled, name } = option;
const selected = selectedIndex === index;
optionsHtml.push(
new TemplateFactory(mode, OPTION_TAG)
.addAttribute(consts.DISABLED_ATTR, optionDisabled, false)
.addAttribute(consts.SELECTED_ATTR, selected, false)
.addChild(name)
.build()
);
}
const selectElem = new TemplateFactory(mode, SELECT_TAG)
.addAttribute(consts.DISABLED_ATTR, disabled, false)
.addAttribute(consts.REQUIRED_ATTR, required, false)
.addChild(optionsHtml.join(''))
.build();
const labelElem = label ? new mat.MaterialLabel('').template(mode, props) : '';
const hintElem = hint ? new mat.MaterialHint('').template(mode, props) : '';
return [labelElem, selectElem, hintElem].join('');
};<|fim▁end|> | .ifExport((me) => |
<|file_name|>borrowck-auto-mut-ref-to-immut-var.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that auto-ref can't create mutable aliases to immutable memory.
struct Foo {
x: int
}
impl Foo {
pub fn printme(&mut self) {
println!("{}", self.x);
}
}
fn main() {
let x = Foo { x: 3 };
x.printme(); //~ ERROR cannot borrow
}<|fim▁end|> | |
<|file_name|>validation_test.go<|end_file_name|><|fim▁begin|>package config
import (
"bytes"
"fmt"
"strings"
"testing"
"gopkg.in/inconshreveable/log15.v2"
)
func TestValidation(t *testing.T) {
t.Parallel()
c := New().FromString(`
[networks.net]
`)
if c.Validate() {
t.Error("Expected it to be invalid.")
}
expErr := "(net) Expected at least one server."
if ers := c.Errors(); len(ers) == 0 {
t.Error("Expected one error.")
} else if ers[0].Error() != expErr {
t.Error("Expected a particular error message, got:", ers[0])
}
c = New().FromString(`
nick = "n"
altnick = "n"
realname = "n"
username = "n"
prefix = 5
[networks.net]
servers = ["n"]
`)
if c.Validate() {
t.Error("Expected it to be invalid.")
}
expErr = "(global) prefix is int64 but expected string [5]"
if ers := c.Errors(); len(ers) == 0 {
t.Error("Expected one error.")
} else if ers[0].Error() != expErr {
t.Error("Expected a particular error message, got:", ers[0])
}
}
func TestValidation_DisplayErrors(t *testing.T) {
t.Parallel()
b := &bytes.Buffer{}
logger := log15.New()
logger.SetHandler(log15.StreamHandler(b, log15.LogfmtFormat()))
c := New().FromString(`
nick = "n"
altnick = "n"
realname = "n"
username = "n"
prefix = 5
[networks.net]
`)
if c.Validate() {
t.Error("Expected it to be invalid.")
}
exp := "(global) prefix is int64 but expected string [5]"
c.DisplayErrors(logger)
if !strings.Contains(b.String(), exp) {
t.Error("Expected a particular error message, got:", b.String())
}
}
type rexpect struct {
context, message string
}
type texpect struct {
context, key, kind, foundKind string
}
func TestValidation_RequiredNoServers(t *testing.T) {
t.Parallel()
expects := []rexpect{
{"", "Expected at least one network."},
}
requiredTestHelper("", expects, t)
}
func TestValidation_RequiredServers(t *testing.T) {
t.Parallel()
cfg := `[networks.hello]`
expects := []rexpect{
{"hello", "Nickname is required."},
{"hello", "Username is required."},
{"hello", "Realname is required."},
{"hello", "Expected at least one server."},
}
requiredTestHelper(cfg, expects, t)
}
func TestValidation_RequiredTypes(t *testing.T) {
t.Parallel()
cfg := `networks = 5`
expects := []rexpect{{"", "Expected at least one network."}}
requiredTestHelper(cfg, expects, t)
cfg = "[networks]\nserver = 5"
expects = []rexpect{{"server", "Expected network to be a map, got int64"}}
requiredTestHelper(cfg, expects, t)
}
func requiredTestHelper(cfg string, expects []rexpect, t *testing.T) {
ers := make(errList, 0)
c := New().FromString(cfg)
c.validateRequired(&ers)
if len(expects) != len(ers) {
for _, e := range ers {
t.Error(e)
}
t.Errorf("Expected %d errors, but got %d", len(expects), len(ers))
}
founds := make([]bool, len(ers))
for _, expErr := range expects {
found := false
for i, e := range ers {
var er string
if len(expErr.context) == 0 {
er = fmt.Sprintf("%s", expErr.message)
} else {
er = fmt.Sprintf("(%s) %s", expErr.context, expErr.message)
}
if strings.HasPrefix(e.Error(), er) {
found = true
founds[i] = true
break
}
}
if !found {
t.Error("Expected to find error concerning:",
expErr.context, expErr.message)
}
}
for i, found := range founds {
if !found {
t.Error("Unexpected error occurred:", ers[i])
}
}
}
func TestValidation_TypesTopLevel(t *testing.T) {
t.Parallel()
cfg := `
networks = 5
ext = 5
exts = 5`
exps := []texpect{
{"global", "networks", "map", "int64"},
{"global", "ext", "map", "int64"},
{"global", "exts", "map", "int64"},
}
typesTestHelper(cfg, exps, t)
}
func TestValidation_TypesMidLevel(t *testing.T) {
t.Parallel()
cfg := `
[networks]
noirc = 5
[networks.ircnet]
channels = 5
[ext]
config = 5
active = 5
[exts]
myext = 5
[exts.extension]
active = 5`
exps := []texpect{
{"global networks", "noirc", "map", "int64"},
{"ircnet", "channels", "map", "int64"},
{"ext", "config", "map", "int64"},
{"ext", "active", "map", "int64"},
{"exts", "myext", "map", "int64"},
{"extension", "active", "map", "int64"},
}
typesTestHelper(cfg, exps, t)
}
func TestValidation_TypesConfig(t *testing.T) {
t.Parallel()
cfg := `
[ext.active]
list = 5
[ext.config]
networks = 5
channels = 5
[ext.config.more]
list = 5
[exts.extname.active]
list = 5`
exps := []texpect{
{"ext active", "list", "array", "int64"},
{"extname active", "list", "array", "int64"},
{"ext config", "networks", "map", "int64"},
{"ext config", "channels", "map", "int64"},
{"ext config", "more", "string", "map[string]interface {}"},
}
typesTestHelper(cfg, exps, t)
cfg = `
[ext.config]
networks = "5"
channels = "5"`
exps = []texpect{
{"ext config", "networks", "map", "string"},
{"ext config", "channels", "map", "string"},
}
typesTestHelper(cfg, exps, t)
}
func TestValidation_TypesConfigMidLevel(t *testing.T) {
t.Parallel()
cfg := `
[ext.config.networks]<|fim▁hole|> [ext.config.channels]
channel = 5
[ext.config.networks.ircnet]
channels = 5`
exps := []texpect{
{"ext config", "network", "map", "int64"},
{"ext config", "channel", "map", "int64"},
{"ext config ircnet", "channels", "map", "int64"},
}
typesTestHelper(cfg, exps, t)
}
func TestValidation_TypesLeafs(t *testing.T) {
t.Parallel()
cfg := `
storefile = 5
nocorecmds = "hello"
logfile = 5
loglevel = 5
secret_key = 5
nick = 6
altnick = 7
username = 8
realname = 9
password = 10
[networks.noirc]
servers = "farse"
[networks.ircnet]
servers = [10]
tls = "hello"
tls_ca_cert = false
tls_cert = false
tls_key = false
tls_insecure_skip_verify = "lol"
nostate = 5
nostore = 6
noautojoin = 5
joindelay = "lol"
floodlenpenalty = 20.0
floodtimeout = "anarchy"
floodstep = "string"
keepalive = "what"
noreconnect = "abc"
reconnecttimeout = 20.0
prefix = false
[[networks.ircnet.channels]]
name = 5
password = 5
prefix = 5
[[networks.ircnet.channels]]
name = "#channel2"
password = "pass2"
prefix = "@"
[ext]
listen = 5
execdir = 20
noreconnect = "true"
reconnecttimeout = 40.0
tls_cert = true
tls_key = true
tls_client_ca = true
tls_client_revs = true
[ext.config]
key = 5
[ext.config.channels."#channel"]
key = 5
[ext.config.networks.ircnet]
key = 5
[ext.config.networks.ircnet.channels."#channel"]
key = 5
[ext.active]
ircnet = [5, 6]
[exts.myext]
exec = 5
server = 5
tls_cert = true
tls_insecure_skip_verify = "what"
unix = 5
[exts.myext.active]
ircnet = [5, 6]`
exps := []texpect{
{"global", "storefile", "string", "int64"},
{"global", "nocorecmds", "bool", "string"},
{"global", "loglevel", "string", "int64"},
{"global", "logfile", "string", "int64"},
{"global", "secret_key", "string", "int64"},
{"global", "nick", "string", "int64"},
{"global", "altnick", "string", "int64"},
{"global", "username", "string", "int64"},
{"global", "realname", "string", "int64"},
{"global", "password", "string", "int64"},
{"noirc", "servers", "array", "string"},
{"ircnet", "servers 1", "string", "int64"},
{"ircnet", "tls", "bool", "string"},
{"ircnet", "tls_key", "string", "bool"},
{"ircnet", "tls_cert", "string", "bool"},
{"ircnet", "tls_ca_cert", "string", "bool"},
{"ircnet", "tls_insecure_skip_verify", "bool", "string"},
{"ircnet", "nostate", "bool", "int64"},
{"ircnet", "nostore", "bool", "int64"},
{"ircnet", "noautojoin", "bool", "int64"},
{"ircnet", "joindelay", "int", "string"},
{"ircnet", "floodlenpenalty", "int", "float64"},
{"ircnet", "floodtimeout", "float64", "string"},
{"ircnet", "floodstep", "float64", "string"},
{"ircnet", "keepalive", "float64", "string"},
{"ircnet", "noreconnect", "bool", "string"},
{"ircnet", "reconnecttimeout", "int", "float64"},
{"ircnet", "prefix", "string", "bool"},
{"ircnet channels", "name", "string", "int64"},
{"ircnet channels", "password", "string", "int64"},
{"ircnet channels", "prefix", "string", "int64"},
{"ext", "listen", "string", "int64"},
{"ext", "tls_cert", "string", "bool"},
{"ext", "tls_key", "string", "bool"},
{"ext", "tls_client_ca", "string", "bool"},
{"ext", "tls_client_revs", "string", "bool"},
{"ext", "execdir", "string", "int64"},
{"ext", "noreconnect", "bool", "string"},
{"ext", "reconnecttimeout", "int", "float64"},
{"ext config", "key", "string", "int64"},
{"ext config #channel", "key", "string", "int64"},
{"ext config ircnet", "key", "string", "int64"},
{"ext config ircnet #channel", "key", "string", "int64"},
{"ext active ircnet", "channel 1", "string", "int64"},
{"ext active ircnet", "channel 2", "string", "int64"},
{"myext", "exec", "string", "int64"},
{"myext", "server", "string", "int64"},
{"myext", "tls_cert", "string", "bool"},
{"myext", "tls_insecure_skip_verify", "bool", "string"},
{"myext active ircnet", "channel 1", "string", "int64"},
{"myext active ircnet", "channel 2", "string", "int64"},
}
typesTestHelper(cfg, exps, t)
}
func typesTestHelper(cfg string, expects []texpect, t *testing.T) {
ers := make(errList, 0)
c := New().FromString(cfg)
c.validateTypes(&ers)
if len(expects) != len(ers) {
for _, e := range ers {
t.Error(e)
}
t.Errorf("Expected %d errors, but got %d", len(expects), len(ers))
}
founds := make([]bool, len(ers))
for _, expErr := range expects {
found := false
for i, e := range ers {
er := fmt.Sprintf("(%s) %s is %s but expected %s",
expErr.context, expErr.key, expErr.foundKind, expErr.kind)
if strings.HasPrefix(e.Error(), er) {
found = true
founds[i] = true
break
}
}
if !found {
t.Error("Expected to find error concerning:",
expErr.context, expErr.key, expErr.foundKind, expErr.kind)
}
}
for i, found := range founds {
if !found {
t.Error("Unexpected error occurred:", ers[i])
}
}
}<|fim▁end|> | network = 5 |
<|file_name|>get_device_state.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
""" message_gds.py:
"""
# Import Required Libraries (Standard, Third Party, Local) ********************
import datetime
import logging
if __name__ == "__main__":
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from rpihome_v3.helpers.ipv4_help import check_ipv4
from rpihome_v3.helpers.field_checkers import in_int_range
from rpihome_v3.helpers.field_checkers import is_valid_datetime
# Authorship Info *************************************************************
__author__ = "Christopher Maue"
__copyright__ = "Copyright 2017, The RPi-Home Project"
__credits__ = ["Christopher Maue"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Christopher Maue"
__email__ = "[email protected]"
__status__ = "Development"
# Message Class Definition ****************************************************
class GetDeviceStateMessage(object):
""" Log Status Update message class and methods """
def __init__(self, log=None, **kwargs):
# Configure logger
self.log = log or logging.getLogger(__name__)
self._ref = str()
self._dest_addr = str()
self._dest_port = str()
self._source_addr = str()
self._source_port = str()
self._msg_type = str()
self._dev_name = str()
self._dev_addr = str()
self._dev_status = str()
self._dev_last_seen = str()
self.temp_list = []
# Process input variables if present
if kwargs is not None:<|fim▁hole|> if key == "ref":
self.ref = value
self.log.debug('Ref Number value set during '
'__init__ to: %s', self.ref)
if key == "dest_addr":
self.dest_addr = value
self.log.debug('Destination address value set during __init__ '
'to: %s', self.dest_addr)
if key == "dest_port":
self.dest_port = value
self.log.debug('Destination port value set during __init__ '
'to: %s', self.dest_port)
if key == "source_addr":
self.source_addr = value
self.log.debug('Source address value set during __init__ '
'to: %s', self.source_addr)
if key == "source_port":
self.source_port = value
self.log.debug('Source port value set during __init__ to: '
'%s', self.source_port)
if key == "msg_type":
self.msg_type = value
self.log.debug('Message type value set during __init__ to: '
'%s', self.msg_type)
if key == "dev_name":
self.dev_name = value
self.log.debug('Device name value set during __init__ to: '
'%s', self.dev_name)
if key == "dev_addr":
self.dev_addr = value
self.log.debug('Device Address value set during __init__ '
'to: %s', self.dev_addr)
if key == "dev_status":
self.dev_status = value
self.log.debug('Device Status value set during __init__ '
'to: %s', self.dev_status)
if key == "dev_last_seen":
self.dev_last_seen = value
self.log.debug('Device last seen value set during __init__ '
'to: %s', self.dev_last_seen)
# ref number field ********************************************************
@property
def ref(self):
self.log.debug('Returning current value of ref number: %s', self._ref)
return self._ref
@ref.setter
def ref(self, value):
if in_int_range(self.log, value, 100, 999) is True:
self._ref = str(value)
self.log.debug('Ref number updated to: %s', self._ref)
else:
self.log.debug('Ref number update failed with input value: '
'%s', value)
# destination address *****************************************************
@property
def dest_addr(self):
self.log.debug('Returning current value of destination address: '
'%s', self._dest_addr)
return self._dest_addr
@dest_addr.setter
def dest_addr(self, value):
if check_ipv4(value) is True:
self._dest_addr = str(value)
self.log.debug('Destination address updated to: '
'%s', self._dest_addr)
else:
self.log.warning('Destination address update failed with input value: '
'%s', value)
# destination port ********************************************************
@property
def dest_port(self):
self.log.debug('Returning current value of destination port: '
'%s', self._dest_port)
return self._dest_port
@dest_port.setter
def dest_port(self, value):
if in_int_range(self.log, value, 10000, 60000) is True:
self._dest_port = str(value)
self.log.debug('Destination port updated to: %s', self._dest_port)
else:
self.log.debug('Destination port update failed with input value: '
'%s', value)
# source address field ****************************************************
@property
def source_addr(self):
self.log.debug('Returning current value of source address: '
'%s', self._source_addr)
return self._source_addr
@source_addr.setter
def source_addr(self, value):
if check_ipv4(value) is True:
self._source_addr = value
self.log.debug('source address updated to: '
'%s', self._source_addr)
else:
self.log.warning('Source address update failed with input value: '
'%s', value)
# source port field *******************************************************
@property
def source_port(self):
self.log.debug('Returning current value of source port: '
'%s', self._source_port)
return self._source_port
@source_port.setter
def source_port(self, value):
if in_int_range(self.log, value, 10000, 60000) is True:
self._source_port = str(value)
self.log.debug('Source port updated to: %s', self._source_port)
else:
self.log.debug('Source port update failed with input value: '
'%s', value)
# message type field ******************************************************
@property
def msg_type(self):
self.log.debug('Returning current value of message type: '
'%s', self._msg_type)
return self._msg_type
@msg_type.setter
def msg_type(self, value):
if in_int_range(self.log, value, 100, 999) is True:
self._msg_type = str(value)
self.log.debug('Message type updated to: %s', self._msg_type)
else:
self.log.debug('Message type update failed with input value: '
'%s', value)
# device name field *******************************************************
@property
def dev_name(self):
self.log.debug('Returning current value of device name: '
'%s', self._dev_name)
return self._dev_name
@dev_name.setter
def dev_name(self, value):
if isinstance(value, str):
self._dev_name = value
else:
self._dev_name = str(value)
self.log.debug('Device name value updated to: '
'%s', self._dev_name)
# device address field ****************************************************
@property
def dev_addr(self):
self.log.debug('Returning current value of device address: '
'%s', self._dev_addr)
return self._dev_addr
@dev_addr.setter
def dev_addr(self, value):
if check_ipv4(value) is True:
self._dev_addr = value
self.log.debug('Device address updated to: '
'%s', self._dev_addr)
else:
self.log.warning('Device address update failed with input value: '
'%s', value)
# device status field *****************************************************
@property
def dev_status(self):
self.log.debug('Returning current value of device status: '
'%s', self._dev_status)
return self._dev_status
@dev_status.setter
def dev_status(self, value):
if isinstance(value, str):
self._dev_status = value.lower()
else:
self._dev_status = (str(value)).lower()
self.log.debug('Device status value updated to: '
'%s', self._dev_status)
# device last seen field **************************************************
@property
def dev_last_seen(self):
self.log.debug('Returning current value of device last seen: '
'%s', self._dev_last_seen)
return self._dev_last_seen
@dev_last_seen.setter
def dev_last_seen(self, value):
self._dev_last_seen = is_valid_datetime(
self.log,
value,
self._dev_last_seen)
self.log.debug('Device last seen updated to: %s', self._dev_last_seen)
# complete message encode/decode methods **********************************
@property
def complete(self):
self.log.debug('Returning current value of complete message: '
'%s,%s,%s,%s,%s,%s,%s,%s,%s,%s',
self._ref, self._dest_addr, self._dest_port,
self._source_addr, self._source_port,
self._msg_type, self._dev_name, self._dev_addr,
self._dev_status, self._dev_last_seen)
return '%s,%s,%s,%s,%s,%s,%s,%s,%s,%s' % (
self._ref, self._dest_addr, self._dest_port,
self._source_addr, self._source_port,
self._msg_type, self._dev_name, self._dev_addr,
self._dev_status, self._dev_last_seen)
@complete.setter
def complete(self, value):
if isinstance(value, str):
self.temp_list = value.split(',')
if len(self.temp_list) >= 10:
self.log.debug('Message was properly formatted for decoding')
self.ref = self.temp_list[0]
self.dest_addr = self.temp_list[1]
self.dest_port = self.temp_list[2]
self.source_addr = self.temp_list[3]
self.source_port = self.temp_list[4]
self.msg_type = self.temp_list[5]
self.dev_name = self.temp_list[6]
self.dev_addr = self.temp_list[7]
self.dev_status = self.temp_list[8]
self.dev_last_seen = self.temp_list[9]<|fim▁end|> | for key, value in kwargs.items(): |
<|file_name|>data_api_client_ex2.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
query = GeneAcronymQuery()
gene_info = query.get_data('ABAT')
for gene in gene_info:
print "%s (%s)" % (gene['name'], gene['organism']['name'])<|fim▁end|> | from gene_acronym_query import GeneAcronymQuery |
<|file_name|>app.rs<|end_file_name|><|fim▁begin|>use clap::{App, AppSettings, Arg, ArgGroup, SubCommand};
pub fn build() -> App<'static, 'static> {
App::new(crate_name!())
.about(crate_description!())
.version(crate_version!())
.setting(AppSettings::SubcommandRequired)
.setting(AppSettings::VersionlessSubcommands)
.subcommand(SubCommand::with_name("digraph")<|fim▁hole|> .arg(Arg::with_name("convert")
.help("Converts a digraph sequence or a character to the other")
.long("convert")
.short("c")
.takes_value(true))
.arg(Arg::with_name("filter")
.help("Prints information about matching digraphs")
.long("filter")
.short("f")
.takes_value(true))
.arg(Arg::with_name("description")
.help("Prints results with description")
.long("description")
.short("d")
.requires("filter"))
.group(ArgGroup::with_name("modes")
.args(&["convert", "filter"])
.required(true)))
}<|fim▁end|> | .about("Digraph lookup and resolution")
.setting(AppSettings::AllowLeadingHyphen)
.setting(AppSettings::UnifiedHelpMessage) |
<|file_name|>globals.d.ts<|end_file_name|><|fim▁begin|>/**
* TypeScript global declarations go here
*/<|fim▁hole|>
declare global {
// Defined in es2017 but most major browsers support it
interface String {
padStart(width: number, pad: string): string
}
}
export { }<|fim▁end|> | |
<|file_name|>guildDelete.js<|end_file_name|><|fim▁begin|>const excludedTables = ["blacklist", "musicCache", "timedEvents"];
const statPoster = require("../../modules/statPoster.js");
module.exports = async guild => {
let tables = await r.tableList().run();
for(let table of tables) {
let indexes = await r.table(table).indexList().run();
if(~indexes.indexOf("guildID")) r.table(table).getAll(guild.id, { index: "guildID" }).delete().run();
else r.table(table).filter({ guildID: guild.id }).delete().run();
}
if(bot.config.bot.serverChannel) {
let owner = bot.users.get(guild.ownerID);
let botCount = guild.members.filter(member => member.bot).length;
let botPercent = ((botCount / guild.memberCount) * 100).toFixed(2);
let userCount = guild.memberCount - botCount;
let userPercent = ((userCount / guild.memberCount) * 100).toFixed(2);
let content = "❌ LEFT GUILD ❌\n";
content += `Guild: ${guild.name} (${guild.id})\n`;
content += `Owner: ${owner.username}#${owner.discriminator} (${owner.id})\n`;
content += `Members: ${guild.memberCount} **|** `;
content += `Users: ${userCount} (${userPercent}%) **|** `;
content += `Bots: ${botCount} (${botPercent}%)`;
try {
await bot.createMessage(bot.config.bot.serverChannel, content);
} catch(err) {
console.error(`Failed to send message to server log: ${err.message}`);
}<|fim▁hole|>
statPoster();
};<|fim▁end|> | } |
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>"""Let's Encrypt client errors."""
class Error(Exception):
"""Generic Let's Encrypt client error."""
class AccountStorageError(Error):
"""Generic `.AccountStorage` error."""
class AccountNotFound(AccountStorageError):
"""Account not found error."""
class ReverterError(Error):
"""Let's Encrypt Reverter error."""
class SubprocessError(Error):
"""Subprocess handling error."""
class CertStorageError(Error):
"""Generic `.CertStorage` error."""
class HookCommandNotFound(Error):
"""Failed to find a hook command in the PATH."""
# Auth Handler Errors
class AuthorizationError(Error):
"""Authorization error."""
class FailedChallenges(AuthorizationError):
"""Failed challenges error.
:ivar set failed_achalls: Failed `.AnnotatedChallenge` instances.
"""
def __init__(self, failed_achalls):
assert failed_achalls
self.failed_achalls = failed_achalls
super(FailedChallenges, self).__init__()
def __str__(self):
return "Failed authorization procedure. {0}".format(
", ".join(
"{0} ({1}): {2}".format(achall.domain, achall.typ, achall.error)<|fim▁hole|>
# Plugin Errors
class PluginError(Error):
"""Let's Encrypt Plugin error."""
class PluginEnhancementAlreadyPresent(Error):
""" Enhancement was already set """
class PluginSelectionError(Error):
"""A problem with plugin/configurator selection or setup"""
class NoInstallationError(PluginError):
"""Let's Encrypt No Installation error."""
class MisconfigurationError(PluginError):
"""Let's Encrypt Misconfiguration error."""
class NotSupportedError(PluginError):
"""Let's Encrypt Plugin function not supported error."""
class StandaloneBindError(Error):
"""Standalone plugin bind error."""
def __init__(self, socket_error, port):
super(StandaloneBindError, self).__init__(
"Problem binding to port {0}: {1}".format(port, socket_error))
self.socket_error = socket_error
self.port = port
class ConfigurationError(Error):
"""Configuration sanity error."""
# NoninteractiveDisplay iDisplay plugin error:
class MissingCommandlineFlag(Error):
"""A command line argument was missing in noninteractive usage"""<|fim▁end|> | for achall in self.failed_achalls if achall.error is not None))
|
<|file_name|>mysql.py<|end_file_name|><|fim▁begin|>"""pybackup - Backup Plugin for MySQL Database
"""
import os
from pybackup import errors
from pybackup import utils
from pybackup.logmgr import logger
from pybackup.plugins import BackupPluginBase
from pysysinfo.mysql import MySQLinfo
__author__ = "Ali Onur Uyar"
__copyright__ = "Copyright 2011, Ali Onur Uyar"
__credits__ = []
__license__ = "GPL"
__version__ = "0.5"
__maintainer__ = "Ali Onur Uyar"
__email__ = "aouyar at gmail.com"
__status__ = "Development"
class PluginMySQL(BackupPluginBase):
"""Class for backups of MySQL Database.
"""
_extOpts = {'filename_dump_db': 'Filename for MySQL dump files.',
'db_host': 'MySQL Database Server Name or IP.',
'db_port': 'MySQL Database Server Port.',
'db_user': 'MySQL Database Server User.',
'db_password': 'MySQL Database Server Password.',
'db_list': 'List of databases. (All databases by default.)',}
_extReqOptList = ()
_extDefaults = {'cmd_mysqldump': 'mysqldump',
'filename_dump_db': 'mysql_dump',}
def __init__(self, global_conf, job_conf):
"""Constructor
@param global_conf: Dictionary of general configuration options.
@param job_conf: Dictionary of job configuration options.
"""
BackupPluginBase.__init__(self, global_conf, job_conf)
self._connArgs = []
for (opt, key) in (('-h', 'db_host'),
('-P', 'db_port'),
('-u', 'db_user')):
val = self._conf.get(key)
if val is not None:
self._connArgs.extend([opt, val])
self._env = os.environ.copy()
db_password = self._conf.get('db_password')
if db_password is not None:
self._env['MYSQL_PWD'] = db_password
def dumpDatabase(self, db, data=True):
if data:
dump_type = 'data'
dump_desc = 'MySQL Database Contents'
else:
dump_type = 'db'
dump_desc = 'MySQL Database Container'
dump_filename = "%s_%s_%s.dump.%s" % (self._conf['filename_dump_db'],
db, dump_type,
self._conf['suffix_compress'])
dump_path = os.path.join(self._conf['job_path'], dump_filename)
args = [self._conf['cmd_mysqldump'],]
args.extend(self._connArgs)
if db in ('information_schema', 'mysql'):
args.append('--skip-lock-tables')
if not data:
args.extend(['--no-create-info', '--no-data' ,'--databases'])
args.append(db)
logger.info("Starting dump of %s: %s"
" Backup: %s", dump_desc, db, dump_path)
returncode, out, err = self._execBackupCmd(args, #@UnusedVariable
self._env,<|fim▁hole|> out_compress=True)
if returncode == 0:
logger.info("Finished dump of %s: %s"
" Backup: %s", dump_desc, db, dump_path)
else:
raise errors.BackupError("Dump of %s for %s failed "
"with error code: %s"
% (dump_desc, db, returncode),
*utils.splitMsg(err))
def dumpDatabases(self):
if not self._conf.has_key('db_list'):
try:
my = MySQLinfo(host=self._conf.get('db_host'),
port=self._conf.get('db_port'),
user=self._conf.get('db_user'),
password=self._conf.get('db_password'))
self._conf['db_list'] = my.getDatabases()
del my
except Exception, e:
raise errors.BackupError("Connection to MySQL Server "
"for querying database list failed.",
"Error Message: %s" % str(e))
logger.info("Starting dump of %d MySQL Databases.",
len(self._conf['db_list']))
for db in self._conf['db_list']:
self.dumpDatabase(db, False)
self.dumpDatabase(db, True)
logger.info("Finished dump of MySQL Databases.")
def dumpFull(self):
self.dumpDatabases()
description = "Plugin for backups of MySQL Database."
methodList = (('mysql_dump_full', PluginMySQL, 'dumpFull'),
('mysql_dump_databases', PluginMySQL, 'dumpDatabases'),)<|fim▁end|> | out_path=dump_path, |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import json
import mock
from sentry.plugins.helpers import get_option, set_option
from sentry.testutils import TestCase
from sentry.models import set_sentry_version, Option
from sentry.tasks.check_update import check_update, PYPI_URL
class CheckUpdateTest(TestCase):
OLD = '5.0.0'
CURRENT = '5.5.0-DEV'
NEW = '1000000000.5.1'
KEY = 'sentry:latest_version'
def test_run_check_update_task(self):
with mock.patch('sentry.tasks.check_update.fetch_url_content') as fetch:
fetch.return_value = (
None, None, json.dumps({'info': {'version': self.NEW}})
)
check_update() # latest_version > current_version
fetch.assert_called_once_with(PYPI_URL)
self.assertEqual(get_option(key=self.KEY), self.NEW)
def test_run_check_update_task_with_bad_response(self):
with mock.patch('sentry.tasks.check_update.fetch_url_content') as fetch:
fetch.return_value = (None, None, '')
check_update() # latest_version == current_version
fetch.assert_called_once_with(PYPI_URL)
self.assertEqual(get_option(key=self.KEY), None)
def test_set_sentry_version_empty_latest(self):
set_sentry_version(latest=self.NEW)<|fim▁hole|> set_option(self.KEY, self.OLD)
with mock.patch('sentry.get_version') as get_version:
get_version.return_value = self.CURRENT
set_sentry_version(latest=self.NEW)
self.assertEqual(Option.objects.get_value(key=self.KEY), self.NEW)
def test_set_sentry_version_old(self):
set_option(self.KEY, self.NEW)
with mock.patch('sentry.get_version') as get_version:
get_version.return_value = self.CURRENT
set_sentry_version(latest=self.OLD)
self.assertEqual(Option.objects.get_value(key=self.KEY), self.NEW)<|fim▁end|> | self.assertEqual(get_option(key=self.KEY), self.NEW)
def test_set_sentry_version_new(self): |
<|file_name|>init-old.py<|end_file_name|><|fim▁begin|>import errno
import os
import pwd
import shutil
import sys
from jinja2 import Environment, FileSystemLoader
class TutorialEnv:
LOCAL_MACHINE = ("Local Machine Condor Pool", "submit-host")
USC_HPCC_CLUSTER = ("USC HPCC Cluster", "usc-hpcc")
OSG_FROM_ISI = ("OSG from ISI submit node", "osg")
XSEDE_BOSCO = ("XSEDE, with Bosco", "xsede-bosco")
BLUEWATERS_GLITE = ("Bluewaters, with Glite", "bw-glite")
TACC_WRANGLER = ("TACC Wrangler with Glite", "wrangler-glite")
OLCF_TITAN = ("OLCF TITAN with Glite", "titan-glite")
OLCF_SUMMIT_KUBERNETES_BOSCO = (
"OLCF Summit from Kubernetes using BOSCO",
"summit-kub-bosco",
)
class TutorialExample:
PROCESS = ("Process", "process")
PIPELINE = ("Pipeline", "pipeline")
SPLIT = ("Split", "split")
MERGE = ("Merge", "merge")
EPA = ("EPA (requires R)", "r-epa")
DIAMOND = ("Diamond", "diamond")
CONTAINER = ("Population Modeling using Containers", "population")
MPI = ("MPI Hello World", "mpi-hw")
def choice(question, options, default):
"Ask the user to choose from a short list of named options"
while True:
sys.stdout.write("{} ({}) [{}]: ".format(question, "/".join(options), default))
answer = sys.stdin.readline().strip()
if len(answer) == 0:
return default
for opt in options:
if answer == opt:
return answer
def yesno(question, default="y"):
"Ask the user a yes/no question"
while True:
sys.stdout.write("{} (y/n) [{}]: ".format(question, default))
answer = sys.stdin.readline().strip().lower()
if len(answer) == 0:
answer = default
if answer == "y":
return True
elif answer == "n":
return False
def query(question, default=None):
"Ask the user a question and return the response"
while True:
if default:
sys.stdout.write("{} [{}]: ".format(question, default))
else:
sys.stdout.write("%s: " % question)
answer = sys.stdin.readline().strip().replace(" ", "_")
if answer == "":
if default:
return default
else:
return answer
def optionlist(question, options, default=0):
"Ask the user to choose from a list of options"
for i, option in enumerate(options):
print("%d: %s" % (i + 1, option[0]))
while True:
sys.stdout.write("%s (1-%d) [%d]: " % (question, len(options), default + 1))
answer = sys.stdin.readline().strip()
if len(answer) == 0:
return options[default][1]
try:
optno = int(answer)
if optno > 0 and optno <= len(options):
return options[optno - 1][1]
except Exception:
pass
class Workflow:
def __init__(self, workflowdir, sharedir):
self.jinja = Environment(loader=FileSystemLoader(sharedir), trim_blocks=True)
self.name = os.path.basename(workflowdir)<|fim▁hole|> self.workflowdir = workflowdir
self.sharedir = sharedir
self.properties = {}
self.home = os.environ["HOME"]
self.user = pwd.getpwuid(os.getuid())[0]
self.tutorial = None
self.generate_tutorial = False
self.tutorial_setup = None
self.compute_queue = "default"
self.project = "MYPROJ123"
sysname, _, _, _, machine = os.uname()
if sysname == "Darwin":
self.os = "MACOSX"
else:
# Probably Linux
self.os = sysname.upper()
self.arch = machine
def copy_template(self, template, dest, mode=0o644):
"Copy template to dest in workflowdir with mode"
path = os.path.join(self.workflowdir, dest)
t = self.jinja.get_template(template)
t.stream(**self.__dict__).dump(path)
os.chmod(path, mode)
def copy_dir(self, src, dest):
# self.mkdir(dest)
if not src.startswith("/"):
src = os.path.join(self.sharedir, src)
try:
dest = os.path.join(self.workflowdir, dest)
shutil.copytree(src, dest)
except OSError as exc: # python >2.5
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
raise
def mkdir(self, path):
"Make relative directory in workflowdir"
path = os.path.join(self.workflowdir, path)
if not os.path.exists(path):
os.makedirs(path)
def configure(self):
# The tutorial is a special case
if yesno("Do you want to generate a tutorial workflow?", "n"):
self.config = "tutorial"
self.daxgen = "tutorial"
self.generate_tutorial = True
# determine the environment to setup tutorial for
self.tutorial_setup = optionlist(
"What environment is tutorial to be setup for?",
[
TutorialEnv.LOCAL_MACHINE,
TutorialEnv.USC_HPCC_CLUSTER,
TutorialEnv.OSG_FROM_ISI,
TutorialEnv.XSEDE_BOSCO,
TutorialEnv.BLUEWATERS_GLITE,
TutorialEnv.TACC_WRANGLER,
TutorialEnv.OLCF_TITAN,
TutorialEnv.OLCF_SUMMIT_KUBERNETES_BOSCO,
],
)
# figure out what example options to provide
examples = [
TutorialExample.PROCESS,
TutorialExample.PIPELINE,
TutorialExample.SPLIT,
TutorialExample.MERGE,
TutorialExample.EPA,
TutorialExample.CONTAINER,
]
if self.tutorial_setup != "osg":
examples.append(TutorialExample.DIAMOND)
if self.tutorial_setup in [
"bw-glite",
"wrangler-glite",
"titan-glite",
"summit-kub-bosco",
]:
examples.append(TutorialExample.MPI)
self.project = query(
"What project your jobs should run under. For example on TACC there are like : TG-DDM160003 ?"
)
self.tutorial = optionlist("What tutorial workflow do you want?", examples)
self.setup_tutorial()
return
# Determine which DAX generator API to use
self.daxgen = choice(
"What DAX generator API do you want to use?",
["python", "perl", "java", "r"],
"python",
)
# Determine what kind of site catalog we need to generate
self.config = optionlist(
"What does your computing infrastructure look like?",
[
("Local Machine Condor Pool", "condorpool"),
("Remote Cluster using Globus GRAM", "globus"),
("Remote Cluster using CREAMCE", "creamce"),
("Local PBS Cluster with Glite", "glite"),
("Remote PBS Cluster with BOSCO and SSH", "bosco"),
],
)
# Find out some information about the site
self.sitename = query("What do you want to call your compute site?", "compute")
self.os = choice(
"What OS does your compute site have?", ["LINUX", "MACOSX"], self.os
)
self.arch = choice(
"What architecture does your compute site have?",
["x86_64", "x86"],
self.arch,
)
def setup_tutorial(self):
"""
Set up tutorial for pre-defined computing environments
:return:
"""
if self.tutorial_setup is None:
self.tutorial_setup = "submit-host"
if self.tutorial_setup == "submit-host":
self.sitename = "condorpool"
elif self.tutorial_setup == "usc-hpcc":
self.sitename = "usc-hpcc"
self.config = "glite"
self.compute_queue = "quick"
# for running the whole workflow as mpi job
self.properties["pegasus.job.aggregator"] = "mpiexec"
elif self.tutorial_setup == "osg":
self.sitename = "osg"
self.os = "linux"
if not yesno("Do you want to use Condor file transfers", "y"):
self.staging_site = "isi_workflow"
elif self.tutorial_setup == "xsede-bosco":
self.sitename = "condorpool"
elif self.tutorial_setup == "bw-glite":
self.sitename = "bluewaters"
self.config = "glite"
self.compute_queue = "normal"
elif self.tutorial_setup == "wrangler-glite":
self.sitename = "wrangler"
self.config = "glite"
self.compute_queue = "normal"
elif self.tutorial_setup == "titan-glite":
self.sitename = "titan"
self.config = "glite"
self.compute_queue = "titan"
elif self.tutorial_setup == "summit-kub-bosco":
self.sitename = "summit"
self.config = "bosco"
self.compute_queue = "batch"
return
def generate(self):
os.makedirs(self.workflowdir)
if self.tutorial != "population":
self.mkdir("input")
self.mkdir("output")
if self.generate_tutorial:
self.copy_template("%s/tc.txt" % self.tutorial, "tc.txt")
if self.tutorial == "r-epa":
self.copy_template("%s/daxgen.R" % self.tutorial, "daxgen.R")
elif self.tutorial != "mpi-hw":
self.copy_template("%s/daxgen.py" % self.tutorial, "daxgen.py")
if self.tutorial == "diamond":
# Executables used by the diamond workflow
self.mkdir("bin")
self.copy_template(
"diamond/transformation.py", "bin/preprocess", mode=0o755
)
self.copy_template(
"diamond/transformation.py", "bin/findrange", mode=0o755
)
self.copy_template(
"diamond/transformation.py", "bin/analyze", mode=0o755
)
# Diamond input file
self.copy_template("diamond/f.a", "input/f.a")
elif self.tutorial == "split":
# Split workflow input file
self.mkdir("bin")
self.copy_template("split/pegasus.html", "input/pegasus.html")
elif self.tutorial == "r-epa":
# Executables used by the R-EPA workflow
self.mkdir("bin")
self.copy_template(
"r-epa/epa-wrapper.sh", "bin/epa-wrapper.sh", mode=0o755
)
self.copy_template("r-epa/setupvar.R", "bin/setupvar.R", mode=0o755)
self.copy_template(
"r-epa/weighted.average.R", "bin/weighted.average.R", mode=0o755
)
self.copy_template(
"r-epa/cumulative.percentiles.R",
"bin/cumulative.percentiles.R",
mode=0o755,
)
elif self.tutorial == "population":
self.copy_template("%s/Dockerfile" % self.tutorial, "Dockerfile")
self.copy_template("%s/Singularity" % self.tutorial, "Singularity")
self.copy_template(
"%s/tc.txt.containers" % self.tutorial, "tc.txt.containers"
)
self.copy_dir("%s/scripts" % self.tutorial, "scripts")
self.copy_dir("%s/data" % self.tutorial, "input")
# copy the mpi wrapper, c code and mpi
elif self.tutorial == "mpi-hw":
# copy the mpi wrapper, c code and mpi example
# Executables used by the mpi-hw workflow
self.mkdir("bin")
self.copy_template(
"%s/pegasus-mpi-hw.c" % self.tutorial, "pegasus-mpi-hw.c"
)
self.copy_template("%s/Makefile" % self.tutorial, "Makefile")
self.copy_template("%s/daxgen.py.template" % self.tutorial, "daxgen.py")
self.copy_template(
"%s/mpi-hello-world-wrapper" % self.tutorial,
"bin/mpi-hello-world-wrapper",
mode=0o755,
)
self.copy_template("split/pegasus.html", "input/f.in")
else:
self.copy_template("tc.txt", "tc.txt")
if self.daxgen == "python":
self.copy_template("daxgen/daxgen.py", "daxgen.py")
elif self.daxgen == "perl":
self.copy_template("daxgen/daxgen.pl", "daxgen.pl")
elif self.daxgen == "java":
self.copy_template("daxgen/DAXGen.java", "DAXGen.java")
elif self.daxgen == "r":
self.copy_template("daxgen/daxgen.R", "daxgen.R")
else:
assert False
self.copy_template("sites.xml", "sites.xml")
self.copy_template("plan_dax.sh", "plan_dax.sh", mode=0o755)
self.copy_template("plan_cluster_dax.sh", "plan_cluster_dax.sh", mode=0o755)
self.copy_template("generate_dax.sh", "generate_dax.sh", mode=0o755)
self.copy_template("README.md", "README.md")
self.copy_template("rc.txt", "rc.txt")
self.copy_template("pegasus.properties", "pegasus.properties")
if self.tutorial == "diamond":
if self.tutorial_setup == "wrangler-glite":
self.copy_template(
"pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755
)
elif self.tutorial_setup == "titan-glite":
self.copy_template("pmc-wrapper.titan", "bin/pmc-wrapper", mode=0o755)
elif self.tutorial_setup == "wrangler-glite":
self.copy_template(
"pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755
)
elif self.tutorial_setup == "summit-kub-bosco":
self.copy_template("pmc-wrapper.summit", "bin/pmc-wrapper", mode=0o755)
if self.generate_tutorial:
sys.stdout.write(
"Pegasus Tutorial setup for example workflow - %s for execution on %s in directory %s\n"
% (self.tutorial, self.tutorial_setup, self.workflowdir)
)
def usage():
print("Usage: %s WORKFLOW_DIR" % sys.argv[0])
def main(pegasus_share_dir):
if len(sys.argv) != 2:
usage()
exit(1)
if "-h" in sys.argv:
usage()
exit(1)
workflowdir = sys.argv[1]
if os.path.exists(workflowdir):
print("ERROR: WORKFLOW_DIR '%s' already exists" % workflowdir)
exit(1)
workflowdir = os.path.abspath(workflowdir)
sharedir = os.path.join(pegasus_share_dir, "init")
w = Workflow(workflowdir, sharedir)
w.configure()
w.generate()<|fim▁end|> | |
<|file_name|>kqueue.rs<|end_file_name|><|fim▁begin|>use {io, EventSet, PollOpt, Token};
use event::IoEvent;
use nix::sys::event::{EventFilter, EventFlag, FilterFlag, KEvent, kqueue, kevent};
use nix::sys::event::{EV_ADD, EV_CLEAR, EV_DELETE, EV_DISABLE, EV_ENABLE, EV_EOF, EV_ERROR, EV_ONESHOT};
use std::{fmt, slice};
use std::os::unix::io::RawFd;
use std::collections::HashMap;
#[derive(Debug)]
pub struct Selector {
kq: RawFd,
changes: Events
}
impl Selector {
pub fn new() -> io::Result<Selector> {
Ok(Selector {
kq: try!(kqueue().map_err(super::from_nix_error)),
changes: Events::new()
})
}
pub fn select(&mut self, evts: &mut Events, timeout_ms: usize) -> io::Result<()> {
let cnt = try!(kevent(self.kq, &[], evts.as_mut_slice(), timeout_ms)
.map_err(super::from_nix_error));
self.changes.sys_events.clear();
unsafe {
evts.sys_events.set_len(cnt);
}
evts.coalesce();
Ok(())
}
pub fn register(&mut self, fd: RawFd, token: Token, interests: EventSet, opts: PollOpt) -> io::Result<()> {
trace!("registering; token={:?}; interests={:?}", token, interests);
self.ev_register(fd, token.as_usize(), EventFilter::EVFILT_READ, interests.contains(EventSet::readable()), opts);
self.ev_register(fd, token.as_usize(), EventFilter::EVFILT_WRITE, interests.contains(EventSet::writable()), opts);
self.flush_changes()
}
pub fn reregister(&mut self, fd: RawFd, token: Token, interests: EventSet, opts: PollOpt) -> io::Result<()> {
// Just need to call register here since EV_ADD is a mod if already
// registered
self.register(fd, token, interests, opts)
}
pub fn deregister(&mut self, fd: RawFd) -> io::Result<()> {
self.ev_push(fd, 0, EventFilter::EVFILT_READ, EV_DELETE);
self.ev_push(fd, 0, EventFilter::EVFILT_WRITE, EV_DELETE);
self.flush_changes()
}
fn ev_register(&mut self, fd: RawFd, token: usize, filter: EventFilter, enable: bool, opts: PollOpt) {
let mut flags = EV_ADD;
if enable {
flags = flags | EV_ENABLE;<|fim▁hole|> } else {
flags = flags | EV_DISABLE;
}
if opts.contains(PollOpt::edge()) {
flags = flags | EV_CLEAR;
}
if opts.contains(PollOpt::oneshot()) {
flags = flags | EV_ONESHOT;
}
self.ev_push(fd, token, filter, flags);
}
fn ev_push(&mut self, fd: RawFd, token: usize, filter: EventFilter, flags: EventFlag) {
self.changes.sys_events.push(
KEvent {
ident: fd as ::libc::uintptr_t,
filter: filter,
flags: flags,
fflags: FilterFlag::empty(),
data: 0,
udata: token
});
}
fn flush_changes(&mut self) -> io::Result<()> {
let result = kevent(self.kq, self.changes.as_slice(), &mut [], 0).map(|_| ())
.map_err(super::from_nix_error).map(|_| ());
self.changes.sys_events.clear();
result
}
}
pub struct Events {
sys_events: Vec<KEvent>,
events: Vec<IoEvent>,
event_map: HashMap<Token, usize>,
}
impl Events {
pub fn new() -> Events {
Events {
sys_events: Vec::with_capacity(1024),
events: Vec::with_capacity(1024),
event_map: HashMap::with_capacity(1024)
}
}
#[inline]
pub fn len(&self) -> usize {
self.events.len()
}
pub fn get(&self, idx: usize) -> IoEvent {
self.events[idx]
}
pub fn coalesce(&mut self) {
self.events.clear();
self.event_map.clear();
for e in self.sys_events.iter() {
let token = Token(e.udata as usize);
let len = self.events.len();
let idx = *self.event_map.entry(token)
.or_insert(len);
if idx == len {
// New entry, insert the default
self.events.push(IoEvent::new(EventSet::none(), token));
}
if e.flags.contains(EV_ERROR) {
self.events[idx].kind.insert(EventSet::error());
}
if e.filter == EventFilter::EVFILT_READ {
self.events[idx].kind.insert(EventSet::readable());
} else if e.filter == EventFilter::EVFILT_WRITE {
self.events[idx].kind.insert(EventSet::writable());
}
if e.flags.contains(EV_EOF) {
self.events[idx].kind.insert(EventSet::hup());
// When the read end of the socket is closed, EV_EOF is set on
// flags, and fflags contains the error if there is one.
if !e.fflags.is_empty() {
self.events[idx].kind.insert(EventSet::error());
}
}
}
}
fn as_slice(&self) -> &[KEvent] {
unsafe {
let ptr = (&self.sys_events[..]).as_ptr();
slice::from_raw_parts(ptr, self.sys_events.len())
}
}
fn as_mut_slice(&mut self) -> &mut [KEvent] {
unsafe {
let ptr = (&mut self.sys_events[..]).as_mut_ptr();
slice::from_raw_parts_mut(ptr, self.sys_events.capacity())
}
}
}
impl fmt::Debug for Events {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "Events {{ len: {} }}", self.sys_events.len())
}
}<|fim▁end|> | |
<|file_name|>randNNIWalks.py<|end_file_name|><|fim▁begin|>#randNNIWalks.py
#writes random SPR walks to files
#calls GTP on each NNI random walk file to get
#the ditances between each tree and the first tree of the sequence
#the results are written to csv files with lines delimited by \t
import tree_utils as tu
import w_tree_utils as wtu
import os
import sys
import numpy as np
import random
from math import sqrt
__pid__ = 0
__prefix__ = "NNI_"
#daf: distance algorithm file
def randNNIwalk(daf,size,steps,runs,seed,weighted = False):
global __pid__
global __prefix__
#set the seed
random.seed(seed)
np.random.seed(seed)
#select tree utils module
if weighted:
tum = wtu
genRandBinTree = lambda leaves: wtu.genRandBinTree(leaves,np.random.exponential)
else:
tum = tu
genRandBinTree = lambda leaves: tu.genRandBinTree(leaves)
tum.treeNorm = lambda x: 0.25
out_file_name = __prefix__ + str(size) + "_" + str(steps) + "_" +\
str(runs) + "_" + str(seed)
normsfile_name = out_file_name + '.norms'
#create a file for each spr sequence
for k in range(runs):
rand_tree = genRandBinTree(list(range(size)))
total_nodes = size-1
#write current sequence to file
infile_prefix = "tmpnniseq" + str(__pid__)
infile = infile_prefix + str(k)
with open(infile,'w') as treefile, open(normsfile_name,'w') as nrmfile:<|fim▁hole|>
#write tree norms-----
#save norm of first tree
norm1 = sqrt(tum.treeNorm(rand_tree))
walknorms = ''
for i in range(steps):
current_tree = tum.randNNI(current_tree,total_nodes)
treefile.write(tum.toNewickTree(current_tree) + "\n")
#write ||T1|| + ||T2||
walknorms += str(norm1 + sqrt(tum.treeNorm(current_tree))) + ','
#write norms sequence
nrmfile.write(walknorms[0:-1] + '\n')
#assumes GTP file is in current working directory
outfile = "tempseq" + str(__pid__) + ".csv"
infile_prefix = "tmpnniseq" + str(__pid__)
infile = infile_prefix + str(k)
os.system("java -jar " + daf + " -r 0 -o " + outfile + " " + infile)
#append output to final sequence file
os.system("cat " + outfile + " | ./toLines.py >> " + out_file_name)
#cleanup
os.system("rm " + outfile)
os.system("rm " + infile_prefix + "*")
if __name__=='__main__':
if len(sys.argv)<6:
print ("Too few arguments!!")
print ("Usage: [-w] <distance algorithm file .jar> <size or size range> <no. NNI steps> <no. runs> <seed or seed range>")
sys.exit(-1)
WEIGHTED = False
if len(sys.argv) == 7:
WEIGHTED = sys.argv.pop(1) == '-w'
dist_algo_file = sys.argv[1]
if dist_algo_file != "gtp.jar":
__prefix__ = "RNI_"
if WEIGHTED:
__prefix__ = 'W' + __prefix__
else:
__prefix__ = 'U' + __prefix__
#take a single size or a range of sizes
if ":" in sys.argv[2]:
size_start, size_end = map(lambda x: int(x),sys.argv[2].split(':'))
else:
size_start = int(sys.argv[2])
size_end = size_start + 1
size_range = range(size_start,size_end)
steps = int(sys.argv[3])
runs = int(sys.argv[4])
#take a single seed or a range of seeds
if ":" in sys.argv[5]:
seed_start,seed_end = map(lambda x: int(x),sys.argv[5].split(':'))
else:
seed_start = int(sys.argv[5])
seed_end = seed_start + 1
seed_range = range(seed_start,seed_end)
#set pid property before calling randSPRWalk
__pid__ = os.getpid()
for size in size_range:
for seed in seed_range:
randNNIwalk(dist_algo_file,size,steps,runs,seed,WEIGHTED)<|fim▁end|> | treefile.write(tum.toNewickTree(rand_tree) + "\n")
current_tree = rand_tree |
<|file_name|>test_python_can.py<|end_file_name|><|fim▁begin|>from tests.package.test_python import TestPythonPackageBase
class TestPythonPy2Can(TestPythonPackageBase):
__test__ = True
config = TestPythonPackageBase.config + \
"""
BR2_PACKAGE_PYTHON=y
BR2_PACKAGE_PYTHON_CAN=y
"""
sample_scripts = ["tests/package/sample_python_can.py"]
timeout = 40
class TestPythonPy3Can(TestPythonPackageBase):
__test__ = True
config = TestPythonPackageBase.config + \
"""
BR2_PACKAGE_PYTHON3=y
BR2_PACKAGE_PYTHON_CAN=y<|fim▁hole|> """
sample_scripts = ["tests/package/sample_python_can.py"]
timeout = 40<|fim▁end|> | |
<|file_name|>listdb_reservations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import time
import pyslurm
def reservation_display(reservation):
if reservation:
for key,value in reservation.items():
print("\t{}={}".format(key, value))
if __name__ == "__main__":
try:
end = time.time()
start = end - (30*24*60*60)
print("start={}, end={}".format(start, end))<|fim▁hole|> reservations = pyslurm.slurmdb_reservations()
reservations.set_reservation_condition(start, end)
reservations_dict = reservations.get()
if len(reservations_dict):
for key, value in reservations_dict.items():
print("{} Reservation: {}".format('{', key))
reservation_display(value)
print("}")
else:
print("No reservation found")
except ValueError as e:
print("Error:{}".format(e.args[0]))<|fim▁end|> | |
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>//! JMESPath abstract syntax tree (AST).
//!
//! Inspecting the JMESPath AST can be useful for analyzing the way in
//! which an expression was parsed and which features are utilized in
//! an expression.
//!
//! Ast can be accessed directly from a parsed `jmespath::Expression`
//! using the `as_ast()` method. An Ast can be created by using the
//! `jmespath::parse()` function which returns an Ast rather than an<|fim▁hole|>//!
//! ```
//! use jmespath;
//!
//! let ast = jmespath::parse("a || b && c").unwrap();
//! ```
use std::fmt;
use Rcvar;
use lexer::Token;
/// A JMESPath expression abstract syntax tree.
#[derive(Clone, PartialEq, Debug)]
pub enum Ast {
/// Compares two nodes using a comparator, returning true/false.
Comparison {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Comparator that compares the two results
comparator: Comparator,
/// Left hand side of the comparison
lhs: Box<Ast>,
/// Right hand side of the comparison
rhs: Box<Ast>,
},
/// If `predicate` evaluates to a truthy value, returns the
/// result `then`
Condition {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// The predicate to test.
predicate: Box<Ast>,
/// The node to traverse if the predicate is truthy.
then: Box<Ast>,
},
/// Returns the current node.
Identity {
/// Approximate absolute position in the parsed expression.
offset: usize,
},
/// Used by functions to dynamically evaluate argument values.
Expref {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Node to execute
ast: Box<Ast>,
},
/// Evaluates the node, then flattens it one level.
Flatten {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Node to execute and flatten
node: Box<Ast>,
},
/// Function name and a vec or function argument expressions.
Function {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Function name to invoke.
name: String,
/// Function arguments.
args: Vec<Ast>,
},
/// Extracts a key by name from a map.
Field {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Field name to extract.
name: String,
},
/// Extracts an index from a Vec.
Index {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Index to extract
idx: i32,
},
/// Resolves to a literal value.
Literal {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Literal value
value: Rcvar,
},
/// Evaluates to a list of evaluated expressions.
MultiList {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Elements of the list
elements: Vec<Ast>,
},
/// Evaluates to a map of key value pairs.
MultiHash {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Elements of the hash
elements: Vec<KeyValuePair>,
},
/// Evaluates to true/false based on if the expression is not truthy.
Not {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// node to negate
node: Box<Ast>,
},
/// Evaluates LHS, and pushes each value through RHS.
Projection {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Left hand side of the projection.
lhs: Box<Ast>,
/// Right hand side of the projection.
rhs: Box<Ast>,
},
/// Evaluates LHS. If it resolves to an object, returns a Vec of values.
ObjectValues {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Node to extract object values from.
node: Box<Ast>,
},
/// Evaluates LHS. If not truthy returns. Otherwise evaluates RHS.
And {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Left hand side of the expression.
lhs: Box<Ast>,
/// Right hand side of the expression.
rhs: Box<Ast>,
},
/// Evaluates LHS. If truthy returns. Otherwise evaluates RHS.
Or {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Left hand side of the expression.
lhs: Box<Ast>,
/// Right hand side of the expression.
rhs: Box<Ast>,
},
/// Returns a slice of a vec, using start, stop, and step.
Slice {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Starting index
start: Option<i32>,
/// Stopping index
stop: Option<i32>,
/// Step amount between extractions.
step: i32,
},
/// Evaluates RHS, then provides that value to the evaluation of RHS.
Subexpr {
/// Approximate absolute position in the parsed expression.
offset: usize,
/// Left hand side of the expression.
lhs: Box<Ast>,
/// Right hand side of the expression.
rhs: Box<Ast>,
},
}
impl fmt::Display for Ast {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{:#?}", self)
}
}
/// Represents a key value pair in a MultiHash.
#[derive(Clone, PartialEq, Debug)]
pub struct KeyValuePair {
/// Key name.
pub key: String,
/// Value expression used to determine the value.
pub value: Ast,
}
/// Comparators used in Comparison nodes.
#[derive(Clone, PartialEq, Debug)]
pub enum Comparator {
Equal,
NotEqual,
LessThan,
LessThanEqual,
GreaterThan,
GreaterThanEqual,
}
/// Creates a Comparator from a Token.
///
/// Note: panics if the Token is invalid.
impl From<Token> for Comparator {
fn from(token: Token) -> Self {
match token {
Token::Lt => Comparator::LessThan,
Token::Lte => Comparator::LessThanEqual,
Token::Gt => Comparator::GreaterThan,
Token::Gte => Comparator::GreaterThanEqual,
Token::Eq => Comparator::Equal,
Token::Ne => Comparator::NotEqual,
_ => panic!("Invalid token for comparator: {:?}", token),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn displays_pretty_printed_ast_node() {
let node = Ast::Field {
name: "abc".to_string(),
offset: 4,
};
assert_eq!("Field {\n offset: 4,\n name: \"abc\",\n}",
format!("{}", node));
}
}<|fim▁end|> | //! `Expression`. |
<|file_name|>login.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import urllib
from . import admin
from flask import request
from flask import url_for
from flask import redirect
from flask import render_template
from flask_login import UserMixin
from flask_login import login_user
from flask_login import logout_user
from flask_login import login_required
from core.extension import login_manager
from core.views.common import render_json
from core.models import AdminUser
class LoginUser(UserMixin):
def __init__(self, user):
self.user = user
def get_id(self):<|fim▁hole|>@login_manager.user_loader
def load_user(userid):
user = AdminUser.get_by_id(int(userid))
return LoginUser(user)
@admin.route('/signin', methods=['GET', 'POST'])
def signin():
if request.method == 'POST':
user = AdminUser.query.filter_by(
active=True,
username=request.form['username'],
password=request.form['pwd']
).first()
if not user:
return render_json(1, {'err_no': 'pwd_error', 'input': 'pwd'})
login_user(LoginUser(user))
next = request.form.get('next', '')
if next:
next = urllib.unquote(next)
return render_json(0, {'href': next, 'delaySuccess': True})
return render_json(0, {'href': '/admin/dashboard', 'delaySuccess': True})
return render_template('/admin/signin.html')
@admin.route('/signout', methods=['GET'])
def signout():
logout_user()
return redirect(url_for('admin.signin'))
@admin.route('/dashboard', methods=['GET', 'POST'])
@login_required
def dashboard():
return render_template('/admin/dashboard.html')<|fim▁end|> | return unicode(self.user.id)
|
<|file_name|>specifications.py<|end_file_name|><|fim▁begin|># This file is part of pylabels, a Python library to create PDFs for printing
# labels.
# Copyright (C) 2012, 2013, 2014, 2015 Blair Bonnett
#
# pylabels is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# pylabels is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pylabels. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
import json
class InvalidDimension(ValueError):<|fim▁hole|>class Specification(object):
"""Specification for a sheet of labels.
All dimensions are given in millimetres. If any of the margins are not
given, then any remaining space is divided equally amongst them. If all the
width or all the height margins are given, they must exactly use up all
non-label space on the sheet.
"""
def __init__(self, sheet_width, sheet_height, columns, rows, label_width, label_height, **kwargs):
"""
Required parameters
-------------------
sheet_width, sheet_height: positive dimension
The size of the sheet.
columns, rows: positive integer
The number of labels on the sheet.
label_width, label_size: positive dimension
The size of each label.
Margins and gaps
----------------
left_margin: positive dimension
The gap between the left edge of the sheet and the first column.
column_gap: positive dimension
The internal gap between columns.
right_margin: positive dimension
The gap between the right edge of the sheet and the last column.
top_margin: positive dimension
The gap between the top edge of the sheet and the first row.
row_gap: positive dimension
The internal gap between rows.
bottom_margin: positive dimension
The gap between the bottom edge of the sheet and the last row.
Padding
-------
left_padding, right_padding, top_padding, bottom_padding: positive dimensions, default 0
The padding between the edges of the label and the area available
to draw on.
Corners
---------------------
corner_radius: positive dimension, default 0
Gives the labels rounded corners with the given radius.
padding_radius: positive dimension, default 0
Give the drawing area rounded corners. If there is no padding, this
must be set to zero.
Background
----------
background_image: reportlab.graphics.shape.Image
An image to use as the background to the page. This will be
automatically sized to fit the page; make sure it has the correct
aspect ratio.
background_filename: string
Filename of an image to use as a background to the page. If both
this and background_image are given, then background_image will
take precedence.
Raises
------
InvalidDimension
If any given dimension is invalid (i.e., the labels cannot fit on
the sheet).
"""
# Compulsory arguments.
self._sheet_width = Decimal(sheet_width)
self._sheet_height = Decimal(sheet_height)
self._columns = int(columns)
self._rows = int(rows)
self._label_width = Decimal(label_width)
self._label_height = Decimal(label_height)
# Optional arguments; missing ones will be computed later.
self._left_margin = kwargs.pop('left_margin', None)
self._column_gap = kwargs.pop('column_gap', None)
self._right_margin = kwargs.pop('right_margin', None)
self._top_margin = kwargs.pop('top_margin', None)
self._row_gap = kwargs.pop('row_gap', None)
self._bottom_margin = kwargs.pop('bottom_margin', None)
# Optional arguments with default values.
self._left_padding = kwargs.pop('left_padding', 0)
self._right_padding = kwargs.pop('right_padding', 0)
self._top_padding = kwargs.pop('top_padding', 0)
self._bottom_padding = kwargs.pop('bottom_padding', 0)
self._corner_radius = Decimal(kwargs.pop('corner_radius', 0))
self._padding_radius = Decimal(kwargs.pop('padding_radius', 0))
self._background_image = kwargs.pop('background_image', None)
self._background_filename = kwargs.pop('background_filename', None)
# Leftover arguments.
if kwargs:
args = kwargs.keys()
if len(args) == 1:
raise TypeError("Unknown keyword argument {}.".format(args[0]))
else:
raise TypeError("Unknown keyword arguments: {}.".format(', '.join(args)))
# Track which attributes have been automatically set.
self._autoset = set()
# Check all the dimensions etc are valid.
self._calculate()
def _calculate(self):
"""Checks the dimensions of the sheet are valid and consistent.
NB: this is called internally when needed; there should be no need for
user code to call it.
"""
# Check the dimensions are larger than zero.
for dimension in ('_sheet_width', '_sheet_height', '_columns', '_rows', '_label_width', '_label_height'):
if getattr(self, dimension) <= 0:
name = dimension.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} must be greater than zero.".format(name))
# Check margins / gaps are not smaller than zero if given.
# At the same time, force the values to decimals.
for margin in ('_left_margin', '_column_gap', '_right_margin', '_top_margin', '_row_gap', '_bottom_margin',
'_left_padding', '_right_padding', '_top_padding', '_bottom_padding'):
val = getattr(self, margin)
if val is not None:
if margin in self._autoset:
val = None
else:
val = Decimal(val)
if val < 0:
name = margin.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} cannot be less than zero.".format(name))
setattr(self, margin, val)
else:
self._autoset.add(margin)
# Check the corner radius.
if self._corner_radius < 0:
raise InvalidDimension("Corner radius cannot be less than zero.")
if self._corner_radius > (self._label_width / 2):
raise InvalidDimension("Corner radius cannot be more than half the label width.")
if self._corner_radius > (self._label_height / 2):
raise InvalidDimension("Corner radius cannot be more than half the label height.")
# If there is no padding, we don't need the padding radius.
if (self._left_padding + self._right_padding + self._top_padding + self._bottom_padding) == 0:
if self._padding_radius != 0:
raise InvalidDimension("Padding radius must be zero if there is no padding.")
else:
if (self._left_padding + self._right_padding) >= self._label_width:
raise InvalidDimension("Sum of horizontal padding must be less than the label width.")
if (self._top_padding + self._bottom_padding) >= self._label_height:
raise InvalidDimension("Sum of vertical padding must be less than the label height.")
if self._padding_radius < 0:
raise InvalidDimension("Padding radius cannot be less than zero.")
# Calculate the amount of spare space.
hspace = self._sheet_width - (self._label_width * self._columns)
vspace = self._sheet_height - (self._label_height * self._rows)
# Cannot fit.
if hspace < 0:
raise InvalidDimension("Labels are too wide to fit on the sheet.")
if vspace < 0:
raise InvalidDimension("Labels are too tall to fit on the sheet.")
# Process the horizontal margins / gaps.
hcount = 1 + self._columns
if self._left_margin is not None:
hspace -= self._left_margin
if hspace < 0:
raise InvalidDimension("Left margin is too wide for the labels to fit on the sheet.")
hcount -= 1
if self._column_gap is not None:
hspace -= ((self._columns - 1) * self._column_gap)
if hspace < 0:
raise InvalidDimension("Column gap is too wide for the labels to fit on the sheet.")
hcount -= (self._columns - 1)
if self._right_margin is not None:
hspace -= self._right_margin
if hspace < 0.01 and hspace > -0.01:
self._right_margin += hspace
hspace = 0
if hspace < 0:
raise InvalidDimension("Right margin is too wide for the labels to fit on the sheet.")
hcount -= 1
# Process the vertical margins / gaps.
vcount = 1 + self._rows
if self._top_margin is not None:
vspace -= self._top_margin
if vspace < 0:
raise InvalidDimension("Top margin is too tall for the labels to fit on the sheet.")
vcount -= 1
if self._row_gap is not None:
vspace -= ((self._rows - 1) * self._row_gap)
if vspace < 0:
raise InvalidDimension("Row gap is too tall for the labels to fit on the sheet.")
vcount -= (self._rows - 1)
if self._bottom_margin is not None:
vspace -= self._bottom_margin
if vspace < 0.01 and vspace > -0.01:
self._bottom_margin += vspace
vspace = 0
if vspace < 0:
raise InvalidDimension("Bottom margin is too tall for the labels to fit on the sheet.")
vcount -= 1
# If all the margins are specified, they must use up all available space.
if hcount == 0 and hspace != 0:
raise InvalidDimension("Not all width used by manually specified margins/gaps; {}mm left.".format(hspace))
if vcount == 0 and vspace != 0:
raise InvalidDimension("Not all height used by manually specified margins/gaps; {}mm left.".format(vspace))
# Split any extra horizontal space and allocate it.
if hcount:
auto_margin = hspace / hcount
for margin in ('_left_margin', '_column_gap', '_right_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
# And allocate any extra vertical space.
if vcount:
auto_margin = vspace / vcount
for margin in ('_top_margin', '_row_gap', '_bottom_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
def bounding_boxes(self, mode='fraction', output='dict'):
"""Get the bounding boxes of the labels on a page.
Parameters
----------
mode: 'fraction', 'actual'
If 'fraction', the bounding boxes are expressed as a fraction of the
height and width of the sheet. If 'actual', they are the actual
position of the labels in millimetres from the top-left of the
sheet.
output: 'dict', 'json'
If 'dict', a dictionary with label identifier tuples (row, column)
as keys and a dictionary with 'left', 'right', 'top', and 'bottom'
entries as the values.
If 'json', a JSON encoded string which represents a dictionary with
keys of the string format 'rowxcolumn' and each value being a
bounding box dictionary with 'left', 'right', 'top', and 'bottom'
entries.
Returns
-------
The bounding boxes in the format set by the output parameter.
"""
boxes = {}
# Check the parameters.
if mode not in ('fraction', 'actual'):
raise ValueError("Unknown mode {0}.".format(mode))
if output not in ('dict', 'json'):
raise ValueError("Unknown output {0}.".format(output))
# Iterate over the rows.
for row in range(1, self.rows + 1):
# Top and bottom of all labels in the row.
top = self.top_margin + ((row - 1) * (self.label_height + self.row_gap))
bottom = top + self.label_height
# Now iterate over all columns in this row.
for column in range(1, self.columns + 1):
# Left and right position of this column.
left = self.left_margin + ((column - 1) * (self.label_width + self.column_gap))
right = left + self.label_width
# Output in the appropriate mode format.
if mode == 'fraction':
box = {
'top': top / self.sheet_height,
'bottom': bottom / self.sheet_height,
'left': left / self.sheet_width,
'right': right / self.sheet_width,
}
elif mode == 'actual':
box = {'top': top, 'bottom': bottom, 'left': left, 'right': right}
# Add to the collection.
if output == 'json':
boxes['{0:d}x{1:d}'.format(row, column)] = box
box['top'] = float(box['top'])
box['bottom'] = float(box['bottom'])
box['left'] = float(box['left'])
box['right'] = float(box['right'])
else:
boxes[(row, column)] = box
# Done.
if output == 'json':
return json.dumps(boxes)
return boxes
# Helper function to create an accessor for one of the properties.
# attr is the 'internal' attribute e.g., _sheet_width.
def create_accessor(attr, deletable=False):
# Getter is simple; no processing needed.
@property
def accessor(self):
return getattr(self, attr)
# Setter is more complicated.
@accessor.setter
def accessor(self, value):
# Store the original value in case we need to reset.
original = getattr(self, attr)
# If this was originally autoset or not.
was_autoset = attr in self._autoset
# Discard this attribute from the autoset list.
self._autoset.discard(attr)
# Set the value and see if it is valid.
setattr(self, attr, value)
try:
self._calculate()
except:
# Reset to the original state.
setattr(self, attr, original)
if was_autoset:
self._autoset.add(attr)
# Let the error propogate up.
raise
# Create a deleter if allowable.
if deletable:
@accessor.deleter
def accessor(self):
self._autoset.add(attr)
setattr(self, attr, None)
self._calculate()
# And we now have our accessor.
return accessor
# Create accessors for all our properties.
sheet_width = create_accessor('_sheet_width')
sheet_height = create_accessor('_sheet_height')
label_width = create_accessor('_label_width')
label_height = create_accessor('_label_height')
columns = create_accessor('_columns')
rows = create_accessor('_rows')
left_margin = create_accessor('_left_margin', deletable=True)
column_gap = create_accessor('_column_gap', deletable=True)
right_margin = create_accessor('_right_margin', deletable=True)
top_margin = create_accessor('_top_margin', deletable=True)
row_gap = create_accessor('_row_gap', deletable=True)
bottom_margin = create_accessor('_bottom_margin', deletable=True)
corner_radius = create_accessor('_corner_radius')
padding_radius = create_accessor('_padding_radius')
background_image = create_accessor('_background_image', deletable=True)
background_filename = create_accessor('_background_filename', deletable=True)
left_padding = create_accessor('_left_padding', deletable=True)
right_padding = create_accessor('_right_padding', deletable=True)
top_padding = create_accessor('_top_padding', deletable=True)
bottom_padding = create_accessor('_bottom_padding', deletable=True)
# Don't need the helper function any more.
del create_accessor<|fim▁end|> | """Raised when a sheet specification has inconsistent dimensions. """
pass
|
<|file_name|>advent16.rs<|end_file_name|><|fim▁begin|>// advent16.rs
// find Aunt Sue
extern crate pcre;
use std::io;
fn main() {
loop {
let mut input = String::new();
let result = io::stdin().read_line(&mut input);
match result {
Ok(byte_count) => if byte_count == 0 { break; },
Err(_) => {
println!("error reading from stdin");
break;
}
}
if does_sue_match(&input) {
println!("Found match: {}", input.trim());
}
if does_sue_match2(&input) {
println!("Found match part 2: {}", input.trim());
}
}
}
fn does_sue_match(s: &str) -> bool {
check_match_equal(s, "children", 3)
&& check_match_equal(s, "cats", 7)
&& check_match_equal(s, "samoyeds", 2)
&& check_match_equal(s, "pomeranians", 3)
&& check_match_equal(s, "akitas", 0)
&& check_match_equal(s, "vizslas", 0)
&& check_match_equal(s, "goldfish", 5)
&& check_match_equal(s, "trees", 3)
&& check_match_equal(s, "cars", 2)
&& check_match_equal(s, "perfumes", 1)
}
<|fim▁hole|>fn check_match_equal(s: &str, property: &str, value: u32) -> bool {
check_match(s, property, value, |x,y| x == y)
}
// returns false if it has the property and it doesn't match the value
// returns true if the property doesn't exist or it exists but doesn't match
fn check_match<F>(s: &str, property: &str, value: u32, f: F) -> bool
where F: Fn(u32, u32) -> bool {
let mut re = pcre::Pcre::compile(&format!("{}: (\\d+)", property)).unwrap();
if let Some(m) = re.exec(s) {
assert!(m.string_count() > 1);
f(m.group(1).parse::<u32>().unwrap(), value)
} else {
// property doesn't exist
true
}
}
#[test]
fn test_check_match_equal() {
let s = "junk, foo: 4, bar: 5";
assert!(check_match_equal(s, "foo", 4));
assert!(check_match_equal(s, "bar", 5));
assert!(!check_match_equal(s, "foo", 3));
assert!(check_match_equal(s, "string that isn't even there", 5));
}
// part 2
fn check_match_less(s: &str, property: &str, value: u32) -> bool {
check_match(s, property, value, |x,y| x < y)
}
fn check_match_greater(s: &str, property: &str, value: u32) -> bool {
check_match(s, property, value, |x,y| x > y)
}
fn does_sue_match2(s: &str) -> bool {
check_match_equal(s, "children", 3)
&& check_match_greater(s, "cats", 7)
&& check_match_equal(s, "samoyeds", 2)
&& check_match_less(s, "pomeranians", 3)
&& check_match_equal(s, "akitas", 0)
&& check_match_equal(s, "vizslas", 0)
&& check_match_less(s, "goldfish", 5)
&& check_match_greater(s, "trees", 3)
&& check_match_equal(s, "cars", 2)
&& check_match_equal(s, "perfumes", 1)
}<|fim▁end|> | |
<|file_name|>task_execution_time.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.util import statistics
from telemetry.value import scalar
class TaskExecutionTime(legacy_page_test.LegacyPageTest):
IDLE_SECTION_TRIGGER = 'SingleThreadIdleTaskRunner::RunTask'
IDLE_SECTION = 'IDLE'
NORMAL_SECTION = 'NORMAL'
_TIME_OUT_IN_SECONDS = 60
_NUMBER_OF_RESULTS_TO_DISPLAY = 10
_BROWSER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread']
_RENDERER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread',
'CrRendererMain']
_CATEGORIES = ['benchmark',
'blink',
'blink.console',
'blink_gc',
'cc',
'gpu',
'ipc',
'renderer.scheduler',
'toplevel',
'v8',
'webkit.console']
def __init__(self):
super(TaskExecutionTime, self).__init__()
self._renderer_process = None
self._browser_process = None
self._results = None
def WillNavigateToPage(self, page, tab):
config = tracing_config.TracingConfig()
for category in self._CATEGORIES:
config.tracing_category_filter.AddIncludedCategory(category)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(
config, self._TIME_OUT_IN_SECONDS)
def ValidateAndMeasurePage(self, page, tab, results):
trace_data = tab.browser.platform.tracing_controller.StopTracing()
timeline_model = TimelineModel(trace_data)
self._renderer_process = timeline_model.GetRendererProcessFromTabId(tab.id)
self._browser_process = timeline_model.browser_process
self._AddResults(results)
def _AddResults(self, results):
self._results = results
for thread in self._BROWSER_THREADS:
self._AddTasksFromThreadToResults(self._browser_process, thread)
for thread in self._RENDERER_THREADS:
self._AddTasksFromThreadToResults(self._renderer_process, thread)
def _AddTasksFromThreadToResults(self, process, thread_name):
if process is None:
return
sections = TaskExecutionTime._GetSectionsForThread(process, thread_name)
self._ReportSectionPercentages(sections.values(),
'%s:%s' % (process.name, thread_name))
# Create list with top |_NUMBER_OF_RESULTS_TO_DISPLAY| for each section.
for section in sections.itervalues():
if section.name == TaskExecutionTime.IDLE_SECTION:
# Skip sections we don't report.
continue
self._AddSlowestTasksToResults(section.tasks.values())
def _AddSlowestTasksToResults(self, tasks):
sorted_tasks = sorted(
tasks,
key=lambda slice: slice.median_self_duration,
reverse=True)
for task in sorted_tasks[:self.GetExpectedResultCount()]:
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
task.name,
'ms',
task.median_self_duration,
description='Slowest tasks'))
def _ReportSectionPercentages(self, section_values, metric_prefix):
all_sectionstotal_duration = sum(
section.total_duration for section in section_values)
if not all_sectionstotal_duration:
# Nothing was recorded, so early out.
return
for section in section_values:
section_name = section.name or TaskExecutionTime.NORMAL_SECTION
section_percentage_of_total = (
(section.total_duration * 100.0) / all_sectionstotal_duration)
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
'%s:Section_%s' % (metric_prefix, section_name),
'%',
section_percentage_of_total,
description='Idle task percentage'))
@staticmethod
def _GetSectionsForThread(process, target_thread):
sections = {}
for thread in process.threads.itervalues():
if thread.name != target_thread:
continue
for task_slice in thread.IterAllSlices():
_ProcessTasksForThread(
sections,
'%s:%s' % (process.name, thread.name),
task_slice)
return sections
@staticmethod
def GetExpectedResultCount():
return TaskExecutionTime._NUMBER_OF_RESULTS_TO_DISPLAY
def _ProcessTasksForThread(
sections,
thread_name,
task_slice,
section_name=None):
if task_slice.self_thread_time is None:
# Early out if this slice is a TRACE_EVENT_INSTANT, as it has no duration.
return
# Note: By setting a different section below we split off this task into
# a different sorting bucket. Too add extra granularity (e.g. tasks executed
# during page loading) add logic to set a different section name here. The
# section name is set before the slice's data is recorded so the triggering
# event will be included in its own section (i.e. the idle trigger will be
# recorded as an idle event).
if task_slice.name == TaskExecutionTime.IDLE_SECTION_TRIGGER:
section_name = TaskExecutionTime.IDLE_SECTION
# Add the thread name and section (e.g. 'Idle') to the test name
# so it is human-readable.
reported_name = thread_name + ':'
if section_name:
reported_name += section_name + ':'
if 'src_func' in task_slice.args:
# Data contains the name of the timed function, use it as the name.
reported_name += task_slice.args['src_func']
elif 'line' in task_slice.args:
# Data contains IPC class and line numbers, use these as the name.
reported_name += 'IPC_Class_' + str(task_slice.args['class'])
reported_name += ':Line_' + str(task_slice.args['line'])
else:
# Fall back to use the name of the task slice.
reported_name += task_slice.name.lower()
# Replace any '.'s with '_'s as V8 uses them and it confuses the dashboard.
reported_name = reported_name.replace('.', '_')
# If this task is in a new section create a section object and add it to the
# section dictionary.
if section_name not in sections:
sections[section_name] = Section(section_name)
sections[section_name].AddTask(reported_name, task_slice.self_thread_time)
# Process sub slices recursively, passing the current section down.
for sub_slice in task_slice.sub_slices:
_ProcessTasksForThread(
sections,
thread_name,
sub_slice,
section_name)
class NameAndDurations(object):
def __init__(self, name, self_duration):
self.name = name
self.self_durations = [self_duration]
def Update(self, self_duration):
self.self_durations.append(self_duration)
@property
def median_self_duration(self):
return statistics.Median(self.self_durations)
class Section(object):
def __init__(self, name):
# A section holds a dictionary, keyed on task name, of all the tasks that<|fim▁hole|> # exist within it and the total duration of those tasks.
self.name = name
self.tasks = {}
self.total_duration = 0
def AddTask(self, name, duration):
if name in self.tasks:
# section_tasks already contains an entry for this (e.g. from an earlier
# slice), add the new duration so we can calculate a median value later.
self.tasks[name].Update(duration)
else:
# This is a new task so create a new entry for it.
self.tasks[name] = NameAndDurations(name, duration)
# Accumulate total duration for all tasks in this section.
self.total_duration += duration<|fim▁end|> | |
<|file_name|>todo.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { Todo } from './todo.model';
import { TodoService } from './todo.service';
@Component({
selector: 'app-todo',
templateUrl: './todo.component.html',
styleUrls: ['./todo.component.css'],
providers: [TodoService]
})
export class TodoComponent implements OnInit {
todos: Todo[] = [];
desc = '';
constructor(private service: TodoService) { }
ngOnInit() {
}
<|fim▁hole|> this.service.addTodo(this.desc).then(todo => {
this.todos = [...this.todos, todo];
this.desc = ''
});
}
toggleTodo(todo: Todo) {
const i = this.todos.indexOf(todo);
this.service
.toggleTodo(todo)
.then(t => {
this.todos = [
...this.todos.slice(0,i),
t,
...this.todos.slice(i+1)
];
});
}
removeTodo(todo: Todo) {
const i = this.todos.indexOf(todo);
this.service
.deleteTodoById(todo.id)
.then(()=> {
this.todos = [
...this.todos.slice(0,i),
...this.todos.slice(i+1)
];
});
}
getTodos(): void {
this.service
.getTodos()
.then(todos => this.todos = [...todos]);
}
}<|fim▁end|> | addTodo() { |
<|file_name|>wysiwyg_utils.js<|end_file_name|><|fim▁begin|>/** @odoo-module **/
export function isImg(node) {
return (node && (node.nodeName === "IMG" || (node.className && node.className.match(/(^|\s)(media_iframe_video|o_image|fa)(\s|$)/i))));
}
<|fim▁hole|>/**
* Returns a list of all the ancestors nodes of the provided node.
*
* @param {Node} node
* @param {Node} [stopElement] include to prevent bubbling up further than the stopElement.
* @returns {HTMLElement[]}
*/
export function ancestors(node, stopElement) {
if (!node || !node.parentElement || node === stopElement) return [];
return [node.parentElement, ...ancestors(node.parentElement, stopElement)];
}<|fim▁end|> | |
<|file_name|>config_utils.py<|end_file_name|><|fim▁begin|>import logging
import emission.storage.timeseries.abstract_timeseries as esta
import emission.core.wrapper.entry as ecwe
def get_last_entry(user_id, time_query, config_key):
user_ts = esta.TimeSeries.get_time_series(user_id)
# get the list of overrides for this time range. This should be non zero
# only if there has been an override since the last run, which needs to be<|fim▁hole|> logging.debug("Found %d user overrides for user %s" % (len(config_overrides), user_id))
if len(config_overrides) == 0:
logging.warning("No user defined overrides for %s, early return" % user_id)
return (None, None)
else:
# entries are sorted by the write_ts, we can take the last value
coe = ecwe.Entry(config_overrides[-1])
logging.debug("last entry is %s" % coe)
return (coe.data, coe.metadata.write_ts)<|fim▁end|> | # saved back into the cache.
config_overrides = list(user_ts.find_entries([config_key], time_query)) |
<|file_name|>create_role.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package create
import (
"context"
"fmt"
"strings"
"github.com/spf13/cobra"
rbacv1 "k8s.io/api/rbac/v1"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/cli-runtime/pkg/resource"
clientgorbacv1 "k8s.io/client-go/kubernetes/typed/rbac/v1"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"k8s.io/kubectl/pkg/scheme"
"k8s.io/kubectl/pkg/util/i18n"
"k8s.io/kubectl/pkg/util/templates"
)
var (
roleLong = templates.LongDesc(i18n.T(`
Create a role with single rule.`))
roleExample = templates.Examples(i18n.T(`
# Create a Role named "pod-reader" that allows user to perform "get", "watch" and "list" on pods
kubectl create role pod-reader --verb=get --verb=list --verb=watch --resource=pods
# Create a Role named "pod-reader" with ResourceName specified
kubectl create role pod-reader --verb=get --resource=pods --resource-name=readablepod --resource-name=anotherpod
# Create a Role named "foo" with API Group specified
kubectl create role foo --verb=get,list,watch --resource=rs.extensions
# Create a Role named "foo" with SubResource specified
kubectl create role foo --verb=get,list,watch --resource=pods,pods/status`))
// Valid resource verb list for validation.
validResourceVerbs = []string{"*", "get", "delete", "list", "create", "update", "patch", "watch", "proxy", "deletecollection", "use", "bind", "escalate", "impersonate"}
// Specialized verbs and GroupResources
specialVerbs = map[string][]schema.GroupResource{
"use": {
{
Group: "policy",
Resource: "podsecuritypolicies",
},
{
Group: "extensions",
Resource: "podsecuritypolicies",
},
},
"bind": {
{
Group: "rbac.authorization.k8s.io",
Resource: "roles",
},
{
Group: "rbac.authorization.k8s.io",
Resource: "clusterroles",
},
},
"escalate": {
{
Group: "rbac.authorization.k8s.io",
Resource: "roles",
},
{
Group: "rbac.authorization.k8s.io",
Resource: "clusterroles",
},
},
"impersonate": {
{
Group: "",
Resource: "users",
},
{
Group: "",
Resource: "serviceaccounts",
},
{
Group: "",
Resource: "groups",
},
{
Group: "authentication.k8s.io",
Resource: "userextras",
},
},
}
)
// ResourceOptions holds the related options for '--resource' option
type ResourceOptions struct {
Group string
Resource string
SubResource string
}
// CreateRoleOptions holds the options for 'create role' sub command
type CreateRoleOptions struct {
PrintFlags *genericclioptions.PrintFlags
Name string
Verbs []string
Resources []ResourceOptions
ResourceNames []string
DryRunStrategy cmdutil.DryRunStrategy
DryRunVerifier *resource.DryRunVerifier
OutputFormat string
Namespace string
Client clientgorbacv1.RbacV1Interface
Mapper meta.RESTMapper
PrintObj func(obj runtime.Object) error
genericclioptions.IOStreams
}
// NewCreateRoleOptions returns an initialized CreateRoleOptions instance
func NewCreateRoleOptions(ioStreams genericclioptions.IOStreams) *CreateRoleOptions {
return &CreateRoleOptions{
PrintFlags: genericclioptions.NewPrintFlags("created").WithTypeSetter(scheme.Scheme),
IOStreams: ioStreams,
}
}
// NewCmdCreateRole returnns an initialized Command instance for 'create role' sub command
func NewCmdCreateRole(f cmdutil.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command {
o := NewCreateRoleOptions(ioStreams)
cmd := &cobra.Command{
Use: "role NAME --verb=verb --resource=resource.group/subresource [--resource-name=resourcename] [--dry-run=server|client|none]",
DisableFlagsInUseLine: true,
Short: roleLong,
Long: roleLong,
Example: roleExample,
Run: func(cmd *cobra.Command, args []string) {
cmdutil.CheckErr(o.Complete(f, cmd, args))
cmdutil.CheckErr(o.Validate())
cmdutil.CheckErr(o.RunCreateRole())
},
}
o.PrintFlags.AddFlags(cmd)
cmdutil.AddApplyAnnotationFlags(cmd)
cmdutil.AddValidateFlags(cmd)
cmdutil.AddDryRunFlag(cmd)
cmd.Flags().StringSliceVar(&o.Verbs, "verb", o.Verbs, "Verb that applies to the resources contained in the rule")
cmd.Flags().StringSlice("resource", []string{}, "Resource that the rule applies to")
cmd.Flags().StringArrayVar(&o.ResourceNames, "resource-name", o.ResourceNames, "Resource in the white list that the rule applies to, repeat this flag for multiple items")
return cmd
}
// Complete completes all the required options
func (o *CreateRoleOptions) Complete(f cmdutil.Factory, cmd *cobra.Command, args []string) error {
name, err := NameFromCommandArgs(cmd, args)
if err != nil {
return err
}
o.Name = name
// Remove duplicate verbs.
verbs := []string{}
for _, v := range o.Verbs {
// VerbAll respresents all kinds of verbs.
if v == "*" {
verbs = []string{"*"}
break
}
if !arrayContains(verbs, v) {
verbs = append(verbs, v)
}
}
o.Verbs = verbs
// Support resource.group pattern. If no API Group specified, use "" as core API Group.
// e.g. --resource=pods,deployments.extensions
resources := cmdutil.GetFlagStringSlice(cmd, "resource")
for _, r := range resources {
sections := strings.SplitN(r, "/", 2)
resource := &ResourceOptions{}
if len(sections) == 2 {
resource.SubResource = sections[1]
}
parts := strings.SplitN(sections[0], ".", 2)
if len(parts) == 2 {
resource.Group = parts[1]
}
resource.Resource = parts[0]
if resource.Resource == "*" && len(parts) == 1 && len(sections) == 1 {
o.Resources = []ResourceOptions{*resource}
break
}
o.Resources = append(o.Resources, *resource)
}
// Remove duplicate resource names.
resourceNames := []string{}
for _, n := range o.ResourceNames {
if !arrayContains(resourceNames, n) {
resourceNames = append(resourceNames, n)
}
}
o.ResourceNames = resourceNames
// Complete other options for Run.
o.Mapper, err = f.ToRESTMapper()
if err != nil {
return err
}
o.DryRunStrategy, err = cmdutil.GetDryRunStrategy(cmd)
if err != nil {
return err
}
dynamicClient, err := f.DynamicClient()
if err != nil {
return err
}
discoveryClient, err := f.ToDiscoveryClient()
if err != nil {
return err
}
o.DryRunVerifier = resource.NewDryRunVerifier(dynamicClient, discoveryClient)
o.OutputFormat = cmdutil.GetFlagString(cmd, "output")
cmdutil.PrintFlagsWithDryRunStrategy(o.PrintFlags, o.DryRunStrategy)
printer, err := o.PrintFlags.ToPrinter()
if err != nil {
return err
}
o.PrintObj = func(obj runtime.Object) error {
return printer.PrintObj(obj, o.Out)
}
o.Namespace, _, err = f.ToRawKubeConfigLoader().Namespace()
if err != nil {
return err
}
clientset, err := f.KubernetesClientSet()
if err != nil {
return err
}
o.Client = clientset.RbacV1()
return nil
}
// Validate makes sure there is no discrepency in provided option values
func (o *CreateRoleOptions) Validate() error {
if o.Name == "" {
return fmt.Errorf("name must be specified")
}
// validate verbs.
if len(o.Verbs) == 0 {
return fmt.Errorf("at least one verb must be specified")
}
for _, v := range o.Verbs {
if !arrayContains(validResourceVerbs, v) {
return fmt.Errorf("invalid verb: '%s'", v)
}
}
// validate resources.
if len(o.Resources) == 0 {
return fmt.Errorf("at least one resource must be specified")
}
return o.validateResource()
}
func (o *CreateRoleOptions) validateResource() error {
for _, r := range o.Resources {
if len(r.Resource) == 0 {
return fmt.Errorf("resource must be specified if apiGroup/subresource specified")
}
if r.Resource == "*" {
return nil
}
resource := schema.GroupVersionResource{Resource: r.Resource, Group: r.Group}
groupVersionResource, err := o.Mapper.ResourceFor(schema.GroupVersionResource{Resource: r.Resource, Group: r.Group})
if err == nil {
resource = groupVersionResource
}
for _, v := range o.Verbs {
if groupResources, ok := specialVerbs[v]; ok {<|fim▁hole|> err = nil
break
}
}
if !match {
return fmt.Errorf("can not perform '%s' on '%s' in group '%s'", v, resource.Resource, resource.Group)
}
}
}
if err != nil {
return err
}
}
return nil
}
// RunCreateRole performs the execution of 'create role' sub command
func (o *CreateRoleOptions) RunCreateRole() error {
role := &rbacv1.Role{
// this is ok because we know exactly how we want to be serialized
TypeMeta: metav1.TypeMeta{APIVersion: rbacv1.SchemeGroupVersion.String(), Kind: "Role"},
}
role.Name = o.Name
rules, err := generateResourcePolicyRules(o.Mapper, o.Verbs, o.Resources, o.ResourceNames, []string{})
if err != nil {
return err
}
role.Rules = rules
// Create role.
if o.DryRunStrategy != cmdutil.DryRunClient {
createOptions := metav1.CreateOptions{}
if o.DryRunStrategy == cmdutil.DryRunServer {
if err := o.DryRunVerifier.HasSupport(role.GroupVersionKind()); err != nil {
return err
}
createOptions.DryRun = []string{metav1.DryRunAll}
}
role, err = o.Client.Roles(o.Namespace).Create(context.TODO(), role, createOptions)
if err != nil {
return err
}
}
return o.PrintObj(role)
}
func arrayContains(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
func generateResourcePolicyRules(mapper meta.RESTMapper, verbs []string, resources []ResourceOptions, resourceNames []string, nonResourceURLs []string) ([]rbacv1.PolicyRule, error) {
// groupResourceMapping is a apigroup-resource map. The key of this map is api group, while the value
// is a string array of resources under this api group.
// E.g. groupResourceMapping = {"extensions": ["replicasets", "deployments"], "batch":["jobs"]}
groupResourceMapping := map[string][]string{}
// This loop does the following work:
// 1. Constructs groupResourceMapping based on input resources.
// 2. Prevents pointing to non-existent resources.
// 3. Transfers resource short name to long name. E.g. rs.extensions is transferred to replicasets.extensions
for _, r := range resources {
resource := schema.GroupVersionResource{Resource: r.Resource, Group: r.Group}
groupVersionResource, err := mapper.ResourceFor(schema.GroupVersionResource{Resource: r.Resource, Group: r.Group})
if err == nil {
resource = groupVersionResource
}
if len(r.SubResource) > 0 {
resource.Resource = resource.Resource + "/" + r.SubResource
}
if !arrayContains(groupResourceMapping[resource.Group], resource.Resource) {
groupResourceMapping[resource.Group] = append(groupResourceMapping[resource.Group], resource.Resource)
}
}
// Create separate rule for each of the api group.
rules := []rbacv1.PolicyRule{}
for _, g := range sets.StringKeySet(groupResourceMapping).List() {
rule := rbacv1.PolicyRule{}
rule.Verbs = verbs
rule.Resources = groupResourceMapping[g]
rule.APIGroups = []string{g}
rule.ResourceNames = resourceNames
rules = append(rules, rule)
}
if len(nonResourceURLs) > 0 {
rule := rbacv1.PolicyRule{}
rule.Verbs = verbs
rule.NonResourceURLs = nonResourceURLs
rules = append(rules, rule)
}
return rules, nil
}<|fim▁end|> | match := false
for _, extra := range groupResources {
if resource.Resource == extra.Resource && resource.Group == extra.Group {
match = true |
<|file_name|>env.rs<|end_file_name|><|fim▁begin|>use std::ops::{Deref, DerefMut};
use std::collections::HashMap;
use std::iter::FromIterator;
use std::cell::RefCell;
use rand::IsaacRng;
use rand::distributions::{Range, Sample};
use common_util::IntType;<|fim▁hole|>
/// The Roller runtime environment. Stores the variable and function namespaces, the function call_stack, and the random number generator.
pub struct RollerEnv {
/// The global namespace for functions
fun_ns: HashMap<Ident, RollerFun>,
/// The global namespace for variables
var_ns: HashMap<Ident, Value>,
/// The callstack for the functions.
/// Stores the temporary variables of the functions.
call_stack: RefCell<Vec<HashMap<Ident, Value>>>,
/// How many function calls can be in the callstack
max_call_depth: usize,
/// The random number generator
rng: RefCell<IsaacRng>,
}
pub enum NameInfo {
Var,
Fun,
Empty,
}
impl RollerEnv {
/// Creates a new empty runtime environment
pub fn new(max_call_depth: usize) -> RollerEnv {
RollerEnv {
fun_ns: HashMap::new(),
var_ns: HashMap::new(),
call_stack: RefCell::new(Vec::new()),
rng: RefCell::new(IsaacRng::new_unseeded()),
max_call_depth: max_call_depth,
}
}
/// Clears the function and variable namespaces.
pub fn clear(&mut self) {
*self = RollerEnv {
fun_ns: HashMap::new(),
var_ns: HashMap::new(),
call_stack: RefCell::new(Vec::new()),
rng: RefCell::new(IsaacRng::new_unseeded()),
max_call_depth: self.max_call_depth,
}
}
#[allow(dead_code)] // TODO: remove when used
pub fn set_max_call_depth(&mut self, new_depth: usize) {
self.max_call_depth = new_depth;
}
/// Sets a variable with name id to value.
/// If there were a variable or function with same name, it will be replaced.
pub fn assign_var(&mut self, id: &Ident, value: Value) {
self.fun_ns.remove(id);
self.var_ns.insert(id.to_owned(), value);
}
/// Declares a function with the name id.
/// If there were a variable or function with same name, it will be replaced.
pub fn declare_function(&mut self, id: &Ident, body: &RollerFun) {
self.var_ns.remove(id);
self.fun_ns.insert(id.to_owned(), body.clone());
}
/// Deletes a function or variable with the given name.
/// Returns the type of the deleted identifier
pub fn delete_id(&mut self, id: &Ident) -> ParseResult<NameInfo> {
match self.var_ns.remove(id) {
Some(_) => Ok(NameInfo::Var),
// no variable found, try to delete a function
None => match self.fun_ns.remove(id) {
Some(_) => Ok(NameInfo::Fun),
None => Err(RollerErr::EvalError(EvalErr::NoIdFound(id.to_owned() ))),
},
}
}
/// Tells if there is a variable, function or nothing with that name.
#[allow(dead_code)] // TODO: remove when used
pub fn get_name_info(&self, id: Ident) -> NameInfo {
if let Some(_) = self.var_ns.get(&id) {
NameInfo::Var
}
else if let Some(_) = self.fun_ns.get(&id) {
NameInfo::Fun
}
else {
NameInfo::Empty
}
}
/// Returns the value of the variable with the given identifier.
pub fn get_var<'a>(&'a self, id: &Ident) -> ParseResult<Value> {
// check the last element of the call stack
if let Some(ref hm) = self.call_stack.borrow().deref().last() {
if let Some(ref val) = hm.get(id) {
// check if we found the variable in the stack as a function argument
return Ok((*val).clone());
}
}
// if we didn't find the variable from the stack, check the global space
if let Some(ref val) = self.var_ns.get(id) {
return Ok((*val).clone());
}
// didn't find the variable from either of the namespaces
Err(RollerErr::EvalError(EvalErr::NoVarFound(id.clone() )))
}
/// Calls the function with the given identifier with the given arguments.
/// Returns an error if no such function was found, if the number of parameters was wrong, if the maximum function call depth was reached or if the evaluation of the function's body failed.
/// Calls the eval_expr function to evaluate the function.
pub fn call_fun(&self, id: &Ident, args: Vec<Value>) -> ParseResult<Value> {
match self.fun_ns.get(id) {
Some(ref fun) => {
if self.call_stack.borrow().deref().len() > self.max_call_depth {
return Err(RollerErr::EvalError(EvalErr::ReachedMaxCallDepth));
}
// the fumction's local namespace
let local_ns = try!(Self::ns_from_args(&fun.params, args));
// add the function's local namespace
self.call_stack.borrow_mut().deref_mut().push(local_ns);
// evaluate the function body
let to_return = eval_expr(&fun.body, self);
// remove the call stack namespace. IMPORTANT
self.call_stack.borrow_mut().deref_mut().pop();
// return the output value
to_return
},
None => Err(RollerErr::EvalError(EvalErr::NoFunFound(id.to_owned() ))),
}
}
fn ns_from_args(names: &Vec<Ident>, args: Vec<Value>) -> ParseResult<HashMap<Ident, Value>>
{
// chech whether the lengths match
if names.len() != args.len() {
return Err(RollerErr::EvalError(
EvalErr::WrongNumParams{expected: names.len(), found: args.len()}
));
}
// ok they do, use iterator magic to add them
Ok(
HashMap::from_iter(
names.iter()
.cloned()
.zip(args.into_iter())
)
)
}
pub fn get_roll(&self, amount: IntType, sides: IntType) -> Vec<IntType> {
let mut distr = Range::new(1, sides+1);
let mut to_return = Vec::with_capacity(amount as usize);
for _ in 1..amount+1 {
to_return.push(distr.sample(&mut *self.rng.borrow_mut()) )
}
to_return
}
}<|fim▁end|> | use syntax_tree::*;
use eval::types::*;
use eval::eval_functions::eval_expr;
use error::*; |
<|file_name|>derp.pipe.ts<|end_file_name|><|fim▁begin|>import { Pipe, PipeTransform } from '@angular/core';
/*
# Description:
Repackages an array subset as a new array.
**Reasoning:**
Angular2's change checker freaks out when you ngFor an array that's a subset
of a larger data structure. Please read: https://github.com/angular/angular/issues/6392.
# Usage:
``
<div *ng-for="#value of arrayOfObjects | derp"> </div>
``
*/
@Pipe({
name: 'derp',
pure: false<|fim▁hole|>export class DerpPipe implements PipeTransform {
transform(value, args) {
if (Array.isArray(value)) {
return Array.from(value);
} else {
return [value];
}
}
}<|fim▁end|> | }) |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>'use strict';
var gulp = require('gulp');
var autoprefixer = require('jstransformer')(require('jstransformer-stylus'));
var autoprefixer = require('autoprefixer-stylus');
var browserSync = require('browser-sync').create();
var changed = require('gulp-changed');
var concat = require('gulp-concat');
var cssbeautify = require('gulp-cssbeautify');
var csscomb = require('gulp-csscomb');
var csso = require('gulp-csso');
var data = require('gulp-data');
var del = require('del');
var filter = require('gulp-filter');
var flatten = require('gulp-flatten');
var gulpZip = require('gulp-zip');
var gulpif = require('gulp-if');
var gutil = require('gulp-util');
var htmlPrettify = require('gulp-prettify');
var imagemin = require('gulp-imagemin');
var imageminPngquant = require('imagemin-pngquant');
var imageminSvgo = require('imagemin-svgo');
var include = require('gulp-include');
var jade = require('gulp-jade');
var jadeInheritance = require('gulp-jade-inheritance');
var path = require('path');
var plumber = require('gulp-plumber');
var rename = require('gulp-rename');
var runSequence = require('run-sequence');
var rupture = require('rupture');
var stylus = require('gulp-stylus');
var svgSymbols = require('gulp-svg-symbols');
var uglify = require('gulp-uglify');
var watch = require('gulp-watch');
var gcmq = require('gulp-combine-mq');
// Error handler for gulp-plumber
var errorHandler = function (err) {
gutil.log([(err.name + ' in ' + err.plugin).bold.red, '', err.message, ''].join('\n'));
if (gutil.env.beep) {
gutil.beep();
}
this.emit('end');
};
// Print object in console
var debugObj = function (obj) {
var util = require('util');
console.log(util.inspect(obj, {showHidden: false, depth: null}));
};
// Read file and return object
var getData = function getData (file) {
var dataEntry;
var data;
var dataTmp;
var fs = require('fs');
try {
dataEntry = fs.readFileSync(file, 'utf8');
} catch (er) {
dataEntry = false;
}
dataTmp = '{' + dataEntry + '}';
if (dataEntry) {
// eval('data = {' + dataEntry + '}');
data = JSON.parse(dataTmp);
} else {
data = '{}';
}
return data;
};
var correctNumber = function correctNumber(number) {
return number < 10 ? '0' + number : number;
};
// Return timestamp
var getDateTime = function getDateTime() {
var now = new Date();
var year = now.getFullYear();
var month = correctNumber(now.getMonth() + 1);
var day = correctNumber(now.getDate());
var hours = correctNumber(now.getHours());
var minutes = correctNumber(now.getMinutes());
return year + '-' + month + '-' + day + '-' + hours + minutes;
};
// Plugins options
var options = {
del: [
'dest',
'tmp'
],
plumber: {
errorHandler: errorHandler
},
browserSync: {
server: {
baseDir: './dest'
}
},
stylus: {
use: [
rupture(),
autoprefixer({
browsers: ['last 2 version', '> 1%', 'safari 5', 'ie 8', 'ie 7', 'opera 12.1', 'ios 6', 'android 4'],
cascade: false
})
]
},
cssbeautify: {
indent: '\t',
autosemicolon: true
},
jade: {
pretty: '\t'
},
htmlPrettify: {
"unformatted": ["pre", "code"],
"indent_with_tabs": true,
"preserve_newlines": true,
"brace_style": "expand",
"end_with_newline": true
},
svgSymbols: {
title: false,
id: '%f',
className: '%f',
templates: [
path.join(__dirname, 'source/static/styles/components/icons-template.styl'),
'default-svg'
]
},
imagemin: {
optimizationLevel: 3,
progressive: true,
interlaced: true,
svgoPlugins: [{removeViewBox: false}],
use: [
imageminPngquant(),
imageminSvgo()
]
}
};
gulp.task('cleanup', function (cb) {
return del(options.del, cb);
});
gulp.task('browser-sync', function() {
return browserSync.init(options.browserSync);
});
gulp.task('bs-reload', function (cb) {
browserSync.reload();
});
gulp.task('combine-modules-styles', function (cb) {
return gulp.src(['**/*.styl', '!**/_*.styl'], {cwd: 'source/modules'})
.pipe(plumber(options.plumber))
.pipe(concat('modules.styl'))
.pipe(gulp.dest('tmp'));<|fim▁hole|>});
gulp.task('compile-styles', function (cb) {
return gulp.src(['*.styl', '!_*.styl'], {cwd: 'source/static/styles'})
.pipe(plumber(options.plumber))
.pipe(stylus(options.stylus))
.pipe(gcmq({beautify: false}))
.pipe(cssbeautify(options.cssbeautify))
.pipe(csscomb())
.pipe(gulp.dest('dest/css'))
.pipe(csso())
.pipe(rename({suffix: '.min'}))
.pipe(gulp.dest('dest/css'))
.pipe(browserSync.stream());
});
gulp.task('combine-modules-data', function (cb) {
return gulp.src(['**/*.js', '!**/_*.js'], {cwd: 'source/modules/*/data'})
.pipe(plumber(options.plumber))
.pipe(concat('data.js', { newLine: ',\n\n' }))
.pipe(gulp.dest('tmp'));
});
gulp.task('compile-pages', function (cb) {
return gulp.src(['**/*.jade', '!**/_*.jade'], {cwd: 'source/pages'})
.pipe(plumber(options.plumber))
.pipe(data(getData('tmp/data.js')))
.pipe(jade(options.jade))
.pipe(htmlPrettify(options.htmlPrettify))
.pipe(gulp.dest('dest'));
});
gulp.task('copy-modules-img', function (cb) {
return gulp.src('**/*.{jpg,gif,svg,png}', {cwd: 'source/modules/*/assets'})
.pipe(plumber(options.plumber))
.pipe(changed('dest/img'))
.pipe(imagemin(options.imagemin))
.pipe(flatten())
.pipe(gulp.dest('dest/img'));
});
gulp.task('combine-modules-scripts', function (cb) {
return gulp.src(['*.js', '!_*.js'], {cwd: 'source/modules/*'})
.pipe(plumber(options.plumber))
.pipe(concat('modules.js', { newLine: '\n\n' }))
.pipe(gulp.dest('tmp'));
});
gulp.task('copy-assets', function (cb) {
var imageFilter = filter('**/*.{jpg,gif,svg,png}', {restore: true});
var scriptsFilter = filter(['**/*.js', '!**/*.min.js'], {restore: true});
var stylesFilter = filter(['**/*.css', '!**/*.min.css'], {restore: true});
return gulp.src(['**/*.*', '!**/_*.*'], {cwd: 'source/static/assets'})
.pipe(plumber(options.plumber))
.pipe(changed('dest'))
// Minify images
.pipe(imageFilter)
.pipe(changed('dest'))
.pipe(imagemin(options.imagemin))
.pipe(imageFilter.restore)
// Minify JavaScript files
.pipe(scriptsFilter)
.pipe(gulp.dest('dest'))
.pipe(uglify())
.pipe(rename({suffix: '.min'}))
.pipe(scriptsFilter.restore)
// Minify css
.pipe(stylesFilter)
.pipe(csso())
.pipe(rename({suffix: '.min'}))
.pipe(stylesFilter.restore)
// Copy other files
.pipe(gulp.dest('dest'));
});
gulp.task('combine-scripts', function (cb) {
return gulp.src(['*.js', '!_*.js'], {cwd: 'source/static/scripts'})
.pipe(plumber(options.plumber))
.pipe(include())
.pipe(gulp.dest('dest/js'))
.pipe(uglify())
.pipe(rename({suffix: '.min'}))
.pipe(gulp.dest('dest/js'));
});
gulp.task('combine-svg-icons', function (cb) {
return gulp.src(['**/*.svg', '!**/_*.svg'], {cwd: 'source/static/icons'})
.pipe(plumber(options.plumber))
.pipe(imagemin(options.imagemin))
.pipe(svgSymbols(options.svgSymbols))
.pipe(gulpif(/\.styl$/, gulp.dest('tmp')))
.pipe(gulpif(/\.svg$/, rename('icons.svg')))
.pipe(gulpif(/\.svg$/, gulp.dest('dest/img')));
});
gulp.task('build-zip', function() {
var datetime = '-' + getDateTime();
var zipName = 'dist' + datetime + '.zip';
return gulp.src('dest/**/*')
.pipe(gulpZip(zipName))
.pipe(gulp.dest('zip'));
});
gulp.task('build-html', function (cb) {
return runSequence(
'combine-modules-data',
'compile-pages',
cb
);
});
gulp.task('build-css', function (cb) {
return runSequence(
'combine-modules-styles',
'compile-styles',
cb
);
});
gulp.task('build-js', function (cb) {
return runSequence(
'combine-modules-scripts',
'combine-scripts',
cb
);
});
gulp.task('build', function (cb) {
return runSequence(
'cleanup',
'combine-svg-icons',
[
'build-html',
'copy-modules-img',
'copy-assets',
'build-css',
'build-js'
],
cb
);
});
gulp.task('zip', function (cb) {
return runSequence(
'build',
'build-zip',
cb
);
});
gulp.task('develop', function (cb) {
return runSequence(
'build',
'browser-sync',
cb
);
});
gulp.task('dev', ['develop'], function (cb) {
// Modules, pages
watch('source/**/*.jade', function() {
return runSequence('compile-pages', browserSync.reload);
});
// Modules data
watch('source/modules/*/data/*.js', function() {
return runSequence('build-html', browserSync.reload);
});
// Static styles
watch('source/static/styles/**/*.styl', function() {
// return runSequence('compile-styles');
gulp.start('compile-styles');
});
// Modules styles
watch('source/modules/**/*.styl', function() {
// return runSequence('build-css');
gulp.start('build-css');
});
// Static scripts
watch('source/static/scripts/**/*.js', function() {
return runSequence('combine-scripts', browserSync.reload);
});
// Modules scripts
watch('source/modules/*/*.js', function() {
return runSequence('build-js', browserSync.reload);
});
// Modules images
watch('source/modules/*/assets/**/*.{jpg,gif,svg,png}', function() {
return runSequence('copy-modules-img', browserSync.reload);
});
// Static files
watch('source/static/assets/**/*', function() {
return runSequence('copy-assets', browserSync.reload);
});
// Svg icons
watch('source/static/icons/**/*.svg', function() {
return runSequence('combine-svg-icons', browserSync.reload);
});
});<|fim▁end|> | |
<|file_name|>test_extensions.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This test checks if dynamic loading of library into MXNet is successful
import os
import platform
import mxnet as mx
import numpy as np
from mxnet import nd
from mxnet.gluon import nn
from mxnet.base import MXNetError
from mxnet.test_utils import download, is_cd_run, assert_almost_equal, default_context
import pytest
base_path = os.path.join(os.path.dirname(__file__), "../../..")
def check_platform():
return platform.machine() not in ['x86_64', 'AMD64']
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_custom_op():
# possible places to find library file
if (os.name=='posix'):<|fim▁hole|> if os.path.exists(lib):
fname = lib
elif os.path.exists(os.path.join(base_path,'build/'+lib)):
fname = os.path.join(base_path,'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libcustomop_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
fname = 'windows_package\\lib\\'+lib
else:
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
# load the library containing gemm custom operators
mx.library.load(fname)
# test symbol 2D gemm custom operators
s = mx.sym.Variable('s')
t = mx.sym.Variable('t')
c = mx.sym.my_gemm(s,t)
d = mx.sym.state_gemm(s,t)
# baseline gemm from MXNet
base = mx.sym.linalg.gemm2(s,t)
# get some random input matrices
dim_n, dim_k, dim_m = tuple(np.random.randint(1, 5, size=3))
mat1 = mx.nd.random.uniform(-10, 10, shape=(dim_n, dim_k), ctx=mx.cpu())
mat2 = mx.nd.random.uniform(-10, 10, shape=(dim_k, dim_m), ctx=mx.cpu())
# intermediate ndarrays to be populated by gradient compute
in_grad1 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad2 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad_base = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
exe1 = c.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad1)
exe2 = d.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad2)
exe_base = base.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad_base)
out1 = exe1.forward()
out2 = exe2.forward()
# test stateful operator by calling it multiple times
out2 = exe2.forward()
out_base = exe_base.forward()
# check that forward compute matches one executed by MXNet
assert_almost_equal(out_base[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(out_base[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# random output grad ndarray for gradient update
out_grad = mx.nd.ones((dim_n, dim_m), ctx=mx.cpu())
exe1.backward([out_grad])
exe2.backward([out_grad])
exe_base.backward([out_grad])
# check that gradient compute matches one executed by MXNet
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad2[0].asnumpy(), rtol=1e-3, atol=1e-3)
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_subgraph():
# possible places to find library file
if (os.name=='posix'):
lib = 'libsubgraph_lib.so'
if os.path.exists(lib):
# plain make build, when run in the CI
fname = lib
elif os.path.exists(os.path.join(base_path, 'build/'+lib)):
# plain cmake build when run in the CI
fname = os.path.join(base_path, 'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libsubgraph_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
# plain make build, when run in the CI
fname = 'windows_package\\lib\\'+lib
else:
# plain cmake build when run in the CI
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
mx.library.load(fname)
# test simple graph with add, exp and log operators, library supports exp/log
a = mx.sym.var('a')
b = mx.sym.var('b')
c = a + b
d = mx.sym.exp(c)
sym = mx.sym.log(d)
args = {'a':mx.nd.ones((3,2),ctx=mx.cpu()), 'b':mx.nd.ones((3,2),ctx=mx.cpu())}
arg_array = [mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu()),
mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu())]
# baseline - regular execution in MXNet
exe = sym.bind(ctx=mx.cpu(), args=args)
out = exe.forward()
# without propogating shapes/types, passing a custom option to subgraph prop "myOpt"
# should not create subgraph since subgraph prop requires type info
mysym1 = sym.optimize_for("myProp", myOpt='yello')
exe1 = mysym1.bind(ctx=mx.cpu(), args=args)
out1 = exe1.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types, rejecting subgraph
# this tests creating the subgraph and having the subgraph prop reject it
mysym2 = sym.optimize_for("myProp", arg_array, reject=True)
exe2 = mysym2.bind(ctx=mx.cpu(), args=args)
out2 = exe2.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types
mysym3 = sym.optimize_for("myProp",arg_array)
exe3 = mysym3.bind(ctx=mx.cpu(), args=args)
out3 = exe3.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out3[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block = nn.SymbolBlock(sym, [a,b])
sym_block.initialize()
sym_block.hybridize(backend='myProp')
out4 = sym_block(mx.nd.ones((3,2)),mx.nd.ones((3,2)))
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out4[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block2 = nn.SymbolBlock(sym, [a,b])
sym_block2.initialize()
a_data = mx.nd.ones((3,2))
b_data = mx.nd.ones((3,2))
sym_block2.optimize_for(a_data, b_data, backend='myProp')
sym_block2.export('optimized')
sym_block3 = nn.SymbolBlock.imports('optimized-symbol.json',['a','b'],
'optimized-0000.params')
out5 = sym_block3(a_data, b_data)
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out5[0].asnumpy(), rtol=1e-3, atol=1e-3)<|fim▁end|> | lib = 'libcustomop_lib.so' |
<|file_name|>junit.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package junit is DEPRECATED. Please use github.com/jstemmer/go-junit-report as a library.
package junit
import (
"encoding/xml"
"fmt"
"io/ioutil"
"log"
)
// TestStatusEnum is a enum for test result status
type TestStatusEnum string
const (
// Failed means junit test failed
Failed TestStatusEnum = "failed"<|fim▁hole|>)
// TestSuites holds a <testSuites/> list of TestSuite results
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
Suites []TestSuite `xml:"testsuite"`
}
// TestSuite holds <testSuite/> results
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"` // Seconds
Failures int `xml:"failures,attr"`
Tests int `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Properties TestProperties `xml:"properties"`
}
// TestCase holds <testcase/> results
type TestCase struct {
Name string `xml:"name,attr"`
Time string `xml:"time,attr"` // Seconds
ClassName string `xml:"classname,attr"`
Failure *string `xml:"failure,omitempty"`
Output *string `xml:"system-out,omitempty"`
Error *string `xml:"system-err,omitempty"`
Skipped *string `xml:"skipped,omitempty"`
Properties *TestProperties `xml:"properties,omitempty"`
}
// TestProperties is an array of test properties
type TestProperties struct {
Properties []TestProperty `xml:"property"`
}
// TestProperty defines a property of the test
type TestProperty struct {
Name string `xml:"name,attr"`
Value string `xml:"value,attr"`
}
// GetTestStatus returns the test status as a string
func (testCase *TestCase) GetTestStatus() TestStatusEnum {
testStatus := Passed
switch {
case testCase.Failure != nil:
testStatus = Failed
case testCase.Skipped != nil:
testStatus = Skipped
}
return testStatus
}
// AddProperty adds property to testcase
func (testCase *TestCase) AddProperty(name, val string) {
if testCase.Properties == nil {
testCase.Properties = &TestProperties{}
}
property := TestProperty{Name: name, Value: val}
testCase.Properties.Properties = append(testCase.Properties.Properties, property)
}
// AddTestCase adds a testcase to the testsuite
func (ts *TestSuite) AddTestCase(tc TestCase) {
ts.Tests++
if tc.GetTestStatus() == Failed {
ts.Failures++
}
ts.TestCases = append(ts.TestCases, tc)
}
// GetTestSuite gets TestSuite struct by name
func (testSuites *TestSuites) GetTestSuite(suiteName string) (*TestSuite, error) {
for _, testSuite := range testSuites.Suites {
if testSuite.Name == suiteName {
return &testSuite, nil
}
}
return nil, fmt.Errorf("Test suite '%s' not found", suiteName)
}
// AddTestSuite adds TestSuite to TestSuites
func (testSuites *TestSuites) AddTestSuite(testSuite *TestSuite) error {
if _, err := testSuites.GetTestSuite(testSuite.Name); err == nil {
return fmt.Errorf("Test suite '%s' already exists", testSuite.Name)
}
testSuites.Suites = append(testSuites.Suites, *testSuite)
return nil
}
// ToBytes converts TestSuites struct to bytes array
func (testSuites *TestSuites) ToBytes(prefix, indent string) ([]byte, error) {
return xml.MarshalIndent(testSuites, prefix, indent)
}
// UnMarshal converts bytes array to TestSuites struct,
// it works with both TestSuites and TestSuite structs, if
// input is a TestSuite struct it will still return a TestSuites
// struct, which is an empty wrapper TestSuites containing only
// the input Suite
func UnMarshal(buf []byte) (*TestSuites, error) {
var testSuites TestSuites
if err := xml.Unmarshal(buf, &testSuites); err == nil {
return &testSuites, nil
}
// The input might be a TestSuite if reach here, try parsing with TestSuite
testSuites.Suites = append([]TestSuite(nil), TestSuite{})
if err := xml.Unmarshal(buf, &testSuites.Suites[0]); err != nil {
return nil, err
}
return &testSuites, nil
}
// CreateXMLErrorMsg outputs a junit testsuite, testname and error message to the destination path
// in XML format
func CreateXMLErrorMsg(testSuite, testName, errMsg, dest string) {
suites := TestSuites{}
suite := TestSuite{Name: testSuite}
var errP *string
if errMsg != "" {
errP = &errMsg
}
suite.AddTestCase(TestCase{
Name: testName,
Failure: errP,
})
suites.AddTestSuite(&suite)
contents, err := suites.ToBytes("", "")
if err != nil {
log.Fatal(err)
}
ioutil.WriteFile(dest, contents, 0644)
}<|fim▁end|> | // Skipped means junit test skipped
Skipped TestStatusEnum = "skipped"
// Passed means junit test passed
Passed TestStatusEnum = "passed" |
<|file_name|>MutableValueSetterProxy.cpp<|end_file_name|><|fim▁begin|>#include "MutableValueSetterProxy.h"
#include <jsi/jsi.h>
#include "MutableValue.h"
#include "SharedParent.h"
using namespace facebook;
namespace reanimated {
void MutableValueSetterProxy::set(
jsi::Runtime &rt,
const jsi::PropNameID &name,
const jsi::Value &newValue) {
auto propName = name.utf8(rt);
if (propName == "_value") {
mutableValue->setValue(rt, newValue);
} else if (propName == "_animation") {
// TODO: assert to allow animation to be set from UI only
if (mutableValue->animation.expired()) {
mutableValue->animation = mutableValue->getWeakRef(rt);
}
*mutableValue->animation.lock() = jsi::Value(rt, newValue);
} else if (propName == "value") {
// you call `this.value` inside of value setter, we should throw
}
}
jsi::Value MutableValueSetterProxy::get(
jsi::Runtime &rt,
const jsi::PropNameID &name) {
auto propName = name.utf8(rt);
if (propName == "value") {
return mutableValue->getValue(rt);
} else if (propName == "_value") {
return mutableValue->getValue(rt);
} else if (propName == "_animation") {
if (mutableValue->animation.expired()) {
mutableValue->animation = mutableValue->getWeakRef(rt);
}
return jsi::Value(rt, *mutableValue->animation.lock());<|fim▁hole|> return jsi::Value::undefined();
}
} // namespace reanimated<|fim▁end|> | }
|
<|file_name|>ElementTree.py<|end_file_name|><|fim▁begin|>#
# ElementTree
# $Id: ElementTree.py 3440 2008-07-18 14:45:01Z fredrik $
#
# light-weight XML support for Python 2.3 and later.
#
# history (since 1.2.6):
# 2005-11-12 fl added tostringlist/fromstringlist helpers
# 2006-07-05 fl merged in selected changes from the 1.3 sandbox
# 2006-07-05 fl removed support for 2.1 and earlier
# 2007-06-21 fl added deprecation/future warnings
# 2007-08-25 fl added doctype hook, added parser version attribute etc
# 2007-08-26 fl added new serializer code (better namespace handling, etc)
# 2007-08-27 fl warn for broken /tag searches on tree level
# 2007-09-02 fl added html/text methods to serializer (experimental)
# 2007-09-05 fl added method argument to tostring/tostringlist
# 2007-09-06 fl improved error handling
# 2007-09-13 fl added itertext, iterfind; assorted cleanups
# 2007-12-15 fl added C14N hooks, copy method (experimental)
#
# Copyright (c) 1999-2008 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
__all__ = [
# public symbols
"Comment",
"dump",
"Element", "ElementTree",
"fromstring", "fromstringlist",
"iselement", "iterparse",
"parse", "ParseError",
"PI", "ProcessingInstruction",
"QName",
"SubElement",
"tostring", "tostringlist",
"TreeBuilder",
"VERSION",
"XML",
"XMLParser", "XMLTreeBuilder",
]
VERSION = "1.3.0"
##
# The <b>Element</b> type is a flexible container object, designed to
# store hierarchical data structures in memory. The type can be
# described as a cross between a list and a dictionary.
# <p>
# Each element has a number of properties associated with it:
# <ul>
# <li>a <i>tag</i>. This is a string identifying what kind of data
# this element represents (the element type, in other words).</li>
# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
# <li>a <i>text</i> string.</li>
# <li>an optional <i>tail</i> string.</li>
# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
# </ul>
#
# To create an element instance, use the {@link #Element} constructor
# or the {@link #SubElement} factory function.
# <p>
# The {@link #ElementTree} class can be used to wrap an element
# structure, and convert it from and to XML.
##
import sys
import re
import warnings
class _SimpleElementPath(object):
# emulate pre-1.2 find/findtext/findall behaviour
def find(self, element, tag, namespaces=None):
for elem in element:
if elem.tag == tag:
return elem
return None
def findtext(self, element, tag, default=None, namespaces=None):
elem = self.find(element, tag)
if elem is None:
return default
return elem.text or ""
def iterfind(self, element, tag, namespaces=None):
if tag[:3] == ".//":
for elem in element.iter(tag[3:]):
yield elem
for elem in element:
if elem.tag == tag:
yield elem
def findall(self, element, tag, namespaces=None):
return list(self.iterfind(element, tag, namespaces))
try:
from . import ElementPath
except ImportError:
ElementPath = _SimpleElementPath()
##
# Parser error. This is a subclass of <b>SyntaxError</b>.
# <p>
# In addition to the exception value, an exception instance contains a
# specific exception code in the <b>code</b> attribute, and the line and
# column of the error in the <b>position</b> attribute.
class ParseError(SyntaxError):
pass
# --------------------------------------------------------------------
##
# Checks if an object appears to be a valid element object.
#
# @param An element instance.
# @return A true value if this is an element object.
# @defreturn flag
def iselement(element):
# FIXME: not sure about this; might be a better idea to look
# for tag/attrib/text attributes
return isinstance(element, Element) or hasattr(element, "tag")
##
# Element class. This class defines the Element interface, and
# provides a reference implementation of this interface.
# <p>
# The element name, attribute names, and attribute values can be
# either ASCII strings (ordinary Python strings containing only 7-bit
# ASCII characters) or Unicode strings.
#
# @param tag The element name.
# @param attrib An optional dictionary, containing element attributes.
# @param **extra Additional attributes, given as keyword arguments.
# @see Element
# @see SubElement
# @see Comment
# @see ProcessingInstruction
class Element(object):
# <tag attrib>text<child/>...</tag>tail
##
# (Attribute) Element tag.
tag = None
##
# (Attribute) Element attribute dictionary. Where possible, use
# {@link #Element.get},
# {@link #Element.set},
# {@link #Element.keys}, and
# {@link #Element.items} to access
# element attributes.
attrib = None
##
# (Attribute) Text before first subelement. This is either a
# string or the value None. Note that if there was no text, this
# attribute may be either None or an empty string, depending on
# the parser.
text = None
##
# (Attribute) Text after this element's end tag, but before the
# next sibling element's start tag. This is either a string or
# the value None. Note that if there was no text, this attribute
# may be either None or an empty string, depending on the parser.
tail = None # text after end tag, if any
# constructor
def __init__(self, tag, attrib={}, **extra):
attrib = attrib.copy()
attrib.update(extra)
self.tag = tag
self.attrib = attrib
self._children = []
def __repr__(self):
return "<Element %s at 0x%x>" % (repr(self.tag), id(self))
##
# Creates a new element object of the same type as this element.
#
# @param tag Element tag.
# @param attrib Element attributes, given as a dictionary.
# @return A new element instance.
def makeelement(self, tag, attrib):
return self.__class__(tag, attrib)
##
# (Experimental) Copies the current element. This creates a
# shallow copy; subelements will be shared with the original tree.
#
# @return A new element instance.
def copy(self):
elem = self.makeelement(self.tag, self.attrib)
elem.text = self.text
elem.tail = self.tail
elem[:] = self
return elem
##
# Returns the number of subelements. Note that this only counts
# full elements; to check if there's any content in an element, you
# have to check both the length and the <b>text</b> attribute.
#
# @return The number of subelements.
def __len__(self):
return len(self._children)
def __nonzero__(self):
warnings.warn(
"The behavior of this method will change in future versions. "
"Use specific 'len(elem)' or 'elem is not None' test instead.",
FutureWarning, stacklevel=2
)
return len(self._children) != 0 # emulate old behaviour, for now
##
# Returns the given subelement, by index.
#
# @param index What subelement to return.
# @return The given subelement.
# @exception IndexError If the given element does not exist.
def __getitem__(self, index):
return self._children[index]
##
# Replaces the given subelement, by index.
#
# @param index What subelement to replace.
# @param element The new element value.
# @exception IndexError If the given element does not exist.
<|fim▁hole|> # if isinstance(index, slice):
# for elt in element:
# assert iselement(elt)
# else:
# assert iselement(element)
self._children[index] = element
##
# Deletes the given subelement, by index.
#
# @param index What subelement to delete.
# @exception IndexError If the given element does not exist.
def __delitem__(self, index):
del self._children[index]
##
# Adds a subelement to the end of this element. In document order,
# the new element will appear after the last existing subelement (or
# directly after the text, if it's the first subelement), but before
# the end tag for this element.
#
# @param element The element to add.
def append(self, element):
# assert iselement(element)
self._children.append(element)
##
# Appends subelements from a sequence.
#
# @param elements A sequence object with zero or more elements.
# @since 1.3
def extend(self, elements):
# for element in elements:
# assert iselement(element)
self._children.extend(elements)
##
# Inserts a subelement at the given position in this element.
#
# @param index Where to insert the new subelement.
def insert(self, index, element):
# assert iselement(element)
self._children.insert(index, element)
##
# Removes a matching subelement. Unlike the <b>find</b> methods,
# this method compares elements based on identity, not on tag
# value or contents. To remove subelements by other means, the
# easiest way is often to use a list comprehension to select what
# elements to keep, and use slice assignment to update the parent
# element.
#
# @param element What element to remove.
# @exception ValueError If a matching element could not be found.
def remove(self, element):
# assert iselement(element)
self._children.remove(element)
##
# (Deprecated) Returns all subelements. The elements are returned
# in document order.
#
# @return A list of subelements.
# @defreturn list of Element instances
def getchildren(self):
warnings.warn(
"This method will be removed in future versions. "
"Use 'list(elem)' or iteration over elem instead.",
DeprecationWarning, stacklevel=2
)
return self._children
##
# Finds the first matching subelement, by tag name or path.
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return The first matching element, or None if no element was found.
# @defreturn Element or None
def find(self, path, namespaces=None):
return ElementPath.find(self, path, namespaces)
##
# Finds text for the first matching subelement, by tag name or path.
#
# @param path What element to look for.
# @param default What to return if the element was not found.
# @keyparam namespaces Optional namespace prefix map.
# @return The text content of the first matching element, or the
# default value no element was found. Note that if the element
# is found, but has no text content, this method returns an
# empty string.
# @defreturn string
def findtext(self, path, default=None, namespaces=None):
return ElementPath.findtext(self, path, default, namespaces)
##
# Finds all matching subelements, by tag name or path.
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return A list or other sequence containing all matching elements,
# in document order.
# @defreturn list of Element instances
def findall(self, path, namespaces=None):
return ElementPath.findall(self, path, namespaces)
##
# Finds all matching subelements, by tag name or path.
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return An iterator or sequence containing all matching elements,
# in document order.
# @defreturn a generated sequence of Element instances
def iterfind(self, path, namespaces=None):
return ElementPath.iterfind(self, path, namespaces)
##
# Resets an element. This function removes all subelements, clears
# all attributes, and sets the <b>text</b> and <b>tail</b> attributes
# to None.
def clear(self):
self.attrib.clear()
self._children = []
self.text = self.tail = None
##
# Gets an element attribute. Equivalent to <b>attrib.get</b>, but
# some implementations may handle this a bit more efficiently.
#
# @param key What attribute to look for.
# @param default What to return if the attribute was not found.
# @return The attribute value, or the default value, if the
# attribute was not found.
# @defreturn string or None
def get(self, key, default=None):
return self.attrib.get(key, default)
##
# Sets an element attribute. Equivalent to <b>attrib[key] = value</b>,
# but some implementations may handle this a bit more efficiently.
#
# @param key What attribute to set.
# @param value The attribute value.
def set(self, key, value):
self.attrib[key] = value
##
# Gets a list of attribute names. The names are returned in an
# arbitrary order (just like for an ordinary Python dictionary).
# Equivalent to <b>attrib.keys()</b>.
#
# @return A list of element attribute names.
# @defreturn list of strings
def keys(self):
return self.attrib.keys()
##
# Gets element attributes, as a sequence. The attributes are
# returned in an arbitrary order. Equivalent to <b>attrib.items()</b>.
#
# @return A list of (name, value) tuples for all attributes.
# @defreturn list of (string, string) tuples
def items(self):
return self.attrib.items()
##
# Creates a tree iterator. The iterator loops over this element
# and all subelements, in document order, and returns all elements
# with a matching tag.
# <p>
# If the tree structure is modified during iteration, new or removed
# elements may or may not be included. To get a stable set, use the
# list() function on the iterator, and loop over the resulting list.
#
# @param tag What tags to look for (default is to return all elements).
# @return An iterator containing all the matching elements.
# @defreturn iterator
def iter(self, tag=None):
if tag == "*":
tag = None
if tag is None or self.tag == tag:
yield self
for e in self._children:
for e in e.iter(tag):
yield e
# compatibility
def getiterator(self, tag=None):
# Change for a DeprecationWarning in 1.4
warnings.warn(
"This method will be removed in future versions. "
"Use 'elem.iter()' or 'list(elem.iter())' instead.",
PendingDeprecationWarning, stacklevel=2
)
return list(self.iter(tag))
##
# Creates a text iterator. The iterator loops over this element
# and all subelements, in document order, and returns all inner
# text.
#
# @return An iterator containing all inner text.
# @defreturn iterator
def itertext(self):
tag = self.tag
if not isinstance(tag, basestring) and tag is not None:
return
if self.text:
yield self.text
for e in self:
for s in e.itertext():
yield s
if e.tail:
yield e.tail
# compatibility
_Element = _ElementInterface = Element
##
# Subelement factory. This function creates an element instance, and
# appends it to an existing element.
# <p>
# The element name, attribute names, and attribute values can be
# either 8-bit ASCII strings or Unicode strings.
#
# @param parent The parent element.
# @param tag The subelement name.
# @param attrib An optional dictionary, containing element attributes.
# @param **extra Additional attributes, given as keyword arguments.
# @return An element instance.
# @defreturn Element
def SubElement(parent, tag, attrib={}, **extra):
attrib = attrib.copy()
attrib.update(extra)
element = parent.makeelement(tag, attrib)
parent.append(element)
return element
##
# Comment element factory. This factory function creates a special
# element that will be serialized as an XML comment by the standard
# serializer.
# <p>
# The comment string can be either an 8-bit ASCII string or a Unicode
# string.
#
# @param text A string containing the comment string.
# @return An element instance, representing a comment.
# @defreturn Element
def Comment(text=None):
element = Element(Comment)
element.text = text
return element
##
# PI element factory. This factory function creates a special element
# that will be serialized as an XML processing instruction by the standard
# serializer.
#
# @param target A string containing the PI target.
# @param text A string containing the PI contents, if any.
# @return An element instance, representing a PI.
# @defreturn Element
def ProcessingInstruction(target, text=None):
element = Element(ProcessingInstruction)
element.text = target
if text:
element.text = element.text + " " + text
return element
PI = ProcessingInstruction
##
# QName wrapper. This can be used to wrap a QName attribute value, in
# order to get proper namespace handling on output.
#
# @param text A string containing the QName value, in the form {uri}local,
# or, if the tag argument is given, the URI part of a QName.
# @param tag Optional tag. If given, the first argument is interpreted as
# an URI, and this argument is interpreted as a local name.
# @return An opaque object, representing the QName.
class QName(object):
def __init__(self, text_or_uri, tag=None):
if tag:
text_or_uri = "{%s}%s" % (text_or_uri, tag)
self.text = text_or_uri
def __str__(self):
return self.text
def __hash__(self):
return hash(self.text)
def __cmp__(self, other):
if isinstance(other, QName):
return cmp(self.text, other.text)
return cmp(self.text, other)
# --------------------------------------------------------------------
##
# ElementTree wrapper class. This class represents an entire element
# hierarchy, and adds some extra support for serialization to and from
# standard XML.
#
# @param element Optional root element.
# @keyparam file Optional file handle or file name. If given, the
# tree is initialized with the contents of this XML file.
class ElementTree(object):
def __init__(self, element=None, file=None):
# assert element is None or iselement(element)
self._root = element # first node
if file:
self.parse(file)
##
# Gets the root element for this tree.
#
# @return An element instance.
# @defreturn Element
def getroot(self):
return self._root
##
# Replaces the root element for this tree. This discards the
# current contents of the tree, and replaces it with the given
# element. Use with care.
#
# @param element An element instance.
def _setroot(self, element):
# assert iselement(element)
self._root = element
##
# Loads an external XML document into this element tree.
#
# @param source A file name or file object. If a file object is
# given, it only has to implement a <b>read(n)</b> method.
# @keyparam parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return The document root element.
# @defreturn Element
# @exception ParseError If the parser fails to parse the document.
def parse(self, source, parser=None):
close_source = False
if not hasattr(source, "read"):
source = open(source, "rb")
close_source = True
try:
if not parser:
parser = XMLParser(target=TreeBuilder())
while 1:
data = source.read(65536)
if not data:
break
parser.feed(data)
self._root = parser.close()
return self._root
finally:
if close_source:
source.close()
##
# Creates a tree iterator for the root element. The iterator loops
# over all elements in this tree, in document order.
#
# @param tag What tags to look for (default is to return all elements)
# @return An iterator.
# @defreturn iterator
def iter(self, tag=None):
# assert self._root is not None
return self._root.iter(tag)
# compatibility
def getiterator(self, tag=None):
# Change for a DeprecationWarning in 1.4
warnings.warn(
"This method will be removed in future versions. "
"Use 'tree.iter()' or 'list(tree.iter())' instead.",
PendingDeprecationWarning, stacklevel=2
)
return list(self.iter(tag))
##
# Same as getroot().find(path), starting at the root of the
# tree.
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return The first matching element, or None if no element was found.
# @defreturn Element or None
def find(self, path, namespaces=None):
# assert self._root is not None
if path[:1] == "/":
path = "." + path
warnings.warn(
"This search is broken in 1.3 and earlier, and will be "
"fixed in a future version. If you rely on the current "
"behaviour, change it to %r" % path,
FutureWarning, stacklevel=2
)
return self._root.find(path, namespaces)
##
# Same as getroot().findtext(path), starting at the root of the tree.
#
# @param path What element to look for.
# @param default What to return if the element was not found.
# @keyparam namespaces Optional namespace prefix map.
# @return The text content of the first matching element, or the
# default value no element was found. Note that if the element
# is found, but has no text content, this method returns an
# empty string.
# @defreturn string
def findtext(self, path, default=None, namespaces=None):
# assert self._root is not None
if path[:1] == "/":
path = "." + path
warnings.warn(
"This search is broken in 1.3 and earlier, and will be "
"fixed in a future version. If you rely on the current "
"behaviour, change it to %r" % path,
FutureWarning, stacklevel=2
)
return self._root.findtext(path, default, namespaces)
##
# Same as getroot().findall(path), starting at the root of the tree.
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return A list or iterator containing all matching elements,
# in document order.
# @defreturn list of Element instances
def findall(self, path, namespaces=None):
# assert self._root is not None
if path[:1] == "/":
path = "." + path
warnings.warn(
"This search is broken in 1.3 and earlier, and will be "
"fixed in a future version. If you rely on the current "
"behaviour, change it to %r" % path,
FutureWarning, stacklevel=2
)
return self._root.findall(path, namespaces)
##
# Finds all matching subelements, by tag name or path.
# Same as getroot().iterfind(path).
#
# @param path What element to look for.
# @keyparam namespaces Optional namespace prefix map.
# @return An iterator or sequence containing all matching elements,
# in document order.
# @defreturn a generated sequence of Element instances
def iterfind(self, path, namespaces=None):
# assert self._root is not None
if path[:1] == "/":
path = "." + path
warnings.warn(
"This search is broken in 1.3 and earlier, and will be "
"fixed in a future version. If you rely on the current "
"behaviour, change it to %r" % path,
FutureWarning, stacklevel=2
)
return self._root.iterfind(path, namespaces)
##
# Writes the element tree to a file, as XML.
#
# @def write(file, **options)
# @param file A file name, or a file object opened for writing.
# @param **options Options, given as keyword arguments.
# @keyparam encoding Optional output encoding (default is US-ASCII).
# @keyparam xml_declaration Controls if an XML declaration should
# be added to the file. Use False for never, True for always,
# None for only if not US-ASCII or UTF-8. None is default.
# @keyparam default_namespace Sets the default XML namespace (for "xmlns").
# @keyparam method Optional output method ("xml", "html", "text" or
# "c14n"; default is "xml").
def write(self, file_or_filename,
# keyword arguments
encoding=None,
xml_declaration=None,
default_namespace=None,
method=None):
# assert self._root is not None
if not method:
method = "xml"
elif method not in _serialize:
# FIXME: raise an ImportError for c14n if ElementC14N is missing?
raise ValueError("unknown method %r" % method)
if hasattr(file_or_filename, "write"):
file = file_or_filename
else:
file = open(file_or_filename, "wb")
try:
write = file.write
if not encoding:
if method == "c14n":
encoding = "utf-8"
else:
encoding = "us-ascii"
elif xml_declaration or (xml_declaration is None and
encoding not in ("utf-8", "us-ascii")):
if method == "xml":
write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
if method == "text":
_serialize_text(write, self._root, encoding)
else:
qnames, namespaces = _namespaces(
self._root, encoding, default_namespace
)
serialize = _serialize[method]
serialize(write, self._root, encoding, qnames, namespaces)
finally:
if file_or_filename is not file:
file.close()
def write_c14n(self, file):
# lxml.etree compatibility. use output method instead
return self.write(file, method="c14n")
# --------------------------------------------------------------------
# serialization support
def _namespaces(elem, encoding, default_namespace=None):
# identify namespaces used in this tree
# maps qnames to *encoded* prefix:local names
qnames = {None: None}
# maps uri:s to prefixes
namespaces = {}
if default_namespace:
namespaces[default_namespace] = ""
def encode(text):
return text.encode(encoding)
def add_qname(qname):
# calculate serialized qname representation
try:
if qname[:1] == "{":
uri, tag = qname[1:].rsplit("}", 1)
prefix = namespaces.get(uri)
if prefix is None:
prefix = _namespace_map.get(uri)
if prefix is None:
prefix = "ns%d" % len(namespaces)
if prefix != "xml":
namespaces[uri] = prefix
if prefix:
qnames[qname] = encode("%s:%s" % (prefix, tag))
else:
qnames[qname] = encode(tag) # default element
else:
if default_namespace:
# FIXME: can this be handled in XML 1.0?
raise ValueError(
"cannot use non-qualified names with "
"default_namespace option"
)
qnames[qname] = encode(qname)
except TypeError:
_raise_serialization_error(qname)
# populate qname and namespaces table
try:
iterate = elem.iter
except AttributeError:
iterate = elem.getiterator # cET compatibility
for elem in iterate():
tag = elem.tag
if isinstance(tag, QName):
if tag.text not in qnames:
add_qname(tag.text)
elif isinstance(tag, basestring):
if tag not in qnames:
add_qname(tag)
elif tag is not None and tag is not Comment and tag is not PI:
_raise_serialization_error(tag)
for key, value in elem.items():
if isinstance(key, QName):
key = key.text
if key not in qnames:
add_qname(key)
if isinstance(value, QName) and value.text not in qnames:
add_qname(value.text)
text = elem.text
if isinstance(text, QName) and text.text not in qnames:
add_qname(text.text)
return qnames, namespaces
def _serialize_xml(write, elem, encoding, qnames, namespaces):
tag = elem.tag
text = elem.text
if tag is Comment:
write("<!--%s-->" % _encode(text, encoding))
elif tag is ProcessingInstruction:
write("<?%s?>" % _encode(text, encoding))
else:
tag = qnames[tag]
if tag is None:
if text:
write(_escape_cdata(text, encoding))
for e in elem:
_serialize_xml(write, e, encoding, qnames, None)
else:
write("<" + tag)
items = elem.items()
if items or namespaces:
if namespaces:
for v, k in sorted(namespaces.items(),
key=lambda x: x[1]): # sort on prefix
if k:
k = ":" + k
write(" xmlns%s=\"%s\"" % (
k.encode(encoding),
_escape_attrib(v, encoding)
))
for k, v in sorted(items): # lexical order
if isinstance(k, QName):
k = k.text
if isinstance(v, QName):
v = qnames[v.text]
else:
v = _escape_attrib(v, encoding)
write(" %s=\"%s\"" % (qnames[k], v))
if text or len(elem):
write(">")
if text:
write(_escape_cdata(text, encoding))
for e in elem:
_serialize_xml(write, e, encoding, qnames, None)
write("</" + tag + ">")
else:
write(" />")
if elem.tail:
write(_escape_cdata(elem.tail, encoding))
HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr",
"img", "input", "isindex", "link", "meta", "param")
try:
HTML_EMPTY = set(HTML_EMPTY)
except NameError:
pass
def _serialize_html(write, elem, encoding, qnames, namespaces):
tag = elem.tag
text = elem.text
if tag is Comment:
write("<!--%s-->" % _escape_cdata(text, encoding))
elif tag is ProcessingInstruction:
write("<?%s?>" % _escape_cdata(text, encoding))
else:
tag = qnames[tag]
if tag is None:
if text:
write(_escape_cdata(text, encoding))
for e in elem:
_serialize_html(write, e, encoding, qnames, None)
else:
write("<" + tag)
items = elem.items()
if items or namespaces:
if namespaces:
for v, k in sorted(namespaces.items(),
key=lambda x: x[1]): # sort on prefix
if k:
k = ":" + k
write(" xmlns%s=\"%s\"" % (
k.encode(encoding),
_escape_attrib(v, encoding)
))
for k, v in sorted(items): # lexical order
if isinstance(k, QName):
k = k.text
if isinstance(v, QName):
v = qnames[v.text]
else:
v = _escape_attrib_html(v, encoding)
# FIXME: handle boolean attributes
write(" %s=\"%s\"" % (qnames[k], v))
write(">")
ltag = tag.lower()
if text:
if ltag == "script" or ltag == "style":
write(_encode(text, encoding))
else:
write(_escape_cdata(text, encoding))
for e in elem:
_serialize_html(write, e, encoding, qnames, None)
if ltag not in HTML_EMPTY:
write("</" + tag + ">")
if elem.tail:
write(_escape_cdata(elem.tail, encoding))
def _serialize_text(write, elem, encoding):
for part in elem.itertext():
write(part.encode(encoding))
if elem.tail:
write(elem.tail.encode(encoding))
_serialize = {
"xml": _serialize_xml,
"html": _serialize_html,
"text": _serialize_text,
# this optional method is imported at the end of the module
# "c14n": _serialize_c14n,
}
##
# Registers a namespace prefix. The registry is global, and any
# existing mapping for either the given prefix or the namespace URI
# will be removed.
#
# @param prefix Namespace prefix.
# @param uri Namespace uri. Tags and attributes in this namespace
# will be serialized with the given prefix, if at all possible.
# @exception ValueError If the prefix is reserved, or is otherwise
# invalid.
def register_namespace(prefix, uri):
if re.match("ns\d+$", prefix):
raise ValueError("Prefix format reserved for internal use")
for k, v in _namespace_map.items():
if k == uri or v == prefix:
del _namespace_map[k]
_namespace_map[uri] = prefix
_namespace_map = {
# "well-known" namespace prefixes
"http://www.w3.org/XML/1998/namespace": "xml",
"http://www.w3.org/1999/xhtml": "html",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
"http://schemas.xmlsoap.org/wsdl/": "wsdl",
# xml schema
"http://www.w3.org/2001/XMLSchema": "xs",
"http://www.w3.org/2001/XMLSchema-instance": "xsi",
# dublin core
"http://purl.org/dc/elements/1.1/": "dc",
}
def _raise_serialization_error(text):
raise TypeError(
"cannot serialize %r (type %s)" % (text, type(text).__name__)
)
def _encode(text, encoding):
try:
return text.encode(encoding, "xmlcharrefreplace")
except (TypeError, AttributeError):
_raise_serialization_error(text)
def _escape_cdata(text, encoding):
# escape character data
try:
# it's worth avoiding do-nothing calls for strings that are
# shorter than 500 character, or so. assume that's, by far,
# the most common case in most applications.
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
return text.encode(encoding, "xmlcharrefreplace")
except (TypeError, AttributeError):
_raise_serialization_error(text)
def _escape_attrib(text, encoding):
# escape attribute value
try:
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
if "\"" in text:
text = text.replace("\"", """)
if "\n" in text:
text = text.replace("\n", " ")
return text.encode(encoding, "xmlcharrefreplace")
except (TypeError, AttributeError):
_raise_serialization_error(text)
def _escape_attrib_html(text, encoding):
# escape attribute value
try:
if "&" in text:
text = text.replace("&", "&")
if ">" in text:
text = text.replace(">", ">")
if "\"" in text:
text = text.replace("\"", """)
return text.encode(encoding, "xmlcharrefreplace")
except (TypeError, AttributeError):
_raise_serialization_error(text)
# --------------------------------------------------------------------
##
# Generates a string representation of an XML element, including all
# subelements.
#
# @param element An Element instance.
# @keyparam encoding Optional output encoding (default is US-ASCII).
# @keyparam method Optional output method ("xml", "html", "text" or
# "c14n"; default is "xml").
# @return An encoded string containing the XML data.
# @defreturn string
def tostring(element, encoding=None, method=None):
class dummy:
pass
data = []
file = dummy()
file.write = data.append
ElementTree(element).write(file, encoding, method=method)
return "".join(data)
##
# Generates a string representation of an XML element, including all
# subelements. The string is returned as a sequence of string fragments.
#
# @param element An Element instance.
# @keyparam encoding Optional output encoding (default is US-ASCII).
# @keyparam method Optional output method ("xml", "html", "text" or
# "c14n"; default is "xml").
# @return A sequence object containing the XML data.
# @defreturn sequence
# @since 1.3
def tostringlist(element, encoding=None, method=None):
class dummy:
pass
data = []
file = dummy()
file.write = data.append
ElementTree(element).write(file, encoding, method=method)
# FIXME: merge small fragments into larger parts
return data
##
# Writes an element tree or element structure to sys.stdout. This
# function should be used for debugging only.
# <p>
# The exact output format is implementation dependent. In this
# version, it's written as an ordinary XML file.
#
# @param elem An element tree or an individual element.
def dump(elem):
# debugging
if not isinstance(elem, ElementTree):
elem = ElementTree(elem)
elem.write(sys.stdout)
tail = elem.getroot().tail
if not tail or tail[-1] != "\n":
sys.stdout.write("\n")
# --------------------------------------------------------------------
# parsing
##
# Parses an XML document into an element tree.
#
# @param source A filename or file object containing XML data.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return An ElementTree instance
def parse(source, parser=None):
tree = ElementTree()
tree.parse(source, parser)
return tree
##
# Parses an XML document into an element tree incrementally, and reports
# what's going on to the user.
#
# @param source A filename or file object containing XML data.
# @param events A list of events to report back. If omitted, only "end"
# events are reported.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return A (event, elem) iterator.
def iterparse(source, events=None, parser=None):
close_source = False
if not hasattr(source, "read"):
source = open(source, "rb")
close_source = True
try:
if not parser:
parser = XMLParser(target=TreeBuilder())
return _IterParseIterator(source, events, parser, close_source)
except:
if close_source:
source.close()
raise
class _IterParseIterator(object):
def __init__(self, source, events, parser, close_source=False):
self._file = source
self._close_file = close_source
self._events = []
self._index = 0
self._error = None
self.root = self._root = None
self._parser = parser
# wire up the parser for event reporting
parser = self._parser._parser
append = self._events.append
if events is None:
events = ["end"]
for event in events:
if event == "start":
try:
parser.ordered_attributes = 1
parser.specified_attributes = 1
def handler(tag, attrib_in, event=event, append=append,
start=self._parser._start_list):
append((event, start(tag, attrib_in)))
parser.StartElementHandler = handler
except AttributeError:
def handler(tag, attrib_in, event=event, append=append,
start=self._parser._start):
append((event, start(tag, attrib_in)))
parser.StartElementHandler = handler
elif event == "end":
def handler(tag, event=event, append=append,
end=self._parser._end):
append((event, end(tag)))
parser.EndElementHandler = handler
elif event == "start-ns":
def handler(prefix, uri, event=event, append=append):
try:
uri = (uri or "").encode("ascii")
except UnicodeError:
pass
append((event, (prefix or "", uri or "")))
parser.StartNamespaceDeclHandler = handler
elif event == "end-ns":
def handler(prefix, event=event, append=append):
append((event, None))
parser.EndNamespaceDeclHandler = handler
else:
raise ValueError("unknown event %r" % event)
def next(self):
try:
while 1:
try:
item = self._events[self._index]
self._index += 1
return item
except IndexError:
pass
if self._error:
e = self._error
self._error = None
raise e
if self._parser is None:
self.root = self._root
break
# load event buffer
del self._events[:]
self._index = 0
data = self._file.read(16384)
if data:
try:
self._parser.feed(data)
except SyntaxError as exc:
self._error = exc
else:
self._root = self._parser.close()
self._parser = None
except:
if self._close_file:
self._file.close()
raise
if self._close_file:
self._file.close()
raise StopIteration
def __iter__(self):
return self
##
# Parses an XML document from a string constant. This function can
# be used to embed "XML literals" in Python code.
#
# @param source A string containing XML data.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return An Element instance.
# @defreturn Element
def XML(text, parser=None):
if not parser:
parser = XMLParser(target=TreeBuilder())
parser.feed(text)
return parser.close()
##
# Parses an XML document from a string constant, and also returns
# a dictionary which maps from element id:s to elements.
#
# @param source A string containing XML data.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return A tuple containing an Element instance and a dictionary.
# @defreturn (Element, dictionary)
def XMLID(text, parser=None):
if not parser:
parser = XMLParser(target=TreeBuilder())
parser.feed(text)
tree = parser.close()
ids = {}
for elem in tree.iter():
id = elem.get("id")
if id:
ids[id] = elem
return tree, ids
##
# Parses an XML document from a string constant. Same as {@link #XML}.
#
# @def fromstring(text)
# @param source A string containing XML data.
# @return An Element instance.
# @defreturn Element
fromstring = XML
##
# Parses an XML document from a sequence of string fragments.
#
# @param sequence A list or other sequence containing XML data fragments.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLParser} parser is used.
# @return An Element instance.
# @defreturn Element
# @since 1.3
def fromstringlist(sequence, parser=None):
if not parser:
parser = XMLParser(target=TreeBuilder())
for text in sequence:
parser.feed(text)
return parser.close()
# --------------------------------------------------------------------
##
# Generic element structure builder. This builder converts a sequence
# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
# #TreeBuilder.end} method calls to a well-formed element structure.
# <p>
# You can use this class to build an element structure using a custom XML
# parser, or a parser for some other XML-like format.
#
# @param element_factory Optional element factory. This factory
# is called to create new Element instances, as necessary.
class TreeBuilder(object):
def __init__(self, element_factory=None):
self._data = [] # data collector
self._elem = [] # element stack
self._last = None # last element
self._tail = None # true if we're after an end tag
if element_factory is None:
element_factory = Element
self._factory = element_factory
##
# Flushes the builder buffers, and returns the toplevel document
# element.
#
# @return An Element instance.
# @defreturn Element
def close(self):
assert len(self._elem) == 0, "missing end tags"
assert self._last is not None, "missing toplevel element"
return self._last
def _flush(self):
if self._data:
if self._last is not None:
text = "".join(self._data)
if self._tail:
assert self._last.tail is None, "internal error (tail)"
self._last.tail = text
else:
assert self._last.text is None, "internal error (text)"
self._last.text = text
self._data = []
##
# Adds text to the current element.
#
# @param data A string. This should be either an 8-bit string
# containing ASCII text, or a Unicode string.
def data(self, data):
self._data.append(data)
##
# Opens a new element.
#
# @param tag The element name.
# @param attrib A dictionary containing element attributes.
# @return The opened element.
# @defreturn Element
def start(self, tag, attrs):
self._flush()
self._last = elem = self._factory(tag, attrs)
if self._elem:
self._elem[-1].append(elem)
self._elem.append(elem)
self._tail = 0
return elem
##
# Closes the current element.
#
# @param tag The element name.
# @return The closed element.
# @defreturn Element
def end(self, tag):
self._flush()
self._last = self._elem.pop()
assert self._last.tag == tag,\
"end tag mismatch (expected %s, got %s)" % (
self._last.tag, tag)
self._tail = 1
return self._last
##
# Element structure builder for XML source data, based on the
# <b>expat</b> parser.
#
# @keyparam target Target object. If omitted, the builder uses an
# instance of the standard {@link #TreeBuilder} class.
# @keyparam html Predefine HTML entities. This flag is not supported
# by the current implementation.
# @keyparam encoding Optional encoding. If given, the value overrides
# the encoding specified in the XML file.
# @see #ElementTree
# @see #TreeBuilder
class XMLParser(object):
def __init__(self, html=0, target=None, encoding=None):
try:
from xml.parsers import expat
except ImportError:
try:
import pyexpat as expat
except ImportError:
raise ImportError(
"No module named expat; use SimpleXMLTreeBuilder instead"
)
parser = expat.ParserCreate(encoding, "}")
if target is None:
target = TreeBuilder()
# underscored names are provided for compatibility only
self.parser = self._parser = parser
self.target = self._target = target
self._error = expat.error
self._names = {} # name memo cache
# callbacks
parser.DefaultHandlerExpand = self._default
parser.StartElementHandler = self._start
parser.EndElementHandler = self._end
parser.CharacterDataHandler = self._data
# optional callbacks
parser.CommentHandler = self._comment
parser.ProcessingInstructionHandler = self._pi
# let expat do the buffering, if supported
try:
self._parser.buffer_text = 1
except AttributeError:
pass
# use new-style attribute handling, if supported
try:
self._parser.ordered_attributes = 1
self._parser.specified_attributes = 1
parser.StartElementHandler = self._start_list
except AttributeError:
pass
self._doctype = None
self.entity = {}
try:
self.version = "Expat %d.%d.%d" % expat.version_info
except AttributeError:
pass # unknown
def _raiseerror(self, value):
err = ParseError(value)
err.code = value.code
err.position = value.lineno, value.offset
raise err
def _fixtext(self, text):
# convert text string to ascii, if possible
try:
return text.encode("ascii")
except UnicodeError:
return text
def _fixname(self, key):
# expand qname, and convert name string to ascii, if possible
try:
name = self._names[key]
except KeyError:
name = key
if "}" in name:
name = "{" + name
self._names[key] = name = self._fixtext(name)
return name
def _start(self, tag, attrib_in):
fixname = self._fixname
fixtext = self._fixtext
tag = fixname(tag)
attrib = {}
for key, value in attrib_in.items():
attrib[fixname(key)] = fixtext(value)
return self.target.start(tag, attrib)
def _start_list(self, tag, attrib_in):
fixname = self._fixname
fixtext = self._fixtext
tag = fixname(tag)
attrib = {}
if attrib_in:
for i in range(0, len(attrib_in), 2):
attrib[fixname(attrib_in[i])] = fixtext(attrib_in[i+1])
return self.target.start(tag, attrib)
def _data(self, text):
return self.target.data(self._fixtext(text))
def _end(self, tag):
return self.target.end(self._fixname(tag))
def _comment(self, data):
try:
comment = self.target.comment
except AttributeError:
pass
else:
return comment(self._fixtext(data))
def _pi(self, target, data):
try:
pi = self.target.pi
except AttributeError:
pass
else:
return pi(self._fixtext(target), self._fixtext(data))
def _default(self, text):
prefix = text[:1]
if prefix == "&":
# deal with undefined entities
try:
self.target.data(self.entity[text[1:-1]])
except KeyError:
from xml.parsers import expat
err = expat.error(
"undefined entity %s: line %d, column %d" %
(text, self._parser.ErrorLineNumber,
self._parser.ErrorColumnNumber)
)
err.code = 11 # XML_ERROR_UNDEFINED_ENTITY
err.lineno = self._parser.ErrorLineNumber
err.offset = self._parser.ErrorColumnNumber
raise err
elif prefix == "<" and text[:9] == "<!DOCTYPE":
self._doctype = [] # inside a doctype declaration
elif self._doctype is not None:
# parse doctype contents
if prefix == ">":
self._doctype = None
return
text = text.strip()
if not text:
return
self._doctype.append(text)
n = len(self._doctype)
if n > 2:
type = self._doctype[1]
if type == "PUBLIC" and n == 4:
name, type, pubid, system = self._doctype
elif type == "SYSTEM" and n == 3:
name, type, system = self._doctype
pubid = None
else:
return
if pubid:
pubid = pubid[1:-1]
if hasattr(self.target, "doctype"):
self.target.doctype(name, pubid, system[1:-1])
elif self.doctype is not self._XMLParser__doctype:
# warn about deprecated call
self._XMLParser__doctype(name, pubid, system[1:-1])
self.doctype(name, pubid, system[1:-1])
self._doctype = None
##
# (Deprecated) Handles a doctype declaration.
#
# @param name Doctype name.
# @param pubid Public identifier.
# @param system System identifier.
def doctype(self, name, pubid, system):
"""This method of XMLParser is deprecated."""
warnings.warn(
"This method of XMLParser is deprecated. Define doctype() "
"method on the TreeBuilder target.",
DeprecationWarning,
)
# sentinel, if doctype is redefined in a subclass
__doctype = doctype
##
# Feeds data to the parser.
#
# @param data Encoded data.
def feed(self, data):
try:
self._parser.Parse(data, 0)
except self._error, v:
self._raiseerror(v)
##
# Finishes feeding data to the parser.
#
# @return An element structure.
# @defreturn Element
def close(self):
try:
self._parser.Parse("", 1) # end of data
except self._error, v:
self._raiseerror(v)
tree = self.target.close()
del self.target, self._parser # get rid of circular references
return tree
# compatibility
XMLTreeBuilder = XMLParser
# workaround circular import.
try:
from ElementC14N import _serialize_c14n
_serialize["c14n"] = _serialize_c14n
except ImportError:
pass<|fim▁end|> | def __setitem__(self, index, element): |
<|file_name|>StandaloneConfig.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|>package org.apache.kafka.connect.runtime.standalone;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.runtime.WorkerConfig;
import java.util.Map;
public class StandaloneConfig extends WorkerConfig {
private static final ConfigDef CONFIG;
/**
* <code>offset.storage.file.filename</code>
*/
public static final String OFFSET_STORAGE_FILE_FILENAME_CONFIG = "offset.storage.file.filename";
private static final String OFFSET_STORAGE_FILE_FILENAME_DOC = "File to store offset data in";
static {
CONFIG = baseConfigDef()
.define(OFFSET_STORAGE_FILE_FILENAME_CONFIG,
ConfigDef.Type.STRING,
ConfigDef.Importance.HIGH,
OFFSET_STORAGE_FILE_FILENAME_DOC);
}
public StandaloneConfig(Map<String, String> props) {
super(CONFIG, props);
}
}<|fim▁end|> | * limitations under the License.
*/ |
<|file_name|>output_disposition.py<|end_file_name|><|fim▁begin|>'''
Created on Sep 14, 2013
@author: paepcke
Modifications:
- Jan 1, 2013: added remove() method to OutputFile
'''
import StringIO
from collections import OrderedDict
import csv
import re
import sys
import os
import tempfile
from col_data_type import ColDataType
class OutputDisposition(object):
'''
Specifications for where completed relation rows
should be deposited, and in which format. Current
output options are to files, and to stdout.
This class is abstract, but make sure the subclasses
invoke this super's __init__() when they are initialized.
Also defined here are available output formats, of
which there are two: CSV, and SQL insert statements AND
CSV.
NOTE: currently the CSV-only format option is broken. Not
enough time to maintain it.
SQL insert statements that are directed to files will also
generate equivalent .csv files. The insert statement files
will look like the result of a mysqldump, and inserts into
different tables are mixed. The corresponding (values-only)
csv files are split: one file for each table.
'''
def __init__(self, outputFormat, outputDestObj=None):
'''
:param outputDestObj: instance of one of the subclasses
:type outputDestObj: Subclass(OutputDisposition)
'''
self.outputFormat = outputFormat
if outputDestObj is None:
self.outputDest = self
else:
self.outputDest = outputDestObj
self.csvTableFiles = {}
self.schemas = TableSchemas()
def __enter__(self):
return self.outputDest
def __exit__(self,excType, excValue, excTraceback):
try:
self.outputDest.close()
except:
# If the conversion itself went fine, then
# raise this exception from the closing attempt.
# But if the conversion failed, then have the
# system re-raise that earlier exception:
if excValue is None:
raise IOError("Could not close the output of the conversion: %s" % sys.exc_info()[0])
# Return False to indicate that if the conversion
# threw an error, the exception should now be re-raised.
# If the conversion worked fine, then this return value
# is ignored.
return False
def flush(self):
self.outputDest.flush()
def getOutputFormat(self):
return self.outputFormat
def addSchemaHints(self, tableName, schemaHints):
'''
Provide a schema hint dict for the table of the given name.
:param tableName: name of table to which schema applies. The name may be None, in which case it refers to the main (default) table.
:type tableName: String
:param schemaHints: dict mapping column names to SQL types via ColumnSpec instances
:type schemaHints: [ordered]Dict<String,ColumnSpec>
'''
self.schemas.addColSpecs(tableName, schemaHints)
def getSchemaHint(self, colName, tableName):
'''
Given a column name, and a table name, return the ColumnSpec object
that describes that column. If tableName is None, the main (default)
table's schema will be searched for a colName entry
:param colName: name of column whose schema info is sought
:type colName: String
:param tableName: name of table in which the given column resides
:type tableName: String
:return: list of ColumnSpec instances
:rtype: (ColumnSpec)
@raise KeyError: if table or column are not found
'''
return self.schemas[tableName][colName]
def getSchemaHintByPos(self, pos, tableName):
try:
return self.schemas[tableName].values()[pos]
except ValueError:
return None
except IndexError:
raise ValueError("Attempt to access pos %s in schema for table %s, which is shorter than %s: %s") %\
(str(pos), tableName, str(pos), self.schemas[tableName].values())
def getSchema(self, tableName):
try:
return self.schemas[tableName].values()
except ValueError:
return None
def copySchemas(self, destDisposition):
'''
Given another instance of OutputDisposition,
copy this instance's schemas to the destination.
:param destDisposition: another instance of OutputDisposition
:type destDisposition: OutputDisposition
'''
destDisposition.schemas = self.schemas
def ensureColExistence(self, colName, colDataType, jsonToRelationConverter, tableName=None):
'''
Given a column name and MySQL datatype name, check whether this
column has previously been encountered. If not, a column information
object is created, which will eventually be used to create the column
header, or SQL alter statements.
:param colName: name of the column to consider
:type colName: String
:param colDataType: datatype of the column.
:type colDataType: ColDataType
:param tableName: name of table to which the column is to belong; None if for main table
:type tableName: {String | None}
'''
schemaDict = self.schemas[tableName]
if schemaDict is None or len(schemaDict) == 0:
# schema for this table definitely does not have the column:
colSpecObj = ColumnSpec( colName, colDataType, jsonToRelationConverter)
self.schemas[tableName] = OrderedDict({colName : colSpecObj})
return
# Have schema (dict) for this table. Does that dict contain
# an entry for the col name?
try:
schemaDict[colName]
# all set:
return
except KeyError:
colSpecObj = ColumnSpec( colName, colDataType, jsonToRelationConverter)
schemaDict[colName] = colSpecObj
def createTmpTableFile(self, tableName, fileSuffix):
'''
Used for cases in which parsers must create more than one
table. Those tables need to be written to disk, even when
output of the main table is piped.
:param tableName: name by which the table file obj can be retrieved
:type tableName: String
:param fileSuffix: suffix for temp file name. Ex. 'csv' for CSV outputs, or 'sql' for SQL dumps
:type fileSuffix: String
:return: file object open for writing
:rtype: File
'''
self.csvTableFiles[tableName] = tempfile.NamedTemporaryFile(prefix='tmpTable',
suffix=fileSuffix)
return self.csvTableFiles[tableName]
#--------------------- Available Output Formats
class OutputFormat():
CSV = 0
SQL_INSERT_STATEMENTS = 1
SQL_INSERTS_AND_CSV = 2
#--------------------- Available Output Destination Options:
class OutputPipe(OutputDisposition):
def __init__(self, outputFormat):
super(OutputPipe, self).__init__(outputFormat)
self.fileHandle = sys.stdout
# Make file name accessible as property just like
# Python file objects do:
self.name = "<stdout>" # @UnusedVariable
self.csvWriter = csv.writer(sys.stdout, dialect='excel', delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
self.tableCSVWriters = {}
def close(self):
pass # don't close stdout
def flush(self):
sys.stdout.flush()
def __str__(self):
return "<OutputPipe:<stdout>"
def writerow(self, colElementArray, tableName=None):
# For CSV: make sure everything is a string:
if self.outputFormat == OutputDisposition.OutputFormat.CSV:
row = map(str,colElementArray)
if tableName is None:
self.csvWriter.writerow(row)
else:
self.tableCSVWriters[tableName].writerow(row)
else:
print(colElementArray)
def startNewTable(self, tableName, schemaHintsNewTable):
'''
Called when parser needs to create a table beyond
the main table.
:param schemaHintsNewTable:
:type schemaHintsNewTable:
'''
self.addSchemaHints(tableName, schemaHintsNewTable)
tmpTableFile = self.createTmpTableFile(tableName, 'csv')
self.tableCSVWriters[tableName] = csv.writer(tmpTableFile,
dialect='excel',
delimiter=',',
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
def write(self, whatToWrite):
'''
Write given string straight to the output. No assumption made about the format
:param whatToWrite:
:type whatToWrite:
'''
sys.stdout.write(whatToWrite)
sys.stdout.flush()
def getCSVTableOutFileName(self, tableName):
return self.name
class OutputFile(OutputDisposition):
# When looking at INSERT INTO tableName (...,
# grab 'tableName':
TABLE_NAME_PATTERN = re.compile(r'[^\s]*\s[^\s]*\s([^\s]*)\s')
# When looking at:" ('7a286e24_b578_4741_b6e0_c0e8596bd456','Mozil...);\n"
# grab everything inside the parens, including the trailing ');\n', which
# we'll cut out in the code:
VALUES_PATTERN = re.compile(r'^[\s]{4}\(([^\n]*)\n{0,1}')
def __init__(self, fileName, outputFormat, options='ab'):
'''
Create instance of an output file destination for converted log files.
Such an instance is created both for OutputFormat.SQL_INSERT_STATEMENTS and
for OutputFormat.CSV. In the Insert statements case the fileName is the file
where all INSERT statements are placed; i.e. the entire dump. If the output format
is CSV, then the fileName is a prefix for the file names of each generated CSV file
(one file for each table).
:param fileName: fully qualified name of output file for CSV (in case of CSV-only),
or MySQL INSERT statement dump
:type fileName: String
:param outputFormat: whether to output CSV or MySQL INSERT statements
:type outputFormat: OutputDisposition.OutputFormat
:param options: output file options as per Python built-in 'open()'. Defaults to append/binary. The
latter for compatibility with Windows
:type options: String
'''
super(OutputFile, self).__init__(outputFormat)
# Make file name accessible as property just like
# Python file objects do:
self.name = fileName # @UnusedVariable
self.outputFormat = outputFormat
# Open the output file as 'append' and 'binary'
# The latter is needed for Windows.
self.fileHandle = open(fileName, options)
self.csvWriter = csv.writer(sys.stdout, dialect='excel', delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
if outputFormat == OutputDisposition.OutputFormat.CSV or\
outputFormat == OutputDisposition.OutputFormat.SQL_INSERTS_AND_CSV:
# Prepare for CSV files needed for the tables:
self.tableCSVWriters = {}
def close(self):
self.fileHandle.close()
# Also close any CSV out files that might exist:
try:
for csvFD in self.csvTableFiles.values():
csvFD.close()
except:
pass
def flush(self):
self.fileHandle.flush()
for csvFD in self.tableCSVWriters.values():
try:
csvFD.flush()
except:
pass
def remove(self):
try:
os.remove(self.fileHandle.name)
except:
pass
def __str__(self):
return "<OutputFile:%s>" % self.getFileName()
def getFileName(self, tableName=None):
'''
Get file name of a MySQL INSERT statement outfile,
or, given a table name, the name of the outfile
for CSV destined to the given table.
:param tableName:
:type tableName:
'''
if tableName is None:
return self.name
else:
fd = self.csvTableFiles.get(tableName, None)
if fd is None:
return None
return fd.name
def writerow(self, colElementArray, tableName=None):
'''
How I wish Python had parameter type based polymorphism. Life
would be so much cleaner.
ColElementArray is either an array of values (coming from
a CSV-only parser), or a string that contains a complete
MySQL INSERT statement (from MySQL dump-creating parsers).
In the first case, we ensure all elements in the array are
strings, and write to output. In the latter case we write
the INSERT statements to their output file. Then, if output
format is SQL_INSERTS_AND_CSV, we also extract the MySQL
values and write them to the proper CSV file.
:param colElementArray: either a MySQL INSERT statement, or an array of values
:type colElementArray: {String | [string]}
:param tableName: name of table to which output is destined. Only needed for
value arrays from CSV-only parsers. Their value arrays don't contain
info on the destination table. INSERT statements do contain the destination table
name.
:type tableName: String
'''
if isinstance(colElementArray, list):
# Simple CSV array of values;
# make sure every array element is a string:
row = map(str,colElementArray)
if tableName is None:
# The main (and maybe only) table:
self.csvWriter.writerow(row)
else:
# One of the other tables for which files
# were opened during calls to startNewTable():
self.tableCSVWriters[tableName].writerow(row)
else:
# We are either outputting INSERT statements, or
# both those and CSV, or just CSV derived from a
# full MySQL INSERT parser, like edxTrackLogJSONParser.
# Start with the INSERTS:
if self.outputFormat == OutputDisposition.OutputFormat.SQL_INSERT_STATEMENTS or\
self.outputFormat == OutputDisposition.OutputFormat.SQL_INSERTS_AND_CSV:
self.fileHandle.write(colElementArray + '\n')
# If we are outputting either CSV or INSERTs and CSV, do the CSV
# part now:
if self.outputFormat != OutputDisposition.OutputFormat.SQL_INSERT_STATEMENTS:
# Strip the CSV parts out from the INSERT statement, which may
# contain multiple VALUE statements:
self.writeCSVRowsFromInsertStatement(colElementArray)
def write(self, whatToWrite):
'''
Write given string straight to the output. No assumption made about the format
:param whatToWrite:
:type whatToWrite:
'''
self.fileHandle.write(whatToWrite)
self.fileHandle.flush()
def startNewTable(self, tableName, schemaHintsNewTable):
'''
Called when parser needs to create a table beyond
the main table (in case of CSV-Only), or any table
in case of SQLInsert+CSV.
:param tableName: name of new table
:type tableName: string
:param schemaHintsNewTable: map column name to column SQL type
:type schemaHintsNewTable: {String,ColDataType}
'''
self.addSchemaHints(tableName, schemaHintsNewTable)
if self.outputFormat == OutputDisposition.OutputFormat.SQL_INSERT_STATEMENTS:
return
# We are producing CSV (possibly in addition to Inserts):
try:
# Already have a table writer for this table?
self.tableCSVWriters[tableName]
return # yep
except KeyError:
# OK, really is a new table caller is starting:
pass
# Ensure that we have an open FD to write to for this table:
if self.outputFormat == OutputDisposition.OutputFormat.CSV or\
self.outputFormat == OutputDisposition.OutputFormat.SQL_INSERTS_AND_CSV:
self.ensureOpenCSVOutFileFromTableName(tableName)
def ensureOpenCSVOutFileFromTableName(self, tableName):
'''
Checks whether an open File object exists for the given
table. If not, creates one. Returns the FD. The output
file is created in the same directory as self.out
:param tableName: name of table whose CSV output file we are to check for, or create
:type tableName: String
:return: a File object open for writing/appending
:rtype: File
'''
try:
# If we already have an FD for this table, return:
return self.tableCSVWriters[tableName]
except KeyError:
# Else create one below:
pass
outFileName = self.getFileName()
if outFileName == '/dev/null':
outFile = open('/dev/null', 'ab')
self.csvTableFiles[tableName] = outFile
return outFile
csvOutFileName = self.getCSVTableOutFileName(tableName)
outFile = open(csvOutFileName, 'w')
self.csvTableFiles[tableName] = outFile
self.tableCSVWriters[tableName] = csv.writer(outFile,
dialect='excel',
delimiter=',',
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
return self.tableCSVWriters[tableName]
def getCSVTableOutFileName(self, tableName):
# The 'None' below ensures that we get the
# main file's name back:
return "%s_%sTable.csv" % (self.getFileName(None), tableName)
def writeCSVRowsFromInsertStatement(self, insertStatement):
'''
Takes one SQL INSERT INTO Statement, possibly including multiple VALUES
lines. Extracts the destination table and the values list(s), and writes
them to disk via the appropriate CSVWriter. The INSERT statements are
expected to be very regular, generated by json_to_relation. Don't use
this method for arbitrary INSERT statements, b/c it relies on regular
expressions that expect the specific format. Prerequisite: self.tableCSVWriters
is a dictionary that maps table names into File objects that are open
for writing.
:param insertStatement: Well-formed MySQL INSERT statement
:type insertStatement: String
@raise ValueError: if table name could not be extracted from the
INSERT statement, or if the insertStatement contains no VALUES
clause.
'''
inFD = StringIO.StringIO(insertStatement)
try:
firstLine = inFD.readline()
# Pick out the name of the table to which CSV is to be added:
tblNameMatch = OutputFile.TABLE_NAME_PATTERN.search(firstLine)
if tblNameMatch is None:
raise ValueError('No match when trying to extract table name from "%s"' % insertStatement)
tblName = tblNameMatch.group(1)
except IndexError:
raise ValueError('Could not extract table name from "%s"' % insertStatement)
readAllValueTuples = False
while not readAllValueTuples:
# Get values list that belongs to this insert statement:
valuesLine = inFD.readline()
if not valuesLine.startswith(' ('):
readAllValueTuples = True
continue
# Extract the comma-separated values list out from the parens;
# first get "'fasdrew_fdsaf...',...);\n":
oneValuesLineMatch = OutputFile.VALUES_PATTERN.search(valuesLine)
if oneValuesLineMatch is None:
# Hopefully never happens:
raise ValueError('No match for values line "%s"' % insertStatement)
# Get just the comma-separated values list from
# 'abfd_sfd,...);\n
valuesList = oneValuesLineMatch.group(1)[:-2] + '\n'
# Make sure we've seen additions to this table before or,
# if not, have a CSV writer and a file created to receive
# the CSV lines:
self.ensureOpenCSVOutFileFromTableName(tblName)
theOutFd = self.csvTableFiles[tblName]
theOutFd.write(valuesList)
class ColumnSpec(object):
'''
Housekeeping class. Each instance represents the name,
position, and datatype of one column. These instances are
used to generate column name headers, and
SQL insert statements.
'''
def __init__(self, colName, colDataType, jsonToRelationProcessor):
'''
Create a ColumnSpec instance.
:param colName: name of column
:type colName: String
:param colDataType: data type of column (an enum)
:type colDataType: ColumnSpec
:param jsonToRelationProcessor: associated JSON to relation JSONToRelation instance
:type jsonToRelationProcessor: JSONToRelation
'''
self.colName = colName
self.colDataType = colDataType
self.colPos = jsonToRelationProcessor.getNextNewColPos()
jsonToRelationProcessor.bumpNextNewColPos()
def getDefaultValue(self):
return ColDataType().defaultValues[self.colDataType]
def getName(self):
'''
Return column name
:return: name of column
:rtype: String
'''
return self.colName
def getType(self):
'''
Return SQL type
:return: SQL type of colum in upper case
:rtype: String
'''
return ColDataType().toString(self.colDataType).upper()
def getSQLDefSnippet(self):
'''
Return string snippet to use in SQL CREATE TABLE or ALTER TABLE
statement
'''
return " %s %s" % (self.getName(), self.getType())
def __str__(self):
return "<Col %s: %s (position %s)>" % (self.colName,
self.getType(),
self.colPos)
def __repr__(self):
return self.__str__()
class TableSchemas(object):
'''
Repository for the schemas of all tables. A schema is an
array ColumnSpec instances. Each such list is associated with
one relational table. A class var dict holds the schemas for
all tables.
'''
def __init__(self):
self.allSchemas = OrderedDict()
# Add empty schema for main (default) table:
self.allSchemas[None] = OrderedDict()
def __getitem__(self, tableName):<|fim▁hole|> return self.allSchemas[tableName]
def __setitem__(self, tableName, colSpecsDict):
self.allSchemas[tableName] = colSpecsDict
def keys(self):
return self.allSchemas.keys()
def addColSpec(self, tableName, colSpec):
try:
schema = self.allSchemas[tableName]
except KeyError:
self.allSchemas[tableName] = {colSpec.getName() : colSpec}
schema = self.allSchemas[tableName]
schema[colSpec.getName()] = colSpec
def addColSpecs(self, tableName, colSpecsDict):
if not isinstance(colSpecsDict, OrderedDict):
raise ValueError("ColumSpec parameter must be a dictionary<ColName,ColumnSpec>")
try:
schema = self.allSchemas[tableName]
except KeyError:
self.allSchemas[tableName] = colSpecsDict
schema = self.allSchemas[tableName]
# Change schema to include the new dict:
schema.update(colSpecsDict)<|fim▁end|> | |
<|file_name|>imagenet_train_darknet.py<|end_file_name|><|fim▁begin|>"""Train ILSVRC2017 Data using homemade scripts."""
import cv2
import os
import math
import tensorflow as tf
from multiprocessing import Process, Queue
import os
import sys
FILE_DIR = os.path.dirname(__file__)
sys.path.append(FILE_DIR + '/../')
import config as cfg
from img_dataset.ilsvrc2017_cls_multithread import ilsvrc_cls
from yolo2_nets.darknet import darknet19
from yolo2_nets.net_utils import get_ordered_ckpts
from utils.timer import Timer
slim = tf.contrib.slim
def get_validation_process(imdb, queue_in, queue_out):
"""Get validation dataset. Run in a child process."""
while True:
queue_in.get()
images, labels = imdb.get()
queue_out.put([images, labels])
imdb = ilsvrc_cls('train', data_aug=True, multithread=cfg.MULTITHREAD)
val_imdb = ilsvrc_cls('val', batch_size=64)
# set up child process for getting validation data
queue_in = Queue()
queue_out = Queue()
val_data_process = Process(target=get_validation_process,
args=(val_imdb, queue_in, queue_out))
val_data_process.start()
queue_in.put(True) # start getting the first batch
CKPTS_DIR = cfg.get_ckpts_dir('darknet19', imdb.name)
TENSORBOARD_TRAIN_DIR, TENSORBOARD_VAL_DIR = cfg.get_output_tb_dir(
'darknet19', imdb.name)
input_data = tf.placeholder(tf.float32, [None, 224, 224, 3])
label_data = tf.placeholder(tf.int32, None)
is_training = tf.placeholder(tf.bool)
logits = darknet19(input_data, is_training=is_training)
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=label_data, logits=logits)
loss = tf.reduce_mean(loss)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
# train_op = tf.train.AdamOptimizer(0.0005).minimize(loss)
train_op = tf.train.MomentumOptimizer(0.001, 0.9).minimize(loss)
correct_pred = tf.equal(tf.cast(tf.argmax(logits, 1), tf.int32), label_data)
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
tf.summary.scalar('loss', loss)
tf.summary.scalar('accuracy', accuracy)
######################
# Initialize Session #
######################
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth = True
sess = tf.Session(config=tfconfig)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(TENSORBOARD_TRAIN_DIR)
val_writer = tf.summary.FileWriter(TENSORBOARD_VAL_DIR)
# # initialize variables, assume all vars are new now
# init_op = tf.global_variables_initializer()
# sess.run(init_op)
# load previous models
ckpts = get_ordered_ckpts(sess, imdb, 'darknet19')
variables_to_restore = slim.get_variables_to_restore()
# # change optimizer
# print('Initializing variables for the new optimizer')
# optimzer_vars = [var for var in tf.global_variables()
# if "Momentum" in var.name]
# init_op = tf.variables_initializer(optimzer_vars)
# sess.run(init_op)
# for var in optimzer_vars:
# if var in variables_to_restore:
# variables_to_restore.remove(var)
print('Restorining model snapshots from {:s}'.format(ckpts[-1]))
old_saver = tf.train.Saver(variables_to_restore)
old_saver.restore(sess, str(ckpts[-1]))
print('Restored.')
fnames = ckpts[-1].split('_')
old_epoch = int(fnames[-1][:-5])
imdb.epoch = old_epoch + 1
# simple model saver
cur_saver = tf.train.Saver()
T = Timer()
for i in range(imdb.total_batch * 10 + 1):
T.tic()
images, labels = imdb.get()
_, loss_value, acc_value, train_summary = sess.run(
[train_op, loss, accuracy, merged], {input_data: images, label_data: labels, is_training: 1})
_time = T.toc(average=False)
print('epoch {:d}, iter {:d}/{:d}, training loss: {:.3}, training acc: {:.3}, take {:.2}s'
.format(imdb.epoch, (i + 1) % imdb.total_batch,
imdb.total_batch, loss_value, acc_value, _time))
if (i + 1) % 25 == 0:
T.tic()
val_images, val_labels = queue_out.get()
val_loss_value, val_acc_value, val_summary = sess.run(
[loss, accuracy, merged], {input_data: val_images, label_data: val_labels, is_training: 0})
_val_time = T.toc(average=False)
print('###validation loss: {:.3}, validation acc: {:.3}, take {:.2}s'
.format(val_loss_value, val_acc_value, _val_time))
queue_in.put(True)
global_step = imdb.epoch * imdb.total_batch + (i % imdb.total_batch)
train_writer.add_summary(train_summary, global_step)
val_writer.add_summary(val_summary, global_step)
if (i % (imdb.total_batch * 2) == 0):
save_path = cur_saver.save(sess, os.path.join(<|fim▁hole|> cfg.TRAIN_SNAPSHOT_PREFIX + '_epoch_' + str(imdb.epoch - 1) + '.ckpt'))
print("Model saved in file: %s" % save_path)
# terminate child processes
if cfg.MULTITHREAD:
imdb.close_all_processes()
queue_in.cancel_join_thread()
queue_out.cancel_join_thread()
val_data_process.terminate()<|fim▁end|> | CKPTS_DIR, |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
<|fim▁hole|>from .forms import FileForm
class FileAdmin(admin.ModelAdmin):
list_display = ('id', 'md5', 'file', 'size')
list_per_page = 100
list_display_links = ('md5',)
form = FileForm
class LinkAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'file', 'user')
list_per_page = 100
list_display_links = ('name',)
admin.site.register(File, FileAdmin)
admin.site.register(Link, LinkAdmin)<|fim▁end|> | from .models import File, Link |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate dotenv;
extern crate ctrlc;
extern crate chrono;
extern crate chrono_tz;
extern crate url;
extern crate linkify;
#[macro_use]
extern crate html5ever;
extern crate reqwest;
#[macro_use]
extern crate slog;
extern crate slog_async;
extern crate slog_envlogger;
extern crate slog_scope;
extern crate slog_stdlog;
extern crate slog_term;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate serenity;
#[macro_use]
extern crate error_chain;
mod preview;
mod bot;
mod hacker_news;
mod util;
mod errors {
error_chain! {
foreign_links {
Serenity(::serenity::Error);
Reqwest(::reqwest::Error);
}
links {
HackerNews(::hacker_news::Error, ::hacker_news::ErrorKind);
}
}
}
use std::env;
use slog::Drain;
use bot::Bot;
fn main() {
dotenv::dotenv().ok();
let decorator = slog_term::TermDecorator::new().stderr().build();
let formatter = slog_term::CompactFormat::new(decorator).build().fuse();
let logger = slog_envlogger::new(formatter);
let drain = slog_async::Async::default(logger);
let root_logger = slog::Logger::root(
drain.fuse(),
o!(
"version" => env!("CARGO_PKG_VERSION"),
// NOTE
// Uncomment this to get SLOC location
// "place" => slog::FnValue(move |info| {
// format!("{}:{}", info.file(), info.line())
// })
),
);
let _global_logger_guard =
slog_stdlog::init().expect("Couldn't initialize global slog-stdlog logger.");
slog_scope::scope(&root_logger, || {
// Create client.
let token = env::var("DISCORD_TOKEN").expect("token");
let mut bot = Bot::new(root_logger.new(o!("scope" => "Bot")));<|fim▁hole|>
bot.push_previewer(hacker_news::HackerNews);
let mut client = bot::new_client(&token, bot);
// Listen for signal.
let closer = client.close_handle();
let ctrlc_logger = root_logger.clone();
ctrlc::set_handler(move || {
info!(ctrlc_logger, "Received termination signal. Terminating.");
closer.close();
}).expect("Error setting handler.");
// Start client.
if let Err(e) = client.start_autosharded() {
match e {
serenity::Error::Client(serenity::client::ClientError::Shutdown) => {
info!(root_logger, "Shutting down.")
}
_ => error!(root_logger, "Problem with starting the client."; "error" => e.to_string()),
}
}
});
}<|fim▁end|> | |
<|file_name|>java_compile.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools
from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis
from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser
from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.util.dirutil import relativize_paths, safe_mkdir
# From http://kenai.com/projects/jmake/sources/mercurial/content
# /src/com/sun/tools/jmake/Main.java?rev=26
# Main.mainExternal docs.
_JMAKE_ERROR_CODES = {
-1: 'invalid command line option detected',
-2: 'error reading command file',
-3: 'project database corrupted',
-4: 'error initializing or calling the compiler',
-5: 'compilation error',
-6: 'error parsing a class file',
-7: 'file not found',
-8: 'I/O exception',
-9: 'internal jmake exception',
-10: 'deduced and actual class name mismatch',
-11: 'invalid source file extension',
-12: 'a class in a JAR is found dependent on a class with the .java source',
-13: 'more than one entry for the same class is found in the project',
-20: 'internal Java error (caused by java.lang.InternalError)',
-30: 'internal Java error (caused by java.lang.RuntimeException).'
}
# When executed via a subprocess return codes will be treated as unsigned
_JMAKE_ERROR_CODES.update((256 + code, msg) for code, msg in _JMAKE_ERROR_CODES.items())
class JmakeCompile(JvmCompile):
"""Compile Java code using JMake."""<|fim▁hole|> _supports_concurrent_execution = False
_JMAKE_MAIN = 'org.pantsbuild.jmake.Main'
@classmethod
def get_args_default(cls, bootstrap_option_values):
workdir_gen = os.path.relpath(os.path.join(bootstrap_option_values.pants_workdir, 'gen'),
get_buildroot())
return ('-C-encoding', '-CUTF-8', '-C-g', '-C-Tcolor',
# Don't warn for generated code.
'-C-Tnowarnprefixes',
'-C{0}'.format(workdir_gen),
# Suppress warning for annotations with no processor - we know there are many of these!
'-C-Tnowarnregex', '-C^(warning: )?No processor claimed any of these annotations: .*')
@classmethod
def get_warning_args_default(cls):
return ('-C-Xlint:all', '-C-Xlint:-serial', '-C-Xlint:-path', '-C-deprecation')
@classmethod
def get_no_warning_args_default(cls):
return ('-C-Xlint:none', '-C-nowarn')
@classmethod
def register_options(cls, register):
super(JmakeCompile, cls).register_options(register)
register('--use-jmake', advanced=True, action='store_true', default=True,
fingerprint=True,
help='Use jmake to compile Java targets')
register('--source', advanced=True, fingerprint=True,
help='Provide source compatibility with this release. Overrides the jvm platform '
'source.',
deprecated_hint='The -source arg to javac should be specified by the jvm-platform.',
deprecated_version='0.0.44')
register('--target', advanced=True, fingerprint=True,
help='Generate class files for this JVM version. Overrides the jvm platform target.',
deprecated_hint='The -target arg to javac should be specified by the jvm-platform.',
deprecated_version='0.0.44')
cls.register_jvm_tool(register, 'jmake')
cls.register_jvm_tool(register, 'java-compiler')
def select(self, target):
return self.get_options().use_jmake and super(JmakeCompile, self).select(target)
def __init__(self, *args, **kwargs):
super(JmakeCompile, self).__init__(*args, **kwargs)
self.set_distribution(jdk=True)
self._buildroot = get_buildroot()
# The depfile is generated by org.pantsbuild.tools.compiler.Compiler
# and includes information about package-private classes -- e.g.
# the case where Foo.java also defines class Bar. This allows jmake
# to correctly include these files in its analysis.
self._depfile_folder = os.path.join(self.workdir, 'jmake-depfiles')
@property
def _depfile(self):
safe_mkdir(self._depfile_folder)
return os.path.join(self._depfile_folder, 'global_depfile')
def create_analysis_tools(self):
return AnalysisTools(self.context.java_home, JMakeAnalysisParser(), JMakeAnalysis)
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file,
log_file, settings):
relative_classpath = relativize_paths(classpath, self._buildroot)
jmake_classpath = self.tool_classpath('jmake')
args = [
'-classpath', ':'.join(relative_classpath),
'-d', classes_output_dir,
'-pdb', analysis_file,
'-pdb-text-format',
]
# TODO: This file should always exist for modern jmake installs; this check should
# be removed via a Task-level identity bump after:
# https://github.com/pantsbuild/pants/issues/1351
if os.path.exists(self._depfile):
args.extend(['-depfile', self._depfile])
compiler_classpath = self.tool_classpath('java-compiler')
args.extend([
'-jcpath', ':'.join(compiler_classpath),
'-jcmainclass', 'org.pantsbuild.tools.compiler.Compiler',
])
if not self.get_options().colors:
filtered_args = filter(lambda arg: not arg == '-C-Tcolor', self._args)
else:
filtered_args = self._args
args.extend(filtered_args)
args.extend(settings.args)
if '-C-source' in args:
raise TaskError("Set the source Java version with the 'source' or with the jvm platform, not "
"in 'args'.")
if '-C-target' in args:
raise TaskError("Set the target JVM version with the 'target' option or with the jvm "
"platform, not in 'args'.")
if self.get_options().source or self.get_options().target:
self.context.log.warn('--compile-java-source and --compile-java-target trample and override '
'target jvm platform settings, and probably should not be used except '
'for testing.')
source_level = self.get_options().source or settings.source_level
target_level = self.get_options().target or settings.target_level
if source_level:
args.extend(['-C-source', '-C{0}'.format(source_level)])
if target_level:
args.extend(['-C-target', '-C{0}'.format(target_level)])
args.append('-C-Tdependencyfile')
args.append('-C{}'.format(self._depfile))
jvm_options = list(self._jvm_options)
args.extend(sources)
result = self.runjava(classpath=jmake_classpath,
main=JmakeCompile._JMAKE_MAIN,
jvm_options=jvm_options,
args=args,
workunit_name='jmake',
workunit_labels=[WorkUnitLabel.COMPILER])
if result:
default_message = 'Unexpected error - JMake returned {}'.format(result)
raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))<|fim▁end|> | _name = 'java'
_file_suffix = '.java' |
<|file_name|>poly2wkt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import argparse
def loopfile(polyfile):
polyline=''
with open(polyfile,'r') as f:
read_data = f.readlines()
f.close()
polylines = []
polyline = ""
countend = 0
# countpoly = 0
for l in read_data:
s = l.strip()
if len(s) > 1:
try:
x = ''
y = ''
if int(s[0]) > -180:
xy = s.split(" ")
x = xy[0]
for v in xy:
if x != v:
if len(v) > 1:
if int(v[0])> -90:
y = v
polyline += x + " " + y + ","
except ValueError:
pass
if len(s) == 1:
polyline = "("<|fim▁hole|># else:
# polyline = "POLYGON (("
if s == "END":
countend += 1
if (countend%2) == 1:
polyline = polyline[0:len(polyline)-1]
polyline += ")" #)"
polylines.append(polyline)
return polylines
def createwkt(polylines):
polygon=""
if len(polylines) >0:
polygon = "POLYGON ("
for p in polylines:
polygon += p +","
polygon = polygon[0:len(polygon)-1]
polygon += ")"
return polygon
parser = argparse.ArgumentParser(description='convert a .poly file in .wkt format')
parser.add_argument('infile', metavar='infile', type=str,
help='inputfile')
parser.add_argument('-o',dest='outfile', type=str,help='output file',default=None)
parser.add_argument('-i', '--insertsql', dest='sqlstring',default=False,action='store_true',
help='create insert sql string')
parser.add_argument('-c', '--createtable', dest='createtable',default=False,action='store_true',
help='create sql string with create table')
parser.add_argument('-t', '--tablename', dest='tablename',default='poly',action='store_true',
help='to assign a name of the table (default=poly')
parser.add_argument('-s','--silent', dest='silent',help="dont'show output, if you don't need a output file this is si default",
action='store_true',default=False)
args = parser.parse_args()
tablename = args.tablename
wkt = createwkt(loopfile(args.infile))
out = None
if (args.sqlstring):
out = "INSERT INTO %s (geom) values (GeometryFromText('%s'),4326))\n;" % (tablename,wkt)
if (args.createtable):
out = "CREATE TABLE %s (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT);\n" % (tablename)
out += "SELECT AddGeometryColumn('%s', 'geom', 4326, 'POLYGON', 2);\n" % (tablename)
out += "INSERT INTO %s (geom) values (GeometryFromText('%s',4326));\n" % (tablename,wkt)
if out == "":
out = wkt
if (out is None):
out = wkt
if (args.outfile is not None):
with open(args.outfile,'w') as f:
f.write(out)
f.close()
if (args.silent == False):
print out<|fim▁end|> | # countpoly += 1
# if countpoly == 1:
# polyline = "POLYGON (("
# |
<|file_name|>test_usage.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | ../../../../../share/pyshared/twisted/test/test_usage.py |
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|># pylint: disable=redefined-outer-name, comparison-with-callable
"""Test helper functions."""
import gzip
import importlib
import logging
import os
import sys
from typing import Any, Dict, List, Optional, Tuple, Union
import cloudpickle
import numpy as np
import pytest
from _pytest.outcomes import Skipped
from packaging.version import Version
from ..data import InferenceData, from_dict
_log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def eight_schools_params():
"""Share setup for eight schools."""
return {
"J": 8,
"y": np.array([28.0, 8.0, -3.0, 7.0, -1.0, 1.0, 18.0, 12.0]),
"sigma": np.array([15.0, 10.0, 16.0, 11.0, 9.0, 11.0, 10.0, 18.0]),
}
@pytest.fixture(scope="module")
def draws():
"""Share default draw count."""
return 500
@pytest.fixture(scope="module")
def chains():
"""Share default chain count."""
return 2
def create_model(seed=10):
"""Create model with fake data."""
np.random.seed(seed)
nchains = 4
ndraws = 500
data = {
"J": 8,
"y": np.array([28.0, 8.0, -3.0, 7.0, -1.0, 1.0, 18.0, 12.0]),
"sigma": np.array([15.0, 10.0, 16.0, 11.0, 9.0, 11.0, 10.0, 18.0]),
}
posterior = {
"mu": np.random.randn(nchains, ndraws),
"tau": abs(np.random.randn(nchains, ndraws)),
"eta": np.random.randn(nchains, ndraws, data["J"]),
"theta": np.random.randn(nchains, ndraws, data["J"]),
}
posterior_predictive = {"y": np.random.randn(nchains, ndraws, len(data["y"]))}
sample_stats = {
"energy": np.random.randn(nchains, ndraws),
"diverging": np.random.randn(nchains, ndraws) > 0.90,
"max_depth": np.random.randn(nchains, ndraws) > 0.90,
}
log_likelihood = {
"y": np.random.randn(nchains, ndraws, data["J"]),
}
prior = {
"mu": np.random.randn(nchains, ndraws) / 2,
"tau": abs(np.random.randn(nchains, ndraws)) / 2,
"eta": np.random.randn(nchains, ndraws, data["J"]) / 2,
"theta": np.random.randn(nchains, ndraws, data["J"]) / 2,
}
prior_predictive = {"y": np.random.randn(nchains, ndraws, len(data["y"])) / 2}
sample_stats_prior = {
"energy": np.random.randn(nchains, ndraws),
"diverging": (np.random.randn(nchains, ndraws) > 0.95).astype(int),
}
model = from_dict(
posterior=posterior,
posterior_predictive=posterior_predictive,
sample_stats=sample_stats,
log_likelihood=log_likelihood,
prior=prior,
prior_predictive=prior_predictive,
sample_stats_prior=sample_stats_prior,
observed_data={"y": data["y"]},
dims={
"y": ["obs_dim"],
"log_likelihood": ["obs_dim"],
"theta": ["school"],
"eta": ["school"],
},
coords={"obs_dim": range(data["J"])},
)
return model
def create_multidimensional_model(seed=10):
"""Create model with fake data."""
np.random.seed(seed)
nchains = 4
ndraws = 500
ndim1 = 5
ndim2 = 7
data = {
"y": np.random.normal(size=(ndim1, ndim2)),
"sigma": np.random.normal(size=(ndim1, ndim2)),
}
posterior = {
"mu": np.random.randn(nchains, ndraws),
"tau": abs(np.random.randn(nchains, ndraws)),
"eta": np.random.randn(nchains, ndraws, ndim1, ndim2),
"theta": np.random.randn(nchains, ndraws, ndim1, ndim2),
}
posterior_predictive = {"y": np.random.randn(nchains, ndraws, ndim1, ndim2)}
sample_stats = {
"energy": np.random.randn(nchains, ndraws),
"diverging": np.random.randn(nchains, ndraws) > 0.90,
}
log_likelihood = {
"y": np.random.randn(nchains, ndraws, ndim1, ndim2),
}
prior = {
"mu": np.random.randn(nchains, ndraws) / 2,
"tau": abs(np.random.randn(nchains, ndraws)) / 2,
"eta": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2,
"theta": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2,
}
prior_predictive = {"y": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2}
sample_stats_prior = {
"energy": np.random.randn(nchains, ndraws),
"diverging": (np.random.randn(nchains, ndraws) > 0.95).astype(int),
}
model = from_dict(
posterior=posterior,
posterior_predictive=posterior_predictive,
sample_stats=sample_stats,
log_likelihood=log_likelihood,
prior=prior,
prior_predictive=prior_predictive,
sample_stats_prior=sample_stats_prior,
observed_data={"y": data["y"]},
dims={"y": ["dim1", "dim2"], "log_likelihood": ["dim1", "dim2"]},
coords={"dim1": range(ndim1), "dim2": range(ndim2)},
)
return model
def create_data_random(groups=None, seed=10):
"""Create InferenceData object using random data."""
if groups is None:
groups = ["posterior", "sample_stats", "observed_data", "posterior_predictive"]
rng = np.random.default_rng(seed)
data = rng.normal(size=(4, 500, 8))
idata_dict = dict(
posterior={"a": data[..., 0], "b": data},
sample_stats={"a": data[..., 0], "b": data},
observed_data={"b": data[0, 0, :]},
posterior_predictive={"a": data[..., 0], "b": data},
prior={"a": data[..., 0], "b": data},
prior_predictive={"a": data[..., 0], "b": data},
warmup_posterior={"a": data[..., 0], "b": data},
warmup_posterior_predictive={"a": data[..., 0], "b": data},
warmup_prior={"a": data[..., 0], "b": data},
)
idata = from_dict(
**{group: ary for group, ary in idata_dict.items() if group in groups}, save_warmup=True
)
return idata
@pytest.fixture()
def data_random():
"""Fixture containing InferenceData object using random data."""
idata = create_data_random()
return idata
@pytest.fixture(scope="module")
def models():
"""Fixture containing 2 mock inference data instances for testing."""
# blank line to keep black and pydocstyle happy
class Models:
model_1 = create_model(seed=10)
model_2 = create_model(seed=11)
return Models()
@pytest.fixture(scope="module")
def multidim_models():
"""Fixture containing 2 mock inference data instances with multidimensional data for testing."""
# blank line to keep black and pydocstyle happy
class Models:
model_1 = create_multidimensional_model(seed=10)
model_2 = create_multidimensional_model(seed=11)
return Models()
def check_multiple_attrs(
test_dict: Dict[str, List[str]], parent: InferenceData
) -> List[Union[str, Tuple[str, str]]]:
"""Perform multiple hasattr checks on InferenceData objects.
It is thought to first check if the parent object contains a given dataset,
and then (if present) check the attributes of the dataset.
Given the output of the function, all mismatches between expectation and reality can
be retrieved: a single string indicates a group mismatch and a tuple of strings
``(group, var)`` indicates a mismatch in the variable ``var`` of ``group``.
Parameters
----------
test_dict: dict of {str : list of str}
Its structure should be `{dataset1_name: [var1, var2], dataset2_name: [var]}`.
A ``~`` at the beginning of a dataset or variable name indicates the name NOT
being present must be asserted.
parent: InferenceData
<|fim▁hole|> list
List containing the failed checks. It will contain either the dataset_name or a
tuple (dataset_name, var) for all non present attributes.
Examples
--------
The output below indicates that ``posterior`` group was expected but not found, and
variables ``a`` and ``b``:
["posterior", ("prior", "a"), ("prior", "b")]
Another example could be the following:
[("posterior", "a"), "~observed_data", ("sample_stats", "~log_likelihood")]
In this case, the output indicates that variable ``a`` was not found in ``posterior``
as it was expected, however, in the other two cases, the preceding ``~`` (kept from the
input negation notation) indicates that ``observed_data`` group should not be present
but was found in the InferenceData and that ``log_likelihood`` variable was found
in ``sample_stats``, also against what was expected.
"""
failed_attrs: List[Union[str, Tuple[str, str]]] = []
for dataset_name, attributes in test_dict.items():
if dataset_name.startswith("~"):
if hasattr(parent, dataset_name[1:]):
failed_attrs.append(dataset_name)
elif hasattr(parent, dataset_name):
dataset = getattr(parent, dataset_name)
for attribute in attributes:
if attribute.startswith("~"):
if hasattr(dataset, attribute[1:]):
failed_attrs.append((dataset_name, attribute))
elif not hasattr(dataset, attribute):
failed_attrs.append((dataset_name, attribute))
else:
failed_attrs.append(dataset_name)
return failed_attrs
def emcee_version():
"""Check emcee version.
Returns
-------
int
Major version number
"""
import emcee
return int(emcee.__version__[0])
def needs_emcee3_func():
"""Check if emcee3 is required."""
# pylint: disable=invalid-name
needs_emcee3 = pytest.mark.skipif(emcee_version() < 3, reason="emcee3 required")
return needs_emcee3
def _emcee_lnprior(theta):
"""Proper function to allow pickling."""
mu, tau, eta = theta[0], theta[1], theta[2:]
# Half-cauchy prior, hwhm=25
if tau < 0:
return -np.inf
prior_tau = -np.log(tau ** 2 + 25 ** 2)
prior_mu = -((mu / 10) ** 2) # normal prior, loc=0, scale=10
prior_eta = -np.sum(eta ** 2) # normal prior, loc=0, scale=1
return prior_mu + prior_tau + prior_eta
def _emcee_lnprob(theta, y, sigma):
"""Proper function to allow pickling."""
mu, tau, eta = theta[0], theta[1], theta[2:]
prior = _emcee_lnprior(theta)
like_vect = -(((mu + tau * eta - y) / sigma) ** 2)
like = np.sum(like_vect)
return like + prior, (like_vect, np.random.normal((mu + tau * eta), sigma))
def emcee_schools_model(data, draws, chains):
"""Schools model in emcee."""
import emcee
chains = 10 * chains # emcee is sad with too few walkers
y = data["y"]
sigma = data["sigma"]
J = data["J"] # pylint: disable=invalid-name
ndim = J + 2
pos = np.random.normal(size=(chains, ndim))
pos[:, 1] = np.absolute(pos[:, 1]) # pylint: disable=unsupported-assignment-operation
if emcee_version() < 3:
sampler = emcee.EnsembleSampler(chains, ndim, _emcee_lnprob, args=(y, sigma))
# pylint: enable=unexpected-keyword-arg
sampler.run_mcmc(pos, draws)
else:
here = os.path.dirname(os.path.abspath(__file__))
data_directory = os.path.join(here, "saved_models")
filepath = os.path.join(data_directory, "reader_testfile.h5")
backend = emcee.backends.HDFBackend(filepath) # pylint: disable=no-member
backend.reset(chains, ndim)
# pylint: disable=unexpected-keyword-arg
sampler = emcee.EnsembleSampler(
chains, ndim, _emcee_lnprob, args=(y, sigma), backend=backend
)
# pylint: enable=unexpected-keyword-arg
sampler.run_mcmc(pos, draws, store=True)
return sampler
# pylint:disable=no-member,no-value-for-parameter,invalid-name
def _pyro_noncentered_model(J, sigma, y=None):
import pyro
import pyro.distributions as dist
mu = pyro.sample("mu", dist.Normal(0, 5))
tau = pyro.sample("tau", dist.HalfCauchy(5))
with pyro.plate("J", J):
eta = pyro.sample("eta", dist.Normal(0, 1))
theta = mu + tau * eta
return pyro.sample("obs", dist.Normal(theta, sigma), obs=y)
def pyro_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation in Pyro."""
import torch
from pyro.infer import MCMC, NUTS
y = torch.from_numpy(data["y"]).float()
sigma = torch.from_numpy(data["sigma"]).float()
nuts_kernel = NUTS(_pyro_noncentered_model, jit_compile=True, ignore_jit_warnings=True)
posterior = MCMC(nuts_kernel, num_samples=draws, warmup_steps=draws, num_chains=chains)
posterior.run(data["J"], sigma, y)
# This block lets the posterior be pickled
posterior.sampler = None
posterior.kernel.potential_fn = None
return posterior
# pylint:disable=no-member,no-value-for-parameter,invalid-name
def _numpyro_noncentered_model(J, sigma, y=None):
import numpyro
import numpyro.distributions as dist
mu = numpyro.sample("mu", dist.Normal(0, 5))
tau = numpyro.sample("tau", dist.HalfCauchy(5))
with numpyro.plate("J", J):
eta = numpyro.sample("eta", dist.Normal(0, 1))
theta = mu + tau * eta
return numpyro.sample("obs", dist.Normal(theta, sigma), obs=y)
def numpyro_schools_model(data, draws, chains):
"""Centered eight schools implementation in NumPyro."""
from jax.random import PRNGKey
from numpyro.infer import MCMC, NUTS
mcmc = MCMC(
NUTS(_numpyro_noncentered_model),
num_warmup=draws,
num_samples=draws,
num_chains=chains,
chain_method="sequential",
)
mcmc.run(PRNGKey(0), extra_fields=("num_steps", "energy"), **data)
# This block lets the posterior be pickled
mcmc.sampler._sample_fn = None # pylint: disable=protected-access
mcmc.sampler._init_fn = None # pylint: disable=protected-access
mcmc.sampler._postprocess_fn = None # pylint: disable=protected-access
mcmc.sampler._potential_fn = None # pylint: disable=protected-access
mcmc.sampler._potential_fn_gen = None # pylint: disable=protected-access
mcmc._cache = {} # pylint: disable=protected-access
return mcmc
def pystan_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation for pystan."""
schools_code = """
data {
int<lower=0> J;
real y[J];
real<lower=0> sigma[J];
}
parameters {
real mu;
real<lower=0> tau;
real eta[J];
}
transformed parameters {
real theta[J];
for (j in 1:J)
theta[j] = mu + tau * eta[j];
}
model {
mu ~ normal(0, 5);
tau ~ cauchy(0, 5);
eta ~ normal(0, 1);
y ~ normal(theta, sigma);
}
generated quantities {
vector[J] log_lik;
vector[J] y_hat;
for (j in 1:J) {
log_lik[j] = normal_lpdf(y[j] | theta[j], sigma[j]);
y_hat[j] = normal_rng(theta[j], sigma[j]);
}
}
"""
if pystan_version() == 2:
import pystan # pylint: disable=import-error
stan_model = pystan.StanModel(model_code=schools_code)
fit = stan_model.sampling(
data=data,
iter=draws + 500,
warmup=500,
chains=chains,
check_hmc_diagnostics=False,
control=dict(adapt_engaged=False),
)
else:
import stan # pylint: disable=import-error
stan_model = stan.build(schools_code, data=data)
fit = stan_model.sample(
num_chains=chains, num_samples=draws, num_warmup=500, save_warmup=False
)
return stan_model, fit
def pymc3_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation for pymc3."""
import pymc3 as pm
with pm.Model() as model:
mu = pm.Normal("mu", mu=0, sd=5)
tau = pm.HalfCauchy("tau", beta=5)
eta = pm.Normal("eta", mu=0, sd=1, shape=data["J"])
theta = pm.Deterministic("theta", mu + tau * eta)
pm.Normal("obs", mu=theta, sd=data["sigma"], observed=data["y"])
trace = pm.sample(draws, chains=chains)
return model, trace
def library_handle(library):
"""Import a library and return the handle."""
if library == "pystan":
try:
module = importlib.import_module("pystan")
except ImportError:
module = importlib.import_module("stan")
else:
module = importlib.import_module(library)
return module
def load_cached_models(eight_schools_data, draws, chains, libs=None):
"""Load pymc3, pystan, emcee, and pyro models from pickle."""
here = os.path.dirname(os.path.abspath(__file__))
supported = (
("pystan", pystan_noncentered_schools),
("pymc3", pymc3_noncentered_schools),
("emcee", emcee_schools_model),
("pyro", pyro_noncentered_schools),
("numpyro", numpyro_schools_model),
)
data_directory = os.path.join(here, "saved_models")
models = {}
if isinstance(libs, str):
libs = [libs]
for library_name, func in supported:
if libs is not None and library_name not in libs:
continue
library = library_handle(library_name)
if library.__name__ == "stan":
# PyStan3 does not support pickling
# httpstan caches models automatically
_log.info("Generating and loading stan model")
models["pystan"] = func(eight_schools_data, draws, chains)
continue
py_version = sys.version_info
fname = "{0.major}.{0.minor}_{1.__name__}_{1.__version__}_{2}_{3}_{4}.pkl.gzip".format(
py_version, library, sys.platform, draws, chains
)
path = os.path.join(data_directory, fname)
if not os.path.exists(path):
with gzip.open(path, "wb") as buff:
try:
_log.info("Generating and caching %s", fname)
cloudpickle.dump(func(eight_schools_data, draws, chains), buff)
except AttributeError as err:
raise AttributeError(f"Failed caching {library_name}") from err
with gzip.open(path, "rb") as buff:
_log.info("Loading %s from cache", fname)
models[library.__name__] = cloudpickle.load(buff)
return models
def pystan_version():
"""Check PyStan version.
Returns
-------
int
Major version number
"""
try:
import pystan # pylint: disable=import-error
version = int(pystan.__version__[0])
except ImportError:
try:
import stan # pylint: disable=import-error
version = int(stan.__version__[0])
except ImportError:
version = None
return version
def test_precompile_models(eight_schools_params, draws, chains):
"""Precompile model files."""
load_cached_models(eight_schools_params, draws, chains)
def running_on_ci() -> bool:
"""Return True if running on CI machine."""
return os.environ.get("ARVIZ_CI_MACHINE") is not None
def importorskip(
modname: str, minversion: Optional[str] = None, reason: Optional[str] = None
) -> Any:
"""Import and return the requested module ``modname``.
Doesn't allow skips on CI machine.
Borrowed and modified from ``pytest.importorskip``.
:param str modname: the name of the module to import
:param str minversion: if given, the imported module's ``__version__``
attribute must be at least this minimal version, otherwise the test is
still skipped.
:param str reason: if given, this reason is shown as the message when the
module cannot be imported.
:returns: The imported module. This should be assigned to its canonical
name.
Example::
docutils = pytest.importorskip("docutils")
"""
# ARVIZ_CI_MACHINE is True if tests run on CI, where ARVIZ_CI_MACHINE env variable exists
ARVIZ_CI_MACHINE = running_on_ci()
if ARVIZ_CI_MACHINE:
import warnings
compile(modname, "", "eval") # to catch syntaxerrors
with warnings.catch_warnings():
# make sure to ignore ImportWarnings that might happen because
# of existing directories with the same name we're trying to
# import but without a __init__.py file
warnings.simplefilter("ignore")
__import__(modname)
mod = sys.modules[modname]
if minversion is None:
return mod
verattr = getattr(mod, "__version__", None)
if minversion is not None:
if verattr is None or Version(verattr) < Version(minversion):
raise Skipped(
"module %r has __version__ %r, required is: %r"
% (modname, verattr, minversion),
allow_module_level=True,
)
return mod
else:
return pytest.importorskip(modname=modname, minversion=minversion, reason=reason)<|fim▁end|> | InferenceData object on which to check the attributes.
Returns
-------
|
<|file_name|>utilities.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" A set of utilities, mostly for post-processing and visualization
We put arrays on disk as raw bytes, extending along the first dimension.
Alongside each array x we ensure the value x.dtype which stores the string
description of the array's dtype.
See Also:
------------
@url
.. image::
@author epnev
"""
# \package caiman/dource_ectraction/cnmf
# \version 1.0
# \copyright GNU General Public License v2.0
# \date Created on Sat Sep 12 15:52:53 2015
from builtins import str
from builtins import range
from past.utils import old_div
import cv2
import h5py
import logging
import numpy as np
import os
import pylab as pl
import scipy
from scipy.sparse import spdiags, issparse, csc_matrix, csr_matrix
import scipy.ndimage.morphology as morph
from skimage.feature.peak import _get_high_intensity_peaks
import tifffile
from typing import List
from .initialization import greedyROI
from ...base.rois import com
from ...mmapping import parallel_dot_product, load_memmap
from ...cluster import extract_patch_coordinates
from ...utils.stats import df_percentile
def decimation_matrix(dims, sub):
D = np.prod(dims)
if sub == 2 and D <= 10000: # faster for small matrices
ind = np.arange(D) // 2 - \
np.arange(dims[0], dims[0] + D) // (dims[0] * 2) * (dims[0] // 2) - \
(dims[0] % 2) * (np.arange(D) % (2 * dims[0]) > dims[0]) * (np.arange(1, 1 + D) % 2)
else:
def create_decimation_matrix_bruteforce(dims, sub):
dims_ds = tuple(1 + (np.array(dims) - 1) // sub)
d_ds = np.prod(dims_ds)
ds_matrix = np.eye(d_ds)
ds_matrix = np.repeat(np.repeat(
ds_matrix.reshape((d_ds,) + dims_ds, order='F'), sub, 1),
sub, 2)[:, :dims[0], :dims[1]].reshape((d_ds, -1), order='F')
ds_matrix /= ds_matrix.sum(1)[:, None]
ds_matrix = csc_matrix(ds_matrix, dtype=np.float32)
return ds_matrix
tmp = create_decimation_matrix_bruteforce((dims[0], sub), sub).indices
ind = np.concatenate([tmp] * (dims[1] // sub + 1))[:D] + \
np.arange(D) // (dims[0] * sub) * ((dims[0] - 1) // sub + 1)
data = 1. / np.unique(ind, return_counts=True)[1][ind]
return csc_matrix((data, ind, np.arange(1 + D)), dtype=np.float32)
def peak_local_max(image, min_distance=1, threshold_abs=None,
threshold_rel=None, exclude_border=True, indices=True,
num_peaks=np.inf, footprint=None):
"""Find peaks in an image as coordinate list or boolean mask.
Adapted from skimage to use opencv for speed.
Replaced scipy.ndimage.maximum_filter by cv2.dilate.
Peaks are the local maxima in a region of `2 * min_distance + 1`
(i.e. peaks are separated by at least `min_distance`).
If peaks are flat (i.e. multiple adjacent pixels have identical
intensities), the coordinates of all such pixels are returned.
If both `threshold_abs` and `threshold_rel` are provided, the maximum
of the two is chosen as the minimum intensity threshold of peaks.
Parameters
----------
image : ndarray
Input image.
min_distance : int, optional
Minimum number of pixels separating peaks in a region of `2 *
min_distance + 1` (i.e. peaks are separated by at least
`min_distance`).
To find the maximum number of peaks, use `min_distance=1`.
threshold_abs : float, optional
Minimum intensity of peaks. By default, the absolute threshold is
the minimum intensity of the image.
threshold_rel : float, optional
Minimum intensity of peaks, calculated as `max(image) * threshold_rel`.
exclude_border : int, optional
If nonzero, `exclude_border` excludes peaks from
within `exclude_border`-pixels of the border of the image.
indices : bool, optional
If True, the output will be an array representing peak
coordinates. If False, the output will be a boolean array shaped as
`image.shape` with peaks present at True elements.
num_peaks : int, optional
Maximum number of peaks. When the number of peaks exceeds `num_peaks`,
return `num_peaks` peaks based on highest peak intensity.
footprint : ndarray of bools, optional
If provided, `footprint == 1` represents the local region within which
to search for peaks at every point in `image`. Overrides
`min_distance` (also for `exclude_border`).
Returns
-------
output : ndarray or ndarray of bools
* If `indices = True` : (row, column, ...) coordinates of peaks.
* If `indices = False` : Boolean array shaped like `image`, with peaks
represented by True values.
Notes
-----
The peak local maximum function returns the coordinates of local peaks
(maxima) in an image. A maximum filter is used for finding local maxima.
This operation dilates the original image. After comparison of the dilated
and original image, this function returns the coordinates or a mask of the
peaks where the dilated image equals the original image.
Examples
--------
>>> img1 = np.zeros((7, 7))
>>> img1[3, 4] = 1
>>> img1[3, 2] = 1.5
>>> img1
array([[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 1.5, 0. , 1. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. , 0. ]])
>>> peak_local_max(img1, min_distance=1)
array([[3, 4],
[3, 2]])
>>> peak_local_max(img1, min_distance=2)
array([[3, 2]])
>>> img2 = np.zeros((20, 20, 20))
>>> img2[10, 10, 10] = 1
>>> peak_local_max(img2, exclude_border=0)
array([[10, 10, 10]])
"""
if type(exclude_border) == bool:
exclude_border = min_distance if exclude_border else 0
out = np.zeros_like(image, dtype=np.bool)
if np.all(image == image.flat[0]):
if indices is True:
return np.empty((0, 2), np.int)
else:
return out
# Non maximum filter
if footprint is not None:
# image_max = ndi.maximum_filter(image, footprint=footprint,
# mode='constant')
image_max = cv2.dilate(image, footprint=footprint, iterations=1)
else:
size = 2 * min_distance + 1
# image_max = ndi.maximum_filter(image, size=size, mode='constant')
image_max = cv2.dilate(image, cv2.getStructuringElement(
cv2.MORPH_RECT, (size, size)), iterations=1)
mask = image == image_max
if exclude_border:
# zero out the image borders
for i in range(mask.ndim):
mask = mask.swapaxes(0, i)
remove = (footprint.shape[i] if footprint is not None
else 2 * exclude_border)
mask[:remove // 2] = mask[-remove // 2:] = False
mask = mask.swapaxes(0, i)
# find top peak candidates above a threshold
thresholds = []
if threshold_abs is None:
threshold_abs = image.min()
thresholds.append(threshold_abs)
if threshold_rel is not None:
thresholds.append(threshold_rel * image.max())
if thresholds:
mask &= image > max(thresholds)
# Select highest intensities (num_peaks)
coordinates = _get_high_intensity_peaks(image, mask, num_peaks)
if indices is True:
return coordinates
else:
nd_indices = tuple(coordinates.T)
out[nd_indices] = True
return out
def dict_compare(d1, d2):
d1_keys = set(d1.keys())
d2_keys = set(d2.keys())
intersect_keys = d1_keys.intersection(d2_keys)
added = d1_keys - d2_keys
removed = d2_keys - d1_keys
modified = {o : (d1[o], d2[o]) for o in intersect_keys if np.any(d1[o] != d2[o])}
same = set(o for o in intersect_keys if np.all(d1[o] == d2[o]))
return added, removed, modified, same
def computeDFF_traces(Yr, A, C, bl, quantileMin=8, frames_window=200):
extract_DF_F(Yr, A, C, bl, quantileMin, frames_window)
def extract_DF_F(Yr, A, C, bl, quantileMin=8, frames_window=200, block_size=400, dview=None):
""" Compute DFF function from cnmf output.
Disclaimer: it might be memory inefficient
Args:
Yr: ndarray (2D)
movie pixels X time
A: scipy.sparse.coo_matrix
spatial components (from cnmf cnm.A)
C: ndarray
temporal components (from cnmf cnm.C)
bl: ndarray
baseline for each component (from cnmf cnm.bl)
quantile_min: float
quantile minimum of the
frames_window: int
number of frames for running quantile
Returns:
Cdf:
the computed Calcium acitivty to the derivative of f
See Also:
..image::docs/img/onlycnmf.png
"""
nA = np.array(np.sqrt(A.power(2).sum(0)).T)
A = scipy.sparse.coo_matrix(A / nA.T)
C = C * nA
bl = (bl * nA.T).squeeze()
nA = np.array(np.sqrt(A.power(2).sum(0)).T)
T = C.shape[-1]
if 'memmap' in str(type(Yr)):
if block_size >= 500:
print('Forcing single thread for memory issues')
dview_res = None
else:
print('Using thread. If memory issues set block_size larger than 500')
dview_res = dview
AY = parallel_dot_product(Yr, A, dview=dview_res, block_size=block_size,
transpose=True).T
else:
AY = A.T.dot(Yr)
bas_val = bl[None, :]
Bas = np.repeat(bas_val, T, 0).T
AA = A.T.dot(A)
AA.setdiag(0)
Cf = (C - Bas) * (nA**2)
C2 = AY - AA.dot(C)
if frames_window is None or frames_window > T:
Df = np.percentile(C2, quantileMin, axis=1)
C_df = Cf / Df[:, None]
else:
Df = scipy.ndimage.percentile_filter(
C2, quantileMin, (frames_window, 1))
C_df = Cf / Df
return C_df
def detrend_df_f(A, b, C, f, YrA=None, quantileMin=8, frames_window=500,
flag_auto=True, use_fast=False, detrend_only=False):
""" Compute DF/F signal without using the original data.
In general much faster than extract_DF_F
Args:
A: scipy.sparse.csc_matrix
spatial components (from cnmf cnm.A)
b: ndarray
spatial background components
C: ndarray
temporal components (from cnmf cnm.C)
f: ndarray
temporal background components
YrA: ndarray
residual signals
quantile_min: float
quantile used to estimate the baseline (values in [0,100])
frames_window: int
number of frames for computing running quantile
flag_auto: bool
flag for determining quantile automatically
use_fast: bool
flag for u´sing approximate fast percentile filtering
detrend_only: bool (False)
flag for only subtracting baseline and not normalizing by it.
Used in 1p data processing where baseline fluorescence cannot be
determined.
Returns:
F_df:
the computed Calcium acitivty to the derivative of f
"""
if C is None:
logging.warning("There are no components for DF/F extraction!")
return None
if b is None or f is None:
b = np.zeros((A.shape[0], 1))
f = np.zeros((1, C.shape[1]))
logging.warning("Background components not present. Results should" +
" not be interpreted as DF/F normalized but only" +
" as detrended.")
detrend_only = True
if 'csc_matrix' not in str(type(A)):
A = scipy.sparse.csc_matrix(A)
if 'array' not in str(type(b)):
b = b.toarray()
if 'array' not in str(type(C)):
C = C.toarray()
if 'array' not in str(type(f)):
f = f.toarray()
nA = np.sqrt(np.ravel(A.power(2).sum(axis=0)))
nA_mat = scipy.sparse.spdiags(nA, 0, nA.shape[0], nA.shape[0])
nA_inv_mat = scipy.sparse.spdiags(1. / nA, 0, nA.shape[0], nA.shape[0])
A = A * nA_inv_mat
C = nA_mat * C
if YrA is not None:
YrA = nA_mat * YrA
F = C + YrA if YrA is not None else C
B = A.T.dot(b).dot(f)
T = C.shape[-1]
if flag_auto:
data_prct, val = df_percentile(F[:, :frames_window], axis=1)
if frames_window is None or frames_window > T:
Fd = np.stack([np.percentile(f, prctileMin) for f, prctileMin in
zip(F, data_prct)])
Df = np.stack([np.percentile(f, prctileMin) for f, prctileMin in
zip(B, data_prct)])
if not detrend_only:
F_df = (F - Fd[:, None]) / (Df[:, None] + Fd[:, None])
else:
F_df = F - Fd[:, None]
else:
if use_fast:
Fd = np.stack([fast_prct_filt(f, level=prctileMin,
frames_window=frames_window) for
f, prctileMin in zip(F, data_prct)])
Df = np.stack([fast_prct_filt(f, level=prctileMin,
frames_window=frames_window) for
f, prctileMin in zip(B, data_prct)])
else:
Fd = np.stack([scipy.ndimage.percentile_filter(
f, prctileMin, (frames_window)) for f, prctileMin in
zip(F, data_prct)])
Df = np.stack([scipy.ndimage.percentile_filter(
f, prctileMin, (frames_window)) for f, prctileMin in
zip(B, data_prct)])
if not detrend_only:
F_df = (F - Fd) / (Df + Fd)
else:
F_df = F - Fd
else:
if frames_window is None or frames_window > T:
Fd = np.percentile(F, quantileMin, axis=1)
Df = np.percentile(B, quantileMin, axis=1)
if not detrend_only:
F_df = (F - Fd[:, None]) / (Df[:, None] + Fd[:, None])
else:
F_df = F - Fd[:, None]
else:
Fd = scipy.ndimage.percentile_filter(
F, quantileMin, (frames_window, 1))
Df = scipy.ndimage.percentile_filter(
B, quantileMin, (frames_window, 1))
if not detrend_only:
F_df = (F - Fd) / (Df + Fd)
else:
F_df = F - Fd
return F_df
def fast_prct_filt(input_data, level=8, frames_window=1000):
"""
Fast approximate percentage filtering
"""
data = np.atleast_2d(input_data).copy()
T = np.shape(data)[-1]
downsampfact = frames_window
elm_missing = int(np.ceil(T * 1.0 / downsampfact)
* downsampfact - T)
padbefore = int(np.floor(elm_missing / 2.))
padafter = int(np.ceil(elm_missing / 2.))
tr_tmp = np.pad(data.T, ((padbefore, padafter), (0, 0)), mode='reflect')
numFramesNew, num_traces = np.shape(tr_tmp)
#% compute baseline quickly
tr_BL = np.reshape(tr_tmp, (downsampfact, int(numFramesNew / downsampfact),
num_traces), order='F')
tr_BL = np.percentile(tr_BL, level, axis=0)
tr_BL = scipy.ndimage.zoom(np.array(tr_BL, dtype=np.float32),
[downsampfact, 1], order=3, mode='nearest',
cval=0.0, prefilter=True)
if padafter == 0:
data -= tr_BL.T
else:
data -= tr_BL[padbefore:-padafter].T
return data.squeeze()
#%%
def detrend_df_f_auto(A, b, C, f, dims=None, YrA=None, use_annulus = True,
dist1 = 7, dist2 = 5, frames_window=1000,
use_fast = False):
"""
Compute DF/F using an automated level of percentile filtering based on
kernel density estimation.
Args:
A: scipy.sparse.csc_matrix
spatial components (from cnmf cnm.A)
b: ndarray
spatial backgrounds
C: ndarray
temporal components (from cnmf cnm.C)
f: ndarray
temporal background components
YrA: ndarray
residual signals
frames_window: int
number of frames for running quantile
use_fast: bool
flag for using fast approximate percentile filtering
Returns:
F_df:
the computed Calcium acitivty to the derivative of f
"""
if 'csc_matrix' not in str(type(A)):
A = scipy.sparse.csc_matrix(A)
if 'array' not in str(type(b)):
b = b.toarray()
if 'array' not in str(type(C)):
C = C.toarray()
if 'array' not in str(type(f)):
f = f.toarray()
nA = np.sqrt(np.ravel(A.power(2).sum(axis=0)))
nA_mat = scipy.sparse.spdiags(nA, 0, nA.shape[0], nA.shape[0])
nA_inv_mat = scipy.sparse.spdiags(1. / nA, 0, nA.shape[0], nA.shape[0])
A = A * nA_inv_mat
C = nA_mat * C
if YrA is not None:
YrA = nA_mat * YrA
F = C + YrA if YrA is not None else C
K = A.shape[-1]
A_ann = A.copy()
if use_annulus:
dist1 = 7
dist2 = 5
X, Y = np.meshgrid(np.arange(-dist1, dist1), np.arange(-dist1, dist1))
R = np.sqrt(X**2+Y**2)
R[R > dist1] = 0
R[R < dist2] = 0
R = R.astype('bool')
for k in range(K):
a = A[:, k].toarray().reshape(dims, order='F') > 0
a2 = np.bitwise_xor(morph.binary_dilation(a, R), a)
a2 = a2.astype(float).flatten(order='F')
a2 /= np.sqrt(a2.sum())
a2 = scipy.sparse.csc_matrix(a2)
A_ann[:, k] = a2.T
B = A_ann.T.dot(b).dot(f)
T = C.shape[-1]
data_prct, val = df_percentile(F[:, :frames_window], axis=1)
if frames_window is None or frames_window > T:
Fd = np.stack([np.percentile(f, prctileMin) for f, prctileMin in
zip(F, data_prct)])
Df = np.stack([np.percentile(f, prctileMin) for f, prctileMin in
zip(B, data_prct)])
F_df = (F - Fd[:, None]) / (Df[:, None] + Fd[:, None])
else:
if use_fast:
Fd = np.stack([fast_prct_filt(f, level=prctileMin,
frames_window=frames_window) for
f, prctileMin in zip(F, data_prct)])
Df = np.stack([fast_prct_filt(f, level=prctileMin,
frames_window=frames_window) for
f, prctileMin in zip(B, data_prct)])
else:
Fd = np.stack([scipy.ndimage.percentile_filter(
f, prctileMin, (frames_window)) for f, prctileMin in
zip(F, data_prct)])
Df = np.stack([scipy.ndimage.percentile_filter(
f, prctileMin, (frames_window)) for f, prctileMin in
zip(B, data_prct)])
F_df = (F - Fd) / (Df + Fd)
return F_df
#%%
def manually_refine_components(Y, xxx_todo_changeme, A, C, Cn, thr=0.9, display_numbers=True,
max_number=None, cmap=None, **kwargs):
"""Plots contour of spatial components
against a background image and allows to interactively add novel components by clicking with mouse
Args:
Y: ndarray
movie in 2D
(dx,dy): tuple
dimensions of the square used to identify neurons (should be set to the galue of gsiz)
A: np.ndarray or sparse matrix
Matrix of Spatial components (d x K)
Cn: np.ndarray (2D)
Background image (e.g. mean, correlation)
thr: scalar between 0 and 1
Energy threshold for computing contours (default 0.995)
display_number: Boolean
Display number of ROIs if checked (default True)
max_number: int
Display the number for only the first max_number components (default None, display all numbers)
cmap: string
User specifies the colormap (default None, default colormap)
Returns:
A: np.ndarray
matrix A os estimated spatial component contributions
C: np.ndarray
array of estimated calcium traces
"""
(dx, dy) = xxx_todo_changeme
if issparse(A):
A = np.array(A.todense())
else:
A = np.array(A)
d1, d2 = np.shape(Cn)
d, nr = np.shape(A)
if max_number is None:
max_number = nr
x, y = np.mgrid[0:d1:1, 0:d2:1]
pl.imshow(Cn, interpolation=None, cmap=cmap)
cm = com(A, d1, d2)
Bmat = np.zeros((np.minimum(nr, max_number), d1, d2))
for i in range(np.minimum(nr, max_number)):
indx = np.argsort(A[:, i], axis=None)[::-1]
cumEn = np.cumsum(A[:, i].flatten()[indx]**2)
cumEn /= cumEn[-1]
Bvec = np.zeros(d)
Bvec[indx] = cumEn
Bmat[i] = np.reshape(Bvec, np.shape(Cn), order='F')
T = np.shape(Y)[-1]
pl.close()
fig = pl.figure()
ax = pl.gca()
ax.imshow(Cn, interpolation=None, cmap=cmap,
vmin=np.percentile(Cn[~np.isnan(Cn)], 1), vmax=np.percentile(Cn[~np.isnan(Cn)], 99))
for i in range(np.minimum(nr, max_number)):
pl.contour(y, x, Bmat[i], [thr])
if display_numbers:
for i in range(np.minimum(nr, max_number)):
ax.text(cm[i, 1], cm[i, 0], str(i + 1))
A3 = np.reshape(A, (d1, d2, nr), order='F')
while True:
pts = fig.ginput(1, timeout=0)
if pts != []:
print(pts)
xx, yy = np.round(pts[0]).astype(np.int)
coords_y = np.array(list(range(yy - dy, yy + dy + 1)))
coords_x = np.array(list(range(xx - dx, xx + dx + 1)))
coords_y = coords_y[(coords_y >= 0) & (coords_y < d1)]
coords_x = coords_x[(coords_x >= 0) & (coords_x < d2)]
a3_tiny = A3[coords_y[0]:coords_y[-1] +
1, coords_x[0]:coords_x[-1] + 1, :]
y3_tiny = Y[coords_y[0]:coords_y[-1] +
1, coords_x[0]:coords_x[-1] + 1, :]
dy_sz, dx_sz = np.shape(a3_tiny)[:-1]
y2_tiny = np.reshape(y3_tiny, (dx_sz * dy_sz, T), order='F')
a2_tiny = np.reshape(a3_tiny, (dx_sz * dy_sz, nr), order='F')
y2_res = y2_tiny - a2_tiny.dot(C)
y3_res = np.reshape(y2_res, (dy_sz, dx_sz, T), order='F')
a__, c__, center__, b_in__, f_in__ = greedyROI(
y3_res, nr=1, gSig=[np.floor(old_div(dx_sz, 2)), np.floor(old_div(dy_sz, 2))], gSiz=[dx_sz, dy_sz])
a_f = np.zeros((d, 1))
idxs = np.meshgrid(coords_y, coords_x)
a_f[np.ravel_multi_index(
idxs, (d1, d2), order='F').flatten()] = a__
A = np.concatenate([A, a_f], axis=1)
C = np.concatenate([C, c__], axis=0)
indx = np.argsort(a_f, axis=None)[::-1]
cumEn = np.cumsum(a_f.flatten()[indx]**2)
cumEn /= cumEn[-1]
Bvec = np.zeros(d)
Bvec[indx] = cumEn
bmat = np.reshape(Bvec, np.shape(Cn), order='F')
pl.contour(y, x, bmat, [thr])
pl.pause(.01)
elif pts == []:
break
nr += 1
A3 = np.reshape(A, (d1, d2, nr), order='F')
return A, C
def app_vertex_cover(A):
""" Finds an approximate vertex cover for a symmetric graph with adjacency matrix A.
Args:
A: boolean 2d array (K x K)
Adjacency matrix. A is boolean with diagonal set to 0
Returns:
L: A vertex cover of A
Authors:
Eftychios A. Pnevmatikakis, Simons Foundation, 2015
"""
L = []
while A.any():
nz = np.nonzero(A)[0] # find non-zero edges
u = nz[np.random.randint(0, len(nz))]
A[u, :] = False
A[:, u] = False
L.append(u)
return np.asarray(L)
def update_order(A, new_a=None, prev_list=None, method='greedy'):
'''Determines the update order of the temporal components given the spatial
components by creating a nest of random approximate vertex covers
Args:
A: np.ndarray
matrix of spatial components (d x K)
new_a: sparse array
spatial component that is added, in order to efficiently update the orders in online scenarios
prev_list: list of list
orders from previous iteration, you need to pass if new_a is not None
Returns:
O: list of sets
list of subsets of components. The components of each subset can be updated in parallel
lo: list
length of each subset
Written by Eftychios A. Pnevmatikakis, Simons Foundation, 2015
'''
K = np.shape(A)[-1]
if new_a is None and prev_list is None:
if method is 'greedy':
prev_list, count_list = update_order_greedy(A, flag_AA=False)
else:
prev_list, count_list = update_order_random(A, flag_AA=False)
return prev_list, count_list
else:
if new_a is None or prev_list is None:
raise Exception(
'In the online update order you need to provide both new_a and prev_list')
counter = 0
AA = A.T.dot(new_a)
for group in prev_list:
if AA[list(group)].sum() == 0:
group.append(K)
counter += 1
break
if counter == 0:
if prev_list is not None:
prev_list = list(prev_list)
prev_list.append([K])
count_list = [len(gr) for gr in prev_list]
return prev_list, count_list
def order_components(A, C):
"""Order components based on their maximum temporal value and size
Args:
A: sparse matrix (d x K)
spatial components
C: matrix or np.ndarray (K x T)
temporal components
Returns:
A_or: np.ndarray
ordered spatial components
C_or: np.ndarray
ordered temporal components
srt: np.ndarray
sorting mapping
"""
A = np.array(A.todense())
nA2 = np.sqrt(np.sum(A**2, axis=0))
K = len(nA2)
A = np.array(np.matrix(A) * spdiags(old_div(1, nA2), 0, K, K))
nA4 = np.sum(A**4, axis=0)**0.25
C = np.array(spdiags(nA2, 0, K, K) * np.matrix(C))
mC = np.ndarray.max(np.array(C), axis=1)
srt = np.argsort(nA4 * mC)[::-1]
A_or = A[:, srt] * spdiags(nA2[srt], 0, K, K)
C_or = spdiags(old_div(1., nA2[srt]), 0, K, K) * (C[srt, :])
return A_or, C_or, srt<|fim▁hole|>def update_order_random(A, flag_AA=True):
"""Determies the update order of temporal components using
randomized partitions of non-overlapping components
"""
K = np.shape(A)[-1]
if flag_AA:
AA = A.copy()
else:
AA = A.T.dot(A)
AA.setdiag(0)
F = (AA) > 0
F = F.toarray()
rem_ind = np.arange(K)
O = []
lo = []
while len(rem_ind) > 0:
L = np.sort(app_vertex_cover(F[rem_ind, :][:, rem_ind]))
if L.size:
ord_ind = set(rem_ind) - set(rem_ind[L])
rem_ind = rem_ind[L]
else:
ord_ind = set(rem_ind)
rem_ind = []
O.append(ord_ind)
lo.append(len(ord_ind))
return O[::-1], lo[::-1]
def update_order_greedy(A, flag_AA=True):
"""Determines the update order of the temporal components
this, given the spatial components using a greedy method
Basically we can update the components that are not overlapping, in parallel
Args:
A: sparse crc matrix
matrix of spatial components (d x K)
OR:
A.T.dot(A) matrix (d x d) if flag_AA = true
flag_AA: boolean (default true)
Returns:
parllcomp: list of sets
list of subsets of components. The components of each subset can be updated in parallel
len_parrllcomp: list
length of each subset
Author:
Eftychios A. Pnevmatikakis, Simons Foundation, 2017
"""
K = np.shape(A)[-1]
parllcomp:List = []
for i in range(K):
new_list = True
for ls in parllcomp:
if flag_AA:
if A[i, ls].nnz == 0:
ls.append(i)
new_list = False
break
else:
if (A[:, i].T.dot(A[:, ls])).nnz == 0:
ls.append(i)
new_list = False
break
if new_list:
parllcomp.append([i])
len_parrllcomp = [len(ls) for ls in parllcomp]
return parllcomp, len_parrllcomp
#%%
def compute_residuals(Yr_mmap_file, A_, b_, C_, f_, dview=None, block_size=1000, num_blocks_per_run=5):
'''compute residuals from memory mapped file and output of CNMF
Args:
A_,b_,C_,f_:
from CNMF
block_size: int
number of pixels processed together
num_blocks_per_run: int
nnumber of parallel blocks processes
Returns:
YrA: ndarray
residuals per neuron
'''
if not ('sparse' in str(type(A_))):
A_ = scipy.sparse.coo_matrix(A_)
Ab = scipy.sparse.hstack((A_, b_)).tocsc()
Cf = np.vstack((C_, f_))
nA = np.ravel(Ab.power(2).sum(axis=0))
if 'mmap' in str(type(Yr_mmap_file)):
YA = parallel_dot_product(Yr_mmap_file, Ab, dview=dview, block_size=block_size,
transpose=True, num_blocks_per_run=num_blocks_per_run) * scipy.sparse.spdiags(old_div(1., nA), 0, Ab.shape[-1], Ab.shape[-1])
else:
YA = (Ab.T.dot(Yr_mmap_file)).T * \
spdiags(old_div(1., nA), 0, Ab.shape[-1], Ab.shape[-1])
AA = ((Ab.T.dot(Ab)) * scipy.sparse.spdiags(old_div(1., nA),
0, Ab.shape[-1], Ab.shape[-1])).tocsr()
return (YA - (AA.T.dot(Cf)).T)[:, :A_.shape[-1]].T
def normalize_AC(A, C, YrA, b, f, neurons_sn):
""" Normalize to unit norm A and b
Args:
A,C,Yr,b,f:
outputs of CNMF
"""
if 'sparse' in str(type(A)):
nA = np.ravel(np.sqrt(A.power(2).sum(0)))
else:
nA = np.ravel(np.sqrt((A**2).sum(0)))
if A is not None:
A /= nA
if C is not None:
C = np.array(C)
C *= nA[:, None]
if YrA is not None:
YrA = np.array(YrA)
YrA *= nA[:, None]
if b is not None:
if issparse(b):
nB = np.ravel(np.sqrt(b.power(2).sum(0)))
b = csc_matrix(b)
for k, i in enumerate(b.indptr[:-1]):
b.data[i:b.indptr[k + 1]] /= nB[k]
else:
nB = np.ravel(np.sqrt((b**2).sum(0)))
b = np.atleast_2d(b)
b /= nB
if issparse(f):
f = csr_matrix(f)
for k, i in enumerate(f.indptr[:-1]):
f.data[i:f.indptr[k + 1]] *= nB[k]
else:
f = np.atleast_2d(f)
f *= nB[:, np.newaxis]
if neurons_sn is not None:
neurons_sn *= nA
return csc_matrix(A), C, YrA, b, f, neurons_sn
def get_file_size(file_name, var_name_hdf5='mov'):
""" Computes the dimensions of a file or a list of files without loading
it/them in memory. An exception is thrown if the files have FOVs with
different sizes
Args:
file_name: str or list
locations of file(s) in memory
var_name_hdf5: 'str'
if loading from hdf5 name of the variable to load
Returns:
dims: list
dimensions of FOV
T: list
number of timesteps in each file
"""
if isinstance(file_name, str):
if os.path.exists(file_name):
_, extension = os.path.splitext(file_name)[:2]
extension = extension.lower()
if extension == '.mat':
byte_stream, file_opened = scipy.io.matlab.mio._open_file(file_name, appendmat=False)
mjv, mnv = scipy.io.matlab.mio.get_matfile_version(byte_stream)
if mjv == 2:
extension = '.h5'
if extension in ['.tif', '.tiff', '.btf']:
tffl = tifffile.TiffFile(file_name)
siz = tffl.series[0].shape
T, dims = siz[0], siz[1:]
elif extension == '.avi':
cap = cv2.VideoCapture(file_name)
dims = [0, 0]
try:
T = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
dims[1] = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
dims[0] = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
except():
print('Roll back to opencv 2')
T = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT))
dims[1] = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
dims[0] = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
elif extension == '.mmap':
filename = os.path.split(file_name)[-1]
Yr, dims, T = load_memmap(os.path.join(
os.path.split(file_name)[0], filename))
elif extension in ('.h5', '.hdf5', '.nwb'):
with h5py.File(file_name, "r") as f:
kk = list(f.keys())
if len(kk) == 1:
siz = f[kk[0]].shape
elif var_name_hdf5 in f:
if extension == '.nwb':
siz = f[var_name_hdf5]['data'].shape
else:
siz = f[var_name_hdf5].shape
else:
logging.error('The file does not contain a variable' +
'named {0}'.format(var_name_hdf5))
raise Exception('Variable not found. Use one of the above')
T, dims = siz[0], siz[1:]
elif extension in ('.sbx'):
from ...base.movies import loadmat_sbx
info = loadmat_sbx(file_name[:-4]+ '.mat')['info']
dims = tuple((info['sz']).astype(int))
# Defining number of channels/size factor
if info['channels'] == 1:
info['nChan'] = 2
factor = 1
elif info['channels'] == 2:
info['nChan'] = 1
factor = 2
elif info['channels'] == 3:
info['nChan'] = 1
factor = 2
# Determine number of frames in whole file
T = int(os.path.getsize(
file_name[:-4] + '.sbx') / info['recordsPerBuffer'] / info['sz'][1] * factor / 4 - 1)
else:
raise Exception('Unknown file type')
dims = tuple(dims)
else:
raise Exception('File not found!')
elif isinstance(file_name, tuple):
from ...base.movies import load
dims = load(file_name[0], var_name_hdf5=var_name_hdf5).shape
T = len(file_name)
elif isinstance(file_name, list):
if len(file_name) == 1:
dims, T = get_file_size(file_name[0], var_name_hdf5=var_name_hdf5)
else:
dims, T = zip(*[get_file_size(fn, var_name_hdf5=var_name_hdf5)
for fn in file_name])
else:
raise Exception('Unknown input type')
return dims, T
def fast_graph_Laplacian(mmap_file, dims, max_radius=10, kernel='heat',
dview=None, sigma=1, thr=0.05, p=10, normalize=True,
use_NN=False, rf=None, strides=None):
""" Computes an approximate affinity maps and its graph Laplacian for all
pixels. For each pixel it restricts its attention to a given radius around
it.
Args:
mmap_file: str
Memory mapped file in pixel first order
max_radius: float
Maximum radius around each pixel
kernel: str {'heat', 'binary', 'cos'}
type of kernel
dview: dview object
multiprocessing or ipyparallel object for parallelization
sigma: float
standard deviation of Gaussian (heat) kernel
thr: float
threshold for affinity matrix
p: int
number of neighbors
normalize: bool
normalize vectors before computing affinity
use_NN: bool
use only p nearest neighbors
Returns:
W: scipy.sparse.csr_matrix
Graph affinity matrix
D: scipy.sparse.spdiags
Diagonal of affinity matrix
L: scipy.sparse.csr_matrix
Graph Laplacian matrix
"""
Np = np.prod(np.array(dims))
if rf is None:
pars = []
for i in range(Np):
pars.append([i, mmap_file, dims, max_radius, kernel, sigma, thr,
p, normalize, use_NN])
if dview is None:
res = list(map(fast_graph_Laplacian_pixel, pars))
else:
res = dview.map(fast_graph_Laplacian_pixel, pars, chunksize=128)
indptr = np.cumsum(np.array([0] + [len(r[0]) for r in res]))
indeces = [item for sublist in res for item in sublist[0]]
data = [item for sublist in res for item in sublist[1]]
W = scipy.sparse.csr_matrix((data, indeces, indptr), shape=[Np, Np])
D = scipy.sparse.spdiags(W.sum(0), 0, Np, Np)
L = D - W
else:
indices, _ = extract_patch_coordinates(dims, rf, strides)
pars = []
for i in range(len(indices)):
pars.append([mmap_file, indices[i], kernel, sigma, thr, p,
normalize, use_NN])
if dview is None:
res = list(map(fast_graph_Laplacian_patches, pars))
else:
res = dview.map(fast_graph_Laplacian_patches, pars)
W = res
D = [scipy.sparse.spdiags(w.sum(0), 0, w.shape[0], w.shape[0]) for w in W]
L = [d - w for (d, w) in zip(W, D)]
return W, D, L
def fast_graph_Laplacian_patches(pars):
""" Computes the full graph affinity matrix on a patch. See
fast_graph_Laplacian above for definition of arguments.
"""
mmap_file, indices, kernel, sigma, thr, p, normalize, use_NN = pars
if type(mmap_file) not in {'str', 'list'}:
Yind = mmap_file
else:
Y = load_memmap(mmap_file)[0]
Yind = np.array(Y[indices])
if normalize:
Yind -= Yind.mean(1)[:, np.newaxis]
Yind /= np.sqrt((Yind**2).sum(1)[:, np.newaxis])
yf = np.ones((Yind.shape[0], 1))
else:
yf = (Yind**2).sum(1)[:, np.newaxis]
yyt = Yind.dot(Yind.T)
W = np.exp(-(yf + yf.T - 2*yyt)/sigma) if kernel.lower() == 'heat' else yyt
W[W<thr] = 0
if kernel.lower() == 'binary':
W[W>0] = 1
if use_NN:
ind = np.argpartition(W, -p, axis=1)[:, :-p]
for i in range(W.shape[0]):
W[i, ind[i]] = 0
W = scipy.sparse.csr_matrix(W)
W = (W + W.T)/2
return W
def fast_graph_Laplacian_pixel(pars):
""" Computes the i-th row of the Graph affinity matrix. See
fast_graph_Laplacian above for definition of arguments.
"""
i, mmap_file, dims, max_radius, kernel, sigma, thr, p, normalize, use_NN = pars
iy, ix = np.unravel_index(i, dims, order='F')
xx = np.arange(0, dims[1]) - ix
yy = np.arange(0, dims[0]) - iy
[XX, YY] = np.meshgrid(xx, yy)
R = np.sqrt(XX**2 + YY**2)
R = R.flatten('F')
indeces = np.where(R < max_radius)[0]
Y = load_memmap(mmap_file)[0]
Yind = np.array(Y[indeces])
y = np.array(Y[i, :])
if normalize:
Yind -= Yind.mean(1)[:, np.newaxis]
Yind /= np.sqrt((Yind**2).sum(1)[:, np.newaxis])
y -= y.mean()
y /= np.sqrt((y**2).sum())
D = Yind - y
if kernel.lower() == 'heat':
w = np.exp(-np.sum(D**2, axis=1)/sigma)
else: # kernel.lower() == 'cos':
w = Yind.dot(y.T)
w[w<thr] = 0
if kernel.lower() == 'binary':
w[w>0] = 1
if use_NN:
ind = np.argpartition(w, -p)[-p:]
else:
ind = np.where(w>0)[0]
return indeces[ind].tolist(), w[ind].tolist()<|fim▁end|> | |
<|file_name|>hostname.rs<|end_file_name|><|fim▁begin|>extern crate getopts;
extern crate libc;
use getopts::{optflag, getopts, usage, OptGroup};
use libc::{c_char, c_int, size_t};
use std::io::stdio;
use std::os;
static HOSTNAME_MAX_LENGTH: uint = 256;
extern {
fn gethostname(name: *mut c_char, namelen: size_t) -> c_int;
}
fn main() {
let exit_status = run(os::args());
os::set_exit_status(exit_status);
}
fn usage_message(program: &String, options: &[OptGroup]) -> String {
let instructions = format!("Usage: {} [options] [HOSTNAME]", program);
usage(instructions.as_slice(), options)
}
fn run(args: Vec<String>) -> int {
let program = &args[0];
let parameters = [
optflag("V", "version", "Print the version number and exit"),
optflag("h", "help", "Print this help message")
];
let options = match getopts(args.tail(), parameters) {
Ok(options) => options,
Err(failure) => fail!(failure.to_string())
};
if options.opt_present("h") {
println(usage_message(program, parameters));
return 0;
}
if options.opt_present("V") {
println!("hostname 1.0.0");
return 0;
}
if options.free.len() == 1 {
err_println("hostname: you must be root to change the host name\n".to_string());
return 1;
}
match get_hostname() {
Ok(hostname) => println(hostname),
Err(error) => err_println(error)
}
return 0;
}
fn get_hostname() -> Result<String, String> {
let mut name = String::with_capacity(HOSTNAME_MAX_LENGTH).to_c_str();
let result = unsafe { gethostname(name.as_mut_ptr(), HOSTNAME_MAX_LENGTH as size_t) };
if result == 0 {
Ok(name.to_string())
} else {
Err("Failed to get hostname".to_string())
}
}
fn println(message: String) {
println!("{}", message);
}
fn err_println(message: String) {
let result = stdio::stderr().write(message.as_bytes());
match result {
Ok(_) => (),
Err(failure) => fail!(format!("Failed to write to stderr: {}", failure))
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import socket
import sys
import os
import time
import random
import csv
import json
import random
from sentence_generator import make_sentence
from copy import deepcopy
from subprocess import check_output
# csv file columns are timestamp, pressure, CO2, ...
SAMPLE_DATA_DIR = os.path.join(os.path.dirname(__file__), "sample_data")
SAMPLE_DATA = os.path.join(SAMPLE_DATA_DIR, "1427199271-sample-breathing.csv")
SOCKET_PATH = '/tmp/lucidity.socket'
TIME_WARP = float(os.environ.get('TIME_WARP', 1.0))
MAX_LINES_AT_ONCE = int(os.environ.get('MAX_LINES_AT_ONCE', 1))
EMIT_RANDOM_MSGS = bool(os.environ.get('GIBBERISH', False))
class SocketNotFound(Exception):
pass
# Read in data from the example csv file
datapoints = []
with open(SAMPLE_DATA, 'rb') as csvfile:
datareader = csv.reader(csvfile)
for row in datareader:
datapoints.append([float(x) for x in row])
# Try and connect to socket. If any error, print out error and output to stdout instead.
try:
# Make sure the socket exists
if not os.path.exists(SOCKET_PATH):
raise SocketNotFound("No socket at %s" % SOCKET_PATH)
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.setblocking(0) # important - don't block on reads
sock.connect(SOCKET_PATH)
output = sock.sendall
except (SocketNotFound, socket.error), msg:
print >>sys.stderr, "Error connecting to %s.\n\n%s." % (SOCKET_PATH, msg)
sys.exit(1)
def receive(the_socket):
# Act as an iterator. Sometimes >1 message will have accumulated on the
# socket by the time we come to read it.
# Yield either None (if nothing received, buffer empty) or json decode line by line.
rbuffer = ''
while True:
try:
incoming = the_socket.recv(1024)
rbuffer += incoming
except socket.error:
# nothing to read
yield None
continue
while rbuffer.find("\n") != -1:
line, rbuffer = rbuffer.split("\n", 1)
try:
yield json.loads(line)
except ValueError, e:
print >>sys.stderr, str(e)
print >>sys.stderr, line
def enum(**enums):
return type('Enum', (), enums)
STATES = enum(
INITIALISING = "initialising",
WAITING = "waiting",
CALIBRATING = "calibrating",
ANALYSING = "analysing",
COLLECTING = "collecting",
)
ACTIVE_STATES = [ STATES.CALIBRATING, STATES.ANALYSING, STATES.COLLECTING ]
DEFAULT_SETTINGS = {
"calibration_time": 3,
"sample_collection_time": 3,
"collection_control": "c",
"auto_triggers": True,
"blank_capture": False,
"total_breath": False,
"collection_rate": 4,
"collection_limit": 5,
"filename": "",
"capture_window": {
"start": {
"percent": 85,
"gradient": "rising"
},
"end": {
"percent": 15,
"gradient": "falling"
},
}
}
class Publisher:
def __init__(self):
self.lines_buffered = 0
self.index = 0
self.buffer = ""
self.state = None
# get own version
self.version = check_output(['git','describe','--tags'])
self.change_state(STATES.INITIALISING)
self.user_settings = {
"calibration_time": 5,
"sample_collection_time": 2,
"collection_control": "p",
"auto_triggers": False,
"blank_capture": False,
"total_breath": False,
"collection_rate": 2,
"collection_limit": 7,
"filename": "myfile",
"capture_window": {
"start": {
"percent": 62,
"gradient": "rising"
},
"end": {
"percent": 9,
"gradient": "falling"
},
}
}
self.settings = deepcopy(DEFAULT_SETTINGS)
self.set_completion(0,0)
def set_completion(self, by_volume, by_time):
self.collection_completion = {
"volume": min(100, by_volume),
"time": min(100, by_time),
}
self.emit(
collection_completion = self.collection_completion,
)
def change_state(self, new_state, message=None, severity=None):
if self.state != new_state:
message = "State changed to %s." % new_state
severity = "info"
self.state = new_state
self.emit(message=message, severity="info")
self.set_completion(0, 0)
def emit(self, **kwargs):
h = {
"state": self.state,
"version": self.version,
"is_simulation": True # DON'T include this member in a real publisher's messages
}
for key,val in kwargs.iteritems():
h[key] = val
output(json.dumps(h) + "\n")
def run(self):
# Wait a while to simulate initialisation
self.change_state(STATES.INITIALISING)
time.sleep(3.0 / TIME_WARP)
self.change_state(STATES.WAITING)
# Loop until user hits Ctrl+C
while True:
try:
# read from sock
received = receive(sock).next()
been_nudged = False
if received is not None and 'command' in received:
# act on information received
print "Received: %s" % received
do_what = received['command']
if do_what == "stop":
self.change_state(STATES.WAITING)
elif do_what == "start":
self.change_state(STATES.CALIBRATING)
self.emit(message="Using settings: " + json.dumps(received['settings']), severity="info", results_dir=SAMPLE_DATA_DIR)
self.emit(message="Got timestamp: " + json.dumps(received['timestamp']), severity="info")
elif do_what == "request_state":
self.emit()
elif do_what == "request_settings_current":<|fim▁hole|> self.emit(settings=self.settings, results_dir=SAMPLE_DATA_DIR)
elif do_what == "apply_settings_default":
self.settings = deepcopy(DEFAULT_SETTINGS)
self.emit(settings=self.settings, message="Loaded default settings.", severity="info")
elif do_what == "apply_settings_user":
self.settings = deepcopy(self.user_settings)
self.emit(settings=self.settings, message="Loaded user settings.", severity="info")
elif do_what == "save_settings":
self.user_settings = received['settings']
self.settings = deepcopy(self.user_settings)
self.emit(settings=self.settings, message="Saved user settings.", severity="info")
elif do_what == "nudge":
been_nudged = True
# While running...
if self.state in ACTIVE_STATES:
# ...cycle through active states to simulate instrument doing things
if been_nudged:
current = ACTIVE_STATES.index(self.state)
next = current + 1
if next >= len(ACTIVE_STATES):
self.change_state(STATES.WAITING)
else:
self.change_state(ACTIVE_STATES[next])
# Emit incrementing completion data during simulated collection
if self.state == STATES.COLLECTING:
self.set_completion(
by_volume = self.collection_completion["volume"] + random.random() * 5,
by_time = self.collection_completion["time"] + 2.5,
)
# Get data (ultimately this comes from the sample file)
datapoint = datapoints[self.index]
# Replace the first member of datapoint with the current timestamp
datapoint[0] = time.time()
# Fourth column of data should be zero unless we are in collecting state
if self.state != STATES.COLLECTING:
datapoint[3] = 0
# Put comma-separated line of data into the buffer
self.buffer += ",".join([str(x) for x in datapoint]) + "\n"
self.lines_buffered += 1
# Output data if the 'buffer' is full, or on a random spin.
if self.lines_buffered >= MAX_LINES_AT_ONCE or random.random() < 0.3:
if self.state in ACTIVE_STATES:
output( self.buffer )
self.buffer = ""
self.lines_buffered = 0
# Move to next data point. Increment self.index and loop back round
self.index += 1
if self.index >= len(datapoints):
self.index = 0
# Emit some random debugging every now and then
if EMIT_RANDOM_MSGS:
if self.state == STATES.WAITING:
if random.random() < 0.1:
self.emit(message="Waiting" + "." * random.randint(2,5))
else:
x = random.random()
if x < 0.05:
self.emit(message="ERROR: " + make_sentence(), severity="error")
elif x < 0.1:
self.emit(message="WARNING: " + make_sentence(), severity="warning")
elif x < 0.5:
self.emit(message=make_sentence())
time.sleep(0.2 / TIME_WARP)
except KeyboardInterrupt:
break
p = Publisher()
p.run()
sock.close()
print "Finished."<|fim▁end|> | |
<|file_name|>ConcurrentMapBasedDoubleBinaryOperatorMemoizer.java<|end_file_name|><|fim▁begin|>/*
* This file is part of memoization.java. It is subject to the license terms in the LICENSE file found in the top-level
* directory of this distribution and at http://creativecommons.org/publicdomain/zero/1.0/. No part of memoization.java,
* including this file, may be copied, modified, propagated, or distributed except according to the terms contained
* in the LICENSE file.
*/
package de.xn__ho_hia.memoization.map;
import static java.util.Objects.requireNonNull;
import java.util.concurrent.ConcurrentMap;
import java.util.function.DoubleBinaryOperator;
import de.xn__ho_hia.memoization.shared.DoubleBinaryFunction;
import de.xn__ho_hia.quality.suppression.CompilerWarnings;
final class ConcurrentMapBasedDoubleBinaryOperatorMemoizer<KEY>
extends ConcurrentMapBasedMemoizer<KEY, Double>
implements DoubleBinaryOperator {
private final DoubleBinaryFunction<KEY> keyFunction;
private final DoubleBinaryOperator operator;
@SuppressWarnings(CompilerWarnings.NLS)
public ConcurrentMapBasedDoubleBinaryOperatorMemoizer(
final ConcurrentMap<KEY, Double> cache,
final DoubleBinaryFunction<KEY> keyFunction,
final DoubleBinaryOperator operator) {
super(cache);
this.keyFunction = requireNonNull(keyFunction,
"Provide a key function, might just be 'MemoizationDefaults.doubleBinaryOperatorHashCodeKeyFunction()'.");
this.operator = requireNonNull(operator,
"Cannot memoize a NULL DoubleBinaryOperator - provide an actual DoubleBinaryOperator to fix this.");
}
@Override
public double applyAsDouble(final double left, final double right) {<|fim▁hole|> .doubleValue();
}
}<|fim▁end|> | final KEY key = keyFunction.apply(left, right);
return computeIfAbsent(key, givenKey -> Double.valueOf(operator.applyAsDouble(left, right))) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
.. moduleauthor:: Chris Dusold <[email protected]>
A module containing general purpose, cross instance hashing.
This module intends to make storage and cache checking stable accross instances.
"""
from drivelink.hash._hasher import hash<|fim▁hole|><|fim▁end|> | from drivelink.hash._hasher import frozen_hash
from drivelink.hash._hasher import Deterministic_Hashable |
<|file_name|>graph_creation.py<|end_file_name|><|fim▁begin|># Creates graph of restaurant reviews for yelp or trip advisor.<|fim▁hole|># Rob Churchill
#
# NOTE: I learned to do this in my data science class last semester. If you are looking for plagiarism things, you will almost certainly find similar clustering code.
# I did not copy it, I learned this specific way of doing it, and referred to my previous assignments when doing it for this project. If you would like to see my previous
# assignments, I will provide you them on request. Otherwise, I don't think that it's worth adding a lot of extra files for the sole sake of showing that I haven't plagiarized.
import networkx as nx
import numpy as np
import scipy as sp
import csv
folder = 'data/'
file_names = ['yelp_data.csv', 'trip_advisor_data.csv']
# EDIT this line to change which website you make the graph for. True=yelp, False=TripAdvisor
yelp = False
yelp_dataset = list()
file_name = file_names[1]
if yelp == True:
file_name = file_names[0]
# reads in appropriate file given yelp boolean variable
with open(folder+file_name, 'r') as f:
reader = csv.reader(f)
for line in reader:
yelp_dataset.append(line)
# removes headers
yelp_dataset.remove(yelp_dataset[0])
print len(yelp_dataset)
# create the graph
G = nx.Graph()
for y in yelp_dataset:
# add the nodes if they don't already exist
G.add_node(y[4], type='restaurant')
G.add_node(y[13], type='reviewer')
# add the edge between the reviewer and restaurant, weight is in different position in each file.
if yelp == True:
G.add_edge(y[13], y[4], weight=float(y[2]))
else:
G.add_edge(y[13], y[4], weight=float(y[1]))
print nx.number_of_nodes(G)
print nx.number_of_edges(G)
# write graph to gml file.
nx.write_gml(G, 'ta_graph.gml')<|fim▁end|> | # writes graph to gml file for use in gephi
# |
<|file_name|>wasm32.rs<|end_file_name|><|fim▁begin|>fn<|fim▁hole|>{
"wasm32"
}<|fim▁end|> | wasm32
() -> &str |
<|file_name|>puppet_client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from docker.errors import DockerException, NotFound
from oslo_log import log as logging
from oslo_config import cfg
from docker import Client as DC
from validator.common.exception import CookbookSyntaxException, \
CookbookDeploymentException, \
CookbookInstallException, \
DockerContainerException
from validator.common.i18n import _LW, _LE, _, _LI
LOG = logging.getLogger(__name__)
opts = [
cfg.StrOpt('url'),
cfg.StrOpt('image'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="clients_docker")
class PuppetClient(object):
"""
Wrapper for Docker client
"""
def __init__(self, url=CONF.clients_docker.url):
self._url = url
self.container = None
try:
self.dc = DC(base_url=self._url)
except DockerException as e:
LOG.error(_LE("Docker client error: %s") % e)
raise e
def cookbook_deployment_test(self, cookbook, image=CONF.clients_docker.image):
"""
Try to process a cookbook and return results
:param cookbook: cookbook to deploy
:param image: image to deploy to
:return: dictionary with results
"""
LOG.debug("Sending cookbook to docker server in %s" % self._url)
b_success = True
msg = {}
self.run_container(image)
# inject custom solo.json/solo.rb file
json_cont = CONF.clients_puppet.cmd_config % cookbook
cmd_inject = CONF.clients_puppet.cmd_inject.format(json_cont)
self.execute_command(cmd_inject)
msg['install'] = self.run_install(cookbook)
b_success &= msg['install']['success']
msg['test'] = self.run_test(cookbook)
b_success &= msg['test']['success']
msg['deploy'] = self.run_deploy(cookbook)
b_success &= msg['deploy']['success']
# check execution output
if b_success:
msg['result'] = {
'success': True,
'result': "Cookbook %s successfully deployed\n" % cookbook
}
else:
msg['result'] = {
'success': False,
'result': "Error deploying cookbook {}\n".format(cookbook)
}
LOG.error(_LW(msg))
self.remove_container()
return msg
def run_deploy(self, cookbook):
""" Run cookbook deployment
:param cookbook: cookbook to deploy
:return msg: dictionary with results and state<|fim▁hole|> resp_launch = self.execute_command(cmd_launch)
msg = {
'success': True,
'response': resp_launch
}
LOG.debug(_("Launch result: %s") % resp_launch)
if resp_launch is None or "FATAL" in resp_launch:
msg['success'] = False
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook deployment exception %s" % e))
raise CookbookDeploymentException(cookbook=cookbook)
return msg
def run_test(self, cookbook):
""" Test cookbook syntax
:param cookbook: cookbook to test
:return msg: dictionary with results and state
"""
try:
cmd_test = CONF.clients_puppet.cmd_test.format(cookbook)
resp_test = self.execute_command(cmd_test)
msg = {
'success': True,
'response': resp_test
}
for line in resp_test.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Test result: %s") % resp_test)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook syntax exception %s" % e))
raise CookbookSyntaxException(cookbook=cookbook)
return msg
def run_install(self, cookbook):
"""Run download and install command
:param cookbook: cookbook to process
:return msg: operation result
"""
try:
cmd_install = CONF.clients_puppet.cmd_install.format(cookbook)
resp_install = self.execute_command(cmd_install)
msg = {
'success': True,
'response': resp_install
}
for line in resp_install.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Install result: %s") % resp_install)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Chef install exception: %s" % e))
raise CookbookInstallException(cookbook=cookbook)
return msg
def run_container(self, image):
"""Run and start a container based on the given image
:param image: image to run
:return:
"""
contname = "{}-validate".format(image).replace("/", "_")
try:
try:
self.dc.remove_container(contname, force=True)
LOG.info(_LI('Removing old %s container' % contname))
except NotFound:
pass
self.container = self.dc.create_container(
image,
tty=True,
name=contname
).get('Id')
self.dc.start(container=self.container)
except AttributeError as e:
LOG.error(_LW("Error creating container: %s" % e))
raise DockerContainerException(image=image)
def remove_container(self, kill=True):
"""destroy container on exit
:param kill: inhibits removal for testing purposes
"""
self.dc.stop(self.container)
if kill:
self.dc.remove_container(self.container)
def execute_command(self, command):
""" Execute a command in the given container
:param command: bash command to run
:return: execution result
"""
bash_txt = "/bin/bash -c \"{}\"".format(command.replace('"', '\\"'))
exec_txt = self.dc.exec_create(
container=self.container,
cmd=bash_txt
)
return self.dc.exec_start(exec_txt)<|fim▁end|> | """
try:
# launch execution
cmd_launch = CONF.clients_puppet.cmd_launch |
<|file_name|>plane_error.py<|end_file_name|><|fim▁begin|>import numpy as np
def plane_error(results, target):
"""
Computes angle between target orbital plane and actually achieved plane.
:param results: Results struct as output by flight_manager (NOT flight_sim_3d).
:param target: Target struct as output by launch_targeting.
:return: Angle between the two orbital planes.
"""
<|fim▁hole|> inc = results.powered[results.n-1].orbit.inc
lan = results.powered[results.n-1].orbit.lan
Rx = np.array([[1, 0, 0],
[0, np.cos(np.deg2rad(inc)), -np.sin(np.deg2rad(inc))],
[0, np.sin(np.deg2rad(inc)), np.cos(np.deg2rad(inc))]])
Rz = np.array([[np.cos(np.deg2rad(lan)), -np.sin(np.deg2rad(lan)), 0],
[np.sin(np.deg2rad(lan)), np.cos(np.deg2rad(lan)), 0],
[0, 0, 1]])
reached = np.matmul(Rz, np.matmul(Rx, np.array([0, 0, -1])))
error = np.rad2deg(np.arccos(np.vdot(target.normal, reached)))
return error<|fim▁end|> | |
<|file_name|>MinuteRepeater.js<|end_file_name|><|fim▁begin|>// MinuteRepeater Class
// @params dial: object
// @params settings: object
// @params parentWatch: Watch instance
//
// The minuterepeater class accepts a dial, or defaults to the 0th index of the
// dials array on the parent Watch class, and based on the hands' rotation values
// calculates the amount of hours, quarter hours, and remaining minutes. With
// these values, the class then plays back chimes to audibly indicate the time.
class MinuteRepeater {
constructor(dial, repeater, parentWatch) {
this.errorChecking(dial, repeater);
this.hands = dial.hands;
this.hourAngle = 0;
this.hourChimes = 0;
this.hourElement = null;
this.hourDivisor = dial.format === 12 ?
30 :
15;
this.allMinutes = 0;
this.minuteAngle = 0;
this.fifteenMinuteChimes = 0;
this.fifteenMinuteElement = null;
this.minuteChimes = 0;
this.minuteElement = null;
this.trigger = document.getElementById(repeater.id || repeater.trigger);
this.chimes = repeater.chimes;
this.hourChimeDuration = 0;
this.counter = 1;
this.isPlaying = false;
this.quartersPlaying = false;
this.minutesPlaying = false;
this.parent = parentWatch;
if (!this.parent.testing) this.init();
}
errorChecking(dial, settings) {
if (!settings.id && !settings.trigger) throw new ReferenceError('The MinuteRepeater class requires that an ID of the repeater element be provided.');
if (!dial.hands.minute) throw new ReferenceError('The minute repeater, like, by definition, requires a dial which supports a minute hand.');
}
convertAngleToIncrements() {
this.hourAngle = this.parent.getCurrentRotateValue(this.hands.hour);
if (this.hourAngle > 360) {
this.hourAngle -= 360;
}
this.hourChimes = Math.floor(this.hourAngle / this.hourDivisor) || 12;
this.minuteAngle = this.parent.getCurrentRotateValue(this.hands.minute);
if (this.minuteAngle > 360) {
this.minuteAngle %= 360;
}
this.allMinutes = Math.floor(this.minuteAngle / 6);
this.fifteenMinuteChimes = Math.floor(this.allMinutes / 15);
this.minuteChimes = Math.floor(this.allMinutes - (this.fifteenMinuteChimes * 15));
}
bindEvents() {
this.trigger.addEventListener('click', () => {
this.toggleActiveState(this.trigger);
this.togglePlaying();
});
this.trigger.addEventListener('transitionend', () => {
if (this.trigger.classList.contains('active')) this.toggleActiveState(this.trigger);
});
this.hourElement.addEventListener('ended', () => {
if (!this.quartersPlaying && !this.minutesPlaying) {
this.playHours();
}
});
if (this.chimes.quarter) {
this.fifteenMinuteElement.addEventListener("ended", () => {
this.playQuarterHours();
});
}
this.minuteElement.addEventListener('ended', () => {
if (this.quartersPlaying) {
this.playQuarterHours();
} else {
this.playMinutes();
}
});
}
toggleActiveState(btn) {
btn.classList.toggle('active');
}
stopAll() {
this.hourElement.pause();
this.hourElement.currentTime = 0;
if (this.chimes.quarter) {
this.fifteenMinuteElement.pause();
this.fifteenMinuteElementcurrentTime = 0;
}
this.minuteElement.pause();
this.minuteElementcurrentTime = 0;
this.counter = 1;
this.isPlaying = false;
this.quartersPlaying = false;
this.minutesPlaying = false;
}
togglePlaying() {
if (this.parent.globalInterval) {
this.isPlaying = !this.isPlaying;
if (this.isPlaying) {
this.convertAngleToIncrements();
this.playHours();
} else {
this.stopAll();
}
}
}
playHours() {
if (this.counter <= this.hourChimes) {
this.hourElement.play();
this.counter++;
} else if (this.counter === this.hourChimes + 1) {
this.counter = 1;
this.playQuarterHours();
}
}
playQuarterHours() {
if (this.chimes.quarter) {
this.playFifteenMinutes();
} else {
if (this.counter <= this.fifteenMinuteChimes) {
this.quartersPlaying = true;
this.hourElement.play();
setTimeout(() => {
this.minuteElement.play();
this.counter++;
}, this.hourChimeDuration / 2 * 500);
} else {
this.quartersPlaying = false;
this.minutesPlaying = true;
this.counter = 1;
this.playMinutes();
}
}
}
playFifteenMinutes() {
if (this.counter <= this.fifteenMinuteChimes) {
this.fifteenMinuteElement.play();
this.counter++;
} else if (this.counter === this.fifteenMinuteChimes + 1) {
this.counter = 1;
this.playMinutes();
}
}
playMinutes() {
if (this.counter <= this.minuteChimes) {
this.minuteElement.play();
this.counter++;
} else if (this.counter === this.minuteChimes + 1) {
this.stopAll();
}
}
buildAudioElements() {<|fim▁hole|> this.hourElement.addEventListener("loadedmetadata", () => {
this.hourChimeDuration = this.hourElement.duration;
}, false);
if (this.chimes.quarter) {
this.fifteenMinuteElement = document.createElement("audio");
this.fifteenMinuteElement.src = this.chimes.quarter;
document.body.appendChild(this.fifteenMinuteElement);
}
this.minuteElement = document.createElement('audio');
this.minuteElement.src = this.chimes.minute;
document.body.appendChild(this.minuteElement);
}
updateCursorForTrigger() {
this.trigger.style.cursor = 'pointer';
}
init() {
this.buildAudioElements();
this.bindEvents();
this.updateCursorForTrigger();
}
}
module.exports = MinuteRepeater;<|fim▁end|> | this.hourElement = document.createElement('audio');
this.hourElement.src = this.chimes.hour;
document.body.appendChild(this.hourElement);
|
<|file_name|>CronSetDirectory.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2000 - 2011 TagServlet Ltd
*
* This file is part of Open BlueDragon (OpenBD) CFML Server Engine.
*
* OpenBD is free software: you can redistribute it and/or modify<|fim▁hole|> *
* OpenBD is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenBD. If not, see http://www.gnu.org/licenses/
*
* Additional permission under GNU GPL version 3 section 7
*
* If you modify this Program, or any covered work, by linking or combining
* it with any of the JARS listed in the README.txt (or a modified version of
* (that library), containing parts covered by the terms of that JAR, the
* licensors of this Program grant you additional permission to convey the
* resulting work.
* README.txt @ http://www.openbluedragon.org/license/README.txt
*
* http://openbd.org/
*
* $Id: CronSetDirectory.java 1765 2011-11-04 07:55:52Z alan $
*/
package org.alanwilliamson.openbd.plugin.crontab;
import com.naryx.tagfusion.cfm.engine.cfArgStructData;
import com.naryx.tagfusion.cfm.engine.cfBooleanData;
import com.naryx.tagfusion.cfm.engine.cfData;
import com.naryx.tagfusion.cfm.engine.cfSession;
import com.naryx.tagfusion.cfm.engine.cfmRunTimeException;
import com.naryx.tagfusion.expression.function.functionBase;
public class CronSetDirectory extends functionBase {
private static final long serialVersionUID = 1L;
public CronSetDirectory(){ min = max = 1; setNamedParams( new String[]{ "directory" } ); }
public String[] getParamInfo(){
return new String[]{
"uri directory - will be created if not exists",
};
}
public java.util.Map getInfo(){
return makeInfo(
"system",
"Sets the URI directory that the cron tasks will run from. Calling this function will enable the crontab scheduler to start. This persists across server restarts",
ReturnType.BOOLEAN );
}
public cfData execute(cfSession _session, cfArgStructData argStruct ) throws cfmRunTimeException {
CronExtension.setRootPath( getNamedStringParam(argStruct, "directory", null ) );
return cfBooleanData.TRUE;
}
}<|fim▁end|> | * it under the terms of the GNU General Public License as published by
* Free Software Foundation,version 3. |
<|file_name|>downloadview.py<|end_file_name|><|fim▁begin|>#
# downloadview.py
#
# Copyright 2010 Brett Mravec <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
class DownloadView:
def __init__ (self, downloadlist):
self.downloadlist = downloadlist
downloadlist.view = self
<|fim▁hole|>
def update_download (self, download):
print 'DownloadView.update_download (download): stub'
def remove_download (self, download):
print 'DownloadView.remove_download (download): stub'
def get_selected (self):
print 'DownloadView.get_selected (): stub'
return []<|fim▁end|> | def add_download (self, download):
print 'DownloadView.add_download (download): stub' |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for mysite2 project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'f@d3+wz7y8uj!+alcvc!6du++db!-3jh6=vr(%z(e^2n5_fml-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myauthen',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]<|fim▁hole|>ROOT_URLCONF = 'mysite2.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite2.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'<|fim▁end|> | |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|>import os
import types
import binascii
from django.db import models
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
try:
from django.utils.encoding import smart_text
except ImportError:
from django.utils.encoding import smart_str as smart_text
from keyczar import keyczar
class EncryptedFieldException(Exception):
pass
# Simple wrapper around keyczar to standardize the initialization
# of the crypter object and allow for others to extend as needed.
class KeyczarWrapper(object):
def __init__(self, keyname, *args, **kwargs):
self.crypter = keyczar.Crypter.Read(keyname)
def encrypt(self, cleartext):
return self.crypter.Encrypt(cleartext)
def decrypt(self, ciphertext):
return self.crypter.Decrypt(ciphertext)
class EncryptedFieldMixin(object, metaclass=models.SubfieldBase):
"""
EncryptedFieldMixin will use keyczar to encrypt/decrypt data that is being
marshalled in/out of the database into application Django model fields.
This is very helpful in ensuring that data at rest is encrypted and
minimizing the effects of SQL Injection or insider access to sensitive
databases containing sensitive information.
The most basic use of this mixin is to have a single encryption key for all
data in your database. This lives in a Keyczar key directory specified by:
the setting - settings.ENCRYPTED_FIELDS_KEYDIR -
Optionally, you can name specific encryption keys for data-specific purposes
in your model such as:
special_data = EncrytpedCharField( ..., keyname='special_data' )
The Mixin will handle the encryption/decryption seamlessly, but native
SQL queries may need a way to filter data that is encrypted. Using the
optional 'prefix' kwarg will prepend a static identifier to your encrypted
data before it is written to the database.
There are other use cases where you may not wish to encrypt all of the data
in a database. For example, if you have a survey application that allows
users to enter arbitrary questions and answers, users may request sensitive
information to be stored such as SSN, Driver License #, Credit Card, etc.
Your application can detect these sensitive fields, manually encrypt the
data and store that in the database mixed with other cleartext data.
The model should then only decrypt the specific fields needed. Use the
kwarg 'decrypt_only' to specify this behavior and the model will not
encrypt the data inbound and only attempt to decrypt outbound.
Encrypting data will significantly change the size of the data being stored
and this may cause issues with your database column size. Before storing
any encrypted data in your database, ensure that you have the proper
column width otherwise you may experience truncation of your data depending
on the database engine in use.
To have the mixin enforce max field length, either:
a) set ENFORCE_MAX_LENGTH = True in your settings files
b) set 'enforce_max_length' to True in the kwargs of your model.
A ValueError will be raised if the encrypted length of the data (including
prefix if specified) is greater than the max_length of the field.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the EncryptedFieldMixin with the following
optional settings:
* keyname: The name of the keyczar key
* crypter_klass: A custom class that is extended from Keyczar.
* prefix: A static string prepended to all encrypted data
* decrypt_only: Boolean whether to only attempt to decrypt data coming
from the database and not attempt to encrypt the data
being written to the database.
"""
# Allow for custom class extensions of Keyczar.
self._crypter_klass = kwargs.pop('crypter_klass', KeyczarWrapper)
<|fim▁hole|> if self.keyname:
if hasattr(settings, 'DEFAULT_KEY_DIRECTORY'):
self.keydir = os.path.join(
settings.DEFAULT_KEY_DIRECTORY,
self.keyname
)
else:
raise ImproperlyConfigured(
'You must set settings.DEFAULT_KEY_DIRECTORY'
'when using the keyname kwarg'
)
# If the keyname is not defined on a per-field
# basis, then check for the global data encryption key.
if not self.keyname and hasattr(settings, 'ENCRYPTED_FIELDS_KEYDIR'):
self.keydir = settings.ENCRYPTED_FIELDS_KEYDIR
# If we still do not have a keydir, then raise an exception
if not self.keydir:
raise ImproperlyConfigured(
'You must set settings.ENCRYPTED_FIELDS_KEYDIR '
'or name a key with kwarg `keyname`'
)
# The name of the keyczar key without path for logging purposes.
self.keyname = os.path.dirname(self.keydir)
# Prefix encrypted data with a static string to allow filtering
# of encrypted data vs. non-encrypted data using vanilla MySQL queries.
self.prefix = kwargs.pop('prefix', '')
# Allow for model decryption-only, bypassing encryption of data.
# Useful for models that have a sparse amount of data that is required
# to be encrypted.
self.decrypt_only = kwargs.pop('decrypt_only', False)
self._crypter = self._crypter_klass(self.keydir)
# Ensure the encrypted data does not exceed the max_length
# of the database. Data truncation is a possibility otherwise.
self.enforce_max_length = getattr(settings, 'ENFORCE_MAX_LENGTH', False)
if not self.enforce_max_length:
self.enforce_max_length = kwargs.pop('enforce_max_length', False)
super(EncryptedFieldMixin, self).__init__(*args, **kwargs)
def crypter(self):
return self._crypter
def get_internal_type(self):
return 'TextField'
def to_python(self, value):
if value is None or not isinstance(value, str):
return value
if self.prefix and value.startswith(self.prefix):
value = value[len(self.prefix):]
try:
value = self.crypter().decrypt(value)
# value = value.decode('unicode_escape')
except keyczar.errors.KeyczarError:
pass
except UnicodeEncodeError:
pass
except binascii.Error:
pass
return super(EncryptedFieldMixin, self).to_python(value)
def get_prep_value(self, value):
value = super(EncryptedFieldMixin, self).get_prep_value(value)
if value is None or value == '' or self.decrypt_only:
return value
if isinstance(value, str):
value = value.encode('unicode_escape')
# value = value.encode('ascii')
else:
value = str(value)
return self.prefix + self.crypter().encrypt(value)
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
if self.enforce_max_length:
if (
value
and hasattr(self, 'max_length')
and self.max_length
and len(value) > self.max_length
):
raise ValueError(
'Field {0} max_length={1} encrypted_len={2}'.format(
self.name,
self.max_length,
len(value),
)
)
return value
class EncryptedCharField(EncryptedFieldMixin, models.CharField):
pass
class EncryptedTextField(EncryptedFieldMixin, models.TextField):
pass
class EncryptedDateTimeField(EncryptedFieldMixin, models.DateTimeField):
pass
class EncryptedIntegerField(EncryptedFieldMixin, models.IntegerField):
@cached_property
def validators(self):
"""
See issue https://github.com/defrex/django-encrypted-fields/issues/7
Need to keep all field validators, but need to change `get_internal_type` on the fly
to prevent fail in django 1.7.
"""
self.get_internal_type = lambda: 'IntegerField'
return models.IntegerField.validators.__get__(self)
class EncryptedDateField(EncryptedFieldMixin, models.DateField):
pass
class EncryptedFloatField(EncryptedFieldMixin, models.FloatField):
pass
class EncryptedEmailField(EncryptedFieldMixin, models.EmailField):
pass
class EncryptedBooleanField(EncryptedFieldMixin, models.BooleanField):
pass
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['^encrypted_fields\.fields\.\w+Field'])
except ImportError:
pass<|fim▁end|> | self.keyname = kwargs.pop('keyname', None)
# If settings.DEFAULT_KEY_DIRECTORY, then the key
# is located in DEFAULT_KEY_DIRECTORY/keyname |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2002-2018 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Server implementation config for bofhd.
History
-------
This class used to be a part of the bofhd server script itself. It was
moved to a separate module after:
commit ff3e3f1392a951a059020d56044f8017116bb69c
Merge: c57e8ee 61f02de
Date: Fri Mar 18 10:34:58 2016 +0100<|fim▁hole|>"""
from __future__ import print_function
import io
def _format_class(module, name):
""" Format a line for the config. """
return u'{0}/{1}'.format(module, name)
class BofhdConfig(object):
""" Container for parsing and keeping a bofhd config. """
def __init__(self, filename=None):
""" Initialize new config. """
self._exts = list() # NOTE: Must keep order!
if filename:
self.load_from_file(filename)
def load_from_file(self, filename):
""" Load config file. """
with io.open(filename, encoding='utf-8') as f:
for lineno, line in enumerate(f, 1):
line = line.strip()
if not line or line.startswith('#'):
continue
try:
mod, cls = line.split("/", 1)
except:
mod, cls = None, None
if not mod or not cls:
raise Exception("Parse error in '%s' on line %d: %r" %
(filename, lineno, line))
self._exts.append((mod, cls))
def extensions(self):
""" All extensions from config. """
for mod, cls in self._exts:
yield mod, cls
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description="Parse config and output classes")
parser.add_argument(
'config',
metavar='FILE',
help='Bofhd configuration file')
args = parser.parse_args()
config = BofhdConfig(filename=args.config)
print('Command classes:')
for mod, name in config.extensions():
print('-', _format_class(mod, name))<|fim▁end|> | |
<|file_name|>cases.py<|end_file_name|><|fim▁begin|>from app import db, GenericRecord
class Case(GenericRecord):
__collection__ = 'cases'<|fim▁hole|><|fim▁end|> |
db.register([Case]) |
<|file_name|>tempusdominus-bootstrap-4.js<|end_file_name|><|fim▁begin|>/*@preserve
* Tempus Dominus Bootstrap4 v5.0.0-alpha13 (https://tempusdominus.github.io/bootstrap-4/)
* Copyright 2016-2017 Jonathan Peterson
* Licensed under MIT (https://github.com/tempusdominus/bootstrap-3/blob/master/LICENSE)
*/
if (typeof jQuery === 'undefined') {
throw new Error('Tempus Dominus Bootstrap4\'s requires jQuery. jQuery must be included before Tempus Dominus Bootstrap4\'s JavaScript.');
}
+function ($) {
var version = $.fn.jquery.split(' ')[0].split('.');
if ((version[0] < 2 && version[1] < 9) || (version[0] === 1 && version[1] === 9 && version[2] < 1) || (version[0] >= 4)) {
throw new Error('Tempus Dominus Bootstrap4\'s requires at least jQuery v1.9.1 but less than v4.0.0');
}
}(jQuery);
if (typeof moment === 'undefined') {
throw new Error('Tempus Dominus Bootstrap4\'s requires moment.js. Moment.js must be included before Tempus Dominus Bootstrap4\'s JavaScript.');
}
var version = moment.version.split('.')
if ((version[0] <= 2 && version[1] < 17) || (version[0] >= 3)) {
throw new Error('Tempus Dominus Bootstrap4\'s requires at least moment.js v2.17.0 but less than v3.0.0');
}
+function () {
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
// ReSharper disable once InconsistentNaming
var DateTimePicker = function ($, moment) {
// ReSharper disable InconsistentNaming
var NAME = 'datetimepicker',
VERSION = '5.0.0-alpha7',
DATA_KEY = '' + NAME,
EVENT_KEY = '.' + DATA_KEY,
EMIT_EVENT_KEY = DATA_KEY + '.',
DATA_API_KEY = '.data-api',
Selector = {
DATA_TOGGLE: '[data-toggle="' + DATA_KEY + '"]'
},
ClassName = {
INPUT: NAME + '-input'
},
Event = {
CHANGE: 'change' + EVENT_KEY,
BLUR: 'blur' + EVENT_KEY,
KEYUP: 'keyup' + EVENT_KEY,
KEYDOWN: 'keydown' + EVENT_KEY,
FOCUS: 'focus' + EVENT_KEY,
CLICK_DATA_API: 'click' + EVENT_KEY + DATA_API_KEY,
//emitted
UPDATE: EMIT_EVENT_KEY + 'update',
ERROR: EMIT_EVENT_KEY + 'error',
HIDE: EMIT_EVENT_KEY + 'hide',
SHOW: EMIT_EVENT_KEY + 'show'
},
Default = {
timeZone: '',
format: false,
dayViewHeaderFormat: 'MMMM YYYY',
extraFormats: false,
stepping: 1,
minDate: false,
maxDate: false,
useCurrent: true,
collapse: true,
locale: moment.locale(),
defaultDate: false,
disabledDates: false,
enabledDates: false,
icons: {
time: 'fa fa-clock-o',
date: 'fa fa-calendar',
up: 'fa fa-arrow-up',
down: 'fa fa-arrow-down',
previous: 'fa fa-chevron-left',
next: 'fa fa-chevron-right',
today: 'fa fa-calendar-check-o',
clear: 'fa fa-delete',
close: 'fa fa-times'
},
tooltips: {
today: 'Go to today',
clear: 'Clear selection',
close: 'Close the picker',
selectMonth: 'Select Month',
prevMonth: 'Previous Month',
nextMonth: 'Next Month',
selectYear: 'Select Year',
prevYear: 'Previous Year',
nextYear: 'Next Year',
selectDecade: 'Select Decade',
prevDecade: 'Previous Decade',
nextDecade: 'Next Decade',
prevCentury: 'Previous Century',
nextCentury: 'Next Century',
pickHour: 'Pick Hour',
incrementHour: 'Increment Hour',
decrementHour: 'Decrement Hour',
pickMinute: 'Pick Minute',
incrementMinute: 'Increment Minute',
decrementMinute: 'Decrement Minute',
pickSecond: 'Pick Second',
incrementSecond: 'Increment Second',
decrementSecond: 'Decrement Second',
togglePeriod: 'Toggle Period',
selectTime: 'Select Time',
selectDate: 'Select Date'
},
useStrict: false,
sideBySide: false,
daysOfWeekDisabled: false,
calendarWeeks: false,
viewMode: 'days',
toolbarPlacement: 'default',
buttons: {
showToday: false,
showClear: false,
showClose: false
},
widgetPositioning: {
horizontal: 'auto',
vertical: 'auto'
},
widgetParent: null,
ignoreReadonly: false,
keepOpen: false,
focusOnShow: true,
inline: false,
keepInvalid: false,
keyBinds: {
up: function up() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().subtract(7, 'd'));
} else {
this.date(d.clone().add(this.stepping(), 'm'));
}
return true;
},
down: function down() {
if (!this.widget) {
this.show();
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().add(7, 'd'));
} else {
this.date(d.clone().subtract(this.stepping(), 'm'));
}
return true;
},
'control up': function controlUp() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().subtract(1, 'y'));
} else {
this.date(d.clone().add(1, 'h'));
}
return true;
},
'control down': function controlDown() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().add(1, 'y'));
} else {
this.date(d.clone().subtract(1, 'h'));
}
return true;
},
left: function left() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().subtract(1, 'd'));
}
return true;
},
right: function right() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().add(1, 'd'));
}
return true;
},
pageUp: function pageUp() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().subtract(1, 'M'));
}
return true;
},
pageDown: function pageDown() {
if (!this.widget) {
return false;
}
var d = this._dates[0] || this.getMoment();
if (this.widget.find('.datepicker').is(':visible')) {
this.date(d.clone().add(1, 'M'));
}
return true;
},
enter: function enter() {
this.hide();
return true;
},
escape: function escape() {
if (!this.widget) {
return false;
}
this.hide();
return true;
},
'control space': function controlSpace() {
if (!this.widget) {
return false;
}
if (this.widget.find('.timepicker').is(':visible')) {
this.widget.find('.btn[data-action="togglePeriod"]').click();
}
return true;
},
t: function t() {
this.date(this.getMoment());
return true;
},
'delete': function _delete() {
if (!this.widget) {
return false;
}
this.clear();
return true;
}
},
debug: false,
allowInputToggle: false,
disabledTimeIntervals: false,
disabledHours: false,
enabledHours: false,
viewDate: false,
allowMultidate: false,
multidateSeparator: ','
},
DatePickerModes = [{
CLASS_NAME: 'days',
NAV_FUNCTION: 'M',
NAV_STEP: 1
}, {
CLASS_NAME: 'months',
NAV_FUNCTION: 'y',
NAV_STEP: 1
}, {
CLASS_NAME: 'years',
NAV_FUNCTION: 'y',
NAV_STEP: 10
}, {
CLASS_NAME: 'decades',
NAV_FUNCTION: 'y',
NAV_STEP: 100
}],
KeyMap = {
'up': 38,
38: 'up',
'down': 40,
40: 'down',
'left': 37,
37: 'left',
'right': 39,
39: 'right',
'tab': 9,
9: 'tab',
'escape': 27,
27: 'escape',
'enter': 13,
13: 'enter',
'pageUp': 33,
33: 'pageUp',
'pageDown': 34,
34: 'pageDown',
'shift': 16,
16: 'shift',
'control': 17,
17: 'control',
'space': 32,
32: 'space',
't': 84,
84: 't',
'delete': 46,
46: 'delete'
},
ViewModes = ['times', 'days', 'months', 'years', 'decades'],
keyState = {},
keyPressHandled = {};
var MinViewModeNumber = 0;
// ReSharper restore InconsistentNaming
// ReSharper disable once DeclarationHides
// ReSharper disable once InconsistentNaming
var DateTimePicker = function () {
/** @namespace eData.dateOptions */
/** @namespace moment.tz */
function DateTimePicker(element, options) {
_classCallCheck(this, DateTimePicker);
this._options = this._getOptions(options);
this._element = element;
this._dates = [];
this._datesFormatted = [];
this._viewDate = null;
this.unset = true;
this.component = false;
this.widget = false;
this.use24Hours = null;
this.actualFormat = null;
this.parseFormats = null;
this.currentViewMode = null;
this._int();
}
/**
* @return {string}
*/
//private
DateTimePicker.prototype._int = function _int() {
var targetInput = this._element.data('target-input');
if (this._element.is('input')) {
this.input = this._element;
} else if (targetInput !== undefined) {
if (targetInput === 'nearest') {
this.input = this._element.find('input');
} else {
this.input = $(targetInput);
}
}
this._dates = [];
this._dates[0] = this.getMoment();
this._viewDate = this.getMoment().clone();
$.extend(true, this._options, this._dataToOptions());
this.options(this._options);
this._initFormatting();
if (this.input !== undefined && this.input.is('input') && this.input.val().trim().length !== 0) {
this._setValue(this._parseInputDate(this.input.val().trim()), 0);
} else if (this._options.defaultDate && this.input !== undefined && this.input.attr('placeholder') === undefined) {
this._setValue(this._options.defaultDate, 0);
}
if (this._options.inline) {
this.show();
}
};
DateTimePicker.prototype._update = function _update() {
if (!this.widget) {
return;
}
this._fillDate();
this._fillTime();
};
DateTimePicker.prototype._setValue = function _setValue(targetMoment, index) {
var oldDate = this.unset ? null : this._dates[index];
var outpValue = '';
// case of calling setValue(null or false)
if (!targetMoment) {
if (!this._options.allowMultidate || this._dates.length === 1) {
this.unset = true;
this._dates = [];
this._datesFormatted = [];
} else {
outpValue = this._element.data('date') + ',';
outpValue = outpValue.replace(oldDate.format(this.actualFormat) + ',', '').replace(',,', '').replace(/,\s*$/, '');
this._dates.splice(index, 1);
this._datesFormatted.splice(index, 1);
}
if (this.input !== undefined) {
this.input.val(outpValue);
this.input.trigger('input');
}
this._element.data('date', outpValue);
this._notifyEvent({
type: DateTimePicker.Event.CHANGE,
date: false,
oldDate: oldDate
});
this._update();
return;
}
targetMoment = targetMoment.clone().locale(this._options.locale);
if (this._hasTimeZone()) {
targetMoment.tz(this._options.timeZone);
}
if (this._options.stepping !== 1) {
targetMoment.minutes(Math.round(targetMoment.minutes() / this._options.stepping) * this._options.stepping).seconds(0);
}
if (this._isValid(targetMoment)) {
this._dates[index] = targetMoment;
this._datesFormatted[index] = targetMoment.format('YYYY-MM-DD');
this._viewDate = targetMoment.clone();
if (this._options.allowMultidate && this._dates.length > 1) {
for (var i = 0; i < this._dates.length; i++) {
outpValue += '' + this._dates[i].format(this.actualFormat) + this._options.multidateSeparator;
}
outpValue = outpValue.replace(/,\s*$/, '');
} else {
outpValue = this._dates[index].format(this.actualFormat);
}
if (this.input !== undefined) {
this.input.val(outpValue);
this.input.trigger('input');
}
this._element.data('date', outpValue);
this.unset = false;
this._update();
this._notifyEvent({
type: DateTimePicker.Event.CHANGE,
date: this._dates[index].clone(),
oldDate: oldDate
});
} else {
if (!this._options.keepInvalid) {
if (this.input !== undefined) {
this.input.val('' + (this.unset ? '' : this._dates[index].format(this.actualFormat)));
this.input.trigger('input');
}
} else {
this._notifyEvent({
type: DateTimePicker.Event.CHANGE,
date: targetMoment,
oldDate: oldDate
});
}
this._notifyEvent({
type: DateTimePicker.Event.ERROR,
date: targetMoment,
oldDate: oldDate
});
}
};
DateTimePicker.prototype._change = function _change(e) {
var val = $(e.target).val().trim(),
parsedDate = val ? this._parseInputDate(val) : null;
this._setValue(parsedDate);
e.stopImmediatePropagation();
return false;
};
//noinspection JSMethodCanBeStatic
DateTimePicker.prototype._getOptions = function _getOptions(options) {
options = $.extend(true, {}, Default, options);
return options;
};
DateTimePicker.prototype._hasTimeZone = function _hasTimeZone() {
return moment.tz !== undefined && this._options.timeZone !== undefined && this._options.timeZone !== null && this._options.timeZone !== '';
};
DateTimePicker.prototype._isEnabled = function _isEnabled(granularity) {
if (typeof granularity !== 'string' || granularity.length > 1) {
throw new TypeError('isEnabled expects a single character string parameter');
}
switch (granularity) {
case 'y':
return this.actualFormat.indexOf('Y') !== -1;
case 'M':
return this.actualFormat.indexOf('M') !== -1;
case 'd':
return this.actualFormat.toLowerCase().indexOf('d') !== -1;
case 'h':
case 'H':
return this.actualFormat.toLowerCase().indexOf('h') !== -1;
case 'm':
return this.actualFormat.indexOf('m') !== -1;
case 's':
return this.actualFormat.indexOf('s') !== -1;
default:
return false;
}
};
DateTimePicker.prototype._hasTime = function _hasTime() {
return this._isEnabled('h') || this._isEnabled('m') || this._isEnabled('s');
};
DateTimePicker.prototype._hasDate = function _hasDate() {
return this._isEnabled('y') || this._isEnabled('M') || this._isEnabled('d');
};
DateTimePicker.prototype._dataToOptions = function _dataToOptions() {
var eData = this._element.data();
var dataOptions = {};
if (eData.dateOptions && eData.dateOptions instanceof Object) {
dataOptions = $.extend(true, dataOptions, eData.dateOptions);
}
$.each(this._options, function (key) {
var attributeName = 'date' + key.charAt(0).toUpperCase() + key.slice(1); //todo data api key
if (eData[attributeName] !== undefined) {
dataOptions[key] = eData[attributeName];
} else {
delete dataOptions[key];
}
});
return dataOptions;
};
DateTimePicker.prototype._notifyEvent = function _notifyEvent(e) {
if (e.type === DateTimePicker.Event.CHANGE && e.date && e.date.isSame(e.oldDate) || !e.date && !e.oldDate) {
return;
}
this._element.trigger(e);
};
DateTimePicker.prototype._viewUpdate = function _viewUpdate(e) {
if (e === 'y') {
e = 'YYYY';
}
this._notifyEvent({
type: DateTimePicker.Event.UPDATE,
change: e,
viewDate: this._viewDate.clone()
});
};
DateTimePicker.prototype._showMode = function _showMode(dir) {
if (!this.widget) {
return;
}
if (dir) {
this.currentViewMode = Math.max(MinViewModeNumber, Math.min(3, this.currentViewMode + dir));
}
this.widget.find('.datepicker > div').hide().filter('.datepicker-' + DatePickerModes[this.currentViewMode].CLASS_NAME).show();
};
DateTimePicker.prototype._isInDisabledDates = function _isInDisabledDates(testDate) {
return this._options.disabledDates[testDate.format('YYYY-MM-DD')] === true;
};
DateTimePicker.prototype._isInEnabledDates = function _isInEnabledDates(testDate) {
return this._options.enabledDates[testDate.format('YYYY-MM-DD')] === true;
};
DateTimePicker.prototype._isInDisabledHours = function _isInDisabledHours(testDate) {
return this._options.disabledHours[testDate.format('H')] === true;
};
DateTimePicker.prototype._isInEnabledHours = function _isInEnabledHours(testDate) {
return this._options.enabledHours[testDate.format('H')] === true;
};
DateTimePicker.prototype._isValid = function _isValid(targetMoment, granularity) {
if (!targetMoment.isValid()) {
return false;
}
if (this._options.disabledDates && granularity === 'd' && this._isInDisabledDates(targetMoment)) {
return false;
}
if (this._options.enabledDates && granularity === 'd' && !this._isInEnabledDates(targetMoment)) {
return false;
}
if (this._options.minDate && targetMoment.isBefore(this._options.minDate, granularity)) {
return false;
}
if (this._options.maxDate && targetMoment.isAfter(this._options.maxDate, granularity)) {
return false;
}
if (this._options.daysOfWeekDisabled && granularity === 'd' && this._options.daysOfWeekDisabled.indexOf(targetMoment.day()) !== -1) {
return false;
}
if (this._options.disabledHours && (granularity === 'h' || granularity === 'm' || granularity === 's') && this._isInDisabledHours(targetMoment)) {
return false;
}
if (this._options.enabledHours && (granularity === 'h' || granularity === 'm' || granularity === 's') && !this._isInEnabledHours(targetMoment)) {
return false;
}
if (this._options.disabledTimeIntervals && (granularity === 'h' || granularity === 'm' || granularity === 's')) {
var found = false;
$.each(this._options.disabledTimeIntervals, function () {
if (targetMoment.isBetween(this[0], this[1])) {
found = true;
return false;
}
});
if (found) {
return false;
}
}
return true;
};
DateTimePicker.prototype._parseInputDate = function _parseInputDate(inputDate) {
if (this._options.parseInputDate === undefined) {
if (!moment.isMoment(inputDate)) {
inputDate = this.getMoment(inputDate);
}
} else {
inputDate = this._options.parseInputDate(inputDate);
}
//inputDate.locale(this.options.locale);
return inputDate;
};
DateTimePicker.prototype._keydown = function _keydown(e) {
var handler = null,
index = void 0,
index2 = void 0,
keyBindKeys = void 0,
allModifiersPressed = void 0;
var pressedKeys = [],
pressedModifiers = {},
currentKey = e.which,
pressed = 'p';
keyState[currentKey] = pressed;
for (index in keyState) {
if (keyState.hasOwnProperty(index) && keyState[index] === pressed) {
pressedKeys.push(index);
if (parseInt(index, 10) !== currentKey) {
pressedModifiers[index] = true;
}
}
}
for (index in this._options.keyBinds) {
if (this._options.keyBinds.hasOwnProperty(index) && typeof this._options.keyBinds[index] === 'function') {
keyBindKeys = index.split(' ');
if (keyBindKeys.length === pressedKeys.length && KeyMap[currentKey] === keyBindKeys[keyBindKeys.length - 1]) {
allModifiersPressed = true;
for (index2 = keyBindKeys.length - 2; index2 >= 0; index2--) {
if (!(KeyMap[keyBindKeys[index2]] in pressedModifiers)) {
allModifiersPressed = false;
break;
}
}
if (allModifiersPressed) {
handler = this._options.keyBinds[index];
break;
}
}
}
}
if (handler) {
if (handler.call(this.widget)) {
e.stopPropagation();
e.preventDefault();
}
}
};
//noinspection JSMethodCanBeStatic,SpellCheckingInspection
DateTimePicker.prototype._keyup = function _keyup(e) {
keyState[e.which] = 'r';
if (keyPressHandled[e.which]) {
keyPressHandled[e.which] = false;
e.stopPropagation();
e.preventDefault();
}
};
DateTimePicker.prototype._indexGivenDates = function _indexGivenDates(givenDatesArray) {
// Store given enabledDates and disabledDates as keys.
// This way we can check their existence in O(1) time instead of looping through whole array.
// (for example: options.enabledDates['2014-02-27'] === true)
var givenDatesIndexed = {},
self = this;
$.each(givenDatesArray, function () {
var dDate = self._parseInputDate(this);
if (dDate.isValid()) {
givenDatesIndexed[dDate.format('YYYY-MM-DD')] = true;
}
});
return Object.keys(givenDatesIndexed).length ? givenDatesIndexed : false;
};
DateTimePicker.prototype._indexGivenHours = function _indexGivenHours(givenHoursArray) {
// Store given enabledHours and disabledHours as keys.
// This way we can check their existence in O(1) time instead of looping through whole array.
// (for example: options.enabledHours['2014-02-27'] === true)
var givenHoursIndexed = {};
$.each(givenHoursArray, function () {
givenHoursIndexed[this] = true;
});
return Object.keys(givenHoursIndexed).length ? givenHoursIndexed : false;
};
DateTimePicker.prototype._initFormatting = function _initFormatting() {
var format = this._options.format || 'L LT',
self = this;
this.actualFormat = format.replace(/(\[[^\[]*])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g, function (formatInput) {
return self._dates[0].localeData().longDateFormat(formatInput) || formatInput; //todo taking the first date should be ok
});
this.parseFormats = this._options.extraFormats ? this._options.extraFormats.slice() : [];
if (this.parseFormats.indexOf(format) < 0 && this.parseFormats.indexOf(this.actualFormat) < 0) {
this.parseFormats.push(this.actualFormat);
}
this.use24Hours = this.actualFormat.toLowerCase().indexOf('a') < 1 && this.actualFormat.replace(/\[.*?]/g, '').indexOf('h') < 1;
if (this._isEnabled('y')) {
MinViewModeNumber = 2;
}
if (this._isEnabled('M')) {
MinViewModeNumber = 1;
}
if (this._isEnabled('d')) {
MinViewModeNumber = 0;
}
this.currentViewMode = Math.max(MinViewModeNumber, this.currentViewMode);
if (!this.unset) {
this._setValue(this._dates[0], 0);
}
};
DateTimePicker.prototype._getLastPickedDate = function _getLastPickedDate() {
return this._dates[this._getLastPickedDateIndex()];
};
DateTimePicker.prototype._getLastPickedDateIndex = function _getLastPickedDateIndex() {
return this._dates.length - 1;
};
//public
DateTimePicker.prototype.getMoment = function getMoment(d) {
var returnMoment = void 0;
if (d === undefined || d === null) {
returnMoment = moment(); //TODO should this use format? and locale?
} else if (this._hasTimeZone()) {
// There is a string to parse and a default time zone
// parse with the tz function which takes a default time zone if it is not in the format string
returnMoment = moment.tz(d, this.parseFormats, this._options.useStrict, this._options.timeZone);
} else {
returnMoment = moment(d, this.parseFormats, this._options.useStrict);
}
if (this._hasTimeZone()) {
returnMoment.tz(this._options.timeZone);
}
return returnMoment;
};
DateTimePicker.prototype.toggle = function toggle() {
return this.widget ? this.hide() : this.show();
};
DateTimePicker.prototype.ignoreReadonly = function ignoreReadonly(_ignoreReadonly) {
if (arguments.length === 0) {
return this._options.ignoreReadonly;
}
if (typeof _ignoreReadonly !== 'boolean') {
throw new TypeError('ignoreReadonly () expects a boolean parameter');
}
this._options.ignoreReadonly = _ignoreReadonly;
};
DateTimePicker.prototype.options = function options(newOptions) {
if (arguments.length === 0) {
return $.extend(true, {}, this._options);
}
if (!(newOptions instanceof Object)) {
throw new TypeError('options() this.options parameter should be an object');
}
$.extend(true, this._options, newOptions);
var self = this;
$.each(this._options, function (key, value) {
if (self[key] !== undefined) {
self[key](value);
}
});
};
DateTimePicker.prototype.date = function date(newDate, index) {
index = index || 0;
if (arguments.length === 0) {
if (this.unset) {
return null;
}
if (this._options.allowMultidate) {
return this._dates.join(this._options.multidateSeparator);
} else {
return this._dates[index].clone();
}
}
if (newDate !== null && typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) {
throw new TypeError('date() parameter must be one of [null, string, moment or Date]');
}
this._setValue(newDate === null ? null : this._parseInputDate(newDate), index);
};
DateTimePicker.prototype.format = function format(newFormat) {
///<summary>test su</summary>
///<param name="newFormat">info about para</param>
///<returns type="string|boolean">returns foo</returns>
if (arguments.length === 0) {
return this._options.format;
}
if (typeof newFormat !== 'string' && (typeof newFormat !== 'boolean' || newFormat !== false)) {
throw new TypeError('format() expects a string or boolean:false parameter ' + newFormat);
}
this._options.format = newFormat;
if (this.actualFormat) {
this._initFormatting(); // reinitialize formatting
}
};
DateTimePicker.prototype.timeZone = function timeZone(newZone) {
if (arguments.length === 0) {
return this._options.timeZone;
}
if (typeof newZone !== 'string') {
throw new TypeError('newZone() expects a string parameter');
}
this._options.timeZone = newZone;
};
DateTimePicker.prototype.dayViewHeaderFormat = function dayViewHeaderFormat(newFormat) {
if (arguments.length === 0) {
return this._options.dayViewHeaderFormat;
}
if (typeof newFormat !== 'string') {
throw new TypeError('dayViewHeaderFormat() expects a string parameter');
}
this._options.dayViewHeaderFormat = newFormat;
};
DateTimePicker.prototype.extraFormats = function extraFormats(formats) {
if (arguments.length === 0) {
return this._options.extraFormats;
}
if (formats !== false && !(formats instanceof Array)) {
throw new TypeError('extraFormats() expects an array or false parameter');
}
this._options.extraFormats = formats;
if (this.parseFormats) {
this._initFormatting(); // reinit formatting
}
};
DateTimePicker.prototype.disabledDates = function disabledDates(dates) {
if (arguments.length === 0) {
return this._options.disabledDates ? $.extend({}, this._options.disabledDates) : this._options.disabledDates;
}
if (!dates) {
this._options.disabledDates = false;
this._update();
return true;
}
if (!(dates instanceof Array)) {
throw new TypeError('disabledDates() expects an array parameter');
}
this._options.disabledDates = this._indexGivenDates(dates);
this._options.enabledDates = false;
this._update();
};
DateTimePicker.prototype.enabledDates = function enabledDates(dates) {
if (arguments.length === 0) {
return this._options.enabledDates ? $.extend({}, this._options.enabledDates) : this._options.enabledDates;
}
if (!dates) {
this._options.enabledDates = false;
this._update();
return true;
}
if (!(dates instanceof Array)) {
throw new TypeError('enabledDates() expects an array parameter');
}
this._options.enabledDates = this._indexGivenDates(dates);
this._options.disabledDates = false;
this._update();
};
DateTimePicker.prototype.daysOfWeekDisabled = function daysOfWeekDisabled(_daysOfWeekDisabled) {
if (arguments.length === 0) {
return this._options.daysOfWeekDisabled.splice(0);
}
if (typeof _daysOfWeekDisabled === 'boolean' && !_daysOfWeekDisabled) {
this._options.daysOfWeekDisabled = false;
this._update();
return true;
}
if (!(_daysOfWeekDisabled instanceof Array)) {
throw new TypeError('daysOfWeekDisabled() expects an array parameter');<|fim▁hole|> return previousValue;
}
if (previousValue.indexOf(currentValue) === -1) {
previousValue.push(currentValue);
}
return previousValue;
}, []).sort();
if (this._options.useCurrent && !this._options.keepInvalid) {
for (var i = 0; i < this._dates.length; i++) {
var tries = 0;
while (!this._isValid(this._dates[i], 'd')) {
this._dates[i].add(1, 'd');
if (tries === 31) {
throw 'Tried 31 times to find a valid date';
}
tries++;
}
this._setValue(this._dates[i], i);
}
}
this._update();
};
DateTimePicker.prototype.maxDate = function maxDate(_maxDate) {
if (arguments.length === 0) {
return this._options.maxDate ? this._options.maxDate.clone() : this._options.maxDate;
}
if (typeof _maxDate === 'boolean' && _maxDate === false) {
this._options.maxDate = false;
this._update();
return true;
}
if (typeof _maxDate === 'string') {
if (_maxDate === 'now' || _maxDate === 'moment') {
_maxDate = this.getMoment();
}
}
var parsedDate = this._parseInputDate(_maxDate);
if (!parsedDate.isValid()) {
throw new TypeError('maxDate() Could not parse date parameter: ' + _maxDate);
}
if (this._options.minDate && parsedDate.isBefore(this._options.minDate)) {
throw new TypeError('maxDate() date parameter is before this.options.minDate: ' + parsedDate.format(this.actualFormat));
}
this._options.maxDate = parsedDate;
for (var i = 0; i < this._dates.length; i++) {
if (this._options.useCurrent && !this._options.keepInvalid && this._dates[i].isAfter(_maxDate)) {
this._setValue(this._options.maxDate, i);
}
}
if (this._viewDate.isAfter(parsedDate)) {
this._viewDate = parsedDate.clone().subtract(this._options.stepping, 'm');
}
this._update();
};
DateTimePicker.prototype.minDate = function minDate(_minDate) {
if (arguments.length === 0) {
return this._options.minDate ? this._options.minDate.clone() : this._options.minDate;
}
if (typeof _minDate === 'boolean' && _minDate === false) {
this._options.minDate = false;
this._update();
return true;
}
if (typeof _minDate === 'string') {
if (_minDate === 'now' || _minDate === 'moment') {
_minDate = this.getMoment();
}
}
var parsedDate = this._parseInputDate(_minDate);
if (!parsedDate.isValid()) {
throw new TypeError('minDate() Could not parse date parameter: ' + _minDate);
}
if (this._options.maxDate && parsedDate.isAfter(this._options.maxDate)) {
throw new TypeError('minDate() date parameter is after this.options.maxDate: ' + parsedDate.format(this.actualFormat));
}
this._options.minDate = parsedDate;
for (var i = 0; i < this._dates.length; i++) {
if (this._options.useCurrent && !this._options.keepInvalid && this._dates[i].isBefore(_minDate)) {
this._setValue(this._options.minDate, i);
}
}
if (this._viewDate.isBefore(parsedDate)) {
this._viewDate = parsedDate.clone().add(this._options.stepping, 'm');
}
this._update();
};
DateTimePicker.prototype.defaultDate = function defaultDate(_defaultDate) {
if (arguments.length === 0) {
return this._options.defaultDate ? this._options.defaultDate.clone() : this._options.defaultDate;
}
if (!_defaultDate) {
this._options.defaultDate = false;
return true;
}
if (typeof _defaultDate === 'string') {
if (_defaultDate === 'now' || _defaultDate === 'moment') {
_defaultDate = this.getMoment();
} else {
_defaultDate = this.getMoment(_defaultDate);
}
}
var parsedDate = this._parseInputDate(_defaultDate);
if (!parsedDate.isValid()) {
throw new TypeError('defaultDate() Could not parse date parameter: ' + _defaultDate);
}
if (!this._isValid(parsedDate)) {
throw new TypeError('defaultDate() date passed is invalid according to component setup validations');
}
this._options.defaultDate = parsedDate;
if (this._options.defaultDate && this._options.inline || this.input !== undefined && this.input.val().trim() === '') {
this._setValue(this._options.defaultDate, 0);
}
};
DateTimePicker.prototype.locale = function locale(_locale) {
if (arguments.length === 0) {
return this._options.locale;
}
if (!moment.localeData(_locale)) {
throw new TypeError('locale() locale ' + _locale + ' is not loaded from moment locales!');
}
for (var i = 0; i < this._dates.length; i++) {
this._dates[i].locale(this._options.locale);
}
this._viewDate.locale(this._options.locale);
if (this.actualFormat) {
this._initFormatting(); // reinitialize formatting
}
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.stepping = function stepping(_stepping) {
if (arguments.length === 0) {
return this._options.stepping;
}
_stepping = parseInt(_stepping, 10);
if (isNaN(_stepping) || _stepping < 1) {
_stepping = 1;
}
this._options.stepping = _stepping;
};
DateTimePicker.prototype.useCurrent = function useCurrent(_useCurrent) {
var useCurrentOptions = ['year', 'month', 'day', 'hour', 'minute'];
if (arguments.length === 0) {
return this._options.useCurrent;
}
if (typeof _useCurrent !== 'boolean' && typeof _useCurrent !== 'string') {
throw new TypeError('useCurrent() expects a boolean or string parameter');
}
if (typeof _useCurrent === 'string' && useCurrentOptions.indexOf(_useCurrent.toLowerCase()) === -1) {
throw new TypeError('useCurrent() expects a string parameter of ' + useCurrentOptions.join(', '));
}
this._options.useCurrent = _useCurrent;
};
DateTimePicker.prototype.collapse = function collapse(_collapse) {
if (arguments.length === 0) {
return this._options.collapse;
}
if (typeof _collapse !== 'boolean') {
throw new TypeError('collapse() expects a boolean parameter');
}
if (this._options.collapse === _collapse) {
return true;
}
this._options.collapse = _collapse;
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.icons = function icons(_icons) {
if (arguments.length === 0) {
return $.extend({}, this._options.icons);
}
if (!(_icons instanceof Object)) {
throw new TypeError('icons() expects parameter to be an Object');
}
$.extend(this._options.icons, _icons);
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.tooltips = function tooltips(_tooltips) {
if (arguments.length === 0) {
return $.extend({}, this._options.tooltips);
}
if (!(_tooltips instanceof Object)) {
throw new TypeError('tooltips() expects parameter to be an Object');
}
$.extend(this._options.tooltips, _tooltips);
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.useStrict = function useStrict(_useStrict) {
if (arguments.length === 0) {
return this._options.useStrict;
}
if (typeof _useStrict !== 'boolean') {
throw new TypeError('useStrict() expects a boolean parameter');
}
this._options.useStrict = _useStrict;
};
DateTimePicker.prototype.sideBySide = function sideBySide(_sideBySide) {
if (arguments.length === 0) {
return this._options.sideBySide;
}
if (typeof _sideBySide !== 'boolean') {
throw new TypeError('sideBySide() expects a boolean parameter');
}
this._options.sideBySide = _sideBySide;
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.viewMode = function viewMode(_viewMode) {
if (arguments.length === 0) {
return this._options.viewMode;
}
if (typeof _viewMode !== 'string') {
throw new TypeError('viewMode() expects a string parameter');
}
if (DateTimePicker.ViewModes.indexOf(_viewMode) === -1) {
throw new TypeError('viewMode() parameter must be one of (' + DateTimePicker.ViewModes.join(', ') + ') value');
}
this._options.viewMode = _viewMode;
this.currentViewMode = Math.max(DateTimePicker.ViewModes.indexOf(_viewMode) - 1, DateTimePicker.MinViewModeNumber);
this._showMode();
};
DateTimePicker.prototype.calendarWeeks = function calendarWeeks(_calendarWeeks) {
if (arguments.length === 0) {
return this._options.calendarWeeks;
}
if (typeof _calendarWeeks !== 'boolean') {
throw new TypeError('calendarWeeks() expects parameter to be a boolean value');
}
this._options.calendarWeeks = _calendarWeeks;
this._update();
};
DateTimePicker.prototype.buttons = function buttons(_buttons) {
if (arguments.length === 0) {
return $.extend({}, this._options.buttons);
}
if (!(_buttons instanceof Object)) {
throw new TypeError('buttons() expects parameter to be an Object');
}
$.extend(this._options.buttons, _buttons);
if (typeof this._options.buttons.showToday !== 'boolean') {
throw new TypeError('buttons.showToday expects a boolean parameter');
}
if (typeof this._options.buttons.showClear !== 'boolean') {
throw new TypeError('buttons.showClear expects a boolean parameter');
}
if (typeof this._options.buttons.showClose !== 'boolean') {
throw new TypeError('buttons.showClose expects a boolean parameter');
}
if (this.widget) {
this.hide();
this.show();
}
};
DateTimePicker.prototype.keepOpen = function keepOpen(_keepOpen) {
if (arguments.length === 0) {
return this._options.keepOpen;
}
if (typeof _keepOpen !== 'boolean') {
throw new TypeError('keepOpen() expects a boolean parameter');
}
this._options.keepOpen = _keepOpen;
};
DateTimePicker.prototype.focusOnShow = function focusOnShow(_focusOnShow) {
if (arguments.length === 0) {
return this._options.focusOnShow;
}
if (typeof _focusOnShow !== 'boolean') {
throw new TypeError('focusOnShow() expects a boolean parameter');
}
this._options.focusOnShow = _focusOnShow;
};
DateTimePicker.prototype.inline = function inline(_inline) {
if (arguments.length === 0) {
return this._options.inline;
}
if (typeof _inline !== 'boolean') {
throw new TypeError('inline() expects a boolean parameter');
}
this._options.inline = _inline;
};
DateTimePicker.prototype.clear = function clear() {
this._setValue(null); //todo
};
DateTimePicker.prototype.keyBinds = function keyBinds(_keyBinds) {
if (arguments.length === 0) {
return this._options.keyBinds;
}
this._options.keyBinds = _keyBinds;
};
DateTimePicker.prototype.debug = function debug(_debug) {
if (typeof _debug !== 'boolean') {
throw new TypeError('debug() expects a boolean parameter');
}
this._options.debug = _debug;
};
DateTimePicker.prototype.allowInputToggle = function allowInputToggle(_allowInputToggle) {
if (arguments.length === 0) {
return this._options.allowInputToggle;
}
if (typeof _allowInputToggle !== 'boolean') {
throw new TypeError('allowInputToggle() expects a boolean parameter');
}
this._options.allowInputToggle = _allowInputToggle;
};
DateTimePicker.prototype.keepInvalid = function keepInvalid(_keepInvalid) {
if (arguments.length === 0) {
return this._options.keepInvalid;
}
if (typeof _keepInvalid !== 'boolean') {
throw new TypeError('keepInvalid() expects a boolean parameter');
}
this._options.keepInvalid = _keepInvalid;
};
DateTimePicker.prototype.datepickerInput = function datepickerInput(_datepickerInput) {
if (arguments.length === 0) {
return this._options.datepickerInput;
}
if (typeof _datepickerInput !== 'string') {
throw new TypeError('datepickerInput() expects a string parameter');
}
this._options.datepickerInput = _datepickerInput;
};
DateTimePicker.prototype.parseInputDate = function parseInputDate(_parseInputDate2) {
if (arguments.length === 0) {
return this._options.parseInputDate;
}
if (typeof _parseInputDate2 !== 'function') {
throw new TypeError('parseInputDate() should be as function');
}
this._options.parseInputDate = _parseInputDate2;
};
DateTimePicker.prototype.disabledTimeIntervals = function disabledTimeIntervals(_disabledTimeIntervals) {
if (arguments.length === 0) {
return this._options.disabledTimeIntervals ? $.extend({}, this._options.disabledTimeIntervals) : this._options.disabledTimeIntervals;
}
if (!_disabledTimeIntervals) {
this._options.disabledTimeIntervals = false;
this._update();
return true;
}
if (!(_disabledTimeIntervals instanceof Array)) {
throw new TypeError('disabledTimeIntervals() expects an array parameter');
}
this._options.disabledTimeIntervals = _disabledTimeIntervals;
this._update();
};
DateTimePicker.prototype.disabledHours = function disabledHours(hours) {
if (arguments.length === 0) {
return this._options.disabledHours ? $.extend({}, this._options.disabledHours) : this._options.disabledHours;
}
if (!hours) {
this._options.disabledHours = false;
this._update();
return true;
}
if (!(hours instanceof Array)) {
throw new TypeError('disabledHours() expects an array parameter');
}
this._options.disabledHours = this._indexGivenHours(hours);
this._options.enabledHours = false;
if (this._options.useCurrent && !this._options.keepInvalid) {
for (var i = 0; i < this._dates.length; i++) {
var tries = 0;
while (!this._isValid(this._dates[i], 'h')) {
this._dates[i].add(1, 'h');
if (tries === 24) {
throw 'Tried 24 times to find a valid date';
}
tries++;
}
this._setValue(this._dates[i], i);
}
}
this._update();
};
DateTimePicker.prototype.enabledHours = function enabledHours(hours) {
if (arguments.length === 0) {
return this._options.enabledHours ? $.extend({}, this._options.enabledHours) : this._options.enabledHours;
}
if (!hours) {
this._options.enabledHours = false;
this._update();
return true;
}
if (!(hours instanceof Array)) {
throw new TypeError('enabledHours() expects an array parameter');
}
this._options.enabledHours = this._indexGivenHours(hours);
this._options.disabledHours = false;
if (this._options.useCurrent && !this._options.keepInvalid) {
for (var i = 0; i < this._dates.length; i++) {
var tries = 0;
while (!this._isValid(this._dates[i], 'h')) {
this._dates[i].add(1, 'h');
if (tries === 24) {
throw 'Tried 24 times to find a valid date';
}
tries++;
}
this._setValue(this._dates[i], i);
}
}
this._update();
};
DateTimePicker.prototype.viewDate = function viewDate(newDate) {
if (arguments.length === 0) {
return this._viewDate.clone();
}
if (!newDate) {
this._viewDate = (this._dates[0] || this.getMoment()).clone();
return true;
}
if (typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) {
throw new TypeError('viewDate() parameter must be one of [string, moment or Date]');
}
this._viewDate = this._parseInputDate(newDate);
this._viewUpdate();
};
DateTimePicker.prototype.allowMultidate = function allowMultidate(_allowMultidate) {
if (typeof _allowMultidate !== 'boolean') {
throw new TypeError('allowMultidate() expects a boolean parameter');
}
this._options.allowMultidate = _allowMultidate;
};
DateTimePicker.prototype.multidateSeparator = function multidateSeparator(_multidateSeparator) {
if (arguments.length === 0) {
return this._options.multidateSeparator;
}
if (typeof _multidateSeparator !== 'string' || _multidateSeparator.length > 1) {
throw new TypeError('multidateSeparator expects a single character string parameter');
}
this._options.multidateSeparator = _multidateSeparator;
};
_createClass(DateTimePicker, null, [{
key: 'NAME',
get: function get() {
return NAME;
}
/**
* @return {string}
*/
}, {
key: 'VERSION',
get: function get() {
return VERSION;
}
/**
* @return {string}
*/
}, {
key: 'DATA_KEY',
get: function get() {
return DATA_KEY;
}
/**
* @return {string}
*/
}, {
key: 'EVENT_KEY',
get: function get() {
return EVENT_KEY;
}
/**
* @return {string}
*/
}, {
key: 'DATA_API_KEY',
get: function get() {
return DATA_API_KEY;
}
}, {
key: 'DatePickerModes',
get: function get() {
return DatePickerModes;
}
}, {
key: 'ViewModes',
get: function get() {
return ViewModes;
}
/**
* @return {number}
*/
}, {
key: 'MinViewModeNumber',
get: function get() {
return MinViewModeNumber;
}
}, {
key: 'Event',
get: function get() {
return Event;
}
}, {
key: 'Selector',
get: function get() {
return Selector;
}
}, {
key: 'Default',
get: function get() {
return Default;
}
}, {
key: 'ClassName',
get: function get() {
return ClassName;
}
}]);
return DateTimePicker;
}();
return DateTimePicker;
}(jQuery, moment);
//noinspection JSUnusedGlobalSymbols
/* global DateTimePicker */
var TempusDominusBootstrap4 = function ($) {
// eslint-disable-line no-unused-vars
// ReSharper disable once InconsistentNaming
var JQUERY_NO_CONFLICT = $.fn[DateTimePicker.NAME],
verticalModes = ['top', 'bottom', 'auto'],
horizontalModes = ['left', 'right', 'auto'],
toolbarPlacements = ['default', 'top', 'bottom'],
getSelectorFromElement = function getSelectorFromElement($element) {
var selector = $element.data('target'),
$selector = void 0;
if (!selector) {
selector = $element.attr('href') || '';
selector = /^#[a-z]/i.test(selector) ? selector : null;
}
$selector = $(selector);
if ($selector.length === 0) {
return $selector;
}
if (!$selector.data(DateTimePicker.DATA_KEY)) {
$.extend({}, $selector.data(), $(this).data());
}
return $selector;
};
// ReSharper disable once InconsistentNaming
var TempusDominusBootstrap4 = function (_DateTimePicker) {
_inherits(TempusDominusBootstrap4, _DateTimePicker);
function TempusDominusBootstrap4(element, options) {
_classCallCheck(this, TempusDominusBootstrap4);
var _this = _possibleConstructorReturn(this, _DateTimePicker.call(this, element, options));
_this._init();
return _this;
}
TempusDominusBootstrap4.prototype._init = function _init() {
if (this._element.hasClass('input-group')) {
// in case there is more then one 'input-group-addon' Issue #48
var datepickerButton = this._element.find('.datepickerbutton');
if (datepickerButton.length === 0) {
this.component = this._element.find('.input-group-addon');
} else {
this.component = datepickerButton;
}
}
};
TempusDominusBootstrap4.prototype._getDatePickerTemplate = function _getDatePickerTemplate() {
var headTemplate = $('<thead>').append($('<tr>').append($('<th>').addClass('prev').attr('data-action', 'previous').append($('<span>').addClass(this._options.icons.previous))).append($('<th>').addClass('picker-switch').attr('data-action', 'pickerSwitch').attr('colspan', '' + (this._options.calendarWeeks ? '6' : '5'))).append($('<th>').addClass('next').attr('data-action', 'next').append($('<span>').addClass(this._options.icons.next)))),
contTemplate = $('<tbody>').append($('<tr>').append($('<td>').attr('colspan', '' + (this._options.calendarWeeks ? '8' : '7'))));
return [$('<div>').addClass('datepicker-days').append($('<table>').addClass('table table-sm').append(headTemplate).append($('<tbody>'))), $('<div>').addClass('datepicker-months').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone())), $('<div>').addClass('datepicker-years').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone())), $('<div>').addClass('datepicker-decades').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone()))];
};
TempusDominusBootstrap4.prototype._getTimePickerMainTemplate = function _getTimePickerMainTemplate() {
var topRow = $('<tr>'),
middleRow = $('<tr>'),
bottomRow = $('<tr>');
if (this._isEnabled('h')) {
topRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.incrementHour
}).addClass('btn').attr('data-action', 'incrementHours').append($('<span>').addClass(this._options.icons.up))));
middleRow.append($('<td>').append($('<span>').addClass('timepicker-hour').attr({
'data-time-component': 'hours',
'title': this._options.tooltips.pickHour
}).attr('data-action', 'showHours')));
bottomRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.decrementHour
}).addClass('btn').attr('data-action', 'decrementHours').append($('<span>').addClass(this._options.icons.down))));
}
if (this._isEnabled('m')) {
if (this._isEnabled('h')) {
topRow.append($('<td>').addClass('separator'));
middleRow.append($('<td>').addClass('separator').html(':'));
bottomRow.append($('<td>').addClass('separator'));
}
topRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.incrementMinute
}).addClass('btn').attr('data-action', 'incrementMinutes').append($('<span>').addClass(this._options.icons.up))));
middleRow.append($('<td>').append($('<span>').addClass('timepicker-minute').attr({
'data-time-component': 'minutes',
'title': this._options.tooltips.pickMinute
}).attr('data-action', 'showMinutes')));
bottomRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.decrementMinute
}).addClass('btn').attr('data-action', 'decrementMinutes').append($('<span>').addClass(this._options.icons.down))));
}
if (this._isEnabled('s')) {
if (this._isEnabled('m')) {
topRow.append($('<td>').addClass('separator'));
middleRow.append($('<td>').addClass('separator').html(':'));
bottomRow.append($('<td>').addClass('separator'));
}
topRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.incrementSecond
}).addClass('btn').attr('data-action', 'incrementSeconds').append($('<span>').addClass(this._options.icons.up))));
middleRow.append($('<td>').append($('<span>').addClass('timepicker-second').attr({
'data-time-component': 'seconds',
'title': this._options.tooltips.pickSecond
}).attr('data-action', 'showSeconds')));
bottomRow.append($('<td>').append($('<a>').attr({
href: '#',
tabindex: '-1',
'title': this._options.tooltips.decrementSecond
}).addClass('btn').attr('data-action', 'decrementSeconds').append($('<span>').addClass(this._options.icons.down))));
}
if (!this.use24Hours) {
topRow.append($('<td>').addClass('separator'));
middleRow.append($('<td>').append($('<button>').addClass('btn btn-primary').attr({
'data-action': 'togglePeriod',
tabindex: '-1',
'title': this._options.tooltips.togglePeriod
})));
bottomRow.append($('<td>').addClass('separator'));
}
return $('<div>').addClass('timepicker-picker').append($('<table>').addClass('table-condensed').append([topRow, middleRow, bottomRow]));
};
TempusDominusBootstrap4.prototype._getTimePickerTemplate = function _getTimePickerTemplate() {
var hoursView = $('<div>').addClass('timepicker-hours').append($('<table>').addClass('table-condensed')),
minutesView = $('<div>').addClass('timepicker-minutes').append($('<table>').addClass('table-condensed')),
secondsView = $('<div>').addClass('timepicker-seconds').append($('<table>').addClass('table-condensed')),
ret = [this._getTimePickerMainTemplate()];
if (this._isEnabled('h')) {
ret.push(hoursView);
}
if (this._isEnabled('m')) {
ret.push(minutesView);
}
if (this._isEnabled('s')) {
ret.push(secondsView);
}
return ret;
};
TempusDominusBootstrap4.prototype._getToolbar = function _getToolbar() {
var row = [];
if (this._options.buttons.showToday) {
row.push($('<td>').append($('<a>').attr({
'data-action': 'today',
'title': this._options.tooltips.today
}).append($('<span>').addClass(this._options.icons.today))));
}
if (!this._options.sideBySide && this._hasDate() && this._hasTime()) {
row.push($('<td>').append($('<a>').attr({
'data-action': 'togglePicker',
'title': this._options.tooltips.selectTime
}).append($('<span>').addClass(this._options.icons.time))));
}
if (this._options.buttons.showClear) {
row.push($('<td>').append($('<a>').attr({
'data-action': 'clear',
'title': this._options.tooltips.clear
}).append($('<span>').addClass(this._options.icons.clear))));
}
if (this._options.buttons.showClose) {
row.push($('<td>').append($('<a>').attr({
'data-action': 'close',
'title': this._options.tooltips.close
}).append($('<span>').addClass(this._options.icons.close))));
}
return row.length === 0 ? '' : $('<table>').addClass('table-condensed').append($('<tbody>').append($('<tr>').append(row)));
};
TempusDominusBootstrap4.prototype._getTemplate = function _getTemplate() {
var template = $('<div>').addClass('bootstrap-datetimepicker-widget dropdown-menu'),
dateView = $('<div>').addClass('datepicker').append(this._getDatePickerTemplate()),
timeView = $('<div>').addClass('timepicker').append(this._getTimePickerTemplate()),
content = $('<ul>').addClass('list-unstyled'),
toolbar = $('<li>').addClass('picker-switch' + (this._options.collapse ? ' accordion-toggle' : '')).append(this._getToolbar());
if (this._options.inline) {
template.removeClass('dropdown-menu');
}
if (this.use24Hours) {
template.addClass('usetwentyfour');
}
if (this._isEnabled('s') && !this.use24Hours) {
template.addClass('wider');
}
if (this._options.sideBySide && this._hasDate() && this._hasTime()) {
template.addClass('timepicker-sbs');
if (this._options.toolbarPlacement === 'top') {
template.append(toolbar);
}
template.append($('<div>').addClass('row').append(dateView.addClass('col-md-6')).append(timeView.addClass('col-md-6')));
if (this._options.toolbarPlacement === 'bottom' || this._options.toolbarPlacement === 'default') {
template.append(toolbar);
}
return template;
}
if (this._options.toolbarPlacement === 'top') {
content.append(toolbar);
}
if (this._hasDate()) {
content.append($('<li>').addClass(this._options.collapse && this._hasTime() ? 'collapse' : '').addClass(this._options.collapse && this._hasTime() && this._options.viewMode === 'time' ? '' : 'show').append(dateView));
}
if (this._options.toolbarPlacement === 'default') {
content.append(toolbar);
}
if (this._hasTime()) {
content.append($('<li>').addClass(this._options.collapse && this._hasDate() ? 'collapse' : '').addClass(this._options.collapse && this._hasDate() && this._options.viewMode === 'time' ? 'show' : '').append(timeView));
}
if (this._options.toolbarPlacement === 'bottom') {
content.append(toolbar);
}
return template.append(content);
};
TempusDominusBootstrap4.prototype._place = function _place(e) {
var self = e && e.data && e.data.picker || this,
vertical = self._options.widgetPositioning.vertical,
horizontal = self._options.widgetPositioning.horizontal,
parent = void 0;
var position = (self.component || self._element).position(),
offset = (self.component || self._element).offset();
if (self._options.widgetParent) {
parent = self._options.widgetParent.append(self.widget);
} else if (self._element.is('input')) {
parent = self._element.after(self.widget).parent();
} else if (self._options.inline) {
parent = self._element.append(self.widget);
return;
} else {
parent = self._element;
self._element.children().first().after(self.widget);
}
// Top and bottom logic
if (vertical === 'auto') {
//noinspection JSValidateTypes
if (offset.top + self.widget.height() * 1.5 >= $(window).height() + $(window).scrollTop() && self.widget.height() + self._element.outerHeight() < offset.top) {
vertical = 'top';
} else {
vertical = 'bottom';
}
}
// Left and right logic
if (horizontal === 'auto') {
if (parent.width() < offset.left + self.widget.outerWidth() / 2 && offset.left + self.widget.outerWidth() > $(window).width()) {
horizontal = 'right';
} else {
horizontal = 'left';
}
}
if (vertical === 'top') {
self.widget.addClass('top').removeClass('bottom');
} else {
self.widget.addClass('bottom').removeClass('top');
}
if (horizontal === 'right') {
self.widget.addClass('float-right');
} else {
self.widget.removeClass('float-right');
}
// find the first parent element that has a relative css positioning
if (parent.css('position') !== 'relative') {
parent = parent.parents().filter(function () {
return $(this).css('position') === 'relative';
}).first();
}
if (parent.length === 0) {
throw new Error('datetimepicker component should be placed within a relative positioned container');
}
self.widget.css({
top: vertical === 'top' ? 'auto' : position.top + self._element.outerHeight() + 'px',
bottom: vertical === 'top' ? parent.outerHeight() - (parent === self._element ? 0 : position.top) + 'px' : 'auto',
left: horizontal === 'left' ? (parent === self._element ? 0 : position.left) + 'px' : 'auto',
right: horizontal === 'left' ? 'auto' : parent.outerWidth() - self._element.outerWidth() - (parent === self._element ? 0 : position.left) + 'px'
});
};
TempusDominusBootstrap4.prototype._fillDow = function _fillDow() {
var row = $('<tr>'),
currentDate = this._viewDate.clone().startOf('w').startOf('d');
if (this._options.calendarWeeks === true) {
row.append($('<th>').addClass('cw').text('#'));
}
while (currentDate.isBefore(this._viewDate.clone().endOf('w'))) {
row.append($('<th>').addClass('dow').text(currentDate.format('dd')));
currentDate.add(1, 'd');
}
this.widget.find('.datepicker-days thead').append(row);
};
TempusDominusBootstrap4.prototype._fillMonths = function _fillMonths() {
var spans = [],
monthsShort = this._viewDate.clone().startOf('y').startOf('d');
while (monthsShort.isSame(this._viewDate, 'y')) {
spans.push($('<span>').attr('data-action', 'selectMonth').addClass('month').text(monthsShort.format('MMM')));
monthsShort.add(1, 'M');
}
this.widget.find('.datepicker-months td').empty().append(spans);
};
TempusDominusBootstrap4.prototype._updateMonths = function _updateMonths() {
var monthsView = this.widget.find('.datepicker-months'),
monthsViewHeader = monthsView.find('th'),
months = monthsView.find('tbody').find('span'),
self = this;
monthsViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevYear);
monthsViewHeader.eq(1).attr('title', this._options.tooltips.selectYear);
monthsViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextYear);
monthsView.find('.disabled').removeClass('disabled');
if (!this._isValid(this._viewDate.clone().subtract(1, 'y'), 'y')) {
monthsViewHeader.eq(0).addClass('disabled');
}
monthsViewHeader.eq(1).text(this._viewDate.year());
if (!this._isValid(this._viewDate.clone().add(1, 'y'), 'y')) {
monthsViewHeader.eq(2).addClass('disabled');
}
months.removeClass('active');
if (this._getLastPickedDate().isSame(this._viewDate, 'y') && !this.unset) {
months.eq(this._getLastPickedDate().month()).addClass('active');
}
months.each(function (index) {
if (!self._isValid(self._viewDate.clone().month(index), 'M')) {
$(this).addClass('disabled');
}
});
};
TempusDominusBootstrap4.prototype._getStartEndYear = function _getStartEndYear(factor, year) {
var step = factor / 10,
startYear = Math.floor(year / factor) * factor,
endYear = startYear + step * 9,
focusValue = Math.floor(year / step) * step;
return [startYear, endYear, focusValue];
};
TempusDominusBootstrap4.prototype._updateYears = function _updateYears() {
var yearsView = this.widget.find('.datepicker-years'),
yearsViewHeader = yearsView.find('th'),
yearCaps = this._getStartEndYear(10, this._viewDate.year()),
startYear = this._viewDate.clone().year(yearCaps[0]),
endYear = this._viewDate.clone().year(yearCaps[1]);
var html = '';
yearsViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevDecade);
yearsViewHeader.eq(1).attr('title', this._options.tooltips.selectDecade);
yearsViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextDecade);
yearsView.find('.disabled').removeClass('disabled');
if (this._options.minDate && this._options.minDate.isAfter(startYear, 'y')) {
yearsViewHeader.eq(0).addClass('disabled');
}
yearsViewHeader.eq(1).text(startYear.year() + '-' + endYear.year());
if (this._options.maxDate && this._options.maxDate.isBefore(endYear, 'y')) {
yearsViewHeader.eq(2).addClass('disabled');
}
html += '<span data-action="selectYear" class="year old">' + (startYear.year() - 1) + '</span>';
while (!startYear.isAfter(endYear, 'y')) {
html += '<span data-action="selectYear" class="year' + (startYear.isSame(this._getLastPickedDate(), 'y') && !this.unset ? ' active' : '') + (!this._isValid(startYear, 'y') ? ' disabled' : '') + '">' + startYear.year() + '</span>';
startYear.add(1, 'y');
}
html += '<span data-action="selectYear" class="year old">' + startYear.year() + '</span>';
yearsView.find('td').html(html);
};
TempusDominusBootstrap4.prototype._updateDecades = function _updateDecades() {
var decadesView = this.widget.find('.datepicker-decades'),
decadesViewHeader = decadesView.find('th'),
yearCaps = this._getStartEndYear(100, this._viewDate.year()),
startDecade = this._viewDate.clone().year(yearCaps[0]),
endDecade = this._viewDate.clone().year(yearCaps[1]);
var minDateDecade = false,
maxDateDecade = false,
endDecadeYear = void 0,
html = '';
decadesViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevCentury);
decadesViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextCentury);
decadesView.find('.disabled').removeClass('disabled');
if (startDecade.year() === 0 || this._options.minDate && this._options.minDate.isAfter(startDecade, 'y')) {
decadesViewHeader.eq(0).addClass('disabled');
}
decadesViewHeader.eq(1).text(startDecade.year() + '-' + endDecade.year());
if (this._options.maxDate && this._options.maxDate.isBefore(endDecade, 'y')) {
decadesViewHeader.eq(2).addClass('disabled');
}
if (startDecade.year() - 10 < 0) {
html += '<span> </span>';
} else {
html += '<span data-action="selectDecade" class="decade old" data-selection="' + (startDecade.year() + 6) + '">' + (startDecade.year() - 10) + '</span>';
}
while (!startDecade.isAfter(endDecade, 'y')) {
endDecadeYear = startDecade.year() + 11;
minDateDecade = this._options.minDate && this._options.minDate.isAfter(startDecade, 'y') && this._options.minDate.year() <= endDecadeYear;
maxDateDecade = this._options.maxDate && this._options.maxDate.isAfter(startDecade, 'y') && this._options.maxDate.year() <= endDecadeYear;
html += '<span data-action="selectDecade" class="decade' + (this._getLastPickedDate().isAfter(startDecade) && this._getLastPickedDate().year() <= endDecadeYear ? ' active' : '') + (!this._isValid(startDecade, 'y') && !minDateDecade && !maxDateDecade ? ' disabled' : '') + '" data-selection="' + (startDecade.year() + 6) + '">' + startDecade.year() + '</span>';
startDecade.add(10, 'y');
}
html += '<span data-action="selectDecade" class="decade old" data-selection="' + (startDecade.year() + 6) + '">' + startDecade.year() + '</span>';
decadesView.find('td').html(html);
};
TempusDominusBootstrap4.prototype._fillDate = function _fillDate() {
var daysView = this.widget.find('.datepicker-days'),
daysViewHeader = daysView.find('th'),
html = [];
var currentDate = void 0,
row = void 0,
clsName = void 0,
i = void 0;
if (!this._hasDate()) {
return;
}
daysViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevMonth);
daysViewHeader.eq(1).attr('title', this._options.tooltips.selectMonth);
daysViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextMonth);
daysView.find('.disabled').removeClass('disabled');
daysViewHeader.eq(1).text(this._viewDate.format(this._options.dayViewHeaderFormat));
if (!this._isValid(this._viewDate.clone().subtract(1, 'M'), 'M')) {
daysViewHeader.eq(0).addClass('disabled');
}
if (!this._isValid(this._viewDate.clone().add(1, 'M'), 'M')) {
daysViewHeader.eq(2).addClass('disabled');
}
currentDate = this._viewDate.clone().startOf('M').startOf('w').startOf('d');
for (i = 0; i < 42; i++) {
//always display 42 days (should show 6 weeks)
if (currentDate.weekday() === 0) {
row = $('<tr>');
if (this._options.calendarWeeks) {
row.append('<td class="cw">' + currentDate.week() + '</td>');
}
html.push(row);
}
clsName = '';
if (currentDate.isBefore(this._viewDate, 'M')) {
clsName += ' old';
}
if (currentDate.isAfter(this._viewDate, 'M')) {
clsName += ' new';
}
if (this._options.allowMultidate) {
var index = this._datesFormatted.indexOf(currentDate.format('YYYY-MM-DD'));
if (index !== -1) {
if (currentDate.isSame(this._datesFormatted[index], 'd') && !this.unset) {
clsName += ' active';
}
}
} else {
if (currentDate.isSame(this._getLastPickedDate(), 'd') && !this.unset) {
clsName += ' active';
}
}
if (!this._isValid(currentDate, 'd')) {
clsName += ' disabled';
}
if (currentDate.isSame(this.getMoment(), 'd')) {
clsName += ' today';
}
if (currentDate.day() === 0 || currentDate.day() === 6) {
clsName += ' weekend';
}
row.append('<td data-action="selectDay" data-day="' + currentDate.format('L') + '" class="day' + clsName + '">' + currentDate.date() + '</td>');
currentDate.add(1, 'd');
}
daysView.find('tbody').empty().append(html);
this._updateMonths();
this._updateYears();
this._updateDecades();
};
TempusDominusBootstrap4.prototype._fillHours = function _fillHours() {
var table = this.widget.find('.timepicker-hours table'),
currentHour = this._viewDate.clone().startOf('d'),
html = [];
var row = $('<tr>');
if (this._viewDate.hour() > 11 && !this.use24Hours) {
currentHour.hour(12);
}
while (currentHour.isSame(this._viewDate, 'd') && (this.use24Hours || this._viewDate.hour() < 12 && currentHour.hour() < 12 || this._viewDate.hour() > 11)) {
if (currentHour.hour() % 4 === 0) {
row = $('<tr>');
html.push(row);
}
row.append('<td data-action="selectHour" class="hour' + (!this._isValid(currentHour, 'h') ? ' disabled' : '') + '">' + currentHour.format(this.use24Hours ? 'HH' : 'hh') + '</td>');
currentHour.add(1, 'h');
}
table.empty().append(html);
};
TempusDominusBootstrap4.prototype._fillMinutes = function _fillMinutes() {
var table = this.widget.find('.timepicker-minutes table'),
currentMinute = this._viewDate.clone().startOf('h'),
html = [],
step = this._options.stepping === 1 ? 5 : this._options.stepping;
var row = $('<tr>');
while (this._viewDate.isSame(currentMinute, 'h')) {
if (currentMinute.minute() % (step * 4) === 0) {
row = $('<tr>');
html.push(row);
}
row.append('<td data-action="selectMinute" class="minute' + (!this._isValid(currentMinute, 'm') ? ' disabled' : '') + '">' + currentMinute.format('mm') + '</td>');
currentMinute.add(step, 'm');
}
table.empty().append(html);
};
TempusDominusBootstrap4.prototype._fillSeconds = function _fillSeconds() {
var table = this.widget.find('.timepicker-seconds table'),
currentSecond = this._viewDate.clone().startOf('m'),
html = [];
var row = $('<tr>');
while (this._viewDate.isSame(currentSecond, 'm')) {
if (currentSecond.second() % 20 === 0) {
row = $('<tr>');
html.push(row);
}
row.append('<td data-action="selectSecond" class="second' + (!this._isValid(currentSecond, 's') ? ' disabled' : '') + '">' + currentSecond.format('ss') + '</td>');
currentSecond.add(5, 's');
}
table.empty().append(html);
};
TempusDominusBootstrap4.prototype._fillTime = function _fillTime() {
var toggle = void 0,
newDate = void 0;
var timeComponents = this.widget.find('.timepicker span[data-time-component]');
if (!this.use24Hours) {
toggle = this.widget.find('.timepicker [data-action=togglePeriod]');
newDate = this._getLastPickedDate().clone().add(this._getLastPickedDate().hours() >= 12 ? -12 : 12, 'h');
toggle.text(this._getLastPickedDate().format('A'));
if (this._isValid(newDate, 'h')) {
toggle.removeClass('disabled');
} else {
toggle.addClass('disabled');
}
}
timeComponents.filter('[data-time-component=hours]').text(this._getLastPickedDate().format('' + (this.use24Hours ? 'HH' : 'hh')));
timeComponents.filter('[data-time-component=minutes]').text(this._getLastPickedDate().format('mm'));
timeComponents.filter('[data-time-component=seconds]').text(this._getLastPickedDate().format('ss'));
this._fillHours();
this._fillMinutes();
this._fillSeconds();
};
TempusDominusBootstrap4.prototype._doAction = function _doAction(e, action) {
var lastPicked = this._getLastPickedDate();
if ($(e.currentTarget).is('.disabled')) {
return false;
}
action = action || $(e.currentTarget).data('action');
switch (action) {
case 'next':
{
var navFnc = DateTimePicker.DatePickerModes[this.currentViewMode].NAV_FUNCTION;
this._viewDate.add(DateTimePicker.DatePickerModes[this.currentViewMode].NAV_STEP, navFnc);
this._fillDate();
this._viewUpdate(navFnc);
break;
}
case 'previous':
{
var _navFnc = DateTimePicker.DatePickerModes[this.currentViewMode].NAV_FUNCTION;
this._viewDate.subtract(DateTimePicker.DatePickerModes[this.currentViewMode].NAV_STEP, _navFnc);
this._fillDate();
this._viewUpdate(_navFnc);
break;
}
case 'pickerSwitch':
this._showMode(1);
break;
case 'selectMonth':
{
var month = $(e.target).closest('tbody').find('span').index($(e.target));
this._viewDate.month(month);
if (this.currentViewMode === DateTimePicker.MinViewModeNumber) {
this._setValue(lastPicked.clone().year(this._viewDate.year()).month(this._viewDate.month()), this._getLastPickedDateIndex());
if (!this._options.inline) {
this.hide();
}
} else {
this._showMode(-1);
this._fillDate();
}
this._viewUpdate('M');
break;
}
case 'selectYear':
{
var year = parseInt($(e.target).text(), 10) || 0;
this._viewDate.year(year);
if (this.currentViewMode === DateTimePicker.MinViewModeNumber) {
this._setValue(lastPicked.clone().year(this._viewDate.year()), this._getLastPickedDateIndex());
if (!this._options.inline) {
this.hide();
}
} else {
this._showMode(-1);
this._fillDate();
}
this._viewUpdate('YYYY');
break;
}
case 'selectDecade':
{
var _year = parseInt($(e.target).data('selection'), 10) || 0;
this._viewDate.year(_year);
if (this.currentViewMode === DateTimePicker.MinViewModeNumber) {
this._setValue(lastPicked.clone().year(this._viewDate.year()), this._getLastPickedDateIndex());
if (!this._options.inline) {
this.hide();
}
} else {
this._showMode(-1);
this._fillDate();
}
this._viewUpdate('YYYY');
break;
}
case 'selectDay':
{
var day = this._viewDate.clone();
if ($(e.target).is('.old')) {
day.subtract(1, 'M');
}
if ($(e.target).is('.new')) {
day.add(1, 'M');
}
this._setValue(day.date(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex());
if (!this._hasTime() && !this._options.keepOpen && !this._options.inline) {
this.hide();
}
break;
}
case 'incrementHours':
{
var newDate = lastPicked.clone().add(1, 'h');
if (this._isValid(newDate, 'h')) {
this._setValue(newDate, this._getLastPickedDateIndex());
}
break;
}
case 'incrementMinutes':
{
var _newDate = lastPicked.clone().add(this._options.stepping, 'm');
if (this._isValid(_newDate, 'm')) {
this._setValue(_newDate, this._getLastPickedDateIndex());
}
break;
}
case 'incrementSeconds':
{
var _newDate2 = lastPicked.clone().add(1, 's');
if (this._isValid(_newDate2, 's')) {
this._setValue(_newDate2, this._getLastPickedDateIndex());
}
break;
}
case 'decrementHours':
{
var _newDate3 = lastPicked.clone().subtract(1, 'h');
if (this._isValid(_newDate3, 'h')) {
this._setValue(_newDate3, this._getLastPickedDateIndex());
}
break;
}
case 'decrementMinutes':
{
var _newDate4 = lastPicked.clone().subtract(this._options.stepping, 'm');
if (this._isValid(_newDate4, 'm')) {
this._setValue(_newDate4, this._getLastPickedDateIndex());
}
break;
}
case 'decrementSeconds':
{
var _newDate5 = lastPicked.clone().subtract(1, 's');
if (this._isValid(_newDate5, 's')) {
this._setValue(_newDate5, this._getLastPickedDateIndex());
}
break;
}
case 'togglePeriod':
{
this._setValue(lastPicked.clone().add(lastPicked.hours() >= 12 ? -12 : 12, 'h'), this._getLastPickedDateIndex());
break;
}
case 'togglePicker':
{
var $this = $(e.target),
$link = $this.closest('a'),
$parent = $this.closest('ul'),
expanded = $parent.find('.show'),
closed = $parent.find('.collapse:not(.show)'),
$span = $this.is('span') ? $this : $this.find('span');
var collapseData = void 0;
if (expanded && expanded.length) {
collapseData = expanded.data('collapse');
if (collapseData && collapseData.transitioning) {
return true;
}
if (expanded.collapse) {
// if collapse plugin is available through bootstrap.js then use it
expanded.collapse('hide');
closed.collapse('show');
} else {
// otherwise just toggle in class on the two views
expanded.removeClass('show');
closed.addClass('show');
}
$span.toggleClass(this._options.icons.time + ' ' + this._options.icons.date);
if ($span.hasClass(this._options.icons.date)) {
$link.attr('title', this._options.tooltips.selectDate);
} else {
$link.attr('title', this._options.tooltips.selectTime);
}
}
}
break;
case 'showPicker':
this.widget.find('.timepicker > div:not(.timepicker-picker)').hide();
this.widget.find('.timepicker .timepicker-picker').show();
break;
case 'showHours':
this.widget.find('.timepicker .timepicker-picker').hide();
this.widget.find('.timepicker .timepicker-hours').show();
break;
case 'showMinutes':
this.widget.find('.timepicker .timepicker-picker').hide();
this.widget.find('.timepicker .timepicker-minutes').show();
break;
case 'showSeconds':
this.widget.find('.timepicker .timepicker-picker').hide();
this.widget.find('.timepicker .timepicker-seconds').show();
break;
case 'selectHour':
{
var hour = parseInt($(e.target).text(), 10);
if (!this.use24Hours) {
if (lastPicked.hours() >= 12) {
if (hour !== 12) {
hour += 12;
}
} else {
if (hour === 12) {
hour = 0;
}
}
}
this._setValue(lastPicked.clone().hours(hour), this._getLastPickedDateIndex());
this._doAction(e, 'showPicker');
break;
}
case 'selectMinute':
this._setValue(lastPicked.clone().minutes(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex());
this._doAction(e, 'showPicker');
break;
case 'selectSecond':
this._setValue(lastPicked.clone().seconds(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex());
this._doAction(e, 'showPicker');
break;
case 'clear':
this.clear();
break;
case 'today':
{
var todaysDate = this.getMoment();
if (this._isValid(todaysDate, 'd')) {
this._setValue(todaysDate, this._getLastPickedDateIndex());
}
break;
}
}
return false;
};
//public
TempusDominusBootstrap4.prototype.hide = function hide() {
var transitioning = false;
if (!this.widget) {
return;
}
// Ignore event if in the middle of a picker transition
this.widget.find('.collapse').each(function () {
var collapseData = $(this).data('collapse');
if (collapseData && collapseData.transitioning) {
transitioning = true;
return false;
}
return true;
});
if (transitioning) {
return;
}
if (this.component && this.component.hasClass('btn')) {
this.component.toggleClass('active');
}
this.widget.hide();
$(window).off('resize', this._place());
this.widget.off('click', '[data-action]');
this.widget.off('mousedown', false);
this.widget.remove();
this.widget = false;
this._notifyEvent({
type: DateTimePicker.Event.HIDE,
date: this._getLastPickedDate().clone()
});
if (this.input !== undefined) {
this.input.blur();
}
this._viewDate = this._getLastPickedDate().clone();
};
TempusDominusBootstrap4.prototype.show = function show() {
var currentMoment = void 0;
var useCurrentGranularity = {
'year': function year(m) {
return m.month(0).date(1).hours(0).seconds(0).minutes(0);
},
'month': function month(m) {
return m.date(1).hours(0).seconds(0).minutes(0);
},
'day': function day(m) {
return m.hours(0).seconds(0).minutes(0);
},
'hour': function hour(m) {
return m.seconds(0).minutes(0);
},
'minute': function minute(m) {
return m.seconds(0);
}
};
if (this.input !== undefined) {
if (this.input.prop('disabled') || !this._options.ignoreReadonly && this.input.prop('readonly') || this.widget) {
return;
}
if (this.input.val() !== undefined && this.input.val().trim().length !== 0) {
this._setValue(this._parseInputDate(this.input.val().trim()), 0);
} else if (this.unset && this._options.useCurrent) {
currentMoment = this.getMoment();
if (typeof this._options.useCurrent === 'string') {
currentMoment = useCurrentGranularity[this._options.useCurrent](currentMoment);
}
this._setValue(currentMoment, 0);
}
} else if (this.unset && this._options.useCurrent) {
currentMoment = this.getMoment();
if (typeof this._options.useCurrent === 'string') {
currentMoment = useCurrentGranularity[this._options.useCurrent](currentMoment);
}
this._setValue(currentMoment, 0);
}
this.widget = this._getTemplate();
this._fillDow();
this._fillMonths();
this.widget.find('.timepicker-hours').hide();
this.widget.find('.timepicker-minutes').hide();
this.widget.find('.timepicker-seconds').hide();
this._update();
this._showMode();
$(window).on('resize', { picker: this }, this._place);
this.widget.on('click', '[data-action]', $.proxy(this._doAction, this)); // this handles clicks on the widget
this.widget.on('mousedown', false);
if (this.component && this.component.hasClass('btn')) {
this.component.toggleClass('active');
}
this._place();
this.widget.show();
if (this.input !== undefined && this._options.focusOnShow && !this.input.is(':focus')) {
this.input.focus();
}
this._notifyEvent({
type: DateTimePicker.Event.SHOW
});
};
TempusDominusBootstrap4.prototype.destroy = function destroy() {
this.hide();
//todo doc off?
this._element.removeData(DateTimePicker.DATA_KEY);
this._element.removeData('date');
};
TempusDominusBootstrap4.prototype.disable = function disable() {
this.hide();
if (this.component && this.component.hasClass('btn')) {
this.component.addClass('disabled');
}
if (this.input !== undefined) {
this.input.prop('disabled', true); //todo disable this/comp if input is null
}
};
TempusDominusBootstrap4.prototype.enable = function enable() {
if (this.component && this.component.hasClass('btn')) {
this.component.removeClass('disabled');
}
if (this.input !== undefined) {
this.input.prop('disabled', false); //todo enable comp/this if input is null
}
};
TempusDominusBootstrap4.prototype.toolbarPlacement = function toolbarPlacement(_toolbarPlacement) {
if (arguments.length === 0) {
return this._options.toolbarPlacement;
}
if (typeof _toolbarPlacement !== 'string') {
throw new TypeError('toolbarPlacement() expects a string parameter');
}
if (toolbarPlacements.indexOf(_toolbarPlacement) === -1) {
throw new TypeError('toolbarPlacement() parameter must be one of (' + toolbarPlacements.join(', ') + ') value');
}
this._options.toolbarPlacement = _toolbarPlacement;
if (this.widget) {
this.hide();
this.show();
}
};
TempusDominusBootstrap4.prototype.widgetPositioning = function widgetPositioning(_widgetPositioning) {
if (arguments.length === 0) {
return $.extend({}, this._options.widgetPositioning);
}
if ({}.toString.call(_widgetPositioning) !== '[object Object]') {
throw new TypeError('widgetPositioning() expects an object variable');
}
if (_widgetPositioning.horizontal) {
if (typeof _widgetPositioning.horizontal !== 'string') {
throw new TypeError('widgetPositioning() horizontal variable must be a string');
}
_widgetPositioning.horizontal = _widgetPositioning.horizontal.toLowerCase();
if (horizontalModes.indexOf(_widgetPositioning.horizontal) === -1) {
throw new TypeError('widgetPositioning() expects horizontal parameter to be one of (' + horizontalModes.join(', ') + ')');
}
this._options.widgetPositioning.horizontal = _widgetPositioning.horizontal;
}
if (_widgetPositioning.vertical) {
if (typeof _widgetPositioning.vertical !== 'string') {
throw new TypeError('widgetPositioning() vertical variable must be a string');
}
_widgetPositioning.vertical = _widgetPositioning.vertical.toLowerCase();
if (verticalModes.indexOf(_widgetPositioning.vertical) === -1) {
throw new TypeError('widgetPositioning() expects vertical parameter to be one of (' + verticalModes.join(', ') + ')');
}
this._options.widgetPositioning.vertical = _widgetPositioning.vertical;
}
this._update();
};
TempusDominusBootstrap4.prototype.widgetParent = function widgetParent(_widgetParent) {
if (arguments.length === 0) {
return this._options.widgetParent;
}
if (typeof _widgetParent === 'string') {
_widgetParent = $(_widgetParent);
}
if (_widgetParent !== null && typeof _widgetParent !== 'string' && !(_widgetParent instanceof $)) {
throw new TypeError('widgetParent() expects a string or a jQuery object parameter');
}
this._options.widgetParent = _widgetParent;
if (this.widget) {
this.hide();
this.show();
}
};
//static
TempusDominusBootstrap4._jQueryHandleThis = function _jQueryHandleThis(me, option, argument) {
var data = $(me).data(DateTimePicker.DATA_KEY);
if ((typeof option === 'undefined' ? 'undefined' : _typeof(option)) === 'object') {
$.extend({}, DateTimePicker.Default, option);
}
if (!data) {
data = new TempusDominusBootstrap4($(me), option);
$(me).data(DateTimePicker.DATA_KEY, data);
}
if (typeof option === 'string') {
if (data[option] === undefined) {
throw new Error('No method named "' + option + '"');
}
if (argument === undefined) {
return data[option]();
} else {
return data[option](argument);
}
}
};
TempusDominusBootstrap4._jQueryInterface = function _jQueryInterface(option, argument) {
if (this.length === 1) {
return TempusDominusBootstrap4._jQueryHandleThis(this[0], option, argument);
}
return this.each(function () {
TempusDominusBootstrap4._jQueryHandleThis(this, option, argument);
});
};
return TempusDominusBootstrap4;
}(DateTimePicker);
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$(document).on(DateTimePicker.Event.CLICK_DATA_API, DateTimePicker.Selector.DATA_TOGGLE, function () {
var $target = getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, 'toggle');
}).on(DateTimePicker.Event.CHANGE, '.' + DateTimePicker.ClassName.INPUT, function (event) {
var $target = getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, '_change', event);
}).on(DateTimePicker.Event.BLUR, '.' + DateTimePicker.ClassName.INPUT, function (event) {
var $target = getSelectorFromElement($(this)),
config = $target.data(DateTimePicker.DATA_KEY);
if ($target.length === 0) {
return;
}
if (config._options.debug || window.debug) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, 'hide', event);
}).on(DateTimePicker.Event.KEYDOWN, '.' + DateTimePicker.ClassName.INPUT, function (event) {
var $target = getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, '_keydown', event);
}).on(DateTimePicker.Event.KEYUP, '.' + DateTimePicker.ClassName.INPUT, function (event) {
var $target = getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, '_keyup', event);
}).on(DateTimePicker.Event.FOCUS, '.' + DateTimePicker.ClassName.INPUT, function (event) {
var $target = getSelectorFromElement($(this)),
config = $target.data(DateTimePicker.DATA_KEY);
if ($target.length === 0) {
return;
}
if (!config._options.allowInputToggle) {
return;
}
TempusDominusBootstrap4._jQueryInterface.call($target, config, event);
});
$.fn[DateTimePicker.NAME] = TempusDominusBootstrap4._jQueryInterface;
$.fn[DateTimePicker.NAME].Constructor = TempusDominusBootstrap4;
$.fn[DateTimePicker.NAME].noConflict = function () {
$.fn[DateTimePicker.NAME] = JQUERY_NO_CONFLICT;
return TempusDominusBootstrap4._jQueryInterface;
};
return TempusDominusBootstrap4;
}(jQuery);
}();<|fim▁end|> | }
this._options.daysOfWeekDisabled = _daysOfWeekDisabled.reduce(function (previousValue, currentValue) {
currentValue = parseInt(currentValue, 10);
if (currentValue > 6 || currentValue < 0 || isNaN(currentValue)) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.