code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from kazoo.client import KazooClient, KazooState
import optparse
import logging
root_nodes = {}
root_path = ''
target_path = ''
def tree_zk_node(zk, node=""):
global root_nodes
children = zk.get_children(root_path + node)
if len(children) < 1:
return
for children_node in children:
path = node + "/" + children_node
data, stat = zk.get(root_path + path)
if data == None:
data = ""
print " " + root_path + path + " " + data.decode("utf-8")
root_nodes[path] = data.decode("utf-8")
tree_zk_node(zk, path)
def build_zk_node(zk,force):
for tree in root_nodes:
path = target_path + tree
print "create path " + path
if not zk.exists(path):
zk.ensure_path(path)
elif not force:
continue
zk.set(path, str(root_nodes[tree]))
def main():
global root_path
global target_path
p = optparse.OptionParser(description="Copy zk data from one node to another node", version="0.0.1",
usage="-r /app/test/a -t /app/prod/b -s 192.168.221.100:2181,192.168.221.101:2181")
p.add_option('--root', '-r', help="root node path,required")
p.add_option('--target', '-t', help="target node path,required")
p.add_option('--server', '-s', help="zk server address,required")
p.add_option('--force', '-f', help="force copy node if exist deleted it,option", action='store_true')
options, arguments = p.parse_args()
force = False
if options.root == None:
logging.error('-r is required\n')
p.print_help()
return
if options.target == None:
logging.error("-t is required\n")
p.print_help()
return
if options.server == None:
logging.error("-s is required\n")
p.print_help()
return
if options.force == True:
force = True
print 'root node %s' % options.root
print 'target node %s' % options.target
print 'server %s' % options.server
print 'force %s' % options.force
print ""
zk = KazooClient(hosts=options.server, read_only=True)
zk.start()
if not zk.exists(options.root):
logging.error(options.root + "not exists")
zk.stop()
return
target_path = options.target
root_path = options.root
print "current data:"
print ""
tree_zk_node(zk)
print ""
str = raw_input("checked current data and copy node to " + target_path + " (yes/no) : ")
if str != 'yes':
zk.stop()
print 'bay!'
return
build_zk_node(zk,force)
zk.stop()
print 'bay!'
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main() | zkSync | /zkSync-0.1.1.tar.gz/zkSync-0.1.1/execdir/zkSync.py | zkSync.py |
"""
This script creates a destination partition as a LUKS keyed dm-crypt volume
and ext4 filesystem. Next, the contents of a source partition are copied to the
destination and the source is optionally erased.
"""
import argparse
import os
import errno
import subprocess
import shutil
import sys
import tarfile
from progress.bar import ShadyBar
import threading
import time
import inotify
import inotify.adapters
########################################################################
# Utility functions
########################################################################
class led_flash:
def __init__(self):
self.evt = threading.Event()
self._thread = threading.Thread(target=self.led_worker)
self.gap_time_ms = 0
self.pulse_time_ms = 0
self.num_pulses = 0
self.invert = False
self._thread.start()
def led_worker(self):
import zymkey
total_flash_time_ms = None
while True:
iadj = 0
if self.invert:
iadj = 1
if self.num_pulses:
np = (self.num_pulses * 2) + iadj
zymkey.client.led_flash(self.pulse_time_ms, off_ms=self.pulse_time_ms, num_flashes=np)
total_flash_time_ms = float((np * self.pulse_time_ms) + self.gap_time_ms) / 1000.0
else:
total_flash_time_ms = None
self.evt.wait(total_flash_time_ms)
self.evt.clear()
ledf = None
prev_pct = None
def flash_zk_led(gap_time_ms, pulse_time_ms, num_pulses, invert=False):
global ledf
if ledf is None:
ledf = led_flash()
time.sleep(0.1)
ledf.gap_time_ms = gap_time_ms
ledf.pulse_time_ms = pulse_time_ms
ledf.num_pulses = num_pulses
ledf.invert = invert
ledf.evt.set()
def get_part_uuid(dev_path):
cmd = "blkid " + dev_path
res = subprocess.check_output(cmd.split(), stderr=subprocess.PIPE).split()
for field in res:
if "PARTUUID" in field:
field = field.replace("\"", "")
return field
def unpack_tar(tar_path, dst, bar=None):
with tarfile.open(tar_path, "r") as tf:
tmembers = tf.getmembers()
total_size = 0
for tm in tmembers:
total_size += tm.size
cur_size = 0
rem = 100.0
for tm in tmembers:
tf.extract(tm, path=dst)
cur_size += tm.size
pct_done = round((rem * cur_size) / total_size, 1)
if not bar is None:
global prev_pct
if pct_done != prev_pct:
bar.goto(pct_done)
prev_pct = pct_done
if not bar is None:
bar.finish()
def do_fdisk(cmds, dev):
cmdstr = "\n".join(cmds)
fd_proc = subprocess.Popen(["fdisk", dev], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
fd_out = fd_proc.communicate(input=cmdstr)[0]
fd_proc.wait()
return fd_out
def startup_zkifc():
zkdir = "/var/lib/zymbit"
# Kill zkifc if it is running
cmd = "killall zkifc"
try:
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
time.sleep(10)
except:
pass
# Clear out the /var/lib/zymbit directory
shutil.rmtree(zkdir)
# Recreate the /var/lib/zymbit directory
os.mkdir(zkdir)
# Start up zkifc
cmd = "/usr/bin/zkifc -s " + zkdir
zkifc_proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for zkifc to bind to the zymkey
i = inotify.adapters.InotifyTree(zkdir)
for event in i.event_gen():
if event is not None:
(header, type_names, watch_path, filename) = event
if filename == "state" and "IN_CLOSE_WRITE" in type_names:
break
"""
This function configures config.txt and cmdline.txt in /boot to boot
Raspberry Pi from the new crypto volume.
"""
def update_initramfs(mapper_name, rfs_part, etc_base_dir="/etc"):
# Insure that the boot partition is mounted
try:
# We might want the boot partition to be configurable, but most
# use cases will use the boot partition on the SD card which is
# /dev/mmcblk0p1
cmd = "mount /dev/mmcblk0p1 /boot"
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
except:
pass
#####################################################################
# Modify config.txt so that an initramfs is used during boot
#####################################################################
with open("/boot/config.txt") as bfd:
lines = bfd.readlines()
# Remove any stale "initramfs" lines
lines = [line for line in lines if not line.startswith("initramfs")]
# Put a new initramfs line at the end of the file. This line
# instructs the GPU to put the file "initrd" after the kernel
# image in memory. The kernel expects to find the initramfs after
# its own image.
lines.append("initramfs initrd.img followkernel")
with open("/boot/config.txt", "w") as bwfd:
bwfd.writelines(lines)
#####################################################################
# Modify cmdline.txt to boot to the dm-crypt volume
#####################################################################
with open("/boot/cmdline.txt") as bfd:
fields = bfd.readline().split()
# Chop out everything having to do with the old root
fields = [field for field in fields if not field.startswith("root=") and not field.startswith("rootfstype=") and not field.startswith("cryptdevice=")]
line = " ".join(fields)
line = line + " root=/dev/mapper/" + mapper_name + " cyptdevice=" + rfs_part + ":" + mapper_name
with open("/boot/cmdline.txt", "w") as bwfd:
bwfd.write(line)
#####################################################################
# Write the i2c drivers to the initramfs staging area
#####################################################################
with open(etc_base_dir + "/initramfs-tools/modules") as ifd:
lines = ifd.readlines()
# Get rid of stale entries
lines = [line for line in lines if not line.startswith("i2c-dev") and not line.startswith("i2c-bcm2835") and not line.startswith("i2c-bcm2708")]
lines.append("i2c-dev\n")
lines.append("i2c-bcm2835\n")
lines.append("i2c-bcm2708\n")
with open(etc_base_dir + "/initramfs-tools/modules", "w") as iwfd:
iwfd.writelines(lines)
#####################################################################
# Create the initramfs
#####################################################################
print "Building initramfs"
cmd = "uname -r"
kernel_ver = subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
kernel_ver = kernel_ver.rstrip("\n\r")
try:
os.remove("/boot/initrd.img-" + kernel_ver)
except:
pass
cmd = "update-initramfs -v -c -k " + kernel_ver
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
os.rename("/boot/initrd.img-" + kernel_ver, "/boot/initrd.img")
"""
create_zk_crypt_vol - This function creates a LUKS dm-crypt volume and formats
is as ext4. The passphrase is randomly generated by Zymkey and consequently
locked by Zymkey.
arguments:
dst_partition The destination partition. This argument can specify a
specific partition (e.g. /dev/sda1) or a device (e.g.
/dev/sda). If a device is specified, then /dev/sda1 is
assumed.
src_path The path to a directory, tarfile or partition that
is to be copied to the destination. If src_path is
actually a partition, src_is_partition should be
set to True. If src_path is set to None, the
destination partition is created without copying
any data to it.
src_is_partition If src_path specifies a path to a partition, this argument
should be set to True.
force_creation If this argument is False, the destination volume will not
be created if a volume already exists on the destination
device. If True, the destination will be destroyed and a
new volume will be created.
dst_size If this argument is 0, the remaining size on the
destination device will be used to create the new
partition. If this argument is "match", then an attampt
will be made to match the source partition size if one
is specified.
crypt_crypt_mapper_name The name for the dm-crypt mapper volume. Defaults to
"cryptfs".
erase_src If True, the source partition is erased.
zero_dst If True, the destination volume is filled with zeros after
creation. This action has the effect of writing a random
sequence on the physical media and can help prevent
various attacks on the physical media at the expense of
extra creation time.
mnt_entry If not "None", entries in fstab and crypttab are added
with the value in this argument.
mnt_cfg_base_path If mnt_entry is not "None", this argument specifies the
location where fstab and crypttab are found.
"""
def create_zk_crypt_vol(dst_dev,
dst_part_num="",
src_path=None,
src_is_partition=False,
force_creation=False,
dst_size=0,
crypt_mapper_name="cryptfs",
erase_src=False,
zero_dst=False,
mnt_entry=None,
mnt_cfg_base_path="/etc"):
dst_part = dst_dev + dst_part_num
# Specify the destination size: if a source partition has been specified and
# the destination size is supposed to match, then we should find the source partition size now.
if src_path and src_is_partition and dst_size == "match":
if dst_size == "match":
if not src_is_partition:
# Find out which device the partition is attached to
cmd = "df " + src_path
src_part = subprocess.check_output(cmd.split(), stderr=subprocess.PIPE).split("\n")[1]
src_part = src_part.split()[0]
else:
src_part = src_path
cmd = "sfdisk -lqs " + src_part
try:
dst_size = subprocess.check_output(cmd.split())
except:
pass
if dst_size == "" or int(dst_size) == 0:
print "Cannot determine src partition size"
exit()
else:
dst_size_k = dst_size / 1024
if dst_size_k != 0:
dst_size = "+" + str(dst_size_k) + "K"
else:
dst_size = ""
tmp_pval = dst_part_num.rstrip("1234567890")
dst_part_num_val = dst_part_num.replace(tmp_pval, "")
# Find out if the destination partition already exists.
if dst_part_num == "" and os.path.exists(dst_part):
print dst_part + " already exists."
# Delete the old destination partition
if force_creation:
print "Deleting existing partition"
fd_cmds = ["d", dst_part_num_val, "w"]
do_fdisk(fd_cmds, dst_dev)
else:
print "Exiting."
exit()
# Unmount potentially stale mounts and dm-crypt mappings
try:
cmd = "umount /dev/mapper/" + crypt_crypt_mapper_name
subprocess.call(cmd.split(), stderr = subprocess.PIPE)
except:
pass
try:
cmd = "umount " + dst_part
subprocess.call(cmd.split(), stderr = subprocess.PIPE)
except:
pass
if src_path and src_is_partition:
try:
cmd = "umount " + src_path
subprocess.call(cmd.split(), stderr = subprocess.PIPE)
except:
pass
try:
cmd = "cryptsetup luksClose " + crypt_crypt_mapper_name
subprocess.call(cmd.split(), stderr = subprocess.PIPE)
except:
pass
# Create the new partition
print "Creating new partition"
fd_cmds = ["n", "", dst_part_num_val, "", dst_size, "w"]
do_fdisk(fd_cmds, dst_dev)
# TODO: if dst_part_num_val was "", we need to find the partition
# number that fdisk just created for us
subprocess.check_output("partprobe", subprocess.PIPE)
# Create the LUKS key using Zymkey's random number generator
print "Creating random passphrase"
import zymkey
key = zymkey.client.get_random(512)
# Lock the key up in the specified location
print "Locking passphrase"
locked_key_fn = "/var/lib/zymbit/" + crypt_mapper_name + ".key"
zymkey.client.lock(key, locked_key_fn)
# Format the LUKS dm-crypt volume
print "Formatting LUKS volume"
cmd = "cryptsetup -q -v luksFormat " + dst_part + " -"
p = subprocess.Popen(cmd.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate(input=key)
p.wait()
# Open the dm-crypt volume
print "Opening LUKS volume"
cmd = "cryptsetup luksOpen " + dst_part + " " + crypt_mapper_name + " --key-file=-"
p = subprocess.Popen(cmd.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate(input=key)
p.wait()
# Create the ext4 file system on the dm-crypt volume
print "Creating ext4 file system"
cmd = "mkfs.ext4 /dev/mapper/" + crypt_mapper_name
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
# Mount the destination volume now
dst_mnt = "/mnt/" + crypt_mapper_name
if not os.path.exists(dst_mnt):
os.makedirs(dst_mnt)
cmd = "mount /dev/mapper/" + crypt_mapper_name + " " + dst_mnt
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
# If a source partition was specified, mount the partition now and
# designate the mount point as the source path
src_part = None
if src_path and src_is_partition:
src_part = src_path
src_path = "/mnt/src_vol"
# Mount the source partition
if not os.path.exists(src_path):
os.makedirs(src_path)
cmd = "mount " + src_part + " " + src_path
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
# If a source was specified, copy the source files to the
# destination now
if src_path:
if os.path.isfile(src_path):
# Source path must be tarfile...extract now
print "Unpacking root file system archive"
bar = ShadyBar(max=100, width=50, suffix="%(percent).1f%% [%(elapsed)ds]")
unpack_tar(src_path, dst_mnt, bar)
else:
# Copy with rsync
print "Copying source files"
cmd = "rsync -axHAX --info=progress2 " + src_path + "/ " + dst_mnt
subprocess.call(cmd.split(), stderr=subprocess.PIPE)
# If specified, erase the source volume
if args.erase_src:
print "Erasing source partition"
shutil.rmtree(src_path)
# Finally, if a source partition was specified, unmount it and remove
# the temporary mount point
if src_part:
cmd = "umount " + src_part
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
os.rmdir(src_path)
# Zero out the remaining space on the destination if so configured
if zero_dst:
print "Zeroing out destination"
# Write as many zeros as we can
cmd = "dd if=/dev/zero of=" + dst_mnt + "/bigzero " + "bs=1M conv=fsync"
try:
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
except:
pass
# Flush the file systems
subprocess.call("sync")
# Remove the big zero file
os.remove(dst_mnt + "/bigzero")
# Add the mount entry to fstab and crypttab if specified
if mnt_entry:
# Determine the UUID of the destination device
mapper_fn = "/dev/mapper/" + crypt_mapper_name
fstab_path = mnt_cfg_base_path + "/fstab"
crypttab_path = mnt_cfg_base_path + "/crypttab"
# Add new volume to end of fstab
# First, create fstab if it isn't already present
open(fstab_path, "a").close()
with open(fstab_path, "r") as rfd:
lines = rfd.readlines()
me_str = " " + mnt_entry + " "
lines = [line for line in lines if not line.startswith(mapper_fn) and not me_str in line]
lines.append(mapper_fn + " " + mnt_entry + " ext4 defaults,noatime 0 1\n")
with open(fstab_path, "w") as wfd:
wfd.writelines(lines)
# Add new entry to crypttab
# First, create crypttab if it isn't already present
open(crypttab_path, "a").close()
with open(crypttab_path, "r") as rfd:
lines = rfd.readlines()
lines = [line for line in lines if not line.startswith(crypt_mapper_name)]
lines.append(crypt_mapper_name + "\t" + dst_part + "\t" + locked_key_fn + "\tluks,keyscript=/lib/cryptsetup/scripts/zk_get_key\n")
with open(crypttab_path, "w") as wfd:
wfd.writelines(lines)
# Sync up the file system
subprocess.check_output("sync", stderr=subprocess.PIPE)
# Cleanly unmount everything
cmd = "umount " + dst_mnt
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
os.rmdir(dst_mnt)
cmd = "cryptsetup luksClose " + crypt_mapper_name
subprocess.check_output(cmd.split(), stderr=subprocess.PIPE)
if __name__ == "__main__":
# If we were called from the command line, parse the arguments
parser = argparse.ArgumentParser(description="Create a LUKS dm-crypt volume with ext4 file system that is keyed and locked up using Zymkey."
"Optionally, copy a source partition to the destination.")
parser.add_argument("-d", "--dst-partition", required=True, help="Specify the destination partition.")
parser.add_argument("-s", "--src-path", help="Specify a source path to copy to the destination. -s and -p can't "
"be specified at the same time.")
parser.add_argument("-p", "--src-partition", help="Specify a source partition to copy to the destination. -s and -p "
"can't be specified at the same time.")
parser.add_argument("-c", "--force-creation", action="store_true", help="Forces creation of destination partition even if it already "
"exists.")
parser.add_argument("-m", "--match-src-size", action="store_true", help="Forces the destination partition to be the same size as the "
"source partition.")
parser.add_argument("-i", "--dst_size", help="Creates the destination partition with specific size. This "
"value can be specified in the form \{size\}K/M/G. Example: "
"\"-i 10G\" will create a partition of 10 gigabytes.")
parser.add_argument("-n", "--crypt-mapper-name", help="Specify a different crypto file system mapper name for the "
"dm-crypt volume. Defaults to cryptfs.")
parser.add_argument("-e", "--erase-src", action="store_true", help="Erase source when after the copy from source to destination "
"completes.")
parser.add_argument("-a", "--add-mnt-entry", help="Add entries to <mnt-entry-base-path>/etc/fstab and "
"<mnt-entry-base-path>/etc/crypttab for the new volume. Example: "
"\"-a /mnt/customer_data\" will add an entry to the end of "
"<mnt-entry-base-path>/etc/fstab.")
parser.add_argument("-b", "--mnt-cfg-base-path", help="If \"-a\" (--add-mnt-entry) is specified, this argument specifies "
"the base path where fstab and crypttab are to be found. Defaults "
"to \"/etc\".")
parser.add_argument("-z", "--zero-dst", action="store_true", help="Fill the destination partition with zeros. This has the effect of "
"writing a random sequence on remaining partition space whcih is "
"considered a best practice.")
args = parser.parse_args()
if args.src_path and args.src_partition:
print "ERROR: source path and source partition (-s and -p) specified at the same time. Only one may be specified."
parser.print_help(sys.stderr)
exit()
# Determine the mapper name
crypt_mapper_name = "cryptfs"
if args.crypt_mapper_name:
crypt_mapper_name = args.crypt_mapper_name
src_path = args.src_path
src_is_partition = False
if args.src_partition:
src_path = args.src_partition
src_is_partition = True
dst_size = args.dst_size
if not dst_size and args.match_src_size:
dst_size = "match"
dst_sz_base = dst_size.rstrip("KMG")
mult = 1
if dst_size.endswith("K"):
mult = 1024
elif dst_size.endswith("M"):
mult = 1024 * 1024
elif dst_size.endswith("G"):
mult = 1024 * 1024 * 1024
if mult != 1:
dst_size = int(dst_sz_base) * mult
else:
dst_size = int(dst_size)
create_zk_crypt_vol(args.dst_partition,
src_path,
src_is_partition,
args.force_creation,
dst_size,
crypt_mapper_name,
args.erase_src,
args.zero_dst,
args.add_mnt_entry,
args.mnt_cfg_base_path)
print "Done." | zk_luks | /zk_luks-0.5.tar.gz/zk_luks-0.5/zk_crypt_vol_utils/__init__.py | __init__.py |
# Zookeeper Node Monitoring Daemon
[](https://travis-ci.org/Nextdoor/zkmonitor)
[](https://pypi.python.org/pypi/zk\_monitor)
[](https://pypi.python.org/pypi/zk\_monitor)
This is a simple daemon for monitoring particular Zookeeper nodes for
compliance with a given set of specifications (ie, minimum number of
registered nodes). In the event that a path changes and becomes out of
spec, (too few nodes, for example), an alert is fired off to let you know.
## Clustered Design
*zk_monitor* is designed to operate in clustered mode with multiple redundant
agents running on multiple servers. The agents talk to eachother through
Zookeeper using a common path and a series of locks/znodes. You can run as
many agents as you want, but only one will ever handle sending off alerts.
## Configuration
Most of the connection and *zk_monitor* specific settings are managed via
CLI arguments:
$ python runserver.py --help
Usage: runserver.py <options>
Options:
--version show program's version number and exit
-h, --help show this help message and exit
-z ZOOKEEPER, --zookeeper=ZOOKEEPER
Zookeeper Server (def: localhost:2181)
--zookeeper_user=ZOOKEEPER_USER
Zookeeper ACL Username
--zookeeper_pass=ZOOKEEPER_PASS
Zookeeper ACL Password
-c CLUSTER_NAME, --cluster_name=CLUSTER_NAME
Unique cluster name (ie, prod-zookeeper-monitor)
--cluster_prefix=CLUSTER_PREFIX
Prefix path in Zookeeper for all zk_monitor clusters
-f FILE, --file=FILE Path to YAML file with znodes to monitor.
-p PORT, --port=PORT Port to listen to (def: 8080)
-l LEVEL, --level=LEVEL
Set logging level (INFO|WARN|DEBUG|ERROR)
-s SYSLOG, --syslog=SYSLOG
Log to syslog. Supply facility name. (ie "local0")
The list of paths that you want to monitor are supplied via a YAML
formatted configuration file. Here's an example file:
/services/foo/min_1:
alerter:
email: [email protected]
children: 1
/services/foo/min_0:
alerter:
email: [email protected]
children: 0
/services/foo/min_3:
children: 3
### Alerter Configuration
In the above example, you'll see that two of the paths have an 'alerter/email'
parameter configured. With this in place, any path spec violations will result
in an email fired off to that address. The third path does not have any
settings, which means that no alert will actually be sent off in the event of
a spec violation.
### Simple Execution
$ python runserver.py -l INFO -z localhost:2181 -f test.yaml
2014-05-31 16:20:25,862 [35661] [nd_service_registry] [__init__]: (INFO) Initializing ServiceRegistry object
2014-05-31 16:20:25,863 [35661] [nd_service_registry] [_connect]: (INFO) Connecting to Zookeeper Service (localhost:2181)
2014-05-31 16:20:25,867 [35661] [nd_service_registry] [_state_listener]: (INFO) Zookeeper connection state changed: CONNECTED
2014-05-31 16:20:25,868 [35661] [nd_service_registry] [__init__]: (INFO) Initialization Done!
2014-05-31 16:20:25,868 [35661] [zk_monitor.monitor] [_stateListener]: (INFO) Service registry connection state: True
## REST Interface
Though not necessary for alerting purposes, you can access the a JSON-formatted
REST interface for the intentionally inspecting the status of the app, and
the current compliance of your watched Zookeeper nodes.
### /status
This page provides a simple live status of the app and its monitors.
$ curl --silent http://localhost:8080/status
{
"monitor": {
"alerter": {
"alerting": true
},
"compliance": {
"/services/foo/min_0": true,
"/services/foo/min_1": "Found children (0) less than minimum (1)",
"/services/foo/min_3": "Found children (2) less than minimum (3)"
}
},
"version": "0.0.1",
"zookeeper": {
"connected": true
}
}
## Development
### Class/Object Architecture
runserver
|
+-- nd_service_registry.KazooServiceRegistry
| | Connection to Zookeeper
|
+-- alert.Dispatcher
| | Handles dispatching of all alerts to Alerter objects
| |
| +-- alerts.email.EmailAlerter
| | | Sends Email-Based Alerts Asynchronously
| | |
| | +-- tornadomail.backends.smtp.EmailBackend()
| |
| +-- alerts.rest.HipChatAlerter
| | Sends Hipchat Alerts Asynchronously
|
+-- cluster.State
| | Handles node-to-node communication via Zookeeper
| |
| +-- Registers /zk_monitor/agent/<agent name>
|
+-- monitor.Monitor
| | Monitors all configured paths
| |
| +-- Obj Ref -> alerts.Dispatcher
| | Alerts are fired off to the Dispatcher, the Dispatcher
| | handles determining whether or not the alert is a dup, a shift
| | from in-compliance to out-of-compliance (or vice versa),
| | and which Alerters to fire off (Hipchat, Email, etc).
|
+-- tornado.Application
| | Handles all web requests
| |
| +-- web.app.getApplication()
| |
| +-- root.RootHandler
| | URL: /
| |
| +-- state.StateHandler
| | URL: /state
| | Obj Ref -> nd_service_registry.KazooServiceRegistry
| | Obj Ref -> monitor.Monitor
### Setup
# Create a dedicated Python virtual environment and source it
virtualenv --no-site-packages .venv
unset PYTHONPATH
source .venv/bin/activate
# Install the dependencies
make build
# Run the tests
make test
### Postfix on Mac OSX
If you want to develop on a Mac OSX host, you need to enable email the
*postfix* daemon on your computer. Here's how!
Modify */Syatem/Library/LaunchDaemons/org.postfix.master.plist*:
--- /System/Library/LaunchDaemons/org.postfix.master.plist.bak 2014-06-02 11:45:24.000000000 -0700
+++ /System/Library/LaunchDaemons/org.postfix.master.plist 2014-06-02 11:47:07.000000000 -0700
@@ -9,8 +9,6 @@
<key>ProgramArguments</key>
<array>
<string>master</string>
- <string>-e</string>
- <string>60</string>
</array>
<key>QueueDirectories</key>
<array>
@@ -18,5 +16,8 @@
</array>
<key>AbandonProcessGroup</key>
<true/>
+
+ <key>KeepAlive</key>
+ <true/>
</dict>
</plist>
Restart the service:
cd /System/Library/LaunchDaemons
sudo launchctl unload org.postfix.master.plist
sudo launchctl load org.postfix.master.plist
| zk_monitor | /zk_monitor-0.1.2.tar.gz/zk_monitor-0.1.2/README.md | README.md |
[](https://travis-ci.org/Nextdoor/zkmonitor)
[](https://pypi.python.org/pypi/zk\_monitor)
[](https://pypi.python.org/pypi/zk\_monitor)
This is a simple daemon for monitoring particular Zookeeper nodes for
compliance with a given set of specifications (ie, minimum number of
registered nodes). In the event that a path changes and becomes out of
spec, (too few nodes, for example), an alert is fired off to let you know.
## Clustered Design
*zk_monitor* is designed to operate in clustered mode with multiple redundant
agents running on multiple servers. The agents talk to eachother through
Zookeeper using a common path and a series of locks/znodes. You can run as
many agents as you want, but only one will ever handle sending off alerts.
## Configuration
Most of the connection and *zk_monitor* specific settings are managed via
CLI arguments:
$ python runserver.py --help
Usage: runserver.py <options>
Options:
--version show program's version number and exit
-h, --help show this help message and exit
-z ZOOKEEPER, --zookeeper=ZOOKEEPER
Zookeeper Server (def: localhost:2181)
--zookeeper_user=ZOOKEEPER_USER
Zookeeper ACL Username
--zookeeper_pass=ZOOKEEPER_PASS
Zookeeper ACL Password
-c CLUSTER_NAME, --cluster_name=CLUSTER_NAME
Unique cluster name (ie, prod-zookeeper-monitor)
--cluster_prefix=CLUSTER_PREFIX
Prefix path in Zookeeper for all zk_monitor clusters
-f FILE, --file=FILE Path to YAML file with znodes to monitor.
-p PORT, --port=PORT Port to listen to (def: 8080)
-l LEVEL, --level=LEVEL
Set logging level (INFO|WARN|DEBUG|ERROR)
-s SYSLOG, --syslog=SYSLOG
Log to syslog. Supply facility name. (ie "local0")
The list of paths that you want to monitor are supplied via a YAML
formatted configuration file. Here's an example file:
/services/foo/min_1:
alerter:
email: [email protected]
children: 1
/services/foo/min_0:
alerter:
email: [email protected]
children: 0
/services/foo/min_3:
children: 3
### Alerter Configuration
In the above example, you'll see that two of the paths have an 'alerter/email'
parameter configured. With this in place, any path spec violations will result
in an email fired off to that address. The third path does not have any
settings, which means that no alert will actually be sent off in the event of
a spec violation.
### Simple Execution
$ python runserver.py -l INFO -z localhost:2181 -f test.yaml
2014-05-31 16:20:25,862 [35661] [nd_service_registry] [__init__]: (INFO) Initializing ServiceRegistry object
2014-05-31 16:20:25,863 [35661] [nd_service_registry] [_connect]: (INFO) Connecting to Zookeeper Service (localhost:2181)
2014-05-31 16:20:25,867 [35661] [nd_service_registry] [_state_listener]: (INFO) Zookeeper connection state changed: CONNECTED
2014-05-31 16:20:25,868 [35661] [nd_service_registry] [__init__]: (INFO) Initialization Done!
2014-05-31 16:20:25,868 [35661] [zk_monitor.monitor] [_stateListener]: (INFO) Service registry connection state: True
## REST Interface
Though not necessary for alerting purposes, you can access the a JSON-formatted
REST interface for the intentionally inspecting the status of the app, and
the current compliance of your watched Zookeeper nodes.
### /status
This page provides a simple live status of the app and its monitors.
$ curl --silent http://localhost:8080/status
{
"monitor": {
"alerter": {
"alerting": true
},
"compliance": {
"/services/foo/min_0": true,
"/services/foo/min_1": "Found children (0) less than minimum (1)",
"/services/foo/min_3": "Found children (2) less than minimum (3)"
}
},
"version": "0.0.1",
"zookeeper": {
"connected": true
}
}
## Development
### Class/Object Architecture
runserver
|
+-- nd_service_registry.KazooServiceRegistry
| | Connection to Zookeeper
|
+-- alert.Dispatcher
| | Handles dispatching of all alerts to Alerter objects
| |
| +-- alerts.email.EmailAlerter
| | | Sends Email-Based Alerts Asynchronously
| | |
| | +-- tornadomail.backends.smtp.EmailBackend()
| |
| +-- alerts.rest.HipChatAlerter
| | Sends Hipchat Alerts Asynchronously
|
+-- cluster.State
| | Handles node-to-node communication via Zookeeper
| |
| +-- Registers /zk_monitor/agent/<agent name>
|
+-- monitor.Monitor
| | Monitors all configured paths
| |
| +-- Obj Ref -> alerts.Dispatcher
| | Alerts are fired off to the Dispatcher, the Dispatcher
| | handles determining whether or not the alert is a dup, a shift
| | from in-compliance to out-of-compliance (or vice versa),
| | and which Alerters to fire off (Hipchat, Email, etc).
|
+-- tornado.Application
| | Handles all web requests
| |
| +-- web.app.getApplication()
| |
| +-- root.RootHandler
| | URL: /
| |
| +-- state.StateHandler
| | URL: /state
| | Obj Ref -> nd_service_registry.KazooServiceRegistry
| | Obj Ref -> monitor.Monitor
### Setup
# Create a dedicated Python virtual environment and source it
virtualenv --no-site-packages .venv
unset PYTHONPATH
source .venv/bin/activate
# Install the dependencies
make build
# Run the tests
make test
### Postfix on Mac OSX
If you want to develop on a Mac OSX host, you need to enable email the
*postfix* daemon on your computer. Here's how!
Modify */Syatem/Library/LaunchDaemons/org.postfix.master.plist*:
--- /System/Library/LaunchDaemons/org.postfix.master.plist.bak 2014-06-02 11:45:24.000000000 -0700
+++ /System/Library/LaunchDaemons/org.postfix.master.plist 2014-06-02 11:47:07.000000000 -0700
@@ -9,8 +9,6 @@
<key>ProgramArguments</key>
<array>
<string>master</string>
- <string>-e</string>
- <string>60</string>
</array>
<key>QueueDirectories</key>
<array>
@@ -18,5 +16,8 @@
</array>
<key>AbandonProcessGroup</key>
<true/>
+
+ <key>KeepAlive</key>
+ <true/>
</dict>
</plist>
Restart the service:
cd /System/Library/LaunchDaemons
sudo launchctl unload org.postfix.master.plist
sudo launchctl load org.postfix.master.plist | zk_monitor | /zk_monitor-0.1.2.tar.gz/zk_monitor-0.1.2/README | README |
from abc import ABC
from c3 import consts
class ControlDeviceBase(ABC):
"""A ControlDevice is a binary message of 5 bytes send to the C3 access panel.
It changes the states of the doors, auxiliary relays and alarms.
All multibyte values are stored as Little-endian.
Byte 0 1 2 3 4
01:01:01:c8:00
Operation: |
01 => 1 (1: output, 2: cancel alarm, 3: restart device_name, 4: enable/disable normal open state)
Param 1: |
Param 2: |
Param 3: |
Param 4: |
The meaning of the parameters is depending on the Operation code.
Param 4 is reserved for future use (defaults to 0)
Operation 1: Output operation
Param 1: Door number or auxiliary output number
Param 2: The address type of output operation (1: Door output, 2: Auxiliary output)
Param 3: Duration of the open operation, only for address type = 1 (door output).
0: close, 255: normal open state, 1~254: normal open duration
Operation 2: Cancel alarm
Param 1: 0 (null)
Param 2: 0 (null)
Param 3: 0 (null)
Operation 3: Restart device_name
Param 1: 0 (null)
Param 2: 0 (null)
Param 3: 0 (null)
Operation 3: Enable/disable normal open state
Param 1: Door number
Param 2: Enable / disable (0: disable, 1: enable'
Param 3: 0 (null)
"""
def __init__(self, operation: consts.ControlOperation, *args: int):
"""Constructor to initialize base class.
The param1, param2, param3 and param4 values are provided as variable argument."""
self.operation: consts.ControlOperation = operation
self.param1: int = args[0] if len(args) > 0 else None
self.param2: int = args[1] if len(args) > 1 else None
self.param3: int = args[2] if len(args) > 2 else None
self.param4: int = args[3] if len(args) > 3 else None
@classmethod
def from_bytes(cls, data: bytes):
return ControlDeviceBase(*data)
def to_bytes(self) -> bytes:
return bytes([self.operation, self.param1 or 0, self.param2 or 0, self.param3 or 0, self.param4 or 0])
def __repr__(self):
return "\r\n".join([
"%-12s %-10s (%s)" % ("operation", self.operation, repr(self.operation)),
"%-12s %-10s" % ("param1", self.param1),
"%-12s %-10s" % ("param2", self.param2),
"%-12s %-10s" % ("param3", self.param3),
"%-12s %-10s" % ("param4", self.param4),
])
class ControlDeviceOutput(ControlDeviceBase):
def __init__(self, output_number: int, address: consts.ControlOutputAddress, duration: int):
ControlDeviceBase.__init__(self, consts.ControlOperation.OUTPUT, output_number, address, duration)
@property
def output_number(self) -> int:
return self.param1
@property
def address(self) -> int:
return self.param2
@property
def duration(self) -> int:
return self.param3
def __repr__(self):
return "\r\n".join([
"ControlDevice Output Operation:"
"%-12s %-10s (%s)" % ("operation", self.operation, repr(self.operation)),
"%-12s %-10s (Door/Aux Number)" % ("param1", self.output_number),
"%-12s %-10s %s" % ("param2", self.param2, repr(consts.ControlOutputAddress(self.address))),
"%-12s %-10s (Duration)" % ("param3", self.duration),
])
class ControlDeviceCancelAlarms(ControlDeviceBase):
def __init__(self):
ControlDeviceBase.__init__(self, consts.ControlOperation.CANCEL_ALARM)
def __repr__(self):
return "\r\n".join([
"ControlDevice Cancel Alarm Operation:",
ControlDeviceBase.__repr__(self)
])
class ControlDeviceNormalOpenStateEnable(ControlDeviceBase):
def __init__(self, door_number: int, enable: bool):
ControlDeviceBase.__init__(self, consts.ControlOperation.ENDIS_NO_STATE, door_number, enable)
@property
def door_number(self) -> int:
return self.param1
@property
def enabled(self) -> bool:
return bool(self.param2)
def __repr__(self):
return "\r\n".join([
"ControlDevice Normal Open State Operation:"
"%-12s %-10s (%s)" % ("operation", self.operation, repr(self.operation)),
"%-12s %-10s (Door Number)" % ("param1", self.door_number),
"%-12s %-10s %s" % ("param2", self.param2, "Enable" if self.enabled else "Disable"),
])
class ControlDeviceRestart(ControlDeviceBase):
def __init__(self):
ControlDeviceBase.__init__(self, consts.ControlOperation.RESTART_DEVICE)
def __repr__(self):
return "\r\n".join([
"ControlDevice Restart Operation:",
ControlDeviceBase.__repr__(self)
]) | zkaccess-c3 | /zkaccess_c3-0.0.7-py3-none-any.whl/c3/controldevice.py | controldevice.py |
from __future__ import annotations
from abc import ABC, abstractmethod
from c3 import consts
from c3.utils import C3DateTime
class RTLogRecord(ABC):
@abstractmethod
def is_door_alarm(self) -> bool:
...
@abstractmethod
def is_event(self) -> bool:
...
class DoorAlarmStatusRecord(RTLogRecord):
"""Realtime Log record for a door and alarm status
An RTLog is a binary message of 16 bytes send by the C3 access panel.
If the value of byte 10 (the event type) is 255, the RTLog is a Door/Alarm Realtime Status.
If the value of byte 10 (the event type) is not 255, the RTLog is a Realtime Event.
Door/Alarm Realtime Status record
All multibyte values are stored as Little-endian.
Byte: 0 1 2 3 4 5 6 7 8 9 A B C D E F
01:4f:86:00:99:92:98:00:04:01:00:00:a5:ad:ad:21
Alarm status (byte 4-7): |
99:92:98:00 => (big endian:) 00989299 = 9999001
DSS status (byte 0-3): |
01:4f:86:00 => (big endian:) 00864f01 = 8802049
Verified (byte 8): |
04
Unused (byte 9): |
01
EventType (byte 10): |
00
Unused (byte 11): |
00
|
Time_second (byte 12-15) a5:ad:ad:21 => (big endian:) 21ADADA5 =
2017-7-30 16:51:49
"""
def __init__(self):
self.alarm_status = bytes(4)
self.dss_status = bytes(4)
self.verified: consts.VerificationMode = consts.VerificationMode.NONE
self.event_type: consts.EventType = consts.EventType.NA
self.time_second = 0
@classmethod
def from_bytes(cls, data: bytes):
record = DoorAlarmStatusRecord()
record.alarm_status = bytes(data[0:4])
record.dss_status = bytes(data[4:8])
try:
record.verified = consts.VerificationMode(data[9])
except ValueError:
record.verified = consts.VerificationMode.OTHER
try:
record.event_type = consts.EventType(data[10])
except ValueError:
record.event_type = consts.EventType.UNKNOWN_UNSUPPORTED
record.time_second = C3DateTime.from_value(int.from_bytes(data[12:16], byteorder="little"))
return record
def is_door_alarm(self) -> bool:
return True
def is_event(self) -> bool:
return False
def get_alarms(self, door_nr: int) -> list[consts.AlarmStatus]:
alarms = []
for i in range(0, 3):
if i+1 == door_nr or not door_nr:
if self.alarm_status[i] & consts.AlarmStatus.ALARM:
if alarms.count(consts.AlarmStatus.ALARM) == 0:
alarms.append(consts.AlarmStatus.ALARM)
elif self.alarm_status[i] & consts.AlarmStatus.DOOR_OPEN_TIMEOUT:
if alarms.count(consts.AlarmStatus.DOOR_OPEN_TIMEOUT) == 0:
alarms.append(consts.AlarmStatus.DOOR_OPEN_TIMEOUT)
return alarms
def has_alarm(self, door_nr: int, status: consts.AlarmStatus = None):
return ((self.alarm_status[door_nr-1] & (status or 0)) == status) or \
((self.alarm_status[door_nr-1] > 0) and status is None)
def door_sensor_status(self, door_nr: int) -> consts.InOutStatus:
return consts.InOutStatus(self.dss_status[door_nr - 1] & 0x0F)
def door_is_open(self, door_nr: int):
is_open = None
if self.door_sensor_status(door_nr) == consts.InOutStatus.OPEN:
is_open = True
elif self.door_sensor_status(door_nr) == consts.InOutStatus.CLOSED:
is_open = False
return is_open
def __repr__(self):
repr_arr = ["Door/Alarm Realtime Status:",
"%-12s %-10s" % ("time_second", self.time_second),
"%-12s %-10s %s" % ("event_type", self.event_type, repr(self.event_type)),
"%-12s %-10s %s" % ("verified", self.verified, repr(self.verified)),
"%-12s %-10s" % ("alarm_status", self.alarm_status.hex())]
for i in range(0, 4):
for status in consts.AlarmStatus:
if status != consts.AlarmStatus.NONE:
if self.alarm_status[i] & status == status:
repr_arr.append(" Door %-2s %-4s %s" % (i, status, repr(status)))
repr_arr.append("%-12s %-10s" % ("dss_status", self.dss_status.hex()))
for i in range(0, 4):
repr_arr.append(" Door %-2s %-4s %s" % (i+1, self.dss_status[i],
repr(consts.InOutStatus(self.dss_status[i] & 0x0F))))
return "\r\n".join(repr_arr)
class EventRecord(RTLogRecord):
"""Realtime Event record
All multibyte values are stored as Little-endian.
Byte: 0 1 2 3 4 5 6 7 8 9 A B C D E F
01:4f:86:00:99:92:98:00:04:01:00:00:a5:ad:ad:21
Cardno (byte 4-7): |
99:92:98:00 => (big endian:) 00989299 = 9999001
Pin (byte 0-3): |
01:4f:86:00 => (big endian:) 00864f01 = 8802049
Verified (byte 8): |
04
DoorID (byte 9): |
01
EventType (byte 10): |
00
InOutState (byte 11): |
00
|
Time_second (byte 12-15) a5:ad:ad:21 => (big endian:) 21ADADA5 = 2017-7-30 16:51:49
"""
def __init__(self):
self.card_no = 0
self.pin = 0
self.verified: consts.VerificationMode = consts.VerificationMode.NONE
self.door_id = 0
self.event_type: consts.EventType = consts.EventType.NA
self.in_out_state: consts.InOutDirection = consts.InOutDirection.NONE
self.time_second = 0
@classmethod
def from_bytes(cls, data: bytes):
record = EventRecord()
record.card_no = int.from_bytes(data[0:4], byteorder="little")
record.pin = int.from_bytes(data[4:8], byteorder="little")
try:
record.verified = consts.VerificationMode(data[8])
except ValueError:
record.verified = consts.VerificationMode.OTHER
record.door_id = data[9]
try:
record.event_type = consts.EventType(data[10])
except ValueError:
record.event_type = consts.EventType.UNKNOWN_UNSUPPORTED
try:
record.in_out_state = consts.InOutDirection(data[11])
except ValueError:
record.in_out_state = consts.InOutDirection.UNKNOWN_UNSUPPORTED
record.time_second = C3DateTime.from_value(int.from_bytes(data[12:16], byteorder="little"))
return record
def is_door_alarm(self) -> bool:
return False
def is_event(self) -> bool:
return True
def __repr__(self):
repr_arr = ["Realtime Event:",
"%-12s %-10s" % ("time_second", self.time_second),
"%-12s %-10s %s" % ("event_type", self.event_type, repr(self.event_type)),
"%-12s %-10s %s" % ("in_out_state", self.in_out_state, repr(self.in_out_state)),
"%-12s %-10s %s" % ("verified", self.verified, repr(self.verified)),
"%-12s %-10s" % ("card_no", self.card_no),
# "%-12s %-10s" % ("pin", self.pin),
"%-12s %-10s" % ("door_id", self.door_id)]
return "\r\n".join(repr_arr) | zkaccess-c3 | /zkaccess_c3-0.0.7-py3-none-any.whl/c3/rtlog.py | rtlog.py |
from enum import IntEnum, unique
from collections import namedtuple
# Defaults
C3_PORT_DEFAULT = 4370
C3_PORT_BROADCAST = 65535
# Protocol commands
C3_MESSAGE_START = 0xAA
C3_MESSAGE_END = 0x55
C3_PROTOCOL_VERSION = 0x01
C3_DISCOVERY_MESSAGE = "CallSecurityDevice"
class Command(IntEnum):
"""Enumeration of supported device_name interaction commands"""
DISCOVER = 0x14
CONNECT_SESSION = 0x76
CONNECT_SESSION_LESS = 0x01
DISCONNECT = 0x02
GETPARAM = 0x04
DATATABLE_CFG = 0x06
CONTROL = 0x05
RTLOG = 0x0B
C3_REPLY_OK = 0xC8
C3_REPLY_ERROR = 0xC9
Errors = {
-13: "Command error: This command is not available",
-14: "The communication password is not correct",
}
@unique
class _IntEnumWithDescription(IntEnum):
def __new__(cls, *args):
obj = int.__new__(cls, args[0])
obj._value_ = args[0]
return obj
# ignore the first param since it's already set by __new__
def __init__(self, _: str, description: str = None):
self._description_ = description
def __str__(self):
return str(self.value)
def __repr__(self):
return self._description_
# this makes sure that the description is read-only
@property
def description(self):
return self._description_
# Control operations
class ControlOperation(_IntEnumWithDescription):
OUTPUT = 1, "Output operation (door or auxiliary)"
CANCEL_ALARM = 2, "Cancel alarm"
RESTART_DEVICE = 3, "Restart Device"
ENDIS_NO_STATE = 4, "Enable/disable normal open state"
class ControlOutputAddress(_IntEnumWithDescription):
DOOR_OUTPUT = 1, "Door output"
AUX_OUTPUT = 2, "Auxiliary output"
# Event values
class VerificationMode(_IntEnumWithDescription):
NONE = 0, "None"
FINGER = 1, "Only finger"
PASSWORD = 3, "Only password"
CARD = 4, "Only card"
CARD_OR_FINGER = 6, "Card or finger"
CARD_WITH_FINGER = 10, "Card and finger"
CARD_WITH_PASSWORD = 11, "Card and password"
OTHER = 200, "Others"
class EventType(_IntEnumWithDescription):
NA = -1, "N/A"
NORMAL_PUNCH_OPEN = 0, "Normal Punch Open"
PUNCH_NORMAL_OPEN_TZ = 1, "Punch during Normal Open Time Zone"
FIRST_CARD_NORMAL_OPEN = 2, "First Card Normal Open (Punch Card)"
MULTI_CARD_OPEN = 3, "Multi-Card Open (Punching Card)"
EMERGENCY_PASS_OPEN = 4, "Emergency Password Open"
OPEN_NORMAL_OPEN_TZ = 5, "Open during Normal Open Time Zone"
LINKAGE_EVENT_TRIGGER = 6, "Linkage Event Triggered"
CANCEL_ALARM = 7, "Cancel Alarm"
REMOTE_OPENING = 8, "Remote Opening"
REMOTE_CLOSING = 9, "Remote Closing"
DISABLE_INTRADAY_NORMAL_OPEN_TZ = 10, "Disable Intraday Normal Open Time Zone"
ENABLE_INTRADAY_NORMAL_OPEN_TZ = 11, "Enable Intraday Normal Open Time Zone"
OPEN_AUX_OUTPUT = 12, "Open Auxiliary Output"
CLOSE_AUX_OUTPUT = 13, "Close Auxiliary Output"
PRESS_FINGER_OPEN = 14, "Press Fingerprint Open"
MULTI_CARD_OPEN_FP = 15, "Multi-Card Open (Press Fingerprint)"
FP_NORMAL_OPEN_TZ = 16, "Press Fingerprint during Normal Open Time Zone"
CARD_FP_OPEN = 17, "Card plus Fingerprint Open"
FIRST_CARD_NORMAL_OPEN_FP = 18, "First Card Normal Open (Press Fingerprint)"
FIRST_CARD_NORMAL_OPEN_CARD_FP = 19, "First Card Normal Open (Card plus Fingerprint)"
TOO_SHORT_PUNCH_INTERVAL = 20, "Too Short Punch Interval"
DOOR_INACTIVE_TZ = 21, "Door Inactive Time Zone (Punch Card)"
ILLEGAL_TZ = 22, "Illegal Time Zone"
ACCESS_DENIED = 23, "Access Denied"
ANTI_PASSBACK = 24, "Anti-Passback"
INTERLOCK = 25, "Interlock"
MULTI_CARD_AUTH = 26, "Multi-Card Authentication (Punching Card)"
UNREGISTERED_CARD = 27, "Unregistered Card"
OPENING_TIMEOUT = 28, "Opening Timeout:"
CARD_EXPIRED = 29, "Card Expired"
PASSWORD_ERROR = 30, "Password Error"
TOO_SHORT_FP_INTERVAL = 31, "Too Short Fingerprint Pressing Interval"
MULTI_CARD_AUTH_FP = 32, "Multi-Card Authentication (Press Fingerprint)"
FP_EXPIRED = 33, "Fingerprint Expired"
UNREGISTERED_FP = 34, "Unregistered Fingerprint"
DOOR_INACTIVE_TZ_FP = 35, "Door Inactive Time Zone (Press Fingerprint)"
DOOR_INACTIVE_TZ_EXIT = 36, "Door Inactive Time Zone (Exit Button)"
FAILED_CLOSE_NORMAL_OPEN_TZ = 37, "Failed to Close during Normal Open Time Zone"
VERIFY_TYPE_INVALID = 41, "Verify Type Invalid"
WG_FORMAT_ERROR = 42, "WG Format Error"
DURESS_PASSWORD_OPEN = 101, "Duress Password Open"
OPENED_ACCIDENT = 102, "Opened Accidentally"
DURESS_FP_OPEN = 103, "Duress Fingerprint Open"
DOOR_OPENED_CORRECT = 200, "Door Opened Correctly"
DOOR_CLOSED_CORRECT = 201, "Door Closed Correctly"
EXIT_BUTTON_OPEN = 202, "Exit button Open"
MULTI_CARD_OPEN_CARD_FP = 203, "Multi-Card Open (Card plus Fingerprint)"
NORMAL_OPEN_TZ_OVER = 204, "Normal Open Time Zone Over"
REMOTE_NORMAL_OPEN = 205, "Remote Normal Opening"
DEVICE_START = 206, "Device Start"
DOOR_OPEN_BY_SUPERUSER = 208, "Door Opened by Superuser"
AUX_INPUT_DISCONNECT = 220, "Auxiliary Input Disconnected"
AUX_INPUT_SHORT = 221, "Auxiliary Input Shorted"
DOOR_ALARM_STATUS = 255, "Current door and alarm status"
UNKNOWN_UNSUPPORTED = 999, "Unknown"
class InOutDirection(_IntEnumWithDescription):
ENTRY = 0, "Entry"
EXIT = 3, "Exit"
NONE = 2, "None"
UNKNOWN_UNSUPPORTED = 15, "Unknown"
class AlarmStatus(_IntEnumWithDescription):
NONE = 0, "None"
ALARM = 1, "Alarm"
DOOR_OPEN_TIMEOUT = 2, "Door opening timeout"
class InOutStatus(_IntEnumWithDescription):
UNKNOWN = 0, "Unknown"
CLOSED = 1, "Closed"
OPEN = 2, "Open" | zkaccess-c3 | /zkaccess_c3-0.0.7-py3-none-any.whl/c3/consts.py | consts.py |
from __future__ import annotations
import logging
import re
import socket
import threading
from dataclasses import dataclass, field
from typing import Dict, Optional
# import consts
from c3 import consts, controldevice, crc, rtlog, utils
@dataclass
class C3DeviceInfo:
"""Basic C3 panel (connection) information, obtained from discovery"""
host: str
port: int = consts.C3_PORT_DEFAULT
serial_number: str = None
mac: str = None
device_name: str = None
firmware_version: str = None
@dataclass
class C3PanelStatus:
"""C3 panel peripheral status"""
nr_of_locks: int = 0
nr_aux_in: int = 0
nr_aux_out: int = 0
lock_status: Dict[int, consts.InOutStatus] = field(default_factory=dict)
aux_in_status: Dict[int, consts.InOutStatus] = field(default_factory=dict)
aux_out_status: Dict[int, consts.InOutStatus] = field(default_factory=dict)
class C3:
log = logging.getLogger("C3")
log.setLevel(logging.ERROR)
def __init__(self, host: [str | C3DeviceInfo], port: int = consts.C3_PORT_DEFAULT) -> None:
self._sock: socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.settimeout(2)
self._connected: bool = False
self._session_less = False
self._session_id: int = 0xFEFE
self._request_nr: int = -258
self._status: C3PanelStatus = C3PanelStatus()
if isinstance(host, C3DeviceInfo):
self._device_info: C3DeviceInfo = host
elif isinstance(host, str):
self._device_info: C3DeviceInfo = C3DeviceInfo(host=host, port=port or consts.C3_PORT_DEFAULT)
@classmethod
def _get_message_header(cls, data: [bytes or bytearray]) -> tuple[[int or None], int]:
command = None
data_size = 0
if len(data) >= 5:
if data[0] == consts.C3_MESSAGE_START: # and data[1] == consts.C3_PROTOCOL_VERSION:
command = data[2]
data_size = data[3] + (data[4] * 255)
return command, data_size
@classmethod
def _get_message(cls, data: [bytes or bytearray]) -> bytearray:
if data[-1] == consts.C3_MESSAGE_END:
# Get the message payload, without start, crc and end bytes
checksum = crc.crc16(data[1:-3])
if utils.lsb(checksum) == data[-3] or utils.msb(checksum) == data[-2]:
# Return all data without header (leading) and crc (trailing)
message = bytearray(data[5:-3])
else:
raise ValueError("Payload checksum is invalid: %x%x expected %x%x" %
(data[-3], data[-2], utils.lsb(checksum), utils.msb(checksum)))
else:
raise ValueError("Payload does not include message end marker (%s)" % data[-1])
return message
@classmethod
def _construct_message(cls, session_id: Optional[int], request_nr: Optional[int], command: consts.Command,
data=None) -> bytes:
message_length = len(data or []) + (4 if (session_id and request_nr) else 0)
message = bytearray([consts.C3_PROTOCOL_VERSION,
command or 0x00,
utils.lsb(message_length),
utils.msb(message_length)])
if session_id:
message.append(utils.lsb(session_id))
message.append(utils.msb(session_id))
message.append(utils.lsb(request_nr))
message.append(utils.msb(request_nr))
if data:
for byte in data:
if isinstance(byte, int):
message.append(byte)
elif isinstance(byte, str):
message.append(ord(byte))
else:
raise TypeError("Data does not contain int or str: %s is %s" % (str(byte), type(byte)))
checksum = crc.crc16(message)
message.append(utils.lsb(checksum))
message.append(utils.msb(checksum))
message.insert(0, consts.C3_MESSAGE_START)
message.append(consts.C3_MESSAGE_END)
return message
def _send(self, command: consts.Command, data=None) -> int:
message = self._construct_message(self._session_id, self._request_nr, command, data)
self.log.debug("Sending: %s", message.hex())
bytes_written = self._sock.send(message)
self._request_nr = self._request_nr + 1
return bytes_written
def _receive(self) -> tuple[bytearray, int]:
# Get the first 5 bytes
header = self._sock.recv(5)
self.log.debug("Receiving header: %s", header.hex())
message = bytearray()
received_command, data_size = self._get_message_header(header)
# Get the optional message data, checksum and end marker
payload = self._sock.recv(data_size + 3)
if data_size > 0:
# Process message in case data available
self.log.debug("Receiving payload: %s", payload.hex())
message = self._get_message(header + payload)
if len(message) != data_size:
raise ValueError(f"Length of received message ({len(message)}) does not match specified size ({data_size})")
if received_command == consts.C3_REPLY_OK:
pass
elif received_command == consts.C3_REPLY_ERROR:
error = utils.byte_to_signed_int(message[-1])
raise ConnectionError(
f"Error {error} received in reply: {consts.Errors[error] if error in consts.Errors else 'Unknown'}")
else:
data_size = 0
return message, data_size
def _send_receive(self, command: consts.Command, data=None) -> tuple[bytearray, int]:
bytes_received = 0
receive_data = bytearray()
session_offset = 0
try:
bytes_written = self._send(command, data)
if bytes_written > 0:
receive_data, bytes_received = self._receive()
if not self._session_less and bytes_received > 2:
session_offset = 4
session_id = (receive_data[1] << 8) + receive_data[0]
# msg_seq = (receive_data[3] << 8) + receive_data[2]
if self._session_id != session_id:
raise ValueError("Data received with invalid session ID")
except BrokenPipeError as ex:
self._connected = False
raise ConnectionError(f"Unexpected connection end: {ex}") from ex
return receive_data[session_offset:], bytes_received-session_offset
def is_connected(self) -> bool:
# try:
# # this will try to read bytes without blocking and also without removing them from buffer (peek only)
# data = self._sock.recv(1, socket.MSG_DONTWAIT | socket.MSG_PEEK)
# if len(data) == 0:
# return True
# except BlockingIOError:
# return True # socket is open and reading from it would block
# except ConnectionResetError:
# return False # socket was closed for some other reason
# except Exception as e:
# return False
return self._connected
@classmethod
def _parse_kv_from_message(cls, message: bytes) -> dict:
kv_pairs = {}
message_str = message.decode(encoding='ascii', errors='ignore')
pattern = re.compile(r"([\w~]+)=([^,]+)")
for (param_name, param_value) in re.findall(pattern, message_str):
kv_pairs[param_name] = param_value
return kv_pairs
def __repr__(self):
return "\r\n".join([
f"- Host: {self.host} @ {self.port}",
f"- Device: {self.device_name} (sn: {self.serial_number})",
f"- Firmware version: {self.firmware_version}"
])
def log_level(self, level: int):
self.log.setLevel(level)
@property
def host(self) -> str:
return self._device_info.host
@host.setter
def host(self, host: str):
if not self.is_connected():
self._device_info.host = host
else:
raise ConnectionError("Cannot set host when C3 is connected. Disconnect first.")
@property
def port(self) -> int:
return self._device_info.port
@port.setter
def port(self, port: int):
if not self.is_connected():
self._device_info.port = port
else:
raise ConnectionError("Cannot set port when C3 is connected. Disconnect first.")
@property
def mac(self) -> str:
return self._device_info.mac or '?'
@property
def serial_number(self) -> str:
return self._device_info.serial_number or '?'
@property
def device_name(self) -> str:
return self._device_info.device_name or '?'
@property
def firmware_version(self) -> str:
return self._device_info.firmware_version or '?'
@property
def nr_of_locks(self) -> int:
return self._status.nr_of_locks or 0
@property
def nr_aux_in(self) -> int:
return self._status.nr_aux_in or 0
@property
def nr_aux_out(self) -> int:
return self._status.nr_aux_out or 0
@classmethod
def discover(cls, interface_address: str = None, timeout: int = 2) -> list[C3]:
"""Scan on all local network interface, or the provided interface, for C3 panels."""
devices = []
message = cls._construct_message(None, None, consts.Command.DISCOVER, consts.C3_DISCOVERY_MESSAGE)
if interface_address:
ip_addresses = [interface_address]
else:
interfaces = socket.getaddrinfo(host=socket.gethostname(), port=None, family=socket.AF_INET)
ip_addresses = [ip[-1][0] for ip in interfaces]
for ip_address in ip_addresses:
cls.log.debug("Discover on %s", ip_address)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.settimeout(timeout)
sock.bind((ip_address, 0))
sock.sendto(message, ("255.255.255.255", consts.C3_PORT_BROADCAST))
while True:
try:
payload = sock.recv(64*1024)
except socket.timeout:
break
if payload:
received_command, data_size = cls._get_message_header(payload)
if received_command == consts.C3_REPLY_OK:
# Get the message data and signature
message = cls._get_message(payload)
if len(message) != data_size:
raise ValueError(
"Length of received message (%d) does not match specified size (%d)" % (len(message),
data_size))
data = cls._parse_kv_from_message(message)
devices.append(C3(C3DeviceInfo(
host=data.get("IP"),
mac=data.get("MAC"),
serial_number=data.get("SN"),
device_name=data.get("Device"),
firmware_version=data.get("Ver")
)))
sock.close()
return devices
def connect(self, password: Optional[str] = None) -> bool:
"""Connect to the C3 panel on the host/port provided in the constructor."""
self._connected = False
self._session_id = 0xFEFE
self._request_nr: -258
data = None
if password:
data = bytearray(password.encode('ascii'))
# Attempt to connect to panel with session initiation command
try:
self._sock.connect((self._device_info.host, self._device_info.port))
bytes_written = self._send(consts.Command.CONNECT_SESSION, data)
if bytes_written > 0:
receive_data, bytes_received = self._receive()
if bytes_received > 2:
self._session_id = (receive_data[1] << 8) + receive_data[0]
self.log.debug("Connected with Session ID %04x", self._session_id)
self._session_less = False
self._connected = True
except ConnectionError as ex:
self.log.debug("Connection attempt with session to %s failed: %s", self._device_info.host, ex)
except ValueError as ex:
self.log.error("Reply from %s failed: %s", self._device_info.host, ex)
# Alternatively attempt to connect to panel without session initiation
if not self._connected:
try:
self._session_id = None
self._sock.connect((self._device_info.host, self._device_info.port))
bytes_written = self._send(consts.Command.CONNECT_SESSION_LESS, data)
if bytes_written > 0:
self._receive()
self.log.debug("Connected without session")
self._session_less = True
self._connected = True
except ConnectionError as ex:
self.log.debug("Connection attempt without session to %s failed: %s", self._device_info.host, ex)
except ValueError as ex:
self.log.error("Reply from %s failed: %s", self._device_info.host, ex)
if self._connected:
try:
params = self.get_device_param(["~SerialNumber", "LockCount", "AuxInCount", "AuxOutCount"])
self._device_info.serial_number = params.get("~SerialNumber", self._device_info.serial_number)
self._status.nr_of_locks = int(params.get("LockCount", self._status.nr_of_locks))
self._status.nr_aux_in = int(params.get("AuxInCount", self._status.nr_aux_in))
self._status.nr_aux_out = int(params.get("AuxOutCount", self._status.nr_aux_out))
except ConnectionError as ex:
self.log.error("Connection to %s failed: %s", self._device_info.host, ex)
except ValueError as ex:
self.log.error("Retrieving configuration parameters from %s failed: %s", self._device_info.host, ex)
return self._connected
def disconnect(self):
"""Disconnect from C3 panel and end session."""
if self.is_connected():
self._send_receive(consts.Command.DISCONNECT)
self._sock.close()
self._connected = False
self._session_id = None
self._request_nr: -258
def get_device_param(self, request_parameters: list[str]) -> dict:
"""Retrieve the requested device parameter values."""
if self.is_connected():
message, _ = self._send_receive(consts.Command.GETPARAM, ','.join(request_parameters))
parameter_values = self._parse_kv_from_message(message)
else:
raise ConnectionError("No connection to C3 panel.")
return parameter_values
def _update_inout_status(self, logs: list[rtlog.RTLogRecord]):
for log in logs:
if isinstance(log, rtlog.DoorAlarmStatusRecord):
for lock_nr in range(1, self._status.nr_of_locks+1):
self._status.lock_status[lock_nr] = log.door_sensor_status(lock_nr)
elif isinstance(log, rtlog.EventRecord):
if log.event_type == consts.EventType.OPEN_AUX_OUTPUT:
self._status.aux_out_status[log.door_id] = consts.InOutStatus.OPEN
elif log.event_type == consts.EventType.CLOSE_AUX_OUTPUT:
self._status.aux_out_status[log.door_id] = consts.InOutStatus.CLOSED
elif log.event_type == consts.EventType.AUX_INPUT_DISCONNECT:
self._status.aux_in_status[log.door_id] = consts.InOutStatus.OPEN
elif log.event_type == consts.EventType.AUX_INPUT_SHORT:
self._status.aux_in_status[log.door_id] = consts.InOutStatus.CLOSED
def get_rt_log(self) -> list[rtlog.EventRecord | rtlog.DoorAlarmStatusRecord]:
"""Retrieve the latest event or alarm records."""
records = []
if self.is_connected():
message, message_length = self._send_receive(consts.Command.RTLOG)
# One RT log is 16 bytes
# Ensure the array is not empty and a multiple of 16
if message_length % 16 == 0:
logs_messages = [message[i:i+16] for i in range(0, message_length, 16)]
for log_message in logs_messages:
self.log.debug("Received RT Log: %s", log_message.hex())
if log_message[10] == consts.EventType.DOOR_ALARM_STATUS:
records.append(rtlog.DoorAlarmStatusRecord.from_bytes(log_message))
else:
records.append(rtlog.EventRecord.from_bytes(log_message))
else:
raise ValueError("Received RT Log(s) size is not a multiple of 16: %d" % message_length)
else:
raise ConnectionError("No connection to C3 panel.")
self._update_inout_status(records)
return records
def _auto_close_aux_out(self, aux_nr: int) -> None:
"""Set the specified auxiliary output to closed.
The C3 does not send an event when an auxiliary output closes after a certain duration.
This function is trigger by an automatic internal timer to set the internal state to closed."""
self._status.aux_out_status[aux_nr] = consts.InOutStatus.CLOSED
def control_device(self, command: controldevice.ControlDeviceBase):
"""Send a control command to the panel."""
if self.is_connected():
self._send_receive(consts.Command.CONTROL, command.to_bytes())
if isinstance(command, controldevice.ControlDeviceOutput):
if command.operation == consts.ControlOperation.OUTPUT and \
command.address == consts.ControlOutputAddress.AUX_OUTPUT and \
command.duration < 255:
threading.Timer(command.duration, self._auto_close_aux_out, [command.output_number]).start()
else:
raise ConnectionError("No connection to C3 panel.")
def lock_status(self, door_nr: int) -> consts.InOutStatus:
"""Returns the (cached) door open/close status.
Requires a preceding call to get_rt_log to update to the latest status."""
return self._status.lock_status[door_nr] if door_nr in self._status.lock_status else \
consts.InOutStatus.UNKNOWN
def aux_in_status(self, aux_nr: int) -> consts.InOutStatus:
"""Returns the (cached) auxiliary input short/disconnect status.
Requires a preceding call to get_rt_log to update to the latest status."""
return self._status.aux_in_status[aux_nr] if aux_nr in self._status.aux_in_status else \
consts.InOutStatus.UNKNOWN
def aux_out_status(self, aux_nr: int) -> consts.InOutStatus:
"""Returns the (cached) auxiliary output open/close status.
Requires a preceding call to get_rt_log to update to the latest status."""
return self._status.aux_out_status[aux_nr] if aux_nr in self._status.aux_out_status else \
consts.InOutStatus.UNKNOWN | zkaccess-c3 | /zkaccess_c3-0.0.7-py3-none-any.whl/c3/core.py | core.py |
========
zkaffold
========
A plone product to install content on a plone site.
Features
--------
* Install content
* Install products
* Delete content
* Apply zope interfaces
* Modify content
* Generate test content
How to install
--------------
Add zkaffold to your product:
* Add "zkaffold" to your product's install_requires in setup.py
* Add <include package="zkaffold" /> in your product's configure.zcml
* Add "profile-zkaffold:default" as a dependency to your product's
metadata.xml
* run ./bin/buildout
zkaffold should now be installed. Note that zkaffold is not installable /
uninstallable from the portal quick installer.
How to use
----------
After installing zkaffold in your plone site, you can build content for
your plone site by:
* create "initial" profile in your product,
* create a directory called "zkaffold" in profile/initial,
* create structure.xml (example structure.xml is in
zkaffold/exportimport/tests/test_exportimport.py),
You can also create default users in your plone site by:
* create members.xml (example members.xml is in
zkaffold/exportimport/tests/test_exportimport.py)
If you are using buildout, in your buildout.cfg:
* in the [plonesite] section, add your product initial profile (e.g.
mysite:initial) to "profiles-initial",
* run ./bin/buildout
Your plone site should now be populated with content specified in
structure.xml. Note that if your plone site already exists before running
buildout, it will not add any content. You'll have to delete your plone
site or run the zkaffold import step to add the content.
You can use different profiles to create different content, for example if
you want test content, you can create another profile and add that in
profiles-initial when you want test content to be built.
zkaffold publishes all objects by default by trying to apply transition
"publish" to the objects. If your objects need a different way of
publishing, have a look at zkaffold/exportimport/tests/test_exportimport.py.
zkaffold XML schema
-------------------
The basic schema that all zkaffold XML files should begin with is::
<?xml version="1.0" encoding="UTF-8"?>
<site xmlns:zkaffold="http://www.isotoma.com/zkaffold">
</site>
The ``site`` element is the lone root element and represents the Plone portal
object.
Descending the object tree
``````````````````````````
Objects may be traversed by their ID in order to reach the object which should
be modified. This is done by specifying elements with the tag name as the ID
of the object in question. For example, to descend to the object at path
"``portal/my_folder/my_obj``" , one would use the following syntax::
<site>
<my_folder>
<my_obj>
...
</my_obj>
</my_folder>
</site>
Adding new objects
``````````````````
To insert a new object into a folderish object, first descend to the target
container, then insert a new element with the same tag name as the portal type
you wish to add, with an ``id`` attribute of the ID it should have within the
ZODB::
<site>
<my_folder>
<MyContentType id="my_new_obj">
</MyContentType>
</my_folder>
</site>
Parameters
''''''''''
More often than not, one will want to specify values for fields in the schema
of the object when it is created. To do this, add a ``params`` child to the
element representing the new object, which itself should contain a ``param``
element for each field. The ``param`` tag should have the ``name`` attribute
set to the name of the field in question and the ``type`` attribute set to one
of ``text``, ``lipsum``, ``lipsum_html``, ``reference``, ``image``, ``file``,
``boolean``, ``list`` or ``reference_list``. The ``param`` element then
contains the value that should be given to that field. One need not specify a
``param`` element for each and every field of a content type, the default
values will be used where fields are not specified.
``param`` elements with the ``type`` attribute set to ``lipsum`` or
``lipsum_html`` have the optional attribute ``paras`` which specifies the
number of paragraphs of `Lipsum <http://en.wikipedia.org/wiki/Lorem_ipsum>`_
to generate as the value of that field. They also contain no child elements.
The ``lipsum`` value causes ordinary Lipsum to be generated with newlines,
whereas ``lipsum_html`` generates Lipsum with paragraph tags as necessary::
<site>
<my_folder>
<MyContentType id="my_new_obj">
<params>
<param name="title" type="text">My new object</param>
<param name="text" type="lipsum_html" paras="5" />
<param name="isNew" type="boolean">True</param>
<param name="old_obj" type="reference">/my_folder/my_obj</param>
<param name="thumb" type="image">new_obj_thumb.jpg</param>
</params>
</MyContentType>
</my_folder>
</site>
As in the above example, references are given as path strings to the
referenced object. Images & files are given as file names relative to the
directory of the XML file.
Multi-valued parameters
'''''''''''''''''''''''
For fields which may contain multiple values such as ``ReferenceList`` or
``LinesField``, the ``param`` element takes the ``type`` attribute ``list`` or
``reference_list``. It should then contain an ``items`` element, which
contains zero or more ``item`` elements with their ``type`` attribute set
appropriately, as per the ``param`` tag and their value set as their inner
XML::
<site>
<my_folder>
<MyContentType id="my_new_obj">
<params>
<param name="title" type="text">My new object</param>
<param name="options" type="list">
<items>
<item type="text">Option 1</item>
<item type="text">Option 2</item>
</items>
</param>
</params>
</MyContentType>
</my_folder>
</site>
Interfaces
''''''''''
Occasionally one may wish to add interfaces to objects which they do not
automatically provide, particularly marker interfaces. This is achieved using
the ``interfaces`` & ``interface`` tags, like so::
<site>
<my_folder>
<MyContentType id="my_new_obj">
<interfaces>
<interface>identifier.of.IMyMarker</interface>
<interface>identifier.of.IReplacementObject</interface>
</interfaces>
<params>
<param name="title" type="text">My new object</param>
</params>
</MyContentType>
</my_folder>
</site>
This adds the interfaces specified by the identifiers to the object.
Interfaces that are already applied to objects may be similarly removed by
replacing the ``interface`` element with a ``remove`` element, thus::
<site>
<my_folder>
<MyContentType id="my_new_obj">
<interfaces>
<remove>IMyMarker</remove>
</interfaces>
<params>
<param name="title" type="text">My new object</param>
</params>
</MyContentType>
</my_folder>
</site>
Modifying existing objects
``````````````````````````
Existing objects may be modified in a very similar way to which they are
added. Simply descend to the object in question, append the ``modify`` element
to it, and add ``params`` and ``interfaces`` elements as its children, as
necessary. Instead of taking the default values where fields are not
specified, the fields will retain their current values::
<site>
<my_folder>
<my_new_obj>
<modify>
<params>
<param name="title" type="text">My updated title for my new object</param>
</params>
</modify>
</my_new_obj>
</my_folder>
</site>
Setting the default page of folderish objects
'''''''''''''''''''''''''''''''''''''''''''''
To set the default page of a new or existing folderish object, use the
``modify`` element with a ``setDefaultPage`` attribute::
<site>
<my_folder>
<modify setDefaultPage="my_new_obj">
...
</modify>
</my_folder>
<FolderishType id="another_folder">
<modify setDefaultPage="folder_home" />
<params>
...
</params>
<ContentType id="folder_home">
...
</ContentType>
</FolderishType>
</site>
Deleting objects
````````````````
Existing objects can be deleted by descending to their container object and
inserting a ``delete`` element with its ``id`` attribute set to the ID of the
object which should be deleted. The ``delete`` element also has the optional
parameter, ``ctype`` which can be used to specify a particular content type
which the specified object must be in order to be deleted::
<site>
<my_folder>
<!-- I never liked that new object anyway... -->
<delete id="my_new_obj" ctype="MyContentType" />
</my_folder>
</site>
Dependencies
------------
zkaffold depends on lxml, which depends on libxml2-dev and libxslt-dev.
In Debian, you can use:
* ``sudo aptitude install libxml2-dev libxslt-dev``
PLUGIN
------
zkaffold supports a plugin system for exporting content:
* Add an import step for your profile initial,
* create an exporter for your field, it should return a
lxml.etree.Element for <param> or (lxml.etree.Element for <param>,
filename, file content)::
def my_field_exporter(plone_object, field):
...
return param
* create the import step to register the field exporter::
from zkaffold.contextexporter import IContentExporter
def register_field_exporters(context):
portal = context.getSite()
sm = portal.getSiteManager()
exporter = sm.getUtility(IContentExporter)
exporter.register_field_exporter('mysite.fields.MyField',
'mysite.field_exporters.my_field_exporter')
Tests
-----
To run zkaffold's tests, you need to:
* add "zkaffold [test]" in the "eggs" in the [test] section of your
buildout.cfg,
* run ./bin/buildout,
* ./bin/test
| zkaffold | /zkaffold-0.0.11.tar.gz/zkaffold-0.0.11/README.rst | README.rst |
===========
ZK Cluster
===========
This library provides a framework for writing clustered services.
Currently, it is derived from the basis used in HAAlchemy, and is
intended to be used with `HAAlchemy <https://bitbucket.org/zzzeek/haalchemy>`_
and `Connmon <https://bitbucket.org/zzzeek/connmon>`_.
The core of the framework is a clusterable service which can connect
with other clusterable services using a p2p system, and within the p2p
system a partial implementation of the RAFT consensus algorithm may
also be used for "leader" selection.
Messaging is performed using a simple RPC scheme.
Applications which use the system only work with RPC message objects.
The file ``demo.py`` illustrates some use of the API.
The ZKCluster API is very specific to its current use cases, and is likely
not useful as a general use RPC / clustering library. | zkcluster | /zkcluster-0.0.10.tar.gz/zkcluster-0.0.10/README.rst | README.rst |
def curvefit(ftoread,ftowrite,ts=1000,density_coeff=5,plot="off"):
"""
takes 5 parameters file to read, file to write to, max timestep,density coeff and plot ="on" will display plot for given values.
assume first column is timesteps starting from 1 and first row is titles(can be empty)
dilutes to number of timesteps by density_coeff.
curvefits with respect to timesteps and normalize max value to 1.
applied to all columns.
writes 3 files "real" ,"curvefit", "toff (off-rates)"
"""
from scipy.optimize import curve_fit as cvf
import numpy as np
import csv
import matplotlib.pyplot as plt
csvName = ftoread
csv_file = open(csvName,"r")
csv_input = csv.reader(csv_file)
dens_coef = density_coeff
#create an array from csv file until the given timestep
arr = []
cof = 0
ts = ts + 50
for row in csv_input:
cof +=1
if cof<=ts:
arr.append(row)
arr = np.array(arr)
nrow,ncol = arr.shape
exp = int(nrow/dens_coef)
init_btf = float(arr[1,1])
time = np.zeros(exp)
value = np.zeros(exp)
tm = np.linspace(0,ts-dens_coef,exp)
for i in range(exp):
time[i] = arr[i*dens_coef+1,0]
#tm will be used as first columnt reference time list to fit to.
w = tm.reshape(exp,1)
toff = np.ones(ncol)
#Every column is curvefit according to time step values.
tt = -1
for j in range(ncol):
tt+=1
for i in range(1,exp):
value[i-1] = arr[i*dens_coef,j]
#curvefit function
def func(x, a, b,c ,d):
return a - b*np.log(x+d)+c*x**0.25
#parameter optimaztion with initial parameters
popt,pcov = cvf(func,time,value, p0 = [0,-30,30,+30] ,maxfev = 1000000)
value_fit = func(tm,*popt)
#finding the step where btf = (inital bound tf number/e)
ii = -1
for o in value_fit:
ii+=1
e = np.e
tau = init_btf/e
if o > tau -0.5 and o < tau+0.5 :
toff[tt] = tm[ii]
elif o > tau-1 and o < tau+1:
toff[tt] = tm[ii]
elif o > tau-1.5 and o < tau+1.5:
toff[tt] = tm[ii]
elif o > tau-2.5 and o < tau+2.5:
toff[tt] = tm[ii]
break
value_fit[0] = init_btf
value_fit = value_fit/init_btf
#fitted values is turned into column
value_fit =value_fit.reshape(exp,1)
for index,fraction in enumerate(value_fit):
if fraction > 1:
value_fit[index]=1
elif fraction < 0:
value_fit[index]=0
if j == 0:
pass
else:
#all the values will be appended
w = np.append(w,value_fit,axis=1)
if plot == "on":
plt.plot(time,value/init_btf,"b.", label= "real")
plt.plot(tm,value_fit, "r-", label = "fit")
plt.show()
else:
pass
arr3 = []
zero = -1
delim = int(ts/(dens_coef*10)-1)*dens_coef
for k in arr:
zero +=1
if zero == 1:
k[0]=0
arr3.append(k)
elif zero%delim == 0:
arr3.append(k)
csv_file.close()
arr4 = []
i =0
while(w[i][0]<=ts-50 and i<len(w)):
arr4.append(w[i])
i+=1
#toff file
toff = 1/toff
toff = toff.reshape([1,ncol])
ff = "off" + ftowrite
np.savetxt(ff,toff,fmt= "%s",delimiter=",")
print("done " + ff)
#curvefit values file
arr4 = np.array(arr4)
orig = "cf" + ftowrite
np.savetxt(orig,arr4,fmt= "%s",delimiter=",")
print("done " + orig)
arr3 = np.array(arr3)
nr, nc = arr3.shape
arr5 = np.zeros([nr-1,nc])
for h in range(nc):
for p in range(1,nr):
arr5[p-1][h] = float(arr3[p][h])/init_btf
arr5[p-1][0] = float(arr3[p][0])
#real values file
pp = "real"+ftowrite
np.savetxt(pp,arr5,fmt= "%s",delimiter=",")
print("done " + pp + "\n")
return
if __name__ == "__main__":
curvefit("ftoread.csv", "ftowrite.csv",1000,5,plot="off") | zkcurvefit | /zkcurvefit-0.0.6-py3-none-any.whl/zkcurvefit.py | zkcurvefit.py |
def generate(n):
from random import random
from datetime import datetime
from datetime import date
import math
pi = math.pi
#initilaziton of the ll---------------------------------------------------------
ll = open("data.init","w")
#takin inputs and boundary calculation------------------------------------------
btf = 0
tf = 0
#boundaries---------------------------------------------------------------------
basepair = 4.6*10**6 #bp
volume = 6.7*10**(-19) #m3
bp_dens = basepair/volume #bp/m3
realN = n*10 #my polymer bp
sysVol = realN/bp_dens*20000#bp/(bp/m3)
sigma = 34*10**(-10) #1 sigma corresponds to 10 bp and 10bp length is 34 armstrong
short_side = (sysVol/2)**(1/3) #2a^3 = m3, a = (m3/2)^(1/3)
b = short_side/sigma #converting length of short side from meters to sigmas
r = n/(2*pi)
rsys = r +10
print("Converting from microomolar to Number------------------------------DONE")
#setting up the command line of the data file-----------------------------------
now = datetime.now()
ct = now.strftime("%H:%M:%S")
today = date.today()
td = today.strftime("%d/%m/%Y")
#writing starts here
ll.write(str(n) +" DNA monomers ")
ll.write(str(btf) + " binding sites ")
ll.write("Date: " + td + " Time: " + ct)
ll.write("\n\n") #writing starts here
#initialization of bonds, atoms and angles--------------------------------------
atoms = str(n) + " atoms\n"
ll.write(atoms)
bonds = str(n)+" bonds\n"
ll.write(bonds)
angles = str(n)+" angles\n\n"
ll.write(angles)
print("initializating of bonds, atoms and angles--------------------------DONE")
#initialization of bond, atom and angle types-----------------------------------
ll.write("5 atom types\n")
ll.write("5 bond types\n")
ll.write("2 angle types\n\n")
print("initializating bond, atom and angle types--------------------------DONE")
# boundaries of the system------------------------------------------------------
xlh = str(-rsys) + " " + str(rsys) + " " + "xlo" + " " + "xhi" + "\n"
ylh = str(-rsys) + " " + str(rsys) + " " + "ylo" + " " + "yhi" + "\n"
zlh = str(-b/20) + " " + str(b/20) + " " + "zlo" + " " + "zhi" + "\n"
ll.write(xlh + ylh + zlh)
ll.write("\n")
print("Boundaries-----------------------------------------------CALCULATED/SET")
#mass declaration---------------------------------------------------------------
ll.write("Masses\n\n")
ll.write("1 1\n")
ll.write("2 1\n")
ll.write("3 2\n")
ll.write("4 2\n")
ll.write("5 2\n")
ll.write("\n")
print("Declaring Masses---------------------------------------------------DONE")
#Bond Coeffs--------------------------------------------------------------------
ll.write("Bond Coeffs\n\n")
bond_coeff = "1" + " " + "30.0" + " " + "1.5" + " " + "1.0" + " " + "1.0" + "\n"
bond_coeff2 = "2" + " " + "30.0" + " " + "1.5" + " " + "1.0" + " " + "1.0" + "\n"
bond_coeff3 = "3" + " " + "30.0" + " " + "1.5" + " " + "1.0" + " " + "1.0" + "\n"
bond_coeff4 = "4" + " " + "30.0" + " " + "1.5" + " " + "1.0" + " " + "1.0" + "\n"
bond_coeff5 = "5" + " " + "30.0" + " " + "2.0" + " " + "1.5" + " " + "1.5" + "\n"
bbc = bond_coeff + bond_coeff2 + bond_coeff3 + bond_coeff4 + bond_coeff5
ll.write(bbc)
ll.write("\n\n")
print("Bond Coeffs---------------------------------------------------------SET")
#atoms--------------------------------------------------------------------------
ll.write("Atoms\n\n")
zco = 0
for x in range(1,n+1):
xco = math.cos(0.01 + 2*pi/n*x)*r
yco = math.sin(0.01 + 2*pi/n*x)*r
#setting the secondary atom type--------------------------------------------
if (x%20 == 1):
sec = 2
elif (x%20 == 2):
sec = 2
else:
sec = 1
mahmut = str(x) + "\t" + str(sec) + "\t" + str(sec) + "\t" +str(xco) + "\t" + str(yco) + "\t" + str(zco) + "\n"
ll.write(mahmut)
print("DNA monomer coordinates---------------------------------------------SET")
#free Tf core/site atom random position genereation-----------------------------
# x = 0
# y = 0
# z = 0
# for scx in range (n+1,n+tf*3,3):
# x = (b)/0.98*(random()-0.5001)
# y = (b)*0.49*(random()-0.5001)
# z = (b)*0.49*(random()-0.5001)
# ll.write(str(scx) + "\t" + "3" + "\t" +"3" + "\t" +str(x) + "\t" + str(y) + "\t" + str(z) + "\n")
# ll.write(str(scx+1) + "\t" + "5" + "\t" +"5" + "\t" + str(x-0.66) + "\t" + str(y+0.96) + "\t" + str(z+0.81) + "\n")
# ll.write(str(scx+2) + "\t" + "3" + "\t" +"3" + "\t" + str(x+0.66) + "\t" + str(y+0.67) + "\t" + str(z+0.48) + "\n")
# print("TF homodimer random coordinates-------------------------------------SET")
#bound tfs core / site atom near pormoter sites---------------------------------
"""
oo = tf*3 + n
r2 = r + 1.35
zc =0
for y in range(1,n+1):
xc = math.cos(0.01 + 2*pi/n*y)*r2
yc = math.sin(0.01 + 2*pi/n*y)*r2
#setting the bound tfs atom type--------------------------------------------
if (y%20 == 1):
oo +=1
bec = 4
bagl = str(oo) + "\t" + str(bec) + "\t" + str(bec) + "\t" +str(xc) + "\t" + str(yc) + "\t" + str(zc) + "\n"
ll.write(bagl)
oo +=1
bagl2 = str(oo) + "\t" + str(5) + "\t" + str(5) + "\t" +str(xc) + "\t" + str(yc) + "\t" + str(zc+1.0) + "\n"
ll.write(bagl2)
elif (y%20 == 2):
oo +=1
bec = 4
bagl = str(oo) + "\t" + str(bec) + "\t" + str(bec) + "\t" +str(xc) + "\t" + str(yc) + "\t" + str(zc) + "\n"
ll.write(bagl)
else:
sec = 1
ll.write("\n\n")
print("Bound TF homodimer coordinates--------------------------------------SET")
"""
#bonds--------------------------------------------------------------------------
ll.write("\nBonds\n\n")
for i in range(1,n):
bid = str(i) + "\t"
bt = "1" + "\t"
bo = str(i) + "\t" + str(i+1) + "\n"
ll.write(bid+bt+bo)
lbond = str(n) + "\t" + "1" + "\t" + "1" + "\t"+ str(n)
ll.write(lbond)
ll.write("\n\n")
print("Bonds for DNA polymer-----------------------------------------------SET")
#free tf core and site atom bonds----------------------------------------------------
# b = 0
# for bound in range (n+1,n+tf*3+1,3):
# ilk = str(bound )
# iki = str(bound + 1)
# ucc = str(bound + 2)
# ll.write(str(bound+b) + "\t" + "5" + "\t" +ilk + "\t" + iki + "\n")
# ll.write(str(bound+b+1) + "\t" + "5" + "\t" +iki + "\t" + ucc + "\n")
# b -=1
#print("Bonds for free TF homodimers----------------------------------------SET")
#bound tf core and site atom bonds----------------------------------------------
"""
b = 0
for botf in range (n+tf*3+1,n+3*tf+btf*3+1,3):
il = str(botf)
ik = str(botf + 1)
ll.write(str(botf+b) + "\t" + "5" + "\t" +il + "\t" + ik + "\n")
uc = str(botf+2)
ll.write(str(botf+b+1) + "\t" + "5" + "\t" +ik + "\t" + uc + "\n")
b -=1
print("Bonds for bound TF homodimers---------------------------------------SET")
ll.write("\n\n")
"""
#angles-------------------------------------------------------------------------
ll.write("Angles\n\n")
for j in range(1,n-1):
aid = str(j) + "\t"
at = "1" + "\t"
ang = str(j) + "\t" + str(j+1) + "\t" + str(j+2) +"\n"
ll.write(aid+at+ang)
langle = str(n-1) + "\t" + "1" + "\t" + str(n-1) + "\t"+ str(n) +"\t"+ "1" "\n"
langle2 = str(n) + "\t" + "1" + "\t" + str(n) + "\t" "1" +"\t"+ "2" + "\n"
ll.write(langle + langle2)
print("DNA polymer Angles--------------------------------------------------SET\n\n")
#angles for freeTFs-----------------------------------------------------------
# ff = 0
# for k in range (1,tf +1):
# index = n + k
# lb = str(index+ff) + "\t" + str(index+ff+1) + "\t" + str(index+ff+2)
# ff +=2
# ll.write(str(index) + "\t" + "2" + "\t" + lb + "\n")
#angles for boundTF-----------------------------------------------------------
#kk = 0
"""
for d in range(1,btf+1):
index = n + tf + d
index1 = n +tf*3 +d
agar = str(index1+kk) + "\t" + str(index1+kk+1) + "\t" + str(index1+kk+2)
kk+=2
ll.write(str(index) + "\t" + "2" + "\t" + agar + "\n")
print("--------------ALL DONE and DATA FILE is READY TO GO--------------------")
ll.close()
print("\n\n")
"""
return
def inputfile():
tt = open("in.init","w")
tt.write("units lj\n")
tt.write("dimension 3\n")
tt.write("boundary p p p\n\n")
tt.write("atom_style angle\n")
tt.write("pair_style lj/cut 2.5\n")
tt.write("pair_modify shift yes\n")
tt.write("bond_style fene\n")
tt.write("angle_style harmonic\n\n")
tt.write("special_bonds lj 0.0 1.0 1.0 coul 0.0 1.0 1.0\n\n")
tt.write("read_data data.init\n\n")
tt.write("pair_coeff * * 12.0 1.0\n")
tt.write("pair_coeff 2 4 10.0 1.0 2.5\n\n")
tt.write("angle_coeff 1 1.0 109.5\n")
tt.write("angle_coeff 2 12.0 40.0\n\n")
tt.write("neighbor 0.4 bin\n")
tt.write("neigh_modify delay 10\n\n")
tt.write("timestep 0.005\n\n")
tt.write("thermo_style multi\n")
tt.write("thermo 5000\n\n")
tt.write("fix 1 all nvt temp 1.0 1.0 0.5\n")
tt.write("fix 2 all langevin 1.0 1.0 0.5 7213981\n\n")
tt.write("minimize 0.0001 0.000001 100 1000\n\n")
tt.write("run 20000000\n\n")
tt.write("write_data data.collapse")
print("DONE")
return | zkdatabuilder | /zkdatabuilder-0.1.2-py3-none-any.whl/circularDNA.py | circularDNA.py |
def steal(fname,tfd,wdn=1):
"""
takes file name of lammps data file and steals their id and positions and
returns an array with all the atoms with their atom and molecule types
according to given tfd(for how many beads there is a binding site).
"""
import numpy as np
rr = open(fname,"r")
#reading data file
coor = rr.readlines()
rr.close()
atomNum = coor[2].split()
atomN = atomNum[0]
atmn = int(atomN)
index = 0
for rw in coor[0:100]:
if rw != "\n":
atm = rw.split()[0]
#when Atoms string encountered it will be marked as start point
if atm == "Atoms":
start = index+2
break
index +=1
finish = start + int(atomN)
ncol = 6
#array of zeros formed with the neccesarry shapes
atoms = np.zeros([atmn,ncol])
#all the lines with atoms will be read
for row in coor[start:finish]:
atomd = row.split()[0]
atomid = int(atomd)
for j in range (ncol):
atoms[atomid-1,j] = row.split()[j]
#taking avarages
ax ,ay , az = 0,0,0
for p in range(atmn):
ax += atoms[p][3]/atmn
ay += atoms[p][4]/atmn
az += atoms[p][5]/atmn
#centerilazing atoms
for k in range(atmn):
atoms[k][3] = atoms[k][3]-ax
atoms[k][4] = atoms[k][4]-ay
atoms[k][5] = atoms[k][5]-az
#converting atom and molecule types to 1
atoms[:,1:3]=1
#converting atom and molecule types to 2 according to tf density
for index,l in enumerate(atoms):
if index%tfd == 1 or index%tfd == 0:
l[1:3] =2
atoms[index]=l
#atoms array is returned
if wdn == 1:
return atoms
if wdn > 1:
widened = widen(fname,tfd,wdn)
return widened
"""_________________________________________________________________________"""
def widen(fname,tfd,wdn):
"""
Creates widened data files using lammps data files, takes fname to use
steal method, and requires wdn, widening factor and tfd, transcription
factor density as args.
Returns widened array of atoms
"""
import numpy as np
atoms = steal(fname,tfd,1)
watoms = atoms*(wdn**(2/3))
boy = len(atoms)
yeniboy = wdn*boy
arr = np.zeros([yeniboy,6])
a = 0
for i,o in enumerate(arr):
if (i+1)%wdn == 0 and i>0:
arr[i]= watoms[a]
a+=1
widen = arr.copy()
#fills between x+wdn and x+2wdn
for i, row in enumerate(widen):
if (i+1)%wdn == 0 and (i+1)%(2*wdn) != 0 and i > 0:
coor1 = row [3:]
elif (i+1)%wdn == 0 and (i+1)%(2*wdn) == 0 and i>0:
coor2 = row [3:]
dif = coor2-coor1
difc = dif/wdn
for p in range(1,wdn):
widen[i-wdn+p][3:] = coor1+difc*p
#fills between n+2wdn and n+2wdn
for i, row in enumerate(widen):
if (i+1)%wdn == 0 and (i+1)%(2*wdn) == 0 and i > 0:
coor1 = row [3:]
elif (i+1)%wdn == 0 and (i+1)%(2*wdn) != 0 and i>0:
coor2 = row [3:]
dif = coor2-coor1
difc = dif/wdn
for p in range(1,wdn):
widen[i-wdn+p][3:] = coor1+difc*p
#fills between last with wdn
for i, row in enumerate(widen):
if i == wdn-1:
coor1 = row [3:]
elif i == yeniboy-1:
coor2 = row [3:]
dif = coor2-coor1
difc = dif/wdn
for p in range(wdn-1):
widen[p][3:] = coor2-difc*(p+1)
widen[i][0] = i +1
widen[:,1:3] = 1
for index,l in enumerate(widen):
if index%tfd == 1 or index%tfd == 0:
l[1:3] =2
widen[index]=l
return widen
"""_________________________________________________________________________"""
def radius(n):
"""
finding the necessary radius for given number of basepair to be consistent
with E.coli bp density. Where 1 bead represents 10bp.
Returns radius and necessary system volume respectively.
"""
import math
pi = math.pi
basepair = 4.6*10**6 #bp
volume = 6.7*10**(-19) #m3
bp_dens = basepair/volume #bp/m3
realN = n*10 #my polymer bp
sysVol = realN/bp_dens#bp/(bp/m3)
sigma = 34*10**(-10) #1 sigma corresponds to 10 bp and 10bp length is 34 armstrong
rreal = (3*sysVol/(16*pi))**(1/3) #2a^3 = m3, a = (m3/2)^(1/3)
r = int(rreal/sigma+1)
return r,sysVol
"""_________________________________________________________________________"""
def boundtf(fname,tfd,wdn=1):
"""
Creates bounded transcription factors near promoter/binding sites
Uses tfd(tf density) to create near promoter site
tfd is related to promoter density.
Return array atoms of bound transcription factors.
"""
import numpy as np
#uses steal function to obtain atoms
atoms = steal(fname,tfd,wdn)
nrow = len(atoms)
n = nrow
ncol = 6
bound = np.zeros([int(nrow*3/tfd),ncol])
i = 0
for r in range(n-1):
row = atoms[r]
rowr = atoms[r+1]
#bound atoms are created near promoters according to tfd
if (r+1)%tfd == 1:
bound[i]=[i+n+1,4,4,row[3]+0.6,row[4],row[5]]
bound[i+1]=[i+n+2,5,5,row[3]+1.4,row[4],row[5]]
bound[i+2]=[i+n+3,4,4,rowr[3]+0.6,rowr[4],rowr[5]]
i +=3
#bound atoms are stacked with DNA polymer atoms
total = np.append(atoms,bound)
nrow = int(len(total)/6)
total = total.reshape(nrow,ncol)
#array named total containing btf and DNA polymer atom coordinates is returned
return total
"""_________________________________________________________________________"""
def freetf(um,index,n):
"""
creates freee transcription factors within cellular boundries at random
positions with given density(um) in accordance with system volume.
Return array of atoms of free transcription factors.
"""
from random import random
import numpy as np
#atom and moleucule types for root and binding domains
sap = 5
typ = 3
r,sysVol = radius(n)
ttf = um
avag = 6.022*(10**23) #avagdaro number mol
m2l = 1000 #m^3 to liter
m2u = 10**(-6) #meter to micrometer
ftf = avag*m2l*m2u*sysVol*ttf #molarite to number
ntf = int(ftf)
kok2 = 2**(1/2)
free = np.zeros([3*ntf,6])
index = index+1
#creating free TF at random positions within cell membrane
for i in range (0,3*ntf,3):
xcr = 4*r*random()-2*r
ycr = 2*r*random()/kok2 -r/kok2
zcr = 2*r*random()/kok2 -r/kok2
free[i]=[index,typ,typ,xcr,ycr,zcr]
free[i+1]=[index+1,sap,sap,xcr-0.66,ycr+0.56,zcr+0.61]
free[i+2]=[index+2,typ,typ,xcr+0.33,ycr+0.67,zcr+0.48]
index +=3
#retutning a list of free TFs
return free
"""_________________________________________________________________________"""
def cylinder(r,index,atom_type):
"""
Creates a cylinder around given system using an r which could be obtained
using radius function or manually given.
Return array of atoms of cylinder.
"""
import math
import numpy as np
typ = atom_type
cyl = np.array([])
pir = r*3.14159
pr = int(pir)
#creating the atoms
for xcr in range (-2*r,2*r+1):
for x in range(-pr,pr+1):
index +=1
ycr = math.cos(x/r)*r
zcr = math.sin(x/r)*r
coor = [index,typ,typ,xcr,ycr, zcr]
coor = np.array(coor)
cyl = np.append(cyl,coor)
ncol = 6
row = len(cyl)/ncol
nrow = int(row)
cyl = cyl.reshape(nrow,ncol)
#returns an array with atoms of cylinder
return cyl
"""_________________________________________________________________________"""
def cap(r,index,atom_type):
"""
Creates caps around given system using an r which could be obtained
using radius function or manually given.
Return array of atoms of caps.
"""
import math
import numpy as np
pi = math.pi
r = r
gs = r
angle_num=int(pi*gs/2)
cap = np.array([])
typ = atom_type
ncol = 6
#creating atoms of the one cap
for xl in range (0,angle_num+1):
angle = (pi/2)*xl/angle_num
xcr = gs*math.cos(angle)
r_new = math.sqrt(gs**2-xcr**2)
num = int (2*pi*r_new)
for i in range (0,num,2):
index +=1
zcr = math.cos(2*pi/num*i)*r_new
ycr = math.sin(2*pi/num*i)*r_new
coor = [index,typ,typ,xcr+2*r+0.5,ycr, zcr]
coor = np.array(coor)
cap = np.append(cap,coor)
r = -r
gs = r
angle_num = int(pi*gs/2)
#creating atoms of the opposite cap
for xl in range (0,angle_num-1,-1):
angle = (pi/2)*xl/angle_num
xcr = gs*math.cos(angle)
r_new = math.sqrt(gs**2-xcr**2)
num = int (2*pi*r_new)
for i in range (0,-num,-2):
index +=1
zcr = math.cos(2*pi/num*i)*r_new
ycr = math.sin(2*pi/num*i)*r_new
coor = [index,typ,typ,xcr+2*r-0.5,ycr, zcr]
coor = np.array(coor)
cap = np.append(cap,coor)
row = len(cap)/6
nrow = int(row)
cap = cap.reshape(nrow,ncol)
#returns an array with atoms of caps
return cap
"""_________________________________________________________________________"""
def membrane(r,index,atom_type):
"""
Utilizes cap and cyliner functions to create a full membrane to encapsulate
the system.
Return array of atoms of membrane.
"""
import numpy as np
cyl = cylinder(r,index,atom_type)
index += len(cyl)
cap1 = cap(r,index,atom_type)
memb = np.append(cyl,cap1)
memb = np.array(memb)
ncol = 6
nrow = int(len(memb)/ncol)
memb = memb.reshape(nrow,ncol)
#returns an array with atoms of membrane
return memb
"""_________________________________________________________________________"""
def bonder(n,um,tfd):
"""
creates bond among DNA monomers and TF monomers (1-2,2-3) to create "V"
model.
Returns an array of bonds.
"""
import numpy as np
#finds total number of bonds for bound&free tfs and DNA
clps = int(n/tfd*(tfd+3))
free = freetf(um,clps,n)
fr = int(len(free)/3)
btf = int(n/tfd)
ntyp = 1
#finds total number of transcription factors
tfs = btf +fr
#total bond required
num_bonds = n+tfs*2
bonds = np.zeros([num_bonds,4])
index = 0
#bond type
ntyp = 1
#creating list for bonds of DNA polymer
for i in range(n):
index +=1
if index < n:
bonds[i] = [index,ntyp,index,index+1]
elif index == n:
bonds[i] = [index,ntyp,index,1]
tftyp = 2
indx = n
b = 0
#creating list for bonds of TFs
for index in range(n+1,n+tfs*3+1,3):
ilk = index
iki = index+1
uc = index+2
bonds[indx] = [index+b,tftyp,ilk,iki]
indx +=1
bonds [indx] = [index+b+1,tftyp,iki,uc]
indx +=1
b-=1
#returning the list of all the bonds
return bonds
"""_________________________________________________________________________"""
def angler(n,um,tfd):
"""
creates the angles between DNA and the transcription factors.
Returns an array of angles.
"""
import numpy as np
clps = (n/(tfd)*(tfd+3))
free = freetf(um,clps,n)
fr = int(len(free)/3)
btf = int(clps/(tfd+3))
ntyp = 1
tfs = btf +fr
#total number of angles required
num_angles = n+tfs
angles = np.zeros([num_angles,5])
index = 0
#creating list for angles of DNA polymer
for i in range(n):
index += 1
if i<n-2:
angles[i]=[index,ntyp,index,index+1,index+2]
elif i==n-2:
angles[i]=[index,ntyp,index,index+1,1]
elif i==n-1:
angles[i]=[index,ntyp,index,1,2]
tftyp = 2
indx = index +1
#creating list for angles of TFs
for j in range(tfs):
index = index +1
angles[n+j]=[index,tftyp,indx+j,indx+j+1,indx+j+2]
indx = indx +2
#returning the list of all the angles
return angles
"""_________________________________________________________________________"""
def buildNwrite(um,filetoread,filetowrite,tfd=40,wdn=1):
"""
takes all necessary arguments to use other functions in this module
builds all arrays and lists and write them into a data file with given name
Does not return anything
"""
import numpy as np
tf = tfd
pos = boundtf(filetoread,tf,wdn)
ps = len(pos)
n = int(ps/(tf+3)*(tf))
r,sysVol = radius(n)
ftf = freetf(um,ps,n)
ft = len(ftf)
index = ft + ps
mem = membrane(r,index,6)
angles = angler(n,um,tf)
bonds = bonder(n,um,tf)
atoms1 = np.append(pos,ftf)
atoms = np.append(atoms1,mem)
boy = len(atoms)
ncol = 6
nrow = int(boy/ncol)
atoms = atoms.reshape(nrow,ncol)
#open the file to write
ll = open(filetowrite,"w")
#starts writing here
ll.write("\n\n")
#atoms,bonds,angles declared
num_atoms = str(nrow)
ll.write(num_atoms+" atoms\n")
ll.write("6 atom types\n")
bnds = str(len(bonds))
ll.write(bnds+" bonds\n")
ll.write("2 bond types\n")
angls = str(len(angles))
ll.write(angls+" angles\n")
ll.write("2 angle types\n\n")
#boundaries declared
x = 2.2324242
ll.write(str(-3*r-x)+" "+str(3*r+x)+" xlo xhi\n")
ll.write(str(-r-x)+" "+str(r+x)+" ylo yhi\n")
ll.write(str(-r-x)+" "+str(r+x)+" zlo zhi\n\n")
#masses declared
ll.write("Masses\n\n")
ll.write("1 1\n")
ll.write("2 1\n")
ll.write("3 2\n")
ll.write("4 2\n")
ll.write("5 2\n")
ll.write("6 1\n\n")
#pair coeffs declared
ll.write("Pair Coeffs # lj/cut\n\n")
ll.write("1 12 1\n")
ll.write("2 12 1\n")
ll.write("3 12 1\n")
ll.write("4 12 1\n")
ll.write("5 12 1\n")
ll.write("6 12 1\n\n")
#bond coeffs declared
ll.write("Bond Coeffs # fene\n\n")
ll.write("1 30 1.5 1 1\n")
ll.write("2 30 2.0 1.3 1.3\n\n")
#angle coeff declared
ll.write("Angle Coeffs # harmonic\n\n")
ll.write("1 1 180.0\n")
ll.write("2 12 40\n\n")
#writing atoms to the file
ll.write("Atoms # angle\n\n")
for row in atoms:
for i in range (6):
if i<3:
ii = int(row[i])
ll.write(str(ii)+" ")
else:
ll.write(str(row[i])+" ")
ll.write("\n")
#writing bonds to the file
ll.write("\nBonds\n\n")
for row in bonds:
for i in range (4):
ii = int(row[i])
ll.write(str(ii)+" ")
ll.write("\n")
#writing bonds to the file
ll.write("\nAngles\n\n")
for row in angles:
for i in range (5):
ii = int(row[i])
ll.write(str(ii)+" ")
ll.write("\n")
#this function returns nothing
return | zkdatabuilder | /zkdatabuilder-0.1.2-py3-none-any.whl/zkdatabuilder.py | zkdatabuilder.py |
def dumpread(ftoread="dump.main",ftowrite="main.csv"):
"""
Reads the lammps dump file with given name and find how many of the bound TFs stay bound.
Creates a report.txt and a csv file with given name . Csv file has two colums first being
timestep and the second being number of remaining bound TFs.
"""
#transferring dump data into an 3D array---------------------------------------
#importing necessary modules----------------------------------------------------
import numpy as np
from math import sqrt
#reading dump file line-by-line------------------------------------------------
dump = open(ftoread, "r")
lines = dump.readlines()
dump.close()
#scale back coeff--------------------------------------------------------------
coeff = lines[5].split()
xco = float(coeff[1])
coeff2 = lines[6].split()
yco = float(coeff2[1])
coeff3 = lines[7].split()
zco = float(coeff3[1])
#finding the number of the atoms for each type and number of the lines between timesteps-----
number_atom = int(lines[3])
batch = number_atom + 9
n = 0#dna monomer
btf = 0#bound tf
freetf = 0#free tf
for a in lines[9:batch+1]:
b = a.split()[1]
if b == "1" or b=="2":
n +=1
elif b == "4":
btf +=0.5
elif b == "3":
freetf+=0.5
freetf = int(freetf)
btf = int(btf)
ratio = int(n/btf)
#number of the timesteps-------------------------------------------------------
time_num = int(len(lines)/batch)
boundN = np.zeros(time_num)
ts = np.zeros(time_num)
first = btf
#creating array of zeros with intented size------------------------------------
data = np.zeros([time_num,number_atom,3])
times = np.zeros([time_num,1])
#opening new file for results--------------------------------------------------
repp = "report.txt"
rp = open(repp,"w")
c = 0
#for loop to create array of times--------------------------------------------
for i in range(time_num):
step = int(lines[i*(batch)+1])
times[i,0] = step
c = c + 1
print("Processing... " + str(c) + " of " + str(time_num))
minuS = 0
kk = -3
#for loop to make the 3D array--------------------------------------------
for k in range(number_atom):
values = lines[i*batch+k+9].split()
atomID = int(values[0])
data[i,atomID-1,0] = float(values[2])
data[i,atomID-1,1] = float(values[3])
data[i,atomID-1,2] = float(values[4])
#promoter sites taken avarage of psotion-------------------------------
for j in range(1,n):
if j%ratio == 1:
#find promoter site takes avarage
aP = (data[i,j-1] + data[i,j])/2
#find bound tfs and takes avarage
kk +=3
index = n + kk
aTF = (data[i,index] + data[i, index + 2])/2
#find distance between them
ds2 = (xco*(aP[0]-aTF[0]))**2 + (yco*(aP[1]-aTF[1]))**2 + (zco*(aP[2]-aTF[2]))**2
distance = sqrt(ds2)
#print(kk)
if distance > 2.5:
rp.write("promoter with atom ID " + str(j) + " is apart from the TF with ID "+str(index + 1) +" in timestep " + str(step)+" (Distance: " + str(distance)+")\n")
minuS += 1
last = first - minuS
boundN[i] = last
ts[i] = i + 1
#print (last)
rp.write("End of timestep " + str(step) + "\n\n")
ts = np.array(ts)
boundN = np.array(boundN)
lenn = len(ts)
ts = ts.reshape(lenn,1)
boundN = boundN.reshape(lenn,1)
lc = np.append(ts, boundN, axis = 1)
lc = lc.reshape(lenn,2)
np.savetxt(ftowrite,lc,fmt= "%s",delimiter=",")
print("-------------- DONE --------------")
return
if __name__ == "__main__":
dumpread("dump.main","main.csv") | zkdumpreader | /zkdumpreader-0.0.4-py3-none-any.whl/zkdumpreader.py | zkdumpreader.py |
import argparse
import re
import subprocess
from collections import defaultdict
from dataclasses import dataclass
from importlib import metadata
from typing import Dict, Iterable, List, Tuple
try:
__version__ = metadata.version("zkeys")
except metadata.PackageNotFoundError: # pragma: no cover
__version__ = "unknown"
def main() -> None:
parser = argparse.ArgumentParser(
description=__doc__.strip(),
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument(
"--version",
action="version",
version=f"%(prog)s {__version__}",
)
parser.add_argument(
"file",
nargs="?",
type=argparse.FileType("r"),
help="read lines from file ('-' for stdin)",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-i",
"--in-string",
action="store_true",
help="sort by in-string instead of widget",
)
group.add_argument(
"-w",
"--widget",
action="store_true",
help="group by widget",
)
group.add_argument(
"-p",
"--prefix",
action="store_true",
help="group by prefix",
)
args = parser.parse_args()
lines = (line.strip() for line in args.file) if args.file else run_bindkey()
bindings = sorted(parse_bindkey(lines))
if args.widget:
widgets = group_bindings(bindings)
for widget, bindings in sorted(widgets.items()):
in_strings = "".join(f"{b.in_string:8}" for b in bindings)
print(f"{widget:40}{in_strings}".strip())
elif args.prefix:
prefixes = group_bindings(bindings, attr="prefix")
for prefix, bindings in prefixes.items():
keys = " ".join(b.key for b in bindings)
print(f"{prefix:8}{keys}".strip())
else:
if not args.in_string:
bindings = sorted(bindings, key=lambda b: b.widget)
for binding in bindings:
print(f"{binding.in_string:10}{binding.widget}")
PREFIXES = {
prefix: rank
for rank, prefix in enumerate(
[
"^",
"^[",
"^[^",
"M-",
"M-^",
"^X",
"^X^",
"^[[",
"^[O",
"^[[3",
]
)
}
IGNORE_WIDGETS = {
"bracketed-paste",
"digit-argument",
"neg-argument",
"self-insert-unmeta",
}
@dataclass
class Keybinding:
"""
Map an in-string like '^[b' to a ZLE widget like 'backward-word'.
>>> binding = Keybinding('^[b', 'backward-word')
>>> binding.in_string
'^[b'
>>> binding.prefix
'^['
>>> binding.key
'b'
>>> binding.widget
'backward-word'
"""
in_string: str
widget: str
@property
def prefix(self) -> str:
return self.in_string[:-1]
@property
def key(self) -> str:
return self.in_string[-1]
@property
def _compare_string(self) -> Tuple[int, str]:
"""Compare by prefix rank, then by key."""
return (PREFIXES.get(self.prefix, 999), self.key.upper())
def __lt__(self, other: "Keybinding") -> bool:
return self._compare_string < other._compare_string
def run_bindkey() -> Iterable[str]:
result = subprocess.run(
["zsh", "--login", "--interactive", "-c", "bindkey -L"],
capture_output=True,
text=True,
)
return result.stdout.splitlines()
def parse_bindkey(lines: Iterable[str]) -> Iterable[Keybinding]:
"""Parse lines like 'bindkey "^[b" backward-word' into Keybinding objects."""
pattern = r'bindkey "(?P<in_string>.+)" (?P<widget>.+)'
for line in lines:
if not (match := re.match(pattern, line)):
continue
in_string, widget = match.groups()
if widget in IGNORE_WIDGETS:
continue
# HACK: Remove slashes for readability, e.g. \M-\$ becomes M-$
# Could be overzealous, esp. with custom keybindings
in_string = in_string.replace("\\", "")
yield Keybinding(in_string, widget)
def group_bindings(
bindings: Iterable[Keybinding],
*,
attr: str = "widget",
) -> Dict[str, List[Keybinding]]:
group: Dict[str, List[Keybinding]] = defaultdict(list)
for binding in bindings:
group[getattr(binding, attr)].append(binding)
return group
if __name__ == "__main__": # pragma: no cover
main() | zkeys | /zkeys-0.2.0-py3-none-any.whl/zkeys.py | zkeys.py |
zkie
====
Simple tool to access zookeeper configuration from the commandline, with syntax highlight.
.. code:: bash
pip install 'zkie[ui]'
to install minimal command line without highlight, you can just use:
.. code:: bash
pip install 'zkie'
Usage
=====
.. code:: bash
# list all nodes recursively
zk find /
# upload myconfig.json to config directory
zk upload myconfig.json /config/
# display myconfig.json with syntax highlight
zk get /config/myconfig.json
# list directory
zk ls /config
| zkie | /zkie-0.1.4.tar.gz/zkie-0.1.4/README.rst | README.rst |
import sys
import yaml
import asyncio
import aiohttp
async def fetch_json(url):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
for i in range(5):
try:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=5)) as response:
return await response.json(content_type=None)
except asyncio.TimeoutError:
continue
raise asyncio.TimeoutError
async def name_worker(type_id):
url = 'https://esi.evetech.net/latest/universe/types/{}'.format(type_id)
response = await fetch_json(url)
return response['name']
async def config_group_worker(group_maybe):
if type(group_maybe) is list:
task_li = []
for item in group_maybe:
task_li.append(asyncio.create_task(name_worker(item)))
name_li = await asyncio.gather(*task_li)
return group_maybe, name_li
else:
url = 'https://esi.evetech.net/latest/markets/groups/{}'.format(group_maybe)
response = await fetch_json(url)
name = response['name']
if name in ['Micro', 'Small', 'Medium', 'Large', 'Capital']:
parent_group_id = response['parent_group_id']
url = 'https://esi.evetech.net/latest/markets/groups/{}'.format(parent_group_id)
parent_response = await fetch_json(url)
name = '{} {}'.format(name, parent_response['name'])
return response['types'], name
async def zkill_api_worker(km_queue, config, task_li):
for i in range(5):
page = i + 1
url = 'https://zkillboard.com/api/losses/shipTypeID/{}/page/{}/'.format(config['ship'], page)
response = await fetch_json(url)
for zkill_km in response:
await km_queue.put(zkill_km)
try:
await asyncio.wait_for(km_queue.join(), 5)
except asyncio.TimeoutError:
pass
for task in task_li:
task.cancel()
def is_fitted(item):
flag = item['flag']
return 11 <= flag <= 34 or 92 <= flag <= 99 or 125 <= flag <= 132
async def esi_worker(km_queue, config, task_li, worker_stat):
item_block_li = config['item']
while True:
zkill_km = await km_queue.get()
worker_stat['total'] += 1
km_id = zkill_km['killmail_id']
km_hash = zkill_km['zkb']['hash']
url = 'https://esi.evetech.net/latest/killmails/{}/{}'.format(km_id, km_hash)
try:
response = await fetch_json(url)
except asyncio.TimeoutError:
continue
fitting_li = [item['item_type_id'] for item in response['victim']['items'] if is_fitted(item)]
is_km_accepted = True
for item_block in item_block_li:
item_found = False
for item in item_block:
if item in fitting_li:
item_found = True
if not item_found:
is_km_accepted = False
if is_km_accepted:
print('https://zkillboard.com/kill/{}/'.format(km_id))
worker_stat['output'] += 1
if worker_stat['output'] >= config['fetch-limit']:
for task in task_li:
task.cancel()
km_queue.task_done()
async def main():
config = {}
with open(sys.argv[1], 'r') as config_file:
config = yaml.load(config_file, Loader=yaml.FullLoader)
ship_name = await name_worker(config['ship'])
print('Searching for {} killmails with:\n'.format(ship_name))
config_tasks_li = []
for entry in config['item']:
config_tasks_li.append(asyncio.create_task(config_group_worker(entry)))
parsed_config = await asyncio.gather(*config_tasks_li)
config['item'] = [x[0] for x in parsed_config]
desc_li = [x[1] for x in parsed_config]
print(
'\nAND '.join(
['\t' + ' OR '.join(desc) if type(desc) is list else '\t' + desc
for desc in desc_li
]
) + '\n'
)
km_queue = asyncio.Queue(maxsize=100)
task_li = []
worker_stat = {'output': 0, 'total': 0}
task_li.append(asyncio.create_task(zkill_api_worker(km_queue, config, task_li)))
for i in range(100):
task_li.append(asyncio.create_task(esi_worker(km_queue, config, task_li, worker_stat)))
await asyncio.gather(*task_li, return_exceptions=True)
print(
'\nFinished. {} matching killmails found. {} killmails examined in total.'
.format(worker_stat['output'], worker_stat['total'])
)
def cli_entry_point():
asyncio.run(main())
if __name__ == '__main__':
asyncio.run(main()) | zkill-searchf | /zkill_searchf-0.0.1-py3-none-any.whl/zkill_searchf/search.py | search.py |
==================
Python Boilerplate
==================
.. image:: https://img.shields.io/pypi/v/python_boilerplate.svg
:target: https://pypi.python.org/pypi/python_boilerplate
.. image:: https://img.shields.io/travis/audreyr/python_boilerplate.svg
:target: https://travis-ci.com/audreyr/python_boilerplate
.. image:: https://readthedocs.org/projects/python-boilerplate/badge/?version=latest
:target: https://python-boilerplate.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
Python Boilerplate contains all the boilerplate you need to create a Python package.
* Free software: MIT license
* Documentation: https://python-boilerplate.readthedocs.io.
Features
--------
* TODO
Credits
-------
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
| zkit | /zkit-0.1.0.tar.gz/zkit-0.1.0/README.rst | README.rst |
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/audreyr/python_boilerplate/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
Python Boilerplate could always use more documentation, whether as part of the
official Python Boilerplate docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/audreyr/python_boilerplate/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `python_boilerplate` for local development.
1. Fork the `python_boilerplate` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/python_boilerplate.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv python_boilerplate
$ cd python_boilerplate/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
$ flake8 python_boilerplate tests
$ python setup.py test or pytest
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check
https://travis-ci.com/audreyr/python_boilerplate/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ python -m unittest tests.test_python_boilerplate
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed (including an entry in HISTORY.rst).
Then run::
$ bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
Travis will then deploy to PyPI if tests pass.
| zkit | /zkit-0.1.0.tar.gz/zkit-0.1.0/CONTRIBUTING.rst | CONTRIBUTING.rst |
.. highlight:: shell
============
Installation
============
Stable release
--------------
To install Python Boilerplate, run this command in your terminal:
.. code-block:: console
$ pip install python_boilerplate
This is the preferred method to install Python Boilerplate, as it will always install the most recent stable release.
If you don't have `pip`_ installed, this `Python installation guide`_ can guide
you through the process.
.. _pip: https://pip.pypa.io
.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
From sources
------------
The sources for Python Boilerplate can be downloaded from the `Github repo`_.
You can either clone the public repository:
.. code-block:: console
$ git clone git://github.com/audreyr/python_boilerplate
Or download the `tarball`_:
.. code-block:: console
$ curl -OJL https://github.com/audreyr/python_boilerplate/tarball/master
Once you have a copy of the source, you can install it with:
.. code-block:: console
$ python setup.py install
.. _Github repo: https://github.com/audreyr/python_boilerplate
.. _tarball: https://github.com/audreyr/python_boilerplate/tarball/master
| zkit | /zkit-0.1.0.tar.gz/zkit-0.1.0/docs/installation.rst | installation.rst |
# Python ZKLib #
ZK fingerprint Attendance Machine Library for python with a connection to the network using the UDP protocol and port 4370
## Getting started
Login as admin to attendance machine and set the ip address for example (to 192.168.0.201) and connect the machine with ethernet to your network.
```bash
pip install zklib
```
Connect to the machine
```python
import sys
import zklib
import time
import zkconst
zk = zklib.ZKLib("192.168.0.201", 4370)
ret = zk.connect()
print "connection:", ret
```
If result was
```
connection True
```
Then you are connected.
More examples on how to use the Library available in the
zktest.py file
| zklib | /zklib-0.1.1.tar.gz/zklib-0.1.1/README.txt | README.txt |
import zookeeper, threading
import sys
connected=False
conn_cv = threading.Condition( )
reconnect_host = ''
ZOO_OPEN_ACL_UNSAFE = {"perms":0x1f, "scheme":"world", "id" :"anyone"};
ROOT="/ZkLock"
# locks is a hash by lock name, each value being a list of locks that are waiting for that name.
locks = {}
def my_connection_watcher(handle,type,state,path):
global connected, conn_cv
conn_cv.acquire()
connected = True
conn_cv.notifyAll()
conn_cv.release()
def my_lock_watcher(handle,type,state,path):
global locks
if not path in locks:
return
# Notify everyone that's blocked on this lock that should wake up and see if they acquired the lock
# (Which happens when they have the oldest entry in the list of waiters)
locklist = locks[path]
for lock in locklist:
lock.cv.acquire()
lock.cv.notifyAll()
lock.cv.release()
def connect(host=None):
global conn_cv, connected, handle
global reconnect_host
if host is None:
host = "localhost:2181"
reconnect_host = host
conn_cv.acquire()
handle = zookeeper.init(host, my_connection_watcher, 10000)
while not connected:
conn_cv.wait()
conn_cv.release()
connected = False
while not connected:
try:
zookeeper.create(handle, ROOT, "1", [ZOO_OPEN_ACL_UNSAFE], 0)
connected = True
except zookeeper.NodeExistsException as e:
# No worries
connected = True
except zookeeper.ConnectionLossException:
continue
except:
raise
def reconnect():
connect(reconnect_host)
class Lock:
def __init__(self, name):
self.name = ROOT + '/' + name
self.cv = threading.Condition()
created = False
while not created:
try:
zookeeper.create(handle, self.name, "0", [ZOO_OPEN_ACL_UNSAFE], 0)
created=True
except zookeeper.NodeExistsException:
# S'ok.
created = True
except zookeeper.ConnectionLossException:
continue
self.znode = None
def createNode(self):
# the EPHEMERAL flag creates a new unique node name with an increasing sequence
created = False
while not created:
try:
self.znode = zookeeper.create(handle, self.name + '/lock', "0", [ZOO_OPEN_ACL_UNSAFE], zookeeper.EPHEMERAL | zookeeper.SEQUENCE)
created = True
except zookeeper.ConnectionLossException:
reconnect()
continue
def acquire(self):
# Here's what this does:
# Creates a child node of the named node with a unique ID
# Gets all the children of the named node
# Looks to see if the node it just created is the lowest in sequence
# If it is, the lock is ours! Done!
# If not, wait for the children callback to signal that there has been a change to children, then start over at step 2
#
# So it's not polling, but it will wake up any time another client starts waiting, in addition to any time a client goes away
global handle
global locks
if not self.name in locks:
locks[self.name] = []
locks[self.name] += [self]
exists = False
self.createNode()
self.keyname = self.znode[self.znode.rfind('/') + 1:]
acquired = False
self.cv.acquire()
while not acquired:
try:
children = zookeeper.get_children(handle, self.name, my_lock_watcher)
except zookeeper.ConnectionLossException:
reconnect()
continue
children.sort()
if len(children) == 0 or not self.keyname in children:
# Disconnects or other errors can cause this
self.createNode()
continue
if self.keyname == children[0]:
# The lock is ours!
acquired = True
else:
# Wait for a notification from get_children
self.cv.wait()
self.cv.release()
locks[self.name].remove(self)
return True
def release(self):
# All release has to do, if you follow the logic in acquire, is delete the unique ID that this lock created. That will wake
# up all the other waiters and whoever is first in line can then have the lock.
global handle
released = False
while not released:
try:
zookeeper.delete(handle, self.znode)
released = True
except zookeeper.ConnectionLossException:
reconnect()
self.znode = None
self.keyname = None | zklock | /zklock-0.2.tar.gz/zklock-0.2/zklock.py | zklock.py |
Copyright (c) 2011 Joe Rumsey ([email protected])
Released under the LGPL, see COPYING.txt for details
zklock is a python package that provides a simple distributed mutex
implementation using zookeeper as the back end.
* HOW TO INSTALL
$ pip install zklock
Or get it from github, see http://github.com/tinyogre/zklock for details
zklock requires zkpython. Additionally, installing zkpython requires
the zookeeper C library be installed first. You can download and
learn about ZooKeeper here: http://zookeeper.apache.org/. Though
ZooKeeper is written in Java, the C library is part of the main
distribution.
* WHY DOES THIS EXIST
I'm writing a mobile game that includes an online turn based
mode. I started investigating Cassandra for storage of games, but
almost immediately found that I needed some kind of row level locking
to avoid races with both players updating the same game. Turns in
this game are simultaneously executed, and I accept a save from
whoever submits it first. Stored chat has a similar issue.
The web interface is built in python with Django. So I needed a lock
mechanism for Cassandra in python. I found existing ZooKeeper
solutions in Java, but none in Python. So here's this. It's cribbed
from several other Java examples out there, but not translated.
* HOW TO USE THIS
See zklocktest.py on github for a simple example.
| zklock | /zklock-0.2.tar.gz/zklock-0.2/README.txt | README.txt |
from __future__ import print_function
import shutil
import random
from argparse import ArgumentParser
hiragana = range(0x3041, 0x308f)
katakana = range(0x30a1, 0x30f3)
numbers = range(0xff10, 0xff19)
letters_upper = range(0xff21, 0xff31)
letters_lower = range(0xff41, 0xff5a)
blacklist = u'ゐゑヰヱ'
def choose(char_type):
while True:
i = random.choice(char_type)
try:
c = unichr(i)
except NameError:
c = chr(i)
if c not in blacklist:
return c
def maybe():
return random.randint(0, 9) < 3
def generate_simple(length, letters=True, numerals=True, secure=False):
hira = secure or bool(random.getrandbits(1))
upper = secure or (letters and bool(random.getrandbits(1)))
lower = secure or (letters and not upper)
return generate(length,
hira=hira,
kata=secure or not hira,
num=secure or numerals,
upper=upper,
lower=lower)
def generate(length, hira=True, kata=True, num=True, upper=True, lower=True):
if length < [hira, kata, num, upper, lower].count(True):
raise ValueError(
'Password of length %d cannot meet requirements' % length)
accum = []
did_hira = False
did_kata = False
did_num = False
did_upper = False
did_lower = False
while True:
if hira:
accum.append(choose(hiragana))
did_hira = True
if len(accum) >= length:
break
if kata:
accum.append(choose(katakana))
did_kata = True
if len(accum) >= length:
break
if num and maybe():
accum.append(choose(numbers))
did_num = True
if len(accum) >= length:
break
if upper and maybe():
accum.append(choose(letters_upper))
did_upper = True
if len(accum) >= length:
break
if lower and maybe():
accum.append(choose(letters_lower))
did_lower = True
if len(accum) >= length:
break
if all([did_hira == hira,
did_kata == kata,
did_num == num,
did_upper == upper,
did_lower == lower]):
return ''.join(accum)
else:
return generate(length, hira=hira, kata=kata, num=num, upper=upper, lower=lower)
def main():
parser = ArgumentParser(
description='Generate random passwords of full-width Japanese characters')
parser.add_argument('--secure', '-s', action='store_true',
help='generate completely random passwords')
parser.add_argument('--one-column', '-1', action='store_true',
help="don't print the generated passwords in columns")
parser.add_argument('--no-numerals', '-0', action='store_true',
help="don't include numbers in the password")
parser.add_argument('--no-letters', '-L', action='store_true',
help="don't include Roman alphabet characters in the password")
parser.add_argument('length', nargs='?', type=int,
default=8, help='the length of the password')
parser.add_argument('count', nargs='?', type=int,
default=-1, help='the number of passwords to output')
args = parser.parse_args()
try:
term_cols, _ = shutil.get_terminal_size((80, 20))
except AttributeError:
term_cols = 80
num_cols = (1 if args.one_column else
max(1, int(term_cols / (args.length * 2 + 1))))
num_pw = (args.count if args.count != -1 else
1 if args.one_column else num_cols * 20)
for n in range(num_pw):
pw = generate_simple(args.length,
letters=not args.no_letters,
numerals=not args.no_numerals,
secure=args.secure)
if n % num_cols == num_cols - 1 or n == num_pw - 1:
print(pw, end='\n')
else:
print(pw, end=' ')
if __name__ == '__main__':
main() | zkpwgen | /zkpwgen-1.0-py3-none-any.whl/zkpwgen.py | zkpwgen.py |
# zkpy
A Python library that allows for easy compiling/proving/verifying of zk circuits.
[](https://opensource.org/licenses/Apache-2.0)



[](https://pypi.org/project/zkpy/)
[](https://sahilmahendrakar.github.io/zkpy/)
## Overview
ZKPy is a Python library that allows for easy compiling/proving/verifying of zk circuits. It is implemented as a wrapper of Circom or SnarkJS, allowing developers to incorporate zero knowledge proofs into Python projects.
Features:
The `ptau` module allows users to:
- Create and contribute to a powers of tau ceremony
- Apply beacon to a powers of tau ceremony
- Perform phase 2 of a powers of tau ceremony (for groth16)
- Export and import contributions from 3rd party software
- Verify a powers of tau file
The `circuits` module allows users to:
- Compile circom circuits
- Generate r1cs, wasm, etc
- Generate witness from input file
- Prove circuits with support for three proving schemes (groth16, PLONK, FFLONK)
- Export verification keys
- Verify proofs with support for three proving schemes (groth16, PLONK, FFLONK)
## Dependencies
zkpy requires [Circom](https://docs.circom.io/getting-started/installation/) and [snarkjs](https://github.com/iden3/snarkjs). You can find installation instructions [here](https://docs.circom.io/getting-started/installation/).
## Installation
The recommended way to install zkpy is through pip.
```
pip install zkpy
```
## Usage
### Powers of Tau
Here is an example use case walking through a powers of tau ceremony:
```
from zkpy.ptau import PTau
ptau = PTau()
ptau.start()
ptau.contribute()
ptau.beacon()
ptau.prep_phase2()
```
The `PTau` object maintains an underlying powers of tau file throughout these operations. You can also import an existing ptau file:
```
ptau = PTau(ptau_file="ptau_file.ptau")
```
At any stage, we can verify the powers of tau file is valid:
```
ptau.verify()
```

### Circuit
This class uses a circuit defined in a circom file to generate and verify zk proofs.
Here is an example scenario walking through compiling a circuit, generating witnesses, generating a proof, and verifying the proof:
```
from zkpy.circuit import Circuit, GROTH, PLONK, FFLONK
circuit = Circuit("./circuit.circom")
circuit.compile()
circuit.get_info()
circuit.print_constraints()
circuit.gen_witness("./example_circuits/input.json")
circuit.setup(PLONK, ptau)
circuit.prove(PLONK)
circuit.export_vkey()
circuit.verify(PLONK, vkey_file="vkey.json", public_file="public.json", proof_file="proof.json")
```

## Contributing
Help is always appreciated! Feel free to open an issue if you find a problem, or open a pull request if you've solved an issue.
See more at [CONTRIBUTING.md](./CONTRIBUTING.md) | zkpy | /zkpy-0.2.0.tar.gz/zkpy-0.2.0/README.md | README.md |
# Contributing to zkpy
## Environment Setup
To get started with developing on zkpy, run the following steps:
1. Clone zkpy from git
```
git clone https://github.com/sahilmahendrakar/zkpy.git
cd zkpy
```
2. Create and load a virtual environment
```
python3 -m venv venv
source venv/bin/activate
```
3. Install snarkjs
```
npm install -g snarkjs
```
4. Install circom. Instructions [here](https://docs.circom.io/getting-started/installation/)
5. Install dev dependencies
```
make develop
```
## Issues
If you spot a problem, check if an issue already exists. If no issues exist for the problem you see, feel free to open an issue.
Feel free to look through existing issues to find problems that need to be fixed. You are welcome to open a PR to fix open issues.
## Pull Requests
Please feel encouraged to make pull requests for changes and new features! Before making a pull request, ensure the following works:
1. All tests pass
```
make test
```
2. Lint passes
```
make lint
```
If needed, run
```
make fix
```
3. Ensure all checks pass on Github | zkpy | /zkpy-0.2.0.tar.gz/zkpy-0.2.0/CONTRIBUTING.md | CONTRIBUTING.md |
=======
History
=======
0.2.0 (2020-07-04)
-------------------
* Update supported Python versions: drop Python 3.4 and added Python 3.8
* zkpytb.pandas: describe_numeric_1d(series): handle case where input series is empty
0.1.1 (2019-08-29)
-------------------
* setup.py: add 'Programming Language :: Python :: 3.7' trove classifier
0.1.0 (2019-08-29)
-------------------
* zkpytb.dicts: Add dict_value_map()
* zkpytb.pandas: add describe_numeric_1d(series)
* Add py 3.7 to tox.ini and .travis.yml
0.0.10 (2018-05-30)
-------------------
* Add AutoDict and AutoOrderedDict classes in zkpytb.dicts
* zkpytb.dicts.hashdict and JsonEncoder: normalize path separator to ensure stable representation and hash for windows and linux.
* Fix tests test_compare_df_cols_*() to reach 100% coverage.
0.0.9 (2018-05-11)
------------------
* Add module zkpytb.json with a custom JSONEncoder class, and use it in hashdict().
0.0.8 (2018-05-11)
------------------
* Add tests for zkpytb.logging
0.0.7 (2018-05-11)
------------------
* zkpytb.dicts: add hashdict() helper.
0.0.6 (2018-04-17)
------------------
* zkpytb.pandas: only try to import statsmodels when using mad()
* Minor changes missed while relasing previous version.
0.0.5 (2018-04-17)
------------------
* Add an implementation of PriorityQueue based on heapqueue in zkpytb.priorityqueue
* Add mad(c) (median absolute deviation) and percentile(q) functions in zkpytb.pandas
* Add code coverage and coveralls
* Add tests for zkpytb.pandas
* Fix requirements_dev.txt because pandas>=0.21 is not compatible with py34
0.0.4 (2017-06-27)
------------------
* zkpytb.utils: add hashstring() and get_git_hash() helpers.
* Add tests for zkpytb.dicts and zkpytb.utils modules.
0.0.3 (2017-06-23)
------------------
* Add first version of zkpytb.logging module with function setup_simple_console_and_file_logger().
0.0.2 (2017-06-22)
------------------
* Disable universal wheel.
0.0.1 (2017-06-22)
------------------
* First release on PyPI.
| zkpytb | /zkpytb-0.2.0.tar.gz/zkpytb-0.2.0/HISTORY.rst | HISTORY.rst |
======================
Zertrin Python Toolbox
======================
.. image:: https://img.shields.io/pypi/v/zkpytb.svg
:target: https://pypi.python.org/pypi/zkpytb
:alt: PyPI Package latest release
.. image:: https://img.shields.io/travis/zertrin/zkpytb.svg
:target: https://travis-ci.org/zertrin/zkpytb
:alt: Travis-CI Build status
.. image:: https://coveralls.io/repos/github/zertrin/zkpytb/badge.svg?branch=master
:target: https://coveralls.io/github/zertrin/zkpytb?branch=master
:alt: Coverage status
.. image:: https://readthedocs.org/projects/zkpytb/badge/?version=latest
:target: https://zkpytb.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://img.shields.io/pypi/pyversions/zkpytb.svg
:target: https://pypi.python.org/pypi/zkpytb
:alt: Supported Versions
.. image:: https://img.shields.io/github/license/zertrin/zkpytb.svg
:target: https://github.com/zertrin/zkpytb
:alt: License
A collection of useful functions by Zertrin
* Free software: MIT license
* Documentation: https://zkpytb.readthedocs.io.
Features
--------
.. list-table::
:header-rows: 1
* - Module
- Description
* - zkpytb.dicts
- Helper functions for dictionaries.
* - zkpytb.json
- Helpers related to json.
* - zkpytb.logging
- Helper functions for logging.
* - zkpytb.pandas
- Helper functions for Pandas.
* - zkpytb.priorityqueue
- An implementation of PriorityQueue based on heapq.
* - zkpytb.utils
- Miscellaneous helper functions.
Credits
---------
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
| zkpytb | /zkpytb-0.2.0.tar.gz/zkpytb-0.2.0/README.rst | README.rst |
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every
little bit helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/zertrin/zkpytb/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug"
and "help wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
Zertrin Python Toolbox could always use more documentation, whether as part of the
official docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/zertrin/zkpytb/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `zkpytb` for local development.
1. Fork the `zkpytb` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/zkpytb.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv zkpytb
$ cd zkpytb/
$ python setup.py develop
4. Create a branch for local development (please don't use `master`)::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox::
$ flake8 zkpytb tests
$ py.test
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.4, 3.5 and 3.6. Check
https://travis-ci.org/zertrin/zkpytb/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ py.test tests.test_zkpytb_dicts
| zkpytb | /zkpytb-0.2.0.tar.gz/zkpytb-0.2.0/CONTRIBUTING.rst | CONTRIBUTING.rst |
.. highlight:: shell
============
Installation
============
Stable release
--------------
To install Zertrin Python Toolbox, run this command in your terminal:
.. code-block:: console
$ pip install zkpytb
This is the preferred method to install Zertrin Python Toolbox, as it will always install the most recent release.
If you don't have `pip`_ installed, this `Python installation guide`_ can guide
you through the process.
.. _pip: https://pip.pypa.io
.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
From sources
------------
The sources for Zertrin Python Toolbox can be downloaded from the `Github repo`_.
You can either clone the public repository:
.. code-block:: console
$ git clone git://github.com/zertrin/zkpytb
Or download the `tarball`_:
.. code-block:: console
$ curl -OL https://github.com/zertrin/zkpytb/tarball/master
Once you have a copy of the source, you can install it with:
.. code-block:: console
$ python setup.py install
.. _Github repo: https://github.com/zertrin/zkpytb
.. _tarball: https://github.com/zertrin/zkpytb/tarball/master
| zkpytb | /zkpytb-0.2.0.tar.gz/zkpytb-0.2.0/docs/installation.rst | installation.rst |
import struct
import hashlib
from zksk.exceptions import InvalidExpression, IncompleteValuesError
class Expression:
"""
Arithmetic expression of secrets and group elements.
It is an abstraction for :math:`x_0 g_0 + x_1 g_2 + ... + x_n g_n`, where :math:`x_i`-s are
declared secrets.
Implementation-wise, parses the sum into an ordered list of Secrets and an ordered list of
generators.
Args:
secret (Secret): Secret object.
base: Base point on an elliptic curve.
"""
def __init__(self, secret, base):
if not isinstance(secret, Secret):
raise InvalidExpression(
"In {0} * {1}, the first parameter should be a Secret".format(
secret, base
)
)
self._secrets = [secret]
self._bases = [base]
def __add__(self, other):
"""
Merge Expression objects along addition.
Args:
other (Expression): Another expression
Returns:
Expression: New expression
"""
if not isinstance(other, Expression):
raise InvalidExpression(
"Invalid expression. Only linear combinations of group elements are supported."
)
self._secrets.extend(other._secrets)
self._bases.extend(other._bases)
return self
@property
def secrets(self):
return tuple(self._secrets)
@property
def bases(self):
return tuple(self._bases)
def eval(self):
"""Evaluate the expression, if all secret values are available."""
# TODO: Take secret_dict as optional input.
for secret in self._secrets:
if secret.value is None:
raise IncompleteValuesError(
"Secret {0} does not have a value".format(secret.name)
)
def ith_mul(i):
return self._secrets[i].value * self._bases[i]
summation = ith_mul(0)
for i in range(1, len(self._secrets)):
summation += ith_mul(i)
return summation
def __repr__(self):
fragments = []
for secret, base in zip(self._secrets, self._bases):
fragments.append("Expression({}, {})".format(secret, base))
return " + ".join(fragments)
class Secret:
"""
A secret value in a zero-knowledge proof.
Args
name: String to enforce as name of the Secret. Useful for debugging.
value: Optional secret value.
"""
# Number of bytes in a randomly-generated name of a secret.
NUM_NAME_BYTES = 8
def __init__(self, value=None, name=None):
if name is None:
name = self._generate_unique_name()
self.name = name
self.value = value
def _generate_unique_name(self):
h = struct.pack(">q", super().__hash__())
return hashlib.sha256(h).hexdigest()[: self.NUM_NAME_BYTES * 4]
def __mul__(self, base):
"""
Construct an expression that represents this secrets multiplied by the base.
Args:
base: Base point on an elliptic curve.
Returns:
Expression: Expression that corresponds to :math:`x G`
"""
return Expression(self, base)
__rmul__ = __mul__
def __repr__(self):
if self.value is None and self.name is None:
return "Secret()"
elif self.value is None:
return "Secret(name={})".format(repr(self.name))
elif self.name is None:
return "Secret({})".format(self.value)
else:
return "Secret({}, {})".format(self.value, repr(self.name))
def __hash__(self):
return hash(("Secret", self.name))
def __eq__(self, other):
return (hash(self) == hash(other)) and self.value == other.value
def wsum_secrets(secrets, bases):
"""
Build expression representing a dot product of given secrets and bases.
>>> from zksk.utils import make_generators
>>> x, y = Secret(), Secret()
>>> g, h = make_generators(2)
>>> expr = wsum_secrets([x, y], [g, h])
>>> expr.bases == (g, h)
True
>>> expr.secrets == (x, y)
True
Args:
secrets: :py:class:`Secret` objects :math`x_i`
bases: Elliptic curve points :math:`G_i`
Returns:
Expression: Expression that corresponds to :math:`x_0 G_0 + x_1 G_1 + ... + x_n G_n`
"""
if len(secrets) != len(bases):
raise ValueError("Should have as many secrets as bases.")
result = secrets[0] * bases[0]
for idx in range(len(bases) - 1):
result = result + secrets[idx + 1] * bases[idx + 1]
return result
def update_secret_values(secrets_dict):
"""
Update values of secrets according to the given mapping.
>>> x, y = Secret(), Secret()
>>> secrets_dict = {x: 1, y: 2}
>>> update_secret_values(secrets_dict)
>>> x.value
1
>>> y.value
2
Args:
secrets_dict: A mapping from :py:class:`Secret` objects to their expected values.
"""
for k, v in secrets_dict.items():
k.value = v | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/expr.py | expr.py |
import abc
import random
import string
from petlib.ec import EcGroup, EcPt
from petlib.bn import Bn
from petlib.pack import *
import binascii
import msgpack
from hashlib import sha256
from collections import defaultdict
import attr
from zksk.utils import get_random_num
from zksk.consts import CHALLENGE_LENGTH
from zksk.exceptions import ValidationError
@attr.s
class NIZK:
"""
Non-interactive zero-knowledge proof.
"""
challenge = attr.ib()
responses = attr.ib()
precommitment = attr.ib(default=None)
stmt_hash = attr.ib(default=None)
def serialize(self):
"""
Serialize a non-interactive zero-knowledge proof.
"""
as_list = [
encode(self.challenge),
encode(self.responses),
encode(self.precommitment),
encode(self.stmt_hash)
]
return msgpack.packb(as_list, use_bin_type=True)
@classmethod
def deserialize(cls, nizk_raw):
"""
Deserialize a non-interactive zero-knowledge proof.
"""
as_list = [decode(x) for x in msgpack.unpackb(nizk_raw)]
return NIZK(*as_list)
@attr.s
class SimulationTranscript:
"""
Simulated proof transcript.
"""
commitment = attr.ib()
challenge = attr.ib()
responses = attr.ib()
precommitment = attr.ib(default=None)
stmt_hash = attr.ib(default=None)
def build_fiat_shamir_challenge(stmt_prehash, *args, message=""):
"""Generate a Fiat-Shamir challenge.
>>> prehash = sha256(b"statement id")
>>> commitment = 42 * EcGroup().generator()
>>> isinstance(build_fiat_shamir_challenge(prehash, commitment), Bn)
True
Args:
prehash: Hash object seeded with the proof statement ID.
args: Items to hash (e.g., commitments)
message: Message to make it a signature PK.
"""
# Start building the complete hash for the challenge
for elem in args:
if not isinstance(elem, bytes) and not isinstance(elem, str):
encoded = encode(elem)
else:
encoded = elem
stmt_prehash.update(encoded)
stmt_prehash.update(message.encode())
return Bn.from_hex(stmt_prehash.hexdigest())
class Prover(metaclass=abc.ABCMeta):
"""
Abstract interface representing Prover used in sigma protocols.
Args:
stmt: The Proof instance from which we draw the Prover.
secret_values: The values of the secrets as a dict.
"""
def __init__(self, stmt, secret_values):
self.stmt = stmt
self.secret_values = secret_values
@abc.abstractmethod
def compute_response(self, challenge):
"""
Computes the responses associated to each Secret object in the statement.
Returns a list of responses.
"""
pass
def precommit(self):
"""
Generate a precommitment.
"""
return None
def commit(self, randomizers_dict=None):
"""
Constuct the proof commitment.
Args:
randomizers_dict: Optional dictionary of random values. Each random values is assigned
to a secret.
"""
return (
self.stmt.prehash_statement().digest(),
self.internal_commit(randomizers_dict),
)
def get_nizk_proof(self, message=""):
"""
Construct a non-interactive proof transcript using Fiat-Shamir heuristic.
The transcript contains only the challenge and the responses, as the commitment can be
deterministically recomputed.
The challenge is a hash of the commitment, the stmt statement and all the bases in the
statement (including the left-hand-side).
Args:
message (str): Optional message to make a signature stmt of knowledge.
"""
# Precommit to gather encapsulated precommitments. They are already included in their
# respective statement.
precommitment = self.precommit()
commitment = self.internal_commit()
# Generate the challenge.
prehash = self.stmt.prehash_statement()
stmt_hash = prehash.digest()
challenge = build_fiat_shamir_challenge(
prehash, precommitment, commitment, message=message
)
responses = self.compute_response(challenge)
return NIZK(
challenge=challenge,
responses=responses,
precommitment=precommitment,
stmt_hash=stmt_hash,
)
class Verifier(metaclass=abc.ABCMeta):
"""
An abstract interface representing Prover used in sigma protocols
"""
def __init__(self, stmt):
self.stmt = stmt
@abc.abstractmethod
def check_responses_consistency(self, response, response_dict=None):
"""
Verify that for two identical secrets, the responses are also the same.
"""
return False
def process_precommitment(self, precommitment):
"""
Receive a precommitment and process it.
"""
pass
def send_challenge(self, commitment):
"""
Store the received commitment and generate a challenge.
The challenge is chosen at random between 0 and ``CHALLENGE_LENGTH`` (excluded).
Args:
commitment: A tuple containing a hash of the stmt statement, to be
compared against the local statement, and the commmitment as a
(potentially multi-level list of) base(s) of the group.
"""
statement, self.commitment = commitment
self.stmt.check_statement(statement)
self.challenge = get_random_num(bits=CHALLENGE_LENGTH)
return self.challenge
def pre_verification_validation(self, response, *args, **kwargs):
self.stmt.full_validate(*args, **kwargs)
if not self.check_responses_consistency(response, {}):
raise ValidationError("Responses for the same secret name do not match.")
def verify(self, response, *args, **kwargs):
"""
Verify the responses of an interactive sigma protocol.
To do so, generates a pseudo-commitment based on the stored challenge and the received
responses, and compares it against the stored commitment.
Args:
response: The response given by the prover
Returns:
bool: True if verification succeeded, False otherwise.
"""
# TODO: I really don't think this chain should be raising exceptions
self.pre_verification_validation(response, *args, **kwargs)
# Retrieve the commitment using the verification identity
return self.commitment == self.stmt.recompute_commitment(
self.challenge, response
)
def verify_nizk(self, nizk, message="", *args, **kwargs):
"""
Verify a non-interactive proof.
Unpacks the attributes and checks their consistency by computing a pseudo-commitment and
drawing from a pseudo-challenge. Compares the pseudo-challenge with the nizk challenge.
Args:
nizk (:py:class:`NIZK`): Non-interactive proof
message: A message if a signature proof.
Return:
bool: True of verification succeeded, False otherwise.
"""
# Build the complete stmt if necessary.
# TODO: If empty precommit() function, this is always true.
if nizk.precommitment is not None:
self.process_precommitment(nizk.precommitment)
# Check the proofs statements match, gather the local statement.
prehash = self.stmt.check_statement(nizk.stmt_hash)
self.pre_verification_validation(nizk.responses, *args, **kwargs)
# Retrieve the commitment using the verification identity.
commitment_prime = self.stmt.recompute_commitment(
nizk.challenge, nizk.responses
)
challenge_prime = build_fiat_shamir_challenge(
prehash, nizk.precommitment, commitment_prime, message=message
)
return nizk.challenge == challenge_prime | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/base.py | base.py |
import abc
from zksk.base import Prover, Verifier
from zksk.composition import ComposableProofStmt
from zksk.exceptions import StatementSpecError
from zksk.utils.misc import get_default_attr
class ExtendedProofStmt(ComposableProofStmt, metaclass=abc.ABCMeta):
"""
Proof that deals with precommitments.
"""
@abc.abstractmethod
def construct_stmt(self):
"""
Build internal proof statement for this class
This function must be overridden. The function should return a
constructed proof statement. It can use the values that were computed
by internal_precommit to do so.
"""
pass
def precommit(self):
"""
Compute precommitments. Override if needed.
Override this function to compute precommitments and set corresponding
secrets that must be computed before the ZK proof itself can be
constructed and proven.
Returns:
Precommitment
"""
return []
def simulate_precommit(self):
"""
Simulate a precommitment. Override if needed.
Override this method to enable using this proof in or-proofs.
It should compute the same output as generated by precommit, but without relying on any
secrets.
"""
raise StatementSpecError(
"Override simulate_precommit in order to " "use or-proofs and simulations"
)
def validate(self, precommitment, *args, **kwargs):
"""
Validate proof's construction. Override if needed.
"""
pass
def full_validate(self, *args, **kwargs):
self.validate(self.precommitment, *args, **kwargs)
@property
def constructed_stmt(self):
return get_default_attr(self, "_constructed_stmt")
@property
def precommitment(self):
return get_default_attr(self, "_precommitment")
def get_secret_vars(self):
return self.constructed_stmt.get_secret_vars()
def get_bases(self):
return self.constructed_stmt.get_bases()
def get_prover(self, secrets_dict=None):
"""
Get a prover object.
Returns:
Prover object if all secret values are known, None otherwise.
"""
if secrets_dict is None:
secrets_dict = {}
for k, v in secrets_dict.items():
k.value = v
self.secret_values = {}
self.secret_values.update(secrets_dict)
return ExtendedProver(self, self.secret_values)
def get_verifier_cls(self):
return ExtendedVerifier
def recompute_commitment(self, challenge, responses):
return self.constructed_stmt.recompute_commitment(challenge, responses)
def get_proof_id(self, secret_id_map=None):
"""
Identifier for the proof statement.
"""
if self.constructed_stmt is not None:
proof_id = [
self.__class__.__name__,
self.precommitment,
self.constructed_stmt.get_proof_id(secret_id_map),
]
else:
raise ValueError("Proof ID unknown before the proof is constructed.")
return proof_id
def full_construct_stmt(self, precommitment):
self._precommitment = precommitment
self._constructed_stmt = self.construct_stmt(precommitment)
return self.constructed_stmt
def prepare_simulate_proof(self):
self._precommitment = self.simulate_precommit()
self.full_construct_stmt(self.precommitment)
def simulate_proof(self, responses_dict=None, challenge=None):
"""
Simulate the proof.
Args:
responses_dict: Mapping from secrets to responses
challenge: Challenge
"""
tr = self._constructed_stmt.simulate_proof(
challenge=challenge, responses_dict=responses_dict
)
tr.precommitment = self._precommitment
return tr
def _precommit(self):
self._precommitment = self.precommit()
return self._precommitment
class ExtendedProver(Prover):
"""
Prover dealing with precommitments.
This prover will create a constructed Prover object and delegate to its methods.
"""
def internal_commit(self, randomizers_dict=None):
"""
Trigger the internal prover commit.
Transfers the randomizer_dict if passed. It might be used if the binding
of the proof is set True.
"""
if self.stmt.constructed_stmt is None:
raise StatementSpecError(
"You need to pre-commit before commiting. The proofs lack parameters otherwise."
)
return self.constructed_prover.internal_commit(randomizers_dict)
def compute_response(self, challenge):
"""
Wrap the response computation for the inner proof.
"""
self.challenge = challenge
self.constructed_prover.challenge = challenge
self.response = self.constructed_prover.compute_response(challenge)
return self.response
def precommit(self):
self.precommitment = self.stmt._precommit()
self.process_precommitment()
try:
for p in self.constructed_prover.subs:
p.precommit()
except AttributeError:
pass
return self.precommitment
def process_precommitment(self):
"""
Trigger the inner-proof construction and extract a prover given the secrets.
"""
self.stmt.full_construct_stmt(self.precommitment)
self.constructed_prover = self.stmt._constructed_stmt.get_prover(
self.secret_values
)
class ExtendedVerifier(Verifier):
"""
Verifier that deals with precommitments.
"""
def process_precommitment(self, precommitment):
"""
Receive the precommitment and trigger the inner-verifier construction.
"""
self.precommitment = precommitment
self.stmt.full_construct_stmt(precommitment)
self.constructed_verifier = self.stmt.constructed_stmt.get_verifier()
def send_challenge(self, com):
"""
Transfer the commitment to the inner proof, and compute the challenge.
"""
statement, self.commitment = com
self.stmt.check_statement(statement)
self.challenge = self.constructed_verifier.send_challenge(
self.commitment, ignore_statement_hash_checks=True
)
return self.challenge
def check_responses_consistency(self, responses, responses_dict):
"""
Wrap the consistency check of the innter proof.
"""
return self.constructed_verifier.check_responses_consistency(
responses, responses_dict
) | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/extended.py | extended.py |
import abc
import copy
import random
from hashlib import sha256
from collections import defaultdict
from petlib.bn import Bn
from petlib.pack import encode
from zksk.consts import CHALLENGE_LENGTH
from zksk.base import Prover, Verifier, SimulationTranscript
from zksk.expr import Secret, update_secret_values
from zksk.utils import get_random_num, sum_bn_array
from zksk.utils.misc import get_default_attr
from zksk.exceptions import StatementSpecError, StatementMismatch
from zksk.exceptions import InvalidSecretsError, GroupMismatchError
from zksk.exceptions import InconsistentChallengeError
def _find_residual_challenge(subchallenges, challenge, modulus):
"""
Determine the complement to a global challenge in a list
For example, to find :math:`c_1` such that :math:`c = c_1 + c_2 + c_3 \mod k`, we compute
:math:`c_2 + c_3 - c` and take the opposite.
Args:
subchallenges: The array of subchallenges :math:`c_2`, c_3, ...`
challenge: The global challenge to reach
modulus: the modulus :math:`k`
"""
modulus = Bn(2).pow(modulus)
temp_arr = subchallenges.copy()
temp_arr.append(-challenge)
return -sum_bn_array(temp_arr, modulus)
def _assign_secret_ids(secret_vars):
"""
Assign consecutive identifiers to secrets.
Needed for proof statement idenfifiers.
>>> x, y = Secret(name='x'), Secret(name='y')
>>> _assign_secret_ids([x, y, x])
{'x': 0, 'y': 1}
Args:
secret_vars: :py:class:`expr.Secret` objects.
"""
secret_id_map = {}
counter = 0
for secret in secret_vars:
if secret.name not in secret_id_map:
secret_id_map[secret.name] = counter
counter += 1
return secret_id_map
class ComposableProofStmt(metaclass=abc.ABCMeta):
"""
A composable sigma-protocol proof statement.
In the composed proof tree, these objects are the atoms/leafs.
"""
def get_proof_id(self, secret_id_map=None):
"""
Identifier for the proof statement.
This identifier is used to check the proof statements on the prover and
verifier sides are consistent, and to generate a challenge in non-interactive proofs.
Args:
secret_id_map: A map from secret names to consecutive identifiers.
Returns:
list: Objects that can be used for hashing.
"""
secret_vars = self.get_secret_vars()
bases = self.get_bases()
if secret_id_map is None:
secret_id_map = _assign_secret_ids(secret_vars)
ordered_secret_ids = [secret_id_map[s.name] for s in secret_vars]
return [self.__class__.__name__, bases, ordered_secret_ids]
def get_secret_vars(self):
"""
Collect all secrets in this subtree.
By default tries to get the ``secret_vars`` attribute. Override if needed.
"""
if not hasattr(self, "secret_vars"):
raise StatementSpecError(
"Need to override get_secret_vars or specify secret_vars attribute."
)
return self.secret_vars
def get_bases(self):
"""
Collect all base points in this subtree.
By default tries to get the ``bases`` attribute. Override if needed.
"""
if not hasattr(self, "bases"):
raise StatementSpecError(
"Need to override get_bases or specify bases attribute."
)
return self.bases
def __and__(self, other):
"""
Make a conjuction of proof statements using :py:class:`AndProofStmt`.
If called multiple times, subproofs are flattened so that only one :py:class:`AndProofStmt`
remains at the root.
"""
if isinstance(other, AndProofStmt):
if isinstance(self, AndProofStmt):
return AndProofStmt(*self.subproofs, *other.subproofs)
else:
return AndProofStmt(self, *other.subproofs)
elif isinstance(self, AndProofStmt):
return AndProofStmt(*self.subproofs, other)
return AndProofStmt(self, other)
def __or__(self, other):
"""
Make a disjunction of proof statements using :py:class:`OrProofStmt`.
If called multiple times, subproofs are flattened so that only one :py:class:`OrProofStmt`
remains at the root.
"""
if isinstance(other, OrProofStmt):
if isinstance(self, OrProofStmt):
return OrProofStmt(*self.subproofs, *other.subproofs)
else:
return OrProofStmt(self, *other.subproofs)
elif isinstance(self, OrProofStmt):
return OrProofStmt(*self.subproofs, other)
return OrProofStmt(self, other)
def get_prover_cls(self):
if hasattr(self, "prover_cls"):
return self.prover_cls
else:
raise StatementSpecError("No prover class specified.")
def get_verifier_cls(self):
if hasattr(self, "verifier_cls"):
return self.verifier_cls
else:
raise StatementSpecError("No verifier class specified.")
def get_prover(self, secrets_dict=None):
"""
Get the :py:class:`base.Prover` for the current proof.
"""
return self.get_prover_cls()(self)
def get_verifier(self):
"""
Return the :py:class:`base.Verifier` for the current proof.
"""
return self.get_verifier_cls()(self)
def recompute_commitment(self, challenge, response):
"""
Compute a pseudo-commitment.
A pseudo-commitment is the commitment a verifier should have received if the proof was
correct. It should be compared to the actual commitment.
Re-occuring secrets yield identical responses.
Args:
challenge: the challenge used in the proof
response: a list of responses, ordered as the list of secret names, i.e., with as many
elements as there are secrets in the proof claim.
"""
pass
def prove(self, secret_dict=None, message=""):
"""
Generate the transcript of a non-interactive proof.
"""
if secret_dict is None:
secret_dict = {}
prover = self.get_prover(secret_dict)
return prover.get_nizk_proof(message)
def verify(self, nizk, message=""):
"""
Verify a non-interactive proof.
"""
verifier = self.get_verifier()
return verifier.verify_nizk(nizk, message)
def check_statement(self, statement_hash):
"""
Verify the current proof corresponds to the hash passed as a parameter.
Returns a pre-hash of the current proof, e.g., to be used to verify NIZK proofs.
"""
h = self.prehash_statement()
if statement_hash != h.digest():
raise StatementMismatch("Proof statements mismatch, impossible to verify")
return h
def validate(self, *args, **kwargs):
"""
Validation criteria to be checked. Override if needed.
For example, a :py:class:`primitives.DLNotEqual` statement should
not validate if its proof components are in fact equal.
Should raise an `exceptions.ValidationError` if does not validate.
"""
pass
def full_validate(self, *args, **kwargs):
"""
For or/and-proofs, perform recursive validation of subproofs.
"""
# TODO: calling return here is deceptive if we ask to throw an Exception
return self.validate(*args, **kwargs)
def validate_secrets_reoccurence(self, forbidden_secrets=None):
"""
Check if a secret appears both inside an outside an or-proof.
Does nothing if not overriden.
"""
pass
def update_randomizers(self, randomizers_dict):
"""
Construct a mapping of all secrets to randomizers.
Does so by copying the values of the passed ``randomizers_dict``, and drawing the other
values at random until all the secrets have a randomizer.
These are used as a part of proofs and also as responses in simulations.
Args:
randomizers_dict: A dictionary to enforce
"""
# If we are not provided a randomizer dict from above, we compute it.
if randomizers_dict is None:
randomizers_dict = self.get_randomizers()
# TODO: This can be done easier.
# Fill the dictionary.
elif any([x not in randomizers_dict for x in self.get_secret_vars()]):
tmp = self.get_randomizers()
tmp.update(randomizers_dict)
randomizers_dict = tmp
return randomizers_dict
def prehash_statement(self):
"""
Return a hash of the proof's ID.
"""
return sha256(encode(str(self.get_proof_id())))
@property
def simulated(self):
"""
Tell if this proof is designated as to be simulated in an or-proof.
By default is False.
"""
return get_default_attr(self, "_simulated", False)
def set_simulated(self, value=True):
"""
Designate this proof statement as simulated in an or-proof.
Args:
value (bool): Whether to simulate this proof.
"""
self._simulated = value
def prepare_simulate_proof(self):
"""
Additional steps to prepare before simulating the proof. Override if needed.
"""
pass
def simulate(self, challenge=None):
"""
Generate the transcript of a simulated non-interactive proof.
"""
self.set_simulated()
self.prepare_simulate_proof()
transcript = self.simulate_proof(challenge=challenge)
transcript.stmt_hash = self.prehash_statement().digest()
return transcript
def verify_simulation_consistency(self, transcript):
"""Check if the fields of a transcript satisfy the verification equation.
Useful for debugging purposes.
.. WARNING::
This is NOT an alternative to the full proof verification, as this function
accepts simulated proofs.
"""
verifier = self.get_verifier()
verifier.process_precommitment(transcript.precommitment)
self.check_statement(transcript.stmt_hash)
verifier.commitment, verifier.challenge = (
transcript.commitment,
transcript.challenge,
)
return verifier.verify(transcript.responses)
def __repr__(self):
# TODO: Not a great repr (cannot copy-paste and thus recreate the object).
return str(self.get_proof_id())
class _CommonComposedStmtMixin:
def get_secret_vars(self):
secret_vars = []
for sub in self.subproofs:
secret_vars.extend(sub.get_secret_vars())
return secret_vars
def get_bases(self):
bases = []
for sub in self.subproofs:
bases.extend(sub.get_bases())
return bases
def validate_group_orders(self):
"""
Check that if two secrets are the same, their bases induce groups of the same order.
The primary goal is to ensure same responses for same secrets will not yield false negatives
of :py:meth:`base.Verifier.check_responses_consistency` due to different group-order modular
reductions.
TODO: Consider deactivating in the future as this forbids using different groups in one proof.
TODO: Update docs, variable names.
Args:
secrets: :py:class:`expr.Secret` objects.
bases: Elliptic curve base points.
"""
bases = self.get_bases()
secrets = self.get_secret_vars()
# We map the unique secrets to the indices where they appear
mydict = defaultdict(list)
for index, word in enumerate(secrets):
mydict[word].append(index)
# Now we use this dictionary to check all the bases related to a particular secret live in
# the same group
for (word, gen_idx) in mydict.items():
# Word is the key, gen_idx is the value = a list of indices
ref_order = bases[gen_idx[0]].group.order()
for index in gen_idx:
if bases[index].group.order() != ref_order:
raise GroupMismatchError(
"A shared secret has bases which yield different group orders: %s"
% word
)
def get_proof_id(self, secret_id_map=None):
secret_vars = self.get_secret_vars()
bases = self.get_bases()
if secret_id_map is None:
secret_id_map = _assign_secret_ids(secret_vars)
proof_ids = [sub.get_proof_id(secret_id_map) for sub in self.subproofs]
return (self.__class__.__name__, proof_ids)
def full_validate(self, *args, **kwargs):
for sub in self.subproofs:
sub.full_validate(*args, **kwargs)
class OrProofStmt(_CommonComposedStmtMixin, ComposableProofStmt):
"""
An disjunction of several subproofs.
Args:
suproofs: Proof statements.
Raise:
ValueError: If less than two subproofs given.
"""
def __init__(self, *subproofs):
if len(subproofs) < 2:
raise ValueError("Need at least two subproofs")
# We make a shallow copy of each subproof so they don't mess up each other. This step is
# important, as we can have different outputs for the same proof (independent simulations or
# simulations/execution)
self.subproofs = [copy.copy(p) for p in list(subproofs)]
def recompute_commitment(self, challenge, responses):
# We retrieve the challenges, hidden in the responses tuple
self.or_challenges = responses[0]
responses = responses[1]
# We check for challenge consistency i.e the constraint was respected
if _find_residual_challenge(
self.or_challenges, challenge, CHALLENGE_LENGTH
) != Bn(0):
raise InconsistentChallengeError("Inconsistent challenges.")
# Compute the list of commitments, one for each proof with its challenge and responses
# (in-order)
com = []
for index, subproof in enumerate(self.subproofs):
com.append(
subproof.recompute_commitment(
self.or_challenges[index], responses[index]
)
)
return com
def get_prover(self, secrets_dict=None):
if secrets_dict is None:
secrets_dict = {}
# The prover is built on one legit prover constructed from a subproof picked at random among
# candidates.
# First we update the dictionary we have with the additional secrets, and process it
# TODO: Check this secret_values handling totally different
update_secret_values(secrets_dict)
if self.simulated:
return None
# TODO: Add a unit test where simulation must be True/False for all subproofs
# Prepare the draw. Disqualify proofs with simulation parameter set to true
candidates = {}
for index, subproof in enumerate(self.subproofs):
if not self.subproofs[index].simulated:
candidates[index] = subproof
if len(candidates) == 0:
print("Cannot run an or-proof if all elements are simulated")
return None
# Now choose a proof among the possible ones and try to get a prover from it.
# If for some reason it does not work (e.g some secrets are missing), remove it
# from the list of possible proofs and try again
random_gen = random.SystemRandom()
possible = list(candidates.keys())
self.chosen_idx = random_gen.choice(possible)
# Feed the selected proof the secrets it needs if we have them, and try to get_prover
valid_prover = self.subproofs[self.chosen_idx].get_prover(secrets_dict)
while valid_prover is None:
possible.remove(self.chosen_idx)
# If there is no proof left, abort and say we cannot get a prover
if len(possible) == 0:
self.chosen_idx = None
return None
self.chosen_idx = random_gen.choice(possible)
valid_prover = self.subproofs[self.chosen_idx].get_prover(secrets_dict)
return OrProver(self, valid_prover)
def get_verifier(self):
return OrVerifier(self, [sub.get_verifier() for sub in self.subproofs])
def validate_composition(self):
"""
Validate that composition is done correctly.
"""
self.validate_group_orders()
def validate_secrets_reoccurence(self, forbidden_secrets=None):
"""
Check for re-occurence of secrets both inside and outside an or-proof.
Method is called from :py:meth:`AndProofStmt.validate_secrets_reoccurence`.
Args:
forbidden_secrets: A list of all the secrets in the mother proof.
Raises:
:py:class:`exceptions.InvalidSecretsError`: If any secrets re-occur in an
unsupported way.
"""
secret_vars = self.get_secret_vars()
if forbidden_secrets is None:
return
for secret in set(secret_vars):
if forbidden_secrets.count(secret) > secret_vars.count(secret):
raise InvalidSecretsError(
"Invalid secrets found. Try to flatten the proof to avoid "
"using secrets used inside an or-proof in other parts of "
"the proof too (e.g., in other and or or-clauses)"
)
def prepare_simulate_proof(self):
for sub in self.subproofs:
sub.prepare_simulate_proof()
def simulate_proof(self, challenge=None, *args, **kwargs):
# Simulate the n-1 first subproofs, computes the complementary challenge and
# simulates the last proof using this challenge.
if challenge is None:
challenge = get_random_num(bits=CHALLENGE_LENGTH)
com = []
resp = []
or_chals = []
precom = []
# Generate one simulation at a time and update a list of each attribute.
for index, subproof in enumerate(self.subproofs[:-1]):
transcript = subproof.simulate_proof()
com.append(transcript.commitment)
resp.append(transcript.responses)
or_chals.append(transcript.challenge)
precom.append(transcript.precommitment)
# Generate the last simulation.
final_chal = _find_residual_challenge(or_chals, challenge, CHALLENGE_LENGTH)
or_chals.append(final_chal)
final_transcript = self.subproofs[index + 1].simulate_proof(
challenge=final_chal
)
com.append(final_transcript.commitment)
resp.append(final_transcript.responses)
precom.append(final_transcript.precommitment)
# Pack everything into a SimulationTranscript, pack the or-challenges in the response field.
return SimulationTranscript(
commitment=com,
challenge=challenge,
responses=(or_chals, resp),
precommitment=precom,
)
class OrProver(Prover):
"""
Prover for the or-proof.
This prover is built with only one subprover, and needs to have access to the index of the
corresponding subproof in its mother proof. Runs all the simulations for the other proofs and
stores them.
"""
def __init__(self, stmt, subprover):
self.subprover = subprover
self.stmt = stmt
self.true_prover_idx = self.stmt.chosen_idx
# Create a list storing the SimulationTranscripts
self.setup_simulations()
def setup_simulations(self):
"""
Run all the required simulations and stores them.
"""
self.simulations = []
for index, subproof in enumerate(self.stmt.subproofs):
if index != self.true_prover_idx:
subproof.prepare_simulate_proof()
sim = subproof.simulate_proof()
self.simulations.append(sim)
def precommit(self):
# Generate precommitment for the legit subprover, and gather the precommitments from the
# stored simulations.
precommitment = []
for index, _ in enumerate(self.stmt.subproofs):
if index == self.true_prover_idx:
precommitment.append(self.subprover.precommit())
else:
if index > self.true_prover_idx:
index1 = index - 1
else:
index1 = index
precommitment.append(self.simulations[index1].precommitment)
if not any(precommitment):
return None
return precommitment
def internal_commit(self, randomizers_dict=None):
"""
Gather the commitments from the stored simulations.
Args:
randomizers_dict: A dictionary of randomizers to use for responses consistency. Not used
in this proof. Parameter kept so all internal_commit methods have the same prototype.
"""
# Now that all proofs have been constructed, we can check
self.stmt.validate_composition()
commitment = []
for index, _ in enumerate(self.stmt.subproofs):
if index == self.true_prover_idx:
commitment.append(self.subprover.internal_commit())
else:
if index > self.true_prover_idx:
index1 = index - 1
else:
index1 = index
commitment.append(self.simulations[index1].commitment)
return commitment
def compute_response(self, challenge):
"""
Compute complementary challenges and responses.
Computes the complementary challenge with respect to the received global challenge and the
list of challenges used in the stored simulations. Computes the responses of the subprover
using this auxiliary challenge, gathers the responses from the stored simulations. Returns
both the complete list of subchallenges (including the auxiliary challenge) and the list of
responses, both ordered.
Args:
challenge: The global challenge to use. All subchallenges must add to this one.
"""
residual_chal = _find_residual_challenge(
[el.challenge for el in self.simulations], challenge, CHALLENGE_LENGTH
)
response = []
challenges = []
for index, subproof in enumerate(self.stmt.subproofs):
if index == self.true_prover_idx:
challenges.append(residual_chal)
response.append(self.subprover.compute_response(residual_chal))
else:
# Note that len(simulations) = len(subproofs) - 1.
if index > self.true_prover_idx:
index1 = index - 1
else:
index1 = index
challenges.append(self.simulations[index1].challenge)
response.append(self.simulations[index1].responses)
return (challenges, response)
class OrVerifier(Verifier):
"""
Verifier for the or-proof.
The verifier is built on a list of subverifiers, which will unpack the received attributes.
"""
def __init__(self, stmt, subverifiers):
self.subs = subverifiers
self.stmt = stmt
def process_precommitment(self, precommitment):
"""
Reads the received list of precommitments (or None if non applicable) and distributes them
to the subverifiers so they can finalize their proof construction if necessary.
Args:
precommitment: A list of all required precommitments, ordered.
"""
if precommitment is None:
return
for index, sub in enumerate(self.subs):
sub.process_precommitment(precommitment[index])
def check_responses_consistency(self, responses, responses_dict=None):
"""
Checks that for a same secret, response are actually the same.
Since every member is run with its own challenge, it is enough that one
member is consistent within itself.
Args:
responses: a tuple (subchallenges, actual_responses) from which we extract only the
actual responses for each subverifier.
"""
if responses_dict is None:
responses_dict = {}
for index, sub in enumerate(self.subs):
if not sub.check_responses_consistency(responses[1][index], {}):
return False
return True
class AndProofStmt(_CommonComposedStmtMixin, ComposableProofStmt):
def __init__(self, *subproofs):
"""
Constructs the And conjunction of several subproofs.
Subproofs are copied at instantiation.
Args:
suproofs: Proof statements.
Raise:
ValueError: If less than two subproofs given.
"""
if len(subproofs) < 2:
raise ValueError("Need at least two subproofs")
# We make a shallow copy of each subproof so they dont mess with each other. This step is
# important in case we have proofs which locally draw random values. It ensures several
# occurrences of the same proof in the tree indeed have their own randomnesses.
self.subproofs = [copy.copy(p) for p in list(subproofs)]
def validate_composition(self, *args, **kwargs):
"""
Validate that composition is done correctly.
"""
self.validate_group_orders()
self.validate_secrets_reoccurence()
def recompute_commitment(self, challenge, responses):
com = []
for index, subproof in enumerate(self.subproofs):
com.append(subproof.recompute_commitment(challenge, responses[index]))
return com
def get_prover(self, secrets_dict=None):
if secrets_dict is None:
secrets_dict = {}
# First we update the dictionary we have with the additional secrets, and process it
update_secret_values(secrets_dict)
if self.simulated:
return None
subs = [sub_proof.get_prover(secrets_dict) for sub_proof in self.subproofs]
if None in subs:
# TODO: It'd be great to know which one is failing.
raise ValueError("Failed to construct prover for a conjunct")
return AndProver(self, subs)
def get_verifier(self):
"""
Constructs a Verifier for the and-proof, based on a list of the Verifiers of each subproof.
"""
return AndVerifier(self, [sub.get_verifier() for sub in self.subproofs])
def get_randomizers(self):
"""
Create a dictionary of randomizers by querying the subproofs' maps and merging them.
"""
random_vals = {}
# Pair each Secret to one generator. Overwrites when a Secret re-occurs but since the
# associated bases should yield groups of same order, it's fine.
dict_name_gen = {s: g for s, g in zip(self.get_secret_vars(), self.get_bases())}
# Pair each Secret to a randomizer.
for u in dict_name_gen:
random_vals[u] = dict_name_gen[u].group.order().random()
return random_vals
def prepare_simulate_proof(self):
for sub in self.subproofs:
sub.prepare_simulate_proof()
def simulate_proof(self, responses_dict=None, challenge=None):
"""
Simulate the And proof
To do so, draw a global challenge, a global dictionary of responses (for consistency) and
simulate each subproof.
Gathers the commitments, and pack everything into a :py:class:`base.SimulationTranscript`.
Args:
responses_dict: A dictionary of responses to override (could come from an upper And
Proof, for example). Draw randomly if None.
challenge: The challenge to use in the proof. Draw one if None.
"""
# Fill the missing positions of the responses dictionary
responses_dict = self.update_randomizers(responses_dict)
if challenge is None:
challenge = get_random_num(CHALLENGE_LENGTH)
com = []
resp = []
precom = []
# Simulate all subproofs and gather their attributes, repack them in a unique
# SimulationTranscript.
for sub in self.subproofs:
simulation = sub.simulate_proof(
challenge=challenge, responses_dict=responses_dict
)
com.append(simulation.commitment)
resp.append(simulation.responses)
precom.append(simulation.precommitment)
return SimulationTranscript(
commitment=com, challenge=challenge, responses=resp, precommitment=precom
)
def validate_secrets_reoccurence(self, forbidden_secrets=None):
"""
Check re-occuring secrets both inside and outside an or-proof.
This method gets the list of all secrets in the tree and triggers a depth-first search for
or-proofs
Args:
forbidden_secrets: A list of all the secrets in the mother proof.
Raises:
:py:class:`exceptions.InvalidSecretsError`: If any secrets re-occur in an
unsupported way.
"""
if forbidden_secrets is None:
forbidden_secrets = self.get_secret_vars().copy()
for p in self.subproofs:
p.validate_secrets_reoccurence(forbidden_secrets)
class AndProver(Prover):
def __init__(self, proof, subprovers):
"""
Constructs a Prover for an and-proof, from a list of valid subprovers.
"""
self.subs = subprovers
self.stmt = proof
def precommit(self):
"""
Computes the precommitment for an and-proof.
This precommitment is the list of precommitments of the subprovers.
If not applicable (no subprover outputs a precommitment), returns None.
"""
precommitment = []
for index, sub in enumerate(self.subs):
# Collect precommitments one by one
sub_precommitment = sub.precommit()
if sub_precommitment is not None:
if len(precommitment) == 0:
precommitment = [None] * len(self.subs)
precommitment[index] = sub_precommitment
# If any precommitment is valid, return the list. If all were None, return None.
return precommitment if len(precommitment) != 0 else None
def internal_commit(self, randomizers_dict=None):
"""
Compute the internal commitment.
Args:
randomizers_dict: Mapping from secrets to randomizers.
"""
# Now that we have constructed the proofs, validate.
self.stmt.validate_composition()
randomizers_dict = self.stmt.update_randomizers(randomizers_dict)
self.commitment = []
for sub in self.subs:
self.commitment.append(
sub.internal_commit(randomizers_dict=randomizers_dict)
)
return self.commitment
def compute_response(self, challenge):
"""
Return a list of responses of each subprover.
"""
return [sub.compute_response(challenge) for sub in self.subs]
class AndVerifier(Verifier):
def __init__(self, proof, subverifiers):
self.subs = subverifiers
self.stmt = proof
def send_challenge(self, commitment, ignore_statement_hash_checks=False):
"""
Store the received commitment and generate a challenge.
Additionally checks the received hashed statement matches the one of the current proof. Only
called at the highest level or in extended proofs.
Args:
commitment: A tuple (statement, actual_commitment) with
actual_commitment a list of commitments, one for each subproof.
ignore_statement_hash_checks: Optional parameter to deactivate the
statement check. In this case, the commitment parameter is
simply the actual commitment. Useful in 2-level proofs for which
we don't check the inner statements.
"""
if ignore_statement_hash_checks:
self.commitment = commitment
else:
statement, self.commitment = commitment
self.stmt.check_statement(statement)
self.challenge = get_random_num(CHALLENGE_LENGTH)
return self.challenge
def check_responses_consistency(self, responses, responses_dict=None):
"""
Check that the responses are consistent for re-occurring secret names.
Iterates through the subverifiers, gives them the responses related to
them and constructs a response dictionary. If an inconsistency if found
during this build, returns False.
Args:
responses: Rreceived list of responses for each subproof.
responses_dict: Dictionary to construct and use for comparison.
"""
if responses_dict is None:
responses_dict = {}
for index, sub in enumerate(self.subs):
if not sub.check_responses_consistency(responses[index], responses_dict):
return False
return True
def process_precommitment(self, precommitment):
"""
Distribute the list of precommitments to the subverifiers.
"""
if precommitment is None:
return
for index, sub in enumerate(self.subs):
sub.process_precommitment(precommitment[index]) | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/composition.py | composition.py |
import attr
from bplib.bp import BpGroup, G1Elem, G2Elem, GTElem
import petlib.pack as pack
import msgpack
class BilinearGroupPair:
"""
A bilinear group pair.
Contains two origin groups G1, G2 and the image group GT. The underlying
``bplib.bp.BpGroup`` object is also embedded.
"""
def __init__(self, bp_group=None):
if bp_group is None:
self.bpgp = BpGroup()
self.GT = GTGroup(self)
self.G1 = G1Group(self)
self.G2 = G2Group(self)
def groups(self):
"""
Returns the three groups in the following order : G1, G2, GT.
"""
return self.G1, self.G2, self.GT
class GTGroup:
"""
Wrapper for the GT group with additive points.
Allows to retrieve groups G1 and G2.
The group ID is set to 0 to allow comparisons between groups of different
types to raise an explicit Exception.
Args:
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, bp):
self.bp = bp
self.gen = None
self.inf = None
def infinite(self):
if self.inf is None:
self.inf = AdditivePoint(self.generator().pt.one(self.bp.bpgp), self.bp)
return self.inf
def order(self):
return self.bp.bpgp.order()
def generator(self):
if self.gen is None:
self.gen = self.bp.G1.generator().pair(self.bp.G2.generator())
return self.gen
def sum(self, points):
res = self.infinite()
for p in points:
res = res + p
return res
def wsum(self, weights, generators):
res = self.infinite()
for w, g in zip(weights, generators):
res = res + w * g
return res
# TODO: Why should this not just be called GTPoint?
class AdditivePoint:
"""
A wrapper for GT points that uses additive notation.
Args:
pt (``bplib.bp.GTElem``): Wrapped point.
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, pt, bp):
self.pt = pt
self.bp = bp
self.group = self.bp.GT
def export(self, form=0):
return self.pt.export(form) if form else self.pt.export()
def __mul__(self, nb):
"""
Overrides the multiplicative syntax by an additive one.
Special case in 0 as the underlying ``bplib`` function is broken for
this value.
"""
if nb == 0:
return AdditivePoint(self.pt / self.pt, self.bp)
return AdditivePoint(self.pt ** nb, self.bp)
def __eq__(self, other):
return self.pt == other.pt
def __add__(self, other):
"""
Replace the multiplicative syntax between two points by an additive one.
"""
return AdditivePoint(self.pt * (other.pt), self.bp)
__rmul__ = __mul__
def __repr__(self):
return "GTPt(" + str(self.pt.__hash__()) + ")"
class G1Point:
"""
Wrapper for G1 points so they can be paired with a G2 point.
Args:
pt (``bplib.bp.G1Point``): Point.
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, pt, bp):
self.pt = pt
self.bp = bp
self.group = self.bp.G1
def __eq__(self, other):
return self.pt == other.pt
def __add__(self, other):
return G1Point(self.pt + other.pt, self.bp)
def __sub__(self, other):
return self + (-1 * other)
def __mul__(self, nb):
return G1Point(self.pt * nb, self.bp)
def export(self, form=0):
return self.pt.export(form) if form else self.pt.export()
def __eq__(self, other):
return self.pt == other.pt
__rmul__ = __mul__
def pair(self, other):
return AdditivePoint(self.bp.bpgp.pair(self.pt, other.pt), self.bp)
def __repr__(self):
return "G1Pt(" + str(self.pt.__hash__()) + ")"
class G2Point:
"""
Wrapper for G2 points.
Args:
pt (``bplib.bp.G2Point``): Point.
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, pt, bp):
self.pt = pt
self.bp = bp
self.group = self.bp.G2
def __eq__(self, other):
return self.pt == other.pt
def __add__(self, other):
return G2Point(self.pt + other.pt, self.bp)
def __sub__(self, other):
return self + (-1 * other)
def __mul__(self, nb):
return G2Point(self.pt * nb, self.bp)
def export(self, form=0):
return self.pt.export(form) if form else self.pt.export()
def __eq__(self, other):
return self.pt == other.pt
__rmul__ = __mul__
def __repr__(self):
return "G2Pt(" + str(self.pt.__hash__()) + ")"
class G1Group:
"""
Wrapper for G1 that behaves like normal ``petlib.ec.EcGroup``.
Args:
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, bp):
self.bp = bp
self.gen = None
self.inf = None
def generator(self):
if self.gen is None:
self.gen = G1Point(self.bp.bpgp.gen1(), self.bp)
return self.gen
def infinite(self):
if self.inf is None:
self.inf = G1Point(self.generator().pt.inf(self.bp.bpgp), self.bp)
return self.inf
def order(self):
return self.bp.bpgp.order()
def __eq__(self, other):
return self.bp.bpgp == other.bp.bpgp and self.__class__ == other.__class__
def hash_to_point(self, string):
return G1Point(self.bp.bpgp.hashG1(string), self.bp)
# TODO throw these on a base class
def sum(self, points):
res = self.infinite()
for p in points:
res = res + p
return res
# TODO throw these on a base class
def wsum(self, weights, generators):
res = self.infinite()
for w, g in zip(weights, generators):
res = res + w * g
return res
class G2Group:
"""
Wrapper for the G2 group.
Args:
bp (:py:class:`BilinearGroupPair`): Group pair.
"""
def __init__(self, bp):
self.bp = bp
self.gen = None
self.inf = None
def generator(self):
if self.gen is None:
self.gen = G2Point(self.bp.bpgp.gen2(), self.bp)
return self.gen
def infinite(self):
if self.inf is None:
self.inf = G2Point(self.generator().pt.inf(self.bp.bpgp), self.bp)
return self.inf
def order(self):
return self.bp.bpgp.order()
# TODO throw these on a base class
def sum(self, points):
res = self.infinite()
for p in points:
res = res + p
return res
def wsum(self, weights, generators):
res = self.infinite()
for w, g in zip(weights, generators):
res = res + w * g
return res
def pt_enc(obj):
"""Encoder for the wrapped points."""
nid = obj.bp.bpgp.nid
data = obj.pt.export()
packed_data = msgpack.packb((nid, data))
return packed_data
def pt_dec(bptype, xtype):
"""Decoder for the wrapped points."""
def dec(data):
nid, data = msgpack.unpackb(data)
bp = BilinearGroupPair()
pt = bptype.from_bytes(data, bp.bpgp)
return xtype(pt, bp)
return dec
# Register encoders and decoders for pairing points
pack.register_coders(G1Point, 111, pt_enc, pt_dec(G1Elem, G1Point))
pack.register_coders(G2Point, 112, pt_enc, pt_dec(G2Elem, G2Point))
pack.register_coders(AdditivePoint, 113, pt_enc, pt_dec(GTElem, AdditivePoint)) | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/pairings.py | pairings.py |
r"""
Range proof: ZK proof that a committed value lies within a range.
.. math::
PK \{ (r, x): \underbrace{C = x G + r H}_{Commitment} \land \underbrace{l \leq x < u}_{Range} \}
This module implements a Schoenmakers' range proof, a conjuction of or-proofs for each bit of the
value.
"""
import warnings
from petlib.bn import Bn
from petlib.ec import EcGroup
from zksk import Secret
from zksk.primitives.dlrep import DLRep
from zksk.exceptions import ValidationError
from zksk.extended import ExtendedProofStmt
from zksk.utils import make_generators, get_random_num, ensure_bn
from zksk.composition import AndProofStmt
def decompose_into_n_bits(value, n):
"""Array of bits, least significant bit first"""
if value < 0:
raise Exception("Can't represent negative values")
base = [1 if value.is_bit_set(b) else 0 for b in range(value.num_bits())]
extra_bits = n - len(base)
if extra_bits < 0:
raise Exception("Not enough bits to represent value")
return base + [0] * extra_bits
class PowerTwoRangeStmt(ExtendedProofStmt):
r"""
A power-two range proof statement.
.. math::
PK \{ (r, x): C = x G + r H \land 0 \leq x < 2^n \}
Args:
com: Value of the Pedersen commitment, :math:`C = x G + r H`
g: First commitment base point :math:`G`
h: Second commitment base point :math:`H`
num_bits: The number of bits of the committed value :math:`n`
x: Value for which we construct a range proof (prover only)
randomizer: Randomizer of the commitment :math:`r` (prover only)
"""
def __init__(self, com, g, h, num_bits, x=None, randomizer=None):
if not x.value is None and not randomizer.value is None:
self.x = x
self.randomizer = randomizer
self.is_prover = True
# Ensure secret is in range
self.x.value = ensure_bn(self.x.value)
if self.x.value < 0:
warnings.warn("Secret is negative")
if self.x.value.num_bits() > num_bits:
warnings.warn("Secret has more than {} bits".format(num_bits))
else:
self.is_prover = False
# TODO: Should we combine com with the inner proof?
self.com = com
self.g = g
self.h = h
self.order = g.group.order()
self.num_bits = num_bits
# The constructed proofs need extra randomizers as secrets
self.randomizers = [Secret() for _ in range(self.num_bits)]
def precommit(self):
"""
Commit to the bit-decomposition of the value.
"""
actual_value = ensure_bn(self.x.value)
value_as_bits = decompose_into_n_bits(actual_value, self.num_bits)
# Set true value to computed secrets
for rand in self.randomizers:
rand.value = self.order.random()
precommitment = {}
precommitment["Cs"] = [
b * self.g + r.value * self.h
for b, r in zip(value_as_bits, self.randomizers)
]
# Compute revealed randomizer
rand = Bn(0)
power = Bn(1)
for r in self.randomizers:
rand = rand.mod_add(r.value * power, self.order)
power *= 2
rand = rand.mod_sub(self.randomizer.value, self.order)
precommitment["rand"] = rand
return precommitment
def construct_stmt(self, precommitment):
"""
Construct the internal proof statement.
"""
if self.is_prover:
# Indicators that tell us which or-clause is true
actual_value = ensure_bn(self.x.value)
value_as_bits = decompose_into_n_bits(actual_value, self.num_bits)
zero_simulated = [b == 1 for b in value_as_bits]
one_simulated = [b == 0 for b in value_as_bits]
bit_proofs = []
for i in range(self.num_bits):
p0 = DLRep(precommitment["Cs"][i], self.randomizers[i] * self.h)
p1 = DLRep(precommitment["Cs"][i] - self.g, self.randomizers[i] * self.h)
# When we are a prover, mark which disjunct is true
if self.is_prover:
p0.set_simulated(zero_simulated[i])
p1.set_simulated(one_simulated[i])
bit_proofs.append(p0 | p1)
return AndProofStmt(*bit_proofs)
def simulate_precommit(self):
randomizers = [self.order.random() for _ in range(self.num_bits)]
precommitment = {}
precommitment["Cs"] = [r * self.h for r in randomizers]
precommitment["Cs"][0] += self.com
# Compute revealed randomizer
rand = Bn(0)
power = Bn(1)
for r in randomizers:
rand = rand.mod_add(r * power, self.order)
power *= 2
precommitment["rand"] = rand
return precommitment
def validate(self, precommitment):
"""
Check the commitment to the bit-decomposition is correct.
"""
rand = precommitment["rand"]
# Combine bit commitments into value commitment
combined = self.g.group.infinite()
power = Bn(1)
for c in precommitment["Cs"]:
combined += power * c
power *= 2
if combined != self.com + rand * self.h:
raise ValidationError("The commitments do not combine correctly")
class GenericRangeStmtMaker:
r"""
Auxiliary builder class for generic range proofs.
.. math::
PK \{ (r, x): x G + r H \land a \leq x < b \}
See "`Efficient Protocols for Set Membership and Range Proofs`_" by Camenisch
et al., 2008.
In practice, use the :py:obj:`zksk.primitives.rangeproof.RangeStmt` object directly:
>>> group = EcGroup()
>>> x = Secret(value=3)
>>> randomizer = Secret(value=group.order().random())
>>> g = group.hash_to_point(b"1")
>>> h = group.hash_to_point(b"2")
>>> lo = 0
>>> hi = 5
>>> com = x * g + randomizer * h
>>> stmt = RangeStmt(com.eval(), g, h, lo, hi, x, randomizer)
>>> nizk = stmt.prove()
>>> stmt.verify(nizk)
True
See :py:meth:`GenericRangeStmtMaker.__call__` for the construction signature.
.. `Efficient Protocols for Set Membership and Range Proofs`:
https://infoscience.epfl.ch/record/128718/files/CCS08.pdf
"""
def __call__(self, com, g, h, a, b, x, r):
"""
Get a conjunction of two range-power-of-two proofs.
Args:
com: Value of the Pedersen commitment, :math:`C = x G + r H`
g: First commitment base point :math:`G`
h: Second commitment base point :math:`H`
a: Lower limit :math:`a`
b: Upper limit :math:`b`
x: Value for which we construct a range proof
r: Randomizer of the commitment :math:`r`
"""
a = ensure_bn(a)
b = ensure_bn(b)
num_bits = (b - a - 1).num_bits()
offset = Bn(2) ** num_bits - (b - a)
com_shifted1 = com - a * g
com_shifted2 = com_shifted1 + offset * g
x1 = Secret()
x2 = Secret()
if x.value is not None:
x1.value = x.value - a
x2.value = x.value - a + offset
# Ensure secret is in range
if x.value < a or x.value >= b:
warnings.warn("Secret outside of given range [{}, {})".format(a, b))
com_stmt = DLRep(com, x * g + r * h)
p1 = PowerTwoRangeStmt(
com=com_shifted1, g=g, h=h, num_bits=num_bits, x=x1, randomizer=r,
)
p2 = PowerTwoRangeStmt(
com=com_shifted2, g=g, h=h, num_bits=num_bits, x=x2, randomizer=r,
)
return com_stmt & p1 & p2
class GenericRangeOnlyStmtMaker:
r"""
Auxiliary builder class for generic range proofs.
.. math::
PK \{ (x): a \leq x < b \}
See "`Efficient Protocols for Set Membership and Range Proofs`_" by Camenisch
et al., 2008.
.. _`Efficient Protocols for Set Membership and Range Proofs`:
https://infoscience.epfl.ch/record/128718/files/CCS08.pdf
In practice, use the :py:obj:`zksk.primitives.rangeproof.RangeStmt` object directly:
>>> x = Secret(value=3)
>>> lo = 0
>>> hi = 5
>>> stmt = RangeOnlyStmt(lo, hi, x)
>>> nizk = stmt.prove()
>>> stmt.verify(nizk)
True
See :py:meth:`GenericRangeStmtMaker.__call__` for the construction signature.
"""
def __call__(self, a, b, x=None):
"""
Get a conjunction of two range-power-of-two proofs.
Args:
a: Lower limit :math:`a`
b: Upper limit :math:`b`
x: Value for which we construct a range proof
"""
group = EcGroup()
g = group.hash_to_point(b"g")
h = group.hash_to_point(b"h")
r = Secret(value=group.order().random())
com = (x * g + r * h).eval()
a = ensure_bn(a)
b = ensure_bn(b)
num_bits = (b - a - 1).num_bits()
offset = Bn(2) ** num_bits - (b - a)
com_shifted1 = com - a * g
com_shifted2 = com_shifted1 + offset * g
x1 = Secret()
x2 = Secret()
if x is not None:
x1.value = x.value - a
x2.value = x.value - a + offset
com_stmt = DLRep(com, x * g + r * h)
p1 = PowerTwoRangeStmt(
com=com_shifted1, g=g, h=h, num_bits=num_bits, x=x1, randomizer=r,
)
p2 = PowerTwoRangeStmt(
com=com_shifted2, g=g, h=h, num_bits=num_bits, x=x2, randomizer=r,
)
return com_stmt & p1 & p2
# TODO: Make a regular class.
RangeStmt = GenericRangeStmtMaker()
RangeOnlyStmt = GenericRangeOnlyStmtMaker() | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/primitives/rangeproof.py | rangeproof.py |
r"""
ZK proof for linear representations of discrete logarithms, our basic building block.
An example of such proof is :math:`PK\{ (x_0, x_1): y = x_0 G_0 + x_1 G_1 \}`, where :math:`x_0` and
:math:`x_1` are secret integers from a finite field, :math:`G_0` and :math:`G_1` are points on a
same elliptic curve, and :math:`y` is the actual value of the expression :math:`x_0 G_0 + x_1 G_1`.
See "`Proof Systems for General Statements about Discrete Logarithms`_" by Camenisch and Stadler,
1997 for the details.
.. _`Proof Systems for General Statements about Discrete Logarithms`:
ftp://ftp.inf.ethz.ch/pub/crypto/publications/CamSta97b.pdf
"""
from hashlib import sha256
from petlib.bn import Bn
from zksk.base import Verifier, Prover, SimulationTranscript
from zksk.expr import Secret, Expression
from zksk.utils import get_random_num
from zksk.consts import CHALLENGE_LENGTH
from zksk.composition import ComposableProofStmt
from zksk.exceptions import IncompleteValuesError, InvalidExpression
import warnings
class DLRepVerifier(Verifier):
def check_responses_consistency(self, responses, responses_dict=None):
"""
Check if reoccuring secrets yield the same responses.
To do so, go through the names of the secrets in the current DLRep, and construct a mapping
between secrets and responses.
Args:
response: List of responses
responses_dict: Mapping from secrets to responses
Returns:
bool: True if responses are consistent, False otherwise.
"""
if responses_dict is None:
responses_dict = {}
for i, s in enumerate(self.stmt.secret_vars):
if s in responses_dict.keys():
if responses[i] != responses_dict[s]:
return False
else:
responses_dict.update({s: responses[i]})
return True
class DLRep(ComposableProofStmt):
"""
Proof statement for a discrete-logarithm representation proof.
Supports statements of the following form:
.. math::
PK\{ (x_0, x_1, ..., x_n): Y = x_0 G_0 + x_1 G_1 + ... + x_n G_n \}
Example usage for :math:`PK\{x: Y = x G \}`:
>>> from petlib.ec import EcGroup
>>> x = Secret(name="x")
>>> g = EcGroup().generator()
>>> y = 42 * g
>>> stmt = DLRep(y, x * g)
>>> nizk = stmt.prove({x: 42})
>>> stmt.verify(nizk)
True
Args:
expr (:py:class:`zksk.base.Expression`): Proof statement.
For example: ``Secret("x") * g`` represents :math:`PK\{ x: Y = x G \}`.
lhs: "Left-hand side." Value of :math:`Y`.
"""
verifier_cls = DLRepVerifier
def __init__(self, lhs, expr, simulated=False):
if isinstance(expr, Expression):
self.bases = list(expr.bases)
self.secret_vars = list(expr.secrets)
else:
raise TypeError("Expected an Expression. Got: {}".format(expr))
# Check all the generators live in the same group
test_group = self.bases[0].group
for g in self.bases:
if g.group != test_group:
raise InvalidExpression(
"All bases should come from the same group", g.group
)
# Construct a dictionary with the secret values we already know
self.secret_values = {}
for sec in self.secret_vars:
if sec.value is not None:
self.secret_values[sec] = sec.value
self.lhs = lhs
self.set_simulated(simulated)
def get_prover(self, secrets_dict=None):
"""
Get a prover for the current proof statement.
Args:
secrets_dict: Optional mapping from secrets or secret names to their values.
Returns:
:py:class:`DLRepProver` or None: Prover object if all secret values are known.
"""
if secrets_dict is None:
secrets_dict = {}
# First we update the dictionary we have with the additional secrets, and process it
self.secret_values.update(secrets_dict)
secrets_dict = self.secret_values
# If missing secrets or simulation parameter set, return now
if (
self.set_simulated()
or secrets_dict == {}
or any(sec not in secrets_dict.keys() for sec in set(self.secret_vars))
):
# TODO: Make this raise:
# raise IncompleteValuesError(self.secret_vars)
return None
# We check everything is indeed a big number, else we cast it
for name, sec in secrets_dict.items():
if not isinstance(sec, Bn):
secrets_dict[name] = Bn(sec)
return DLRepProver(self, secrets_dict)
def get_proof_id(self, secret_id_map=None):
"""
Identifier for the proof statement
Returns:
list: Objects that can be used for hashing.
"""
proof_id = super().get_proof_id(secret_id_map)
return proof_id + [self.lhs]
def get_randomizers(self):
"""
Initialize randomizers for each secret.
Each randomizer is drawn at random from the associated group.
By using a dictionary, we enforce that if secret are repeated in :math:`x_0 G_0 + x_1 G_1 +
... + x_n G_n`, that is, if :math:`x_i` and :math:`x_j` have the same name, they will get
the same random value. Identical secret values and identical randomizers will yield
identical responses, and this identity will be checked by the verifier.
Returns:
dict: Mapping from secrets to the random values that are needed to compute the responses
of the proof.
"""
output = {}
order = self.bases[0].group.order()
for sec in set(self.secret_vars):
output.update({sec: order.random()})
return output
def recompute_commitment(self, challenge, responses):
commitment = (
self.lhs.group.wsum(responses, self.bases) + (-challenge) * self.lhs
)
return commitment
def simulate_proof(self, responses_dict=None, challenge=None):
"""
Returns a transcript of a proof simulation. Responses and challenge can be enforced. The
function will misbehave if passed a non-empty but incomplete responses_dict.
Args:
responses_dict: Optinal mapping from secrets or secret names to responses.
challenge: Optional challenge to use in the simulation
"""
# Fill the missing positions of the responses dictionary
responses_dict = self.update_randomizers(responses_dict)
if challenge is None:
challenge = get_random_num(CHALLENGE_LENGTH)
responses = [responses_dict[m] for m in self.secret_vars]
# Random responses, the same for shared secrets
commitment = self.recompute_commitment(challenge, responses)
return SimulationTranscript(
commitment=commitment, challenge=challenge, responses=responses
)
class DLRepProver(Prover):
"""The prover in a discrete logarithm proof."""
def internal_commit(self, randomizers_dict=None):
"""
Compute the commitment using the randomizers.
Args:
randomizers_dict: Optional mapping from secrets or secret names to random values. Every
random value not given here will be generated at random.
Returns:
A single commitment---sum of bases, weighted by the corresponding randomizers
"""
# Fill the missing positions of the randomizers dictionary
randomizers_dict = self.stmt.update_randomizers(randomizers_dict)
# Compute an ordered list of randomizers mirroring the Secret objects
self.ks = [randomizers_dict[sec] for sec in self.stmt.secret_vars]
subcommits = [a * b for a, b in zip(self.ks, self.stmt.bases)]
# We build the commitment doing the product k0 * g0 + k1 * g1...
result = self.stmt.bases[0].group.infinite()
for com in subcommits:
result = result + com
return result
def compute_response(self, challenge):
"""
Constructs an (ordered) list of response for each secret.
For each secret :math:`x` and a random value :math:`k` (associated to :math:`x`), the
response is equal to :math:`k + c x`, where :math:`c` is the challenge value.
Args:
challenge: Challenge value
Returns:
A list of responses
"""
order = self.stmt.bases[0].group.order()
resps = [
(self.secret_values[self.stmt.secret_vars[i]] * challenge + k) % order
for i, k in enumerate(self.ks)
]
return resps | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/primitives/dlrep.py | dlrep.py |
import attr
from zksk.expr import Secret, wsum_secrets
from zksk.extended import ExtendedProofStmt
from zksk.composition import AndProofStmt
from zksk.primitives.dlrep import DLRep
from zksk.utils import make_generators
@attr.s
class BBSPlusSignature:
"""
BBS+ signature.
"""
A = attr.ib()
e = attr.ib()
s = attr.ib()
def verify_signature(self, pk, messages):
"""
Verify the validity of the signature w.r.t the given public key and set of messages.
"""
generators = pk.generators[: len(messages) + 2]
product = generators[0] + generators[0].group.wsum(
([self.s] + messages), generators[1:]
)
return self.A.pair(pk.w + self.e * pk.h0) == product.pair(pk.h0)
@attr.s
class UserCommitmentMessage:
"""
Embed the product to be pre-signed by the issuer.
If blinded by a user's Pedersen commitment, a NI proof is also specified.
"""
com_message = attr.ib()
com_nizk_proof = attr.ib(default=None)
def verify_blinding(self, pk):
"""
Verify the NIZK proof for Pedersen commitment.
"""
if self.com_nizk_proof is None:
raise ValueError("No proof to verify")
# TODO: Extract into a separate ExtendedProofStmt.
lhs = self.com_message
generators = pk.generators[1 : len(self.com_nizk_proof.responses) + 1]
secret_vars = [Secret() for _ in self.com_nizk_proof.responses]
proof = DLRep(lhs, wsum_secrets(secret_vars, generators))
return proof.verify(self.com_nizk_proof)
class BBSPlusSignatureCreator:
"""
Pre-signed product along with a NIZK proof of correct construction.
Args:
pk (PublicKey): Public key.
"""
def __init__(self, pk):
self.pk = pk
self.s1 = None
def commit(self, messages, zkp=True):
"""
Construct the product of messages and optionaly a Pedersen commitment and its proof.
Args:
messages: Messages (attributes) to commit to
zkp (bool): Whether to construct a Pedersen commitment and proof the knowledge of the
messages for this commitment.
Returns:
:py:class:`UserCommitmentMessage`: user's packed commitment.
"""
lhs = self.pk.generators[0].group.wsum(
messages, self.pk.generators[2 : len(messages) + 2]
)
com_nizk_proof = None
if zkp:
self.s1 = self.pk.generators[0].group.order().random()
lhs = self.s1 * self.pk.generators[1] + lhs
# TODO: Extract into a separate ExtendedProofStmt.
secret_vars = [Secret() for _ in range(len(messages) + 1)]
secrets = [self.s1] + messages
rhs = wsum_secrets(secret_vars, self.pk.generators[1 : len(messages) + 2])
com_stmt = DLRep(lhs, rhs)
com_nizk_proof = com_stmt.prove(
{s: v for s, v in zip(secret_vars, secrets)}
)
return UserCommitmentMessage(com_message=lhs, com_nizk_proof=com_nizk_proof)
def obtain_signature(self, presignature):
"""
Make a complete signature from the received pre-signature.
Args:
presignature (:py:class:`BBSPlusSignature`): presignature
Returns:
:py:class:`BBSPlusSignature`: Signature.
"""
# s1 is the part of the signature blinding factor which is on the user side.
if self.s1 is None:
new_s = presignature.s
else:
new_s = presignature.s + self.s1
return BBSPlusSignature(A=presignature.A, e=presignature.e, s=new_s)
@attr.s
class BBSPlusKeypair:
"""
A public-private key pair, along with a list of canonical bases to use in proofs.
"""
generators = attr.ib()
h0 = attr.ib()
sk = attr.ib()
pk = attr.ib()
@staticmethod
def generate(bilinear_pair, num_generators):
"""
Generate a keypair.
Args:
bilinear_pair (:py:class:`pairings.BilinearGroupPair`): Bilinear group pair.
num_generators: Upper bound on the number of generators needed to compute the proof.
Should be at least `2 + the number of messages`.
Returns:
:py:class:`BBSPlusKeypair`: Keypair.
"""
# TODO: Check if this +2 is not redundant.
generators = make_generators(num_generators + 2, group=bilinear_pair.G1)
h0 = bilinear_pair.G2.generator()
sk = BBSPlusSecretKey(
gamma=bilinear_pair.G1.order().random(), generators=generators, h0=h0,
)
pk = BBSPlusPublicKey(w=sk.gamma * h0, generators=generators, h0=h0)
return BBSPlusKeypair(generators=generators, h0=h0, sk=sk, pk=pk)
# self.sk.pk = self.pk
@attr.s
class BBSPlusPublicKey:
"""
BBS+ public key.
Automatically pre-computes the generator pairings :math:`e(g_i, h_0)`.
"""
w = attr.ib()
h0 = attr.ib()
generators = attr.ib()
def __attrs_post_init__(self):
"""Pre-compute the group pairings."""
self.gen_pairs = [g.pair(self.h0) for g in self.generators]
@attr.s
class BBSPlusSecretKey:
"""
BBS+ private key.
"""
h0 = attr.ib()
gamma = attr.ib()
generators = attr.ib()
def sign(self, lhs):
r"""
Sign a committed message (typically a product, blinded or not),
A signature is :math:`(A, e, s_2)` such that
:math:`A = (g_0 + s_2 g_1 + C_m) \cdot \frac{1}{e+\gamma}`.
If the product was blinded by the user's :math:`s_1` secret value, user has to update the
signature.
"""
pedersen_product = lhs
e = self.h0.group.order().random()
s2 = self.h0.group.order().random()
prod = self.generators[0] + s2 * self.generators[1] + pedersen_product
A = (self.gamma + e).mod_inverse(self.h0.group.order()) * prod
return BBSPlusSignature(A=A, e=e, s=s2)
class BBSPlusSignatureStmt(ExtendedProofStmt):
"""
Proof of knowledge of a BBS+ signature over a set of (hidden) messages.
The proof can be made `binding`: bind the secrets to another proof. If the proof is not binding,
it is not possible to assert that the same secrets were used in any other proof.
Args:
secret_vars: Secret variables.
If binding, the two first elements of secret_vars as the Secret variables for the ``e``
and ``s`` attributes of the signature.
pk (:py:class:`BBSPlusPublicKey`): Public key.
signature (:py:class:`BBSPlusSignature`): Signature. Required if used for proving.
binding (bool): Whether the signature is binding.
simulated (bool): If this proof is a part of an or-proof: whether it should be simulated.
"""
def __init__(self, secret_vars, pk, signature=None, binding=True, simulated=False):
self.pk = pk
self.signature = signature
if not binding:
# We add two Secret slots for e and s if necessary
secret_vars = [Secret(), Secret()] + secret_vars
# We need L+1 generators for L messages. secret_vars are messages plus 'e' and 's'
self.bases = pk.generators[: len(secret_vars)]
self.order = self.bases[0].group.order()
# The prover will compute the following secrets:
self.r1, self.r2, self.delta1, self.delta2 = (
Secret(),
Secret(),
Secret(),
Secret(),
)
# Below is boilerplate
self.secret_vars = secret_vars
if signature is not None:
# Digest the signature parameters
self.secret_vars[0].value = signature.e
self.secret_vars[1].value = signature.s
self.set_simulated(simulated)
def precommit(self):
"""
Generate the lacking information to construct a complete proof.
The precommitment comprises the ``A1`` and ``A2`` commitments that depend on the secret
signature and the Prover's randomness.
"""
if self.signature is None:
raise ValueException("No signature given!")
# Compute auxiliary commitments A1,A2 as mentioned in the paper. Needs two random values r1,r2 and associated delta1,delta2
# Set true value to computed secrets
r1, r2 = self.order.random(), self.order.random()
self.r1.value, self.r2.value = r1, r2
self.delta1.value = r1 * self.signature.e % self.order
self.delta2.value = r2 * self.signature.e % self.order
precommitment = {}
precommitment["A1"] = r1 * self.bases[1] + r2 * self.bases[2]
precommitment["A2"] = r1 * self.bases[2] + self.signature.A
return precommitment
def construct_stmt(self, precommitment):
r"""
Proof of knowledge of a signature.
This is an implementation of a proof :math:`\Pi_5` detailed on page 7 of the `Constant-Size
Dynamick-TAA` paper.
"""
self.A1, self.A2 = precommitment["A1"], precommitment["A2"]
g0, g1, g2 = self.bases[0], self.bases[1], self.bases[2]
dl1 = DLRep(self.A1, self.r1 * g1 + self.r2 * g2)
dl2 = DLRep(
g0.group.infinite(),
self.delta1 * g1 + self.delta2 * g2 + self.secret_vars[0] * (-1 * self.A1),
)
self.pair_lhs = self.A2.pair(self.pk.w) + (-1 * self.pk.gen_pairs[0])
bases = [
-1 * (self.A2.pair(self.pk.h0)),
self.bases[2].pair(self.pk.w),
self.pk.gen_pairs[2],
]
bases.extend(self.pk.gen_pairs[1 : len(self.bases)])
# Build secret names [e, r1, delta1, s, m_i]
new_secret_vars = (
self.secret_vars[:1] + [self.r1, self.delta1] + self.secret_vars[1:]
)
pairings_stmt = DLRep(self.pair_lhs, wsum_secrets(new_secret_vars, bases))
constructed_stmt = AndProofStmt(dl1, dl2, pairings_stmt)
constructed_stmt.lhs = [p.lhs for p in constructed_stmt.subproofs]
return constructed_stmt
def simulate_precommit(self):
"""
Draw :math:`A_1`, :math:`A_2` at random.
"""
group = self.bases[0].group
precommitment = {}
precommitment["A1"] = group.order().random() * group.generator()
precommitment["A2"] = group.order().random() * group.generator()
return precommitment | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/primitives/bbsplus.py | bbsplus.py |
r"""
ZK proof of inequality of two discrete logarithms.
.. math::
PK\{ (x): H_0 = x h_0 \land H_1 \neq x h_1 \}
See Protocol 1 in "`Thinking Inside the BLAC Box: Smarter Protocols for Faster Anonymous
Blacklisting`_" by Henry and Goldberg, 2013:
.. _`Thinking Inside the BLAC Box: Smarter Protocols for Faster Anonymous
Blacklisting`: https://www.cypherpunks.ca/~iang/pubs/blacronym-wpes.pdf
"""
from zksk.expr import Secret, wsum_secrets
from zksk.exceptions import ValidationError
from zksk.extended import ExtendedProofStmt, ExtendedVerifier
from zksk.composition import AndProofStmt
from zksk.primitives.dlrep import DLRep
class DLNotEqual(ExtendedProofStmt):
r"""
ZK proof statement of inequality of two discrete logarithms.
.. math::
PK\{ (x): H_0 = x h_0 \land H_1 \neq x h_1 \}
The statement is constructed from two pairs: :math:`(H_0, h_0)`, :math:`(H_1, h_1)`, and a
:py:class:`expr.Secret` object representing a secret :math:`x`.
The proof can be made `binding`: bind the :math:`x` to another proof. If the proof is not
binding, it is not possible to assert that the same :math:`x` was used in any other proof (even
in, say, an AND conjunction).
Args:
valid_pair (tuple): Pair of two Elliptic curve points :math:`(H_0, h_0)` such that
:math:`H_0 = x h_0`
invalid_pair (tuple): Pair of two Elliptic curve points :math:`(H_1, h_1)` such that
:math:`H_1 \neq x h_1`
x (:py:class:`expr.Secret`): Secret.
bind (bool): Whether the proof is binding.
simulated (bool): If this proof is a part of an or-proof: whether it should be simulated.
"""
def __init__(self, valid_pair, invalid_pair, x, bind=False, simulated=False):
if len(valid_pair) != 2 or len(invalid_pair) != 2:
raise TypeException("The valid_pair and invalid_pair must be pairs")
self.x = x
# The internal ZK proof uses two constructed secrets
self.alpha, self.beta = Secret(), Secret()
self.lhs = [valid_pair[0], invalid_pair[0]]
self.g = valid_pair[1]
self.h = invalid_pair[1]
self.bind = bind
self.set_simulated(simulated)
def precommit(self):
"""Build the left-hand side of the internal proof statement."""
order = self.g.group.order()
blinder = order.random()
# Set the value of the two internal secrets
self.alpha.value = self.x.value * blinder % order
self.beta.value = -blinder % order
precommitment = blinder * (self.x.value * self.h - self.lhs[1])
return precommitment
def construct_stmt(self, precommitment):
"""
Build the internal proof statement.
See the formula in Protocol 1 of the `Thinking Inside the BLAC Box: Smarter Protocols for
Faster Anonymous Blacklisting` paper.
"""
infty = self.g.group.infinite()
p1 = DLRep(infty, self.alpha * self.g + self.beta * self.lhs[0])
p2 = DLRep(precommitment, self.alpha * self.h + self.beta * self.lhs[1])
statements = [p1, p2]
if self.bind:
# If the binding parameter is set, we add a DLRep member repeating
# the first member without randomizing the secret.
statements.append(DLRep(self.lhs[0], self.x * self.g))
return AndProofStmt(*statements)
def validate(self, precommitment):
"""
Verify the the proof statement is indeed proving the inequality of discret logs.
"""
if precommitment == self.g.group.infinite():
raise ValidationError("The commitment should not be the unity element")
def simulate_precommit(self):
"""
Draw a base at random (not unity) from the bases' group.
"""
group = self.g.group
precommitment = group.order().random() * group.generator()
return precommitment | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/primitives/dl_notequal.py | dl_notequal.py |
import math
import secrets
import hashlib
import warnings
from petlib.bn import Bn
from zksk.consts import DEFAULT_GROUP
from zksk.exceptions import InvalidExpression
def get_random_point(group=None, random_bits=256, seed=None):
"""
Generate some random group generators.
Args:
num: Number of generators to generate.
group: Group
random_bits: Number of bits of a random string to create a point.
>>> from petlib.ec import EcPt
>>> a = get_random_point()
>>> b = get_random_point()
>>> isinstance(a, EcPt)
True
>>> isinstance(b, EcPt)
True
>>> a != b
True
>>> get_random_point(seed=1)
EcPt(037697679766c26bb7b76c65d2639fb983dea7c859c63b3047168dbc1b)
"""
if group is None:
group = DEFAULT_GROUP
num_bytes = math.ceil(random_bits / 8)
if seed is None:
randomness = secrets.token_bytes(num_bytes)
else:
randomness = hashlib.sha512(b"%i" % seed).digest()[:num_bytes]
return group.hash_to_point(randomness)
def make_generators(num, group=None, random_bits=256, seed=42):
"""
Create some random group generators.
.. WARNING ::
There is a negligible chance that some generators will be the same.
Args:
num: Number of generators to generate.
group: Group
random_bits: Number of bits of a random number used to create a generator.
>>> from petlib.ec import EcPt
>>> generators = make_generators(3)
>>> len(generators) == 3
True
>>> isinstance(generators[0], EcPt)
True
"""
if group is None:
group = DEFAULT_GROUP
generators = [
get_random_point(
group, random_bits, seed=seed + i if seed is not None else None
)
for i in range(num)
]
return generators
def get_random_num(bits):
"""
Draw a random number of given bitlength.
>>> x = get_random_num(6)
>>> x < 2**6
True
"""
order = Bn(2).pow(bits)
return order.random()
def sum_bn_array(arr, modulus):
"""
Sum an array of big numbers under a modulus.
>>> a = [Bn(5), Bn(7)]
>>> m = 10
>>> sum_bn_array(a, m)
2
"""
if not isinstance(modulus, Bn):
modulus = Bn(modulus)
res = Bn(0)
for elem in arr:
if not isinstance(elem, Bn):
elem = Bn(elem)
res = res.mod_add(elem, modulus)
return res
def ensure_bn(x):
"""
Ensure that value is big number.
>>> isinstance(ensure_bn(42), Bn)
True
>>> isinstance(ensure_bn(Bn(42)), Bn)
True
"""
if isinstance(x, Bn):
return x
else:
return Bn(x) | zksk-fork | /zksk_fork-0.0.31-py3-none-any.whl/zksk/utils/groups.py | groups.py |
# zkSync Python SDK
[](https://wallet.zksync.io)
[](https://rinkeby.zksync.io)
[](https://ropsten.zksync.io)
[](https://gitter.im/matter-labs/zksync?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
This repository provides a Python SDK for zkSync developers, which can be used either on PC or Android.
## What is zkSync
zkSync is a scaling and privacy engine for Ethereum. Its current functionality scope includes low gas transfers of ETH
and ERC20 tokens in the Ethereum network.
zkSync is built on ZK Rollup architecture. ZK Rollup is an L2 scaling solution in which all funds are held by a smart
contract on the mainchain, while computation and storage are performed off-chain. For every Rollup block, a state
transition zero-knowledge proof (SNARK) is generated and verified by the mainchain contract. This SNARK includes the
proof of the validity of every single transaction in the Rollup block.
Additionally, the public data update for every block is published over the mainchain network in the cheap calldata.
This architecture provides the following guarantees:
- The Rollup validator(s) can never corrupt the state or steal funds (unlike Sidechains).
- Users can always retrieve the funds from the Rollup even if validator(s) stop cooperating because the data is available (unlike Plasma).
- Thanks to validity proofs, neither users nor a single other trusted party needs to be online to monitor Rollup blocks in order to prevent fraud.
In other words, ZK Rollup strictly inherits the security guarantees of the underlying L1.
To learn how to use zkSync, please refer to the [zkSync SDK documentation](https://zksync.io/api/sdk/python/tutorial.html).
## Supporting version
Python 3.8+
## License
zkSync Python SDK is distributed under the terms of the MIT license.
See [LICENSE](LICENSE) for details.
### Batch builder ###
Here is added ability to collect the different transaction is batch and singing it only once. For this has been added
`BatchBuilder` class. It allows to collect the different transactions type and then build them once. For executing there must be used
new method `submit_batch_builder_trx_batch` with constructed result of batches. Here is the list of supported transactions types:
* ChangePubKey
* Withdraw
* MintNFT
* WithdrawNFT
* Transfer
* ForceExit
* Swap
For creating BatchBuilder object there is necessary to provide `Wallet` object and its current Nonce.
Also `BatchBuilder` could accept already signed transactions list, for instance,
made by build_ method of this wallet object.
| zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/README.md | README.md |
from typing import List
from math import ceil
AMOUNT_EXPONENT_BIT_WIDTH = 5
AMOUNT_MANTISSA_BIT_WIDTH = 35
FEE_EXPONENT_BIT_WIDTH = 5
FEE_MANTISSA_BIT_WIDTH = 11
MAX_NUMBER_OF_ACCOUNTS = 2 ** 24
MAX_NUMBER_OF_TOKENS = 2 ** 32 - 1
class SerializationError(Exception):
pass
class WrongIntegerError(SerializationError):
pass
class WrongBitsError(SerializationError):
pass
class ValueNotPackedError(SerializationError):
pass
class WrongValueError(SerializationError):
pass
def int_to_bytes(val: int, length=4):
return val.to_bytes(length, byteorder='big')
def num_to_bits(integer: int, bits: int):
"""
INFO: Can't be used without correct input data of value & corresponded amount of bits, take care
"""
results = []
for i in range(bits):
results.append(integer & 1)
integer //= 2
return results
def integer_to_float(integer: int, exp_bits: int, mantissa_bits: int, exp_base: int) -> List[int]:
max_exponent_power = 2 ** exp_bits - 1
max_exponent = exp_base ** max_exponent_power
max_mantissa = 2 ** mantissa_bits - 1
if integer > max_mantissa * max_exponent:
raise WrongIntegerError
exponent = 0
exponent_temp = 1
while integer > max_mantissa * exponent_temp:
exponent_temp = exponent_temp * exp_base
exponent += 1
mantissa = integer // exponent_temp
if exponent != 0:
variant1 = exponent_temp * mantissa
variant2 = exponent_temp // exp_base * max_mantissa
diff1 = integer - variant1
diff2 = integer - variant2
if diff2 < diff1:
mantissa = max_mantissa
exponent -= 1
data = num_to_bits(exponent, exp_bits) + num_to_bits(mantissa, mantissa_bits)
data = list(reversed(data))
result = list(reversed(bits_into_bytes_in_be_order(data)))
return result
def integer_to_float_up(integer: int, exp_bits: int, mantissa_bits: int, exp_base) -> List[int]:
max_exponent_power = 2 ** exp_bits - 1
max_exponent = exp_base ** max_exponent_power
max_mantissa = 2 ** mantissa_bits - 1
if integer > max_mantissa * max_exponent:
raise WrongIntegerError("Integer is too big")
exponent = 0
exponent_temp = 1
while integer > max_mantissa * exponent_temp:
exponent_temp = exponent_temp * exp_base
exponent += 1
mantissa = int(ceil(integer / exponent_temp))
encoding = num_to_bits(exponent, exp_bits) + num_to_bits(mantissa, mantissa_bits)
data = list(reversed(encoding))
result = list(reversed(bits_into_bytes_in_be_order(data)))
return result
def bits_into_bytes_in_be_order(bits: List[int]):
if len(bits) % 8 != 0:
raise WrongBitsError("wrong number of bits")
size = len(bits) // 8
result = [0] * size
for i in range(size):
value = 0
for j in range(8):
value |= bits[i * 8 + j] * 2 ** (7 - j)
result[i] = value
return result
def reverse_bit(b):
b = ((b & 0xf0) >> 4) | ((b & 0x0f) << 4)
b = ((b & 0xcc) >> 2) | ((b & 0x33) << 2)
b = ((b & 0xaa) >> 1) | ((b & 0x55) << 1)
return b
def reverse_bits(buffer: List[int]):
return list(reversed(buffer))
def buffer_to_bits_be(buff):
res = [0] * len(buff) * 8
for i, b in enumerate(buff):
for j in range(8):
res[i * 8 + j] = (b >> (7 - j)) & 1
return res
def pack_fee(amount: int):
return bytes(reverse_bits(
integer_to_float(amount, FEE_EXPONENT_BIT_WIDTH, FEE_MANTISSA_BIT_WIDTH, 10)
))
def pack_amount(amount: int) -> bytes:
return bytes(reverse_bits(
integer_to_float(amount, AMOUNT_EXPONENT_BIT_WIDTH, AMOUNT_MANTISSA_BIT_WIDTH, 10)
))
def float_to_integer(float_bytes: bytes, exp_bits, mantissa_bits, exp_base_number):
bits = list(reversed(buffer_to_bits_be(list(float_bytes))))
exponent = 0
exp_pow2 = 1
for i in range(exp_bits):
if bits[i] == 1:
exponent += exp_pow2
exp_pow2 *= 2
exponent = exp_base_number ** exponent
mantissa = 0
mantissa_pow2 = 1
for i in range(exp_bits, exp_bits + mantissa_bits):
if bits[i] == 1:
mantissa += mantissa_pow2
mantissa_pow2 *= 2
return exponent * mantissa
def pack_amount_up(amount: int):
return bytes(reverse_bits(
integer_to_float_up(amount, AMOUNT_EXPONENT_BIT_WIDTH, AMOUNT_MANTISSA_BIT_WIDTH, 10)
))
def pack_fee_up(fee: int):
return bytes(reverse_bits(
integer_to_float_up(fee, FEE_EXPONENT_BIT_WIDTH, FEE_MANTISSA_BIT_WIDTH, 10)
))
def closest_packable_amount(amount: int) -> int:
packed_amount = pack_amount(amount)
return float_to_integer(
packed_amount,
AMOUNT_EXPONENT_BIT_WIDTH,
AMOUNT_MANTISSA_BIT_WIDTH,
10
)
def closest_greater_or_eq_packable_amount(amount: int) -> int:
packed_amount = pack_amount_up(amount)
return float_to_integer(packed_amount, AMOUNT_EXPONENT_BIT_WIDTH, AMOUNT_MANTISSA_BIT_WIDTH, 10)
def closest_packable_transaction_fee(fee: int) -> int:
packed_fee = pack_fee(fee)
return float_to_integer(
packed_fee,
FEE_EXPONENT_BIT_WIDTH,
FEE_MANTISSA_BIT_WIDTH,
10
)
def closest_greater_or_eq_packable_fee(fee: int) -> int:
packed_fee = pack_fee_up(fee)
return float_to_integer(packed_fee, FEE_EXPONENT_BIT_WIDTH, FEE_MANTISSA_BIT_WIDTH, 10)
def packed_fee_checked(fee: int):
if closest_packable_transaction_fee(fee) != fee:
raise ValueNotPackedError
return pack_fee(fee)
def packed_amount_checked(amount: int):
if closest_packable_amount(amount) != amount:
raise ValueNotPackedError
return pack_amount(amount)
def serialize_nonce(nonce: int):
if nonce < 0:
raise WrongValueError
return int_to_bytes(nonce, 4)
def serialize_timestamp(timestamp: int):
if timestamp < 0:
raise WrongValueError
return b"\x00" * 4 + int_to_bytes(timestamp, 4)
def serialize_token_id(token_id: int):
if token_id < 0:
raise WrongValueError
if token_id > MAX_NUMBER_OF_TOKENS:
raise WrongValueError
return int_to_bytes(token_id, 4)
def serialize_account_id(account_id: int):
if account_id < 0:
raise WrongValueError
if account_id > MAX_NUMBER_OF_ACCOUNTS:
raise WrongValueError
return int_to_bytes(account_id, 4)
def remove_address_prefix(address: str) -> str:
if address.startswith('0x'):
return address[2:]
if address.startswith('sync:'):
return address[5:]
return address
def serialize_address(address: str) -> bytes:
address = remove_address_prefix(address)
address_bytes = bytes.fromhex(address)
if len(address_bytes) != 20:
raise WrongValueError
return address_bytes
def serialize_content_hash(content_hash: str) -> bytes:
if content_hash.startswith('0x'):
content_hash = content_hash[2:]
return bytes.fromhex(content_hash)
def serialize_ratio_part(part: int) -> bytes:
# turn the number into bytes and 0-pad to length 15
return bytes.fromhex(hex(part)[2:].zfill(15 * 2)) | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/serializers.py | serializers.py |
import ctypes
from ctypes import (Structure, c_ubyte, cdll)
import os
from typing import Optional
PRIVATE_KEY_LEN = 32
PUBLIC_KEY_LEN = 32
PUBKEY_HASH_LEN = 20
PACKED_SIGNATURE_LEN = 64
ORDER_LEN = 89
ORDERS_HASH_LEN = 31
class ZksPrivateKey(Structure):
_fields_ = [
("data", c_ubyte * PRIVATE_KEY_LEN),
]
class ZksPackedPublicKey(Structure):
_fields_ = [
("data", c_ubyte * PUBLIC_KEY_LEN),
]
class ZksPubkeyHash(Structure):
_fields_ = [
("data", c_ubyte * PUBKEY_HASH_LEN),
]
class ZksSignature(Structure):
_fields_ = [
("data", c_ubyte * PACKED_SIGNATURE_LEN),
]
class ZksOrdersHash(Structure):
_fields_ = [
("data", c_ubyte * ORDERS_HASH_LEN),
]
class ZksOrders(Structure):
_fields_ = [
("data", c_ubyte * (ORDER_LEN * 2)),
]
class ZkSyncLibrary:
def __init__(self, library_path: Optional[str] = None):
if library_path is None:
library_path = os.environ["ZK_SYNC_LIBRARY_PATH"]
self.lib = cdll.LoadLibrary(library_path)
def private_key_from_seed(self, seed: bytes):
private_key = ctypes.pointer(ZksPrivateKey())
self.lib.zks_crypto_private_key_from_seed(seed, len(seed), private_key)
return bytes(private_key.contents.data)
def get_public_key(self, private_key: bytes):
assert len(private_key) == PRIVATE_KEY_LEN
public_key = ctypes.pointer(ZksPackedPublicKey())
pk = ctypes.pointer(ZksPrivateKey(data=(c_ubyte * PRIVATE_KEY_LEN)(*private_key)))
self.lib.zks_crypto_private_key_to_public_key(pk, public_key)
return bytes(public_key.contents.data)
def get_pubkey_hash(self, public_key: bytes):
assert len(public_key) == PUBLIC_KEY_LEN
public_key_hash = ctypes.pointer(ZksPubkeyHash())
public_key_ptr = ctypes.pointer(
ZksPackedPublicKey(data=(c_ubyte * PUBLIC_KEY_LEN)(*public_key)))
self.lib.zks_crypto_public_key_to_pubkey_hash(public_key_ptr, public_key_hash)
return bytes(public_key_hash.contents.data)
def sign(self, private_key: bytes, message: bytes):
assert len(private_key) == PRIVATE_KEY_LEN
signature = ctypes.pointer(ZksSignature())
private_key_ptr = ctypes.pointer(
ZksPrivateKey(data=(c_ubyte * PRIVATE_KEY_LEN)(*private_key)))
self.lib.zks_crypto_sign_musig(private_key_ptr, message, len(message), signature)
return bytes(signature.contents.data)
def hash_orders(self, orders: bytes):
assert len(orders) == ORDER_LEN * 2
orders_hash = ctypes.pointer(ZksOrdersHash())
orders_bytes = ctypes.pointer(
ZksOrders(data=(c_ubyte * (ORDER_LEN * 2))(*orders)))
self.lib.rescue_hash_orders(orders_bytes, len(orders), orders_hash)
return bytes(orders_hash.contents.data)
def is_valid_signature(self, message: bytes, public_key: bytes, zk_sync_signature: bytes) -> bool:
assert len(public_key) == PUBLIC_KEY_LEN
assert len(zk_sync_signature) == PACKED_SIGNATURE_LEN
public_key_ptr = ctypes.pointer(
ZksPackedPublicKey(data=(c_ubyte * PUBLIC_KEY_LEN)(*public_key)))
signature_ptr = ctypes.pointer(
ZksSignature(data=(c_ubyte * PACKED_SIGNATURE_LEN)(*zk_sync_signature)))
ret = self.lib.zks_crypto_verify_musig(message, len(message), public_key_ptr, signature_ptr)
return ret == 0 | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/lib.py | lib.py |
from eth_account.signers.base import BaseAccount
from web3 import Web3
from zksync_sdk.contract_utils import erc20_abi, zksync_abi
MAX_ERC20_APPROVE_AMOUNT = 115792089237316195423570985008687907853269984665640564039457584007913129639935 # 2^256 - 1
ERC20_APPROVE_THRESHOLD = 57896044618658097711785492504343953926634992332820282019728792003956564819968 # 2^255
class Contract:
def __init__(self, contract_address: str, web3: Web3, account: BaseAccount, abi):
self.contract_address = contract_address
self.web3 = web3
self.contract = self.web3.eth.contract(self.contract_address, abi=abi) # type: ignore[call-overload]
self.account = account
def _call_method(self, method_name, *args, amount=None, **kwargs):
params = {}
if amount is not None:
params['value'] = amount
params['from'] = self.account.address
transaction = getattr(self.contract.functions, method_name)(
*args,
**kwargs
).buildTransaction(params)
transaction.update({'nonce': self.web3.eth.get_transaction_count(self.account.address)})
signed_tx = self.account.sign_transaction(transaction)
txn_hash = self.web3.eth.send_raw_transaction(signed_tx.rawTransaction)
txn_receipt = self.web3.eth.waitForTransactionReceipt(txn_hash)
return txn_receipt
class ZkSync(Contract):
def __init__(self, web3: Web3, zksync_contract_address: str, account: BaseAccount):
super().__init__(zksync_contract_address, web3, account, zksync_abi())
def deposit_eth(self, address: str, amount: int):
return self._call_method("depositETH", address, amount=amount)
def deposit_erc20(self, token_address: str, address: str, amount: int):
return self._call_method("depositERC20", token_address, amount, address)
def full_exit(self, account_id: int, token_address: str, ):
return self._call_method("requestFullExit", account_id, token_address)
def full_exit_nft(self, account_id: int, token_id: int):
return self._call_method("requestFullExitNFT", account_id, token_id)
def set_auth_pub_key_hash(self, pub_key_hash: bytes, nonce: int):
return self._call_method("setAuthPubkeyHash", pub_key_hash, nonce)
def auth_facts(self, sender_address: str, nonce: int) -> bytes:
return self.contract.caller.authFacts(sender_address, nonce)
class ERC20Contract(Contract):
def __init__(self, web3: Web3, zksync_address: str, contract_address: str,
account: BaseAccount):
self.zksync_address = zksync_address
super().__init__(contract_address, web3, account, erc20_abi())
def approve_deposit(self, max_erc20_approve_amount=MAX_ERC20_APPROVE_AMOUNT):
return self._call_method('approve', self.zksync_address, max_erc20_approve_amount)
def is_deposit_approved(self, erc20_approve_threshold=ERC20_APPROVE_THRESHOLD):
allowance = self.contract.functions.allowance(self.account.address,
self.zksync_address).call()
return allowance >= erc20_approve_threshold | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/zksync.py | zksync.py |
import time
from decimal import Decimal
from fractions import Fraction
from typing import List, Optional, Tuple, Union
from zksync_sdk.ethereum_provider import EthereumProvider
from zksync_sdk.ethereum_signer import EthereumSignerInterface
from zksync_sdk.types import (ChangePubKey, ChangePubKeyCREATE2, ChangePubKeyEcdsa,
ChangePubKeyTypes, EncodedTx, ForcedExit, Token, TokenLike,
Tokens, TransactionWithSignature, Transfer, TxEthSignature,
Withdraw, MintNFT, WithdrawNFT, NFT, Order, Swap, RatioType,
token_ratio_to_wei_ratio, get_toggle_message, get_toggle_message_with_pub, Toggle2FA)
from zksync_sdk.zksync_provider import FeeTxType, ZkSyncProviderInterface
from zksync_sdk.zksync_signer import ZkSyncSigner
from zksync_sdk.zksync_provider.transaction import Transaction
DEFAULT_VALID_FROM = 0
DEFAULT_VALID_UNTIL = 2 ** 32 - 1
class WalletError(Exception):
pass
class TokenNotFoundError(WalletError):
pass
class AmountsMissing(WalletError):
pass
class Wallet:
def __init__(self, ethereum_provider: EthereumProvider, zk_signer: ZkSyncSigner,
eth_signer: EthereumSignerInterface, provider: ZkSyncProviderInterface):
self.ethereum_provider = ethereum_provider
self.zk_signer = zk_signer
self.eth_signer = eth_signer
self.zk_provider = provider
self.account_id = None
self.tokens = Tokens(tokens=[])
async def get_account_id(self):
if self.account_id is None:
state = await self.zk_provider.get_state(self.address())
if isinstance(state.id, int):
self.account_id = state.id
return self.account_id
async def send_signed_transaction(self, tx: EncodedTx,
eth_signature: Union[Optional[TxEthSignature], List[Optional[TxEthSignature]]],
fast_processing: bool = False) -> Transaction:
return await self.zk_provider.submit_tx(tx, eth_signature, fast_processing)
async def send_txs_batch(self, transactions: List[TransactionWithSignature],
signatures: Optional[
Union[List[TxEthSignature], TxEthSignature]
] = None) -> List[Transaction]:
return await self.zk_provider.submit_txs_batch(transactions, signatures)
async def set_signing_key(self, fee_token: TokenLike, *,
eth_auth_data: Union[ChangePubKeyCREATE2, ChangePubKeyEcdsa, None] = None,
fee: Optional[Decimal] = None, nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM, valid_until=DEFAULT_VALID_UNTIL):
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
fee_token_obj = await self.resolve_token(fee_token)
if isinstance(eth_auth_data, ChangePubKeyEcdsa):
eth_auth_type = ChangePubKeyTypes.ecdsa
elif isinstance(eth_auth_data, ChangePubKeyCREATE2):
eth_auth_type = ChangePubKeyTypes.create2
else:
eth_auth_type = ChangePubKeyTypes.onchain
if fee is None:
if eth_auth_type == ChangePubKeyTypes.ecdsa:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.change_pub_key_ecdsa,
self.address(),
fee_token_obj.id)
elif eth_auth_type == ChangePubKeyTypes.onchain:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.change_pub_key_onchain,
self.address(),
fee_token_obj.id)
else:
assert eth_auth_type == ChangePubKeyTypes.create2, "invalid eth_auth_type"
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.change_pub_key_create2,
self.address(),
fee_token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = fee_token_obj.from_decimal(fee)
change_pub_key, eth_signature = await self.build_change_pub_key(fee_token_obj,
eth_auth_data, fee_int,
nonce,
valid_from,
valid_until)
return await self.send_signed_transaction(change_pub_key, eth_signature)
# This function takes as a parameter the integer fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_change_pub_key(
self,
fee_token: Token,
eth_auth_data: Union[ChangePubKeyCREATE2, ChangePubKeyEcdsa, None],
fee: int,
nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL):
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
new_pubkey_hash = self.zk_signer.pubkey_hash_str()
change_pub_key = ChangePubKey(
account=self.address(),
account_id=account_id,
new_pk_hash=new_pubkey_hash,
token=fee_token,
fee=fee,
nonce=nonce,
valid_until=valid_until,
valid_from=valid_from,
eth_auth_data=eth_auth_data
)
eth_signature = self.eth_signer.sign(change_pub_key.get_eth_tx_bytes())
eth_auth_data = change_pub_key.get_auth_data(eth_signature.signature)
change_pub_key.eth_auth_data = eth_auth_data
zk_signature = self.zk_signer.sign_tx(change_pub_key)
change_pub_key.signature = zk_signature
return change_pub_key, eth_signature
async def forced_exit(self, target: str, token: TokenLike, fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM, valid_until=DEFAULT_VALID_UNTIL) -> Transaction:
nonce = await self.zk_provider.get_account_nonce(self.address())
token_obj = await self.resolve_token(token)
if fee is None:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.withdraw, target, token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = token_obj.from_decimal(fee)
transfer, eth_signature = await self.build_forced_exit(target, token_obj, fee_int, nonce,
valid_from, valid_until)
return await self.send_signed_transaction(transfer, eth_signature)
# This function takes as a parameter the integer fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_forced_exit(
self,
target: str,
token: Token,
fee: int,
nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL) -> Tuple[ForcedExit, TxEthSignature]:
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
forced_exit = ForcedExit(initiator_account_id=account_id,
target=target,
fee=fee,
nonce=nonce,
valid_from=valid_from,
valid_until=valid_until,
token=token)
eth_signature = self.eth_signer.sign_tx(forced_exit)
zk_signature = self.zk_signer.sign_tx(forced_exit)
forced_exit.signature = zk_signature
return forced_exit, eth_signature
async def mint_nft(self, content_hash: str, recipient: str,token: TokenLike, fee: Optional[Decimal] = None) -> Transaction:
token_obj = await self.resolve_token(token)
nonce = await self.zk_provider.get_account_nonce(self.address())
if fee is None:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.mint_nft, recipient, token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = token_obj.from_decimal(fee)
mint_nft, eth_signature = await self.build_mint_nft(content_hash, recipient, token_obj, fee_int, nonce)
return await self.send_signed_transaction(mint_nft, eth_signature)
# This function takes as a parameter the integer fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_mint_nft(
self,
content_hash: str,
recipient: str,
token: Token,
fee: int,
nonce: Optional[int] = None
) -> Tuple[MintNFT, TxEthSignature]:
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
mint_nft = MintNFT(creator_id=account_id,
creator_address=self.address(),
content_hash=content_hash,
recipient=recipient,
fee=fee,
fee_token=token,
nonce=nonce)
eth_signature = self.eth_signer.sign_tx(mint_nft)
zk_signature = self.zk_signer.sign_tx(mint_nft)
mint_nft.signature = zk_signature
return mint_nft, eth_signature
async def withdraw_nft(
self,
to_address: str,
nft_token: NFT,
fee_token: TokenLike,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
) -> Transaction:
nonce = await self.zk_provider.get_account_nonce(self.address())
fee_token_obj = await self.resolve_token(fee_token)
if fee is None:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.withdraw_nft, to_address, fee_token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = fee_token_obj.from_decimal(fee)
withdraw_nft, eth_signature = await self.build_withdraw_nft(to_address, nft_token, fee_token_obj, fee_int,
nonce, valid_from, valid_until)
return await self.send_signed_transaction(withdraw_nft, eth_signature)
# This function takes as a parameter the integer fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_withdraw_nft(
self,
to_address: str,
nft_token: NFT,
fee_token: Token,
fee: int,
nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
) -> Tuple[WithdrawNFT, TxEthSignature]:
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
withdraw_nft = WithdrawNFT(
account_id=account_id,
from_address=self.address(),
to_address=to_address,
fee_token=fee_token,
fee=fee,
nonce=nonce,
valid_from=valid_from,
valid_until=valid_until,
token_id=nft_token.id)
eth_signature = self.eth_signer.sign_tx(withdraw_nft)
zk_signature = self.zk_signer.sign_tx(withdraw_nft)
withdraw_nft.signature = zk_signature
return withdraw_nft, eth_signature
def address(self):
return self.eth_signer.address()
async def build_transfer(
self,
to: str,
amount: int,
token: Token,
fee: int,
nonce: Optional[int] = None,
valid_from: int = DEFAULT_VALID_FROM,
valid_until: int = DEFAULT_VALID_UNTIL,
) -> Tuple[Transfer, TxEthSignature]:
"""
This function takes as a parameter the integer amount/fee of lowest token denominations (wei, satoshi, etc.)
"""
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
transfer = Transfer(account_id=account_id, from_address=self.address(),
to_address=to.lower(),
amount=amount, fee=fee,
nonce=nonce,
valid_from=valid_from,
valid_until=valid_until,
token=token)
eth_signature = self.eth_signer.sign_tx(transfer)
zk_signature = self.zk_signer.sign_tx(transfer)
transfer.signature = zk_signature
return transfer, eth_signature
async def transfer(self, to: str, amount: Decimal, token: TokenLike,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM, valid_until=DEFAULT_VALID_UNTIL) -> Transaction:
nonce = await self.zk_provider.get_account_nonce(self.address())
token_obj = await self.resolve_token(token)
if fee is None:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.transfer, to, token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = token_obj.from_decimal(fee)
amount_int = token_obj.from_decimal(amount)
transfer, eth_signature = await self.build_transfer(to, amount_int, token_obj, fee_int, nonce, valid_from, valid_until)
return await self.send_signed_transaction(transfer, eth_signature)
async def transfer_nft(self, to: str, nft: NFT, fee_token: TokenLike,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
) -> List[Transaction]:
nonce = await self.zk_provider.get_account_nonce(self.address())
fee_token_obj = await self.resolve_token(fee_token)
if fee is None:
fee_int = await self.zk_provider.get_transactions_batch_fee(
[FeeTxType.transfer, FeeTxType.transfer],
[to, self.address()],
fee_token_obj.symbol
)
else:
fee_int = fee_token_obj.from_decimal(fee)
nft_tx = await self.build_transfer(to, 1, nft, 0, nonce, valid_from, valid_until)
fee_tx = await self.build_transfer(self.address(), 0, fee_token_obj, fee_int, nonce + 1, valid_from, valid_until)
batch = [
TransactionWithSignature(nft_tx[0], nft_tx[1]),
TransactionWithSignature(fee_tx[0], fee_tx[1])
]
return await self.send_txs_batch(batch)
async def get_order(self, token_sell: TokenLike, token_buy: TokenLike,
ratio: Fraction, ratio_type: RatioType, amount: Decimal,
recipient: Optional[str] = None,
nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL) -> Order:
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
token_sell_obj = await self.resolve_token(token_sell)
token_buy_obj = await self.resolve_token(token_buy)
recipient = recipient or self.address()
if ratio_type == RatioType.token:
ratio = token_ratio_to_wei_ratio(ratio, token_sell_obj, token_buy_obj)
account_id = await self.get_account_id()
order = Order(account_id=account_id, recipient=recipient,
token_sell=token_sell_obj,
token_buy=token_buy_obj,
ratio=ratio,
amount=token_sell_obj.from_decimal(amount),
nonce=nonce,
valid_from=valid_from,
valid_until=valid_until)
order.eth_signature = self.eth_signer.sign_tx(order)
order.signature = self.zk_signer.sign_tx(order)
return order
async def get_limit_order(self, token_sell: TokenLike, token_buy: TokenLike,
ratio: Fraction, ratio_type: RatioType,
recipient: Optional[str] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL):
return await self.get_order(token_sell, token_buy, ratio, ratio_type, Decimal(0), recipient, valid_from,
valid_until)
# This function takes as a parameter the integer amounts/fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_swap(self, orders: Tuple[Order, Order], fee_token: Token,
amounts: Tuple[int, int], fee: int, nonce: Optional[int] = None):
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
swap = Swap(
orders=orders, fee_token=fee_token, amounts=amounts, fee=fee, nonce=nonce,
submitter_id=account_id, submitter_address=self.address()
)
eth_signature = self.eth_signer.sign_tx(swap)
swap.signature = self.zk_signer.sign_tx(swap)
return swap, eth_signature
async def swap(self, orders: Tuple[Order, Order], fee_token: TokenLike,
amounts: Optional[Tuple[Decimal, Decimal]] = None, fee: Optional[Decimal] = None):
nonce = await self.zk_provider.get_account_nonce(self.address())
fee_token_obj = await self.resolve_token(fee_token)
if fee is None:
fee_obj = await self.zk_provider.get_transaction_fee(FeeTxType.swap, self.address(), fee_token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = fee_token_obj.from_decimal(fee)
if amounts is None:
amounts_int = (orders[0].amount, orders[1].amount)
if amounts_int[0] == 0 or amounts_int[1] == 0:
raise AmountsMissing("in this case you must specify amounts explicitly")
else:
amounts_int = (
orders[0].token_sell.from_decimal(amounts[0]),
orders[1].token_sell.from_decimal(amounts[1])
)
swap, eth_signature = await self.build_swap(orders, fee_token_obj, amounts_int, fee_int, nonce)
eth_signatures = [eth_signature, swap.orders[0].eth_signature, swap.orders[1].eth_signature]
return await self.send_signed_transaction(swap, eth_signatures)
# This function takes as a parameter the integer amount/fee of
# lowest token denominations (wei, satoshi, etc.)
async def build_withdraw(self, eth_address: str, amount: int, token: Token,
fee: int,
nonce: Optional[int] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL):
if nonce is None:
nonce = await self.zk_provider.get_account_nonce(self.address())
account_id = await self.get_account_id()
withdraw = Withdraw(account_id=account_id, from_address=self.address(),
to_address=eth_address,
amount=amount, fee=fee,
nonce=nonce,
valid_from=valid_from,
valid_until=valid_until,
token=token)
eth_signature = self.eth_signer.sign_tx(withdraw)
zk_signature = self.zk_signer.sign_tx(withdraw)
withdraw.signature = zk_signature
return withdraw, eth_signature
async def withdraw(self, eth_address: str, amount: Decimal, token: TokenLike,
fee: Optional[Decimal] = None, fast: bool = False,
valid_from=DEFAULT_VALID_FROM, valid_until=DEFAULT_VALID_UNTIL) -> Transaction:
nonce = await self.zk_provider.get_account_nonce(self.address())
token_obj = await self.resolve_token(token)
if fee is None:
tx_type = FeeTxType.fast_withdraw if fast else FeeTxType.withdraw
fee_obj = await self.zk_provider.get_transaction_fee(tx_type, eth_address, token_obj.id)
fee_int = fee_obj.total_fee
else:
fee_int = token_obj.from_decimal(fee)
amount_int = token_obj.from_decimal(amount)
withdraw, eth_signature = await self.build_withdraw(eth_address, amount_int, token_obj, fee_int, nonce,
valid_from, valid_until)
return await self.send_signed_transaction(withdraw, eth_signature, fast)
async def get_balance(self, token: TokenLike, type: str):
account_state = await self.get_account_state()
token_obj = await self.resolve_token(token)
if type == "committed":
token_balance = account_state.committed.balances.get(token_obj.symbol)
else:
token_balance = account_state.verified.balances.get(token_obj.symbol)
if token_balance is None:
token_balance = 0
return token_balance
async def get_account_state(self):
return await self.zk_provider.get_state(self.address())
async def is_signing_key_set(self) -> bool:
account_state = await self.get_account_state()
signer_pub_key_hash = self.zk_signer.pubkey_hash_str()
return account_state.id is not None and \
account_state.committed.pub_key_hash == signer_pub_key_hash
async def resolve_token(self, token: TokenLike) -> Token:
resolved_token = self.tokens.find(token)
if resolved_token is not None:
return resolved_token
self.tokens = await self.zk_provider.get_tokens()
resolved_token = self.tokens.find(token)
if resolved_token is None:
raise TokenNotFoundError
return resolved_token
async def enable_2fa(self) -> bool:
mil_seconds = int(time.time() * 1000)
msg = get_toggle_message(True, mil_seconds)
eth_sig = self.eth_signer.sign(msg.encode())
account_id = await self.get_account_id()
toggle = Toggle2FA(True,
account_id,
mil_seconds,
eth_sig,
None
)
return await self.zk_provider.toggle_2fa(toggle)
async def disable_2fa(self, pub_key_hash: Optional[str]) -> bool:
mil_seconds = int(time.time() * 1000)
if pub_key_hash is None:
msg = get_toggle_message(False, mil_seconds)
else:
msg = get_toggle_message_with_pub(False, mil_seconds, pub_key_hash)
eth_sig = self.eth_signer.sign(msg.encode())
account_id = await self.get_account_id()
toggle = Toggle2FA(False,
account_id,
mil_seconds,
eth_sig,
pub_key_hash)
return await self.zk_provider.toggle_2fa(toggle)
async def disable_2fa_with_pub_key(self):
pub_key_hash = self.zk_signer.pubkey_hash_str()
return await self.disable_2fa(pub_key_hash) | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/wallet.py | wallet.py |
from typing import Any, Dict, Optional
from enum import Enum
from decimal import Decimal
from zksync_sdk.types.transactions import Token
from pydantic import BaseModel
def to_camel(string: str) -> str:
first, *others = string.split('_')
return ''.join([first.lower(), *map(str.title, others)])
class Balance(BaseModel):
amount: int
expected_accept_block: int
class Config:
alias_generator = to_camel
class Depositing(BaseModel):
balances: Dict[str, Balance]
class NFT(Token):
creator_id: int
content_hash: str
creator_address: str
serial_id: int
decimals = 0
def decimal_amount(self, amount: int) -> Decimal:
return Decimal(amount)
class Config:
alias_generator = to_camel
class State(BaseModel):
nonce: int
pub_key_hash: str
balances: Dict[str, int]
nfts: Dict[str, NFT]
minted_nfts: Dict[str, NFT]
class Config:
alias_generator = to_camel
class AccountTypes(str, Enum):
OWNED = "Owned",
CREATE2 = "CREATE2",
NO_2FA = "No2FA"
class AccountState(BaseModel):
address: str
id: Optional[int]
account_type: Optional[AccountTypes]
depositing: Optional[Depositing]
committed: Optional[State]
verified: Optional[State]
class Config:
alias_generator = to_camel
def get_nonce(self) -> int:
assert self.committed is not None, "`get_nonce` needs `committed` to be set"
return self.committed.nonce
class Fee(BaseModel):
fee_type: Any
gas_tx_amount: int
gas_price_wei: int
gas_fee: int
zkp_fee: int
total_fee: int
class Config:
alias_generator = to_camel
class ContractAddress(BaseModel):
main_contract: str
gov_contract: str
class Config:
alias_generator = to_camel
class BlockInfo(BaseModel):
block_number: int
committed: bool
verified: bool
class Config:
alias_generator = to_camel
class EthOpInfo(BaseModel):
executed: bool
block: BlockInfo
class TransactionDetails(BaseModel):
executed: bool
success: bool
fail_reason: Optional[str] = None
block: BlockInfo
class Config:
alias_generator = to_camel | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/types/responses.py | responses.py |
import abc
from dataclasses import dataclass
from decimal import Decimal
from fractions import Fraction
from enum import Enum, IntEnum
from typing import List, Optional, Union, Tuple
from pydantic import BaseModel
from zksync_sdk.lib import ZkSyncLibrary
from zksync_sdk.serializers import (int_to_bytes, packed_amount_checked, packed_fee_checked,
serialize_account_id,
serialize_address, serialize_content_hash,
serialize_nonce, serialize_timestamp,
serialize_token_id, serialize_ratio_part)
from zksync_sdk.types.signatures import TxEthSignature, TxSignature
from zksync_sdk.types.auth_types import ChangePubKeyCREATE2, ChangePubKeyEcdsa
DEFAULT_TOKEN_ADDRESS = "0x0000000000000000000000000000000000000000"
TokenLike = Union[str, int]
TRANSACTION_VERSION = 0x01
class EncodedTxType(IntEnum):
CHANGE_PUB_KEY = 7
TRANSFER = 5
WITHDRAW = 3
FORCED_EXIT = 8
SWAP = 11
MINT_NFT = 9
WITHDRAW_NFT = 10
class RatioType(Enum):
# ratio that represents the lowest denominations of tokens (wei for ETH, satoshi for BTC etc.)
wei = 'Wei',
# ratio that represents tokens themselves
token = 'Token'
class Token(BaseModel):
address: str
id: int
symbol: str
decimals: int
@classmethod
def eth(cls):
return cls(id=0,
address=DEFAULT_TOKEN_ADDRESS,
symbol="ETH",
decimals=18)
def is_eth(self) -> bool:
return self.symbol == "ETH" and self.address == DEFAULT_TOKEN_ADDRESS
def decimal_amount(self, amount: int) -> Decimal:
return Decimal(amount).scaleb(-self.decimals)
def from_decimal(self, amount: Decimal) -> int:
return int(amount.scaleb(self.decimals))
def decimal_str_amount(self, amount: int) -> str:
d = self.decimal_amount(amount)
# Creates a string with `self.decimals` numbers after decimal point.
# Prevents scientific notation (string values like '1E-8').
# Prevents integral numbers having no decimal point in the string representation.
d_str = f"{d:.{self.decimals}f}"
d_str = d_str.rstrip("0")
if d_str[-1] == ".":
return d_str + "0"
if '.' not in d_str:
return d_str + '.0'
return d_str
def token_ratio_to_wei_ratio(token_ratio: Fraction, token_sell: Token, token_buy: Token) -> Fraction:
num = token_sell.from_decimal(Decimal(token_ratio.numerator))
den = token_buy.from_decimal(Decimal(token_ratio.denominator))
return Fraction(num, den, _normalize = False)
class Tokens(BaseModel):
tokens: List[Token]
def find_by_address(self, address: str) -> Optional[Token]:
found_token = [token for token in self.tokens if token.address == address]
if found_token:
return found_token[0]
else:
return None
def find_by_id(self, token_id: int) -> Optional[Token]:
found_token = [token for token in self.tokens if token.id == token_id]
if found_token:
return found_token[0]
else:
return None
def find_by_symbol(self, symbol: str) -> Optional[Token]:
found_token = [token for token in self.tokens if token.symbol == symbol]
if found_token:
return found_token[0]
else:
return None
def find(self, token: TokenLike) -> Optional[Token]:
result = None
if isinstance(token, int):
result = self.find_by_id(token)
if isinstance(token, str):
result = self.find_by_address(address=token)
if result is None:
result = self.find_by_symbol(symbol=token)
return result
class EncodedTx(abc.ABC):
@abc.abstractmethod
def encoded_message(self) -> bytes:
pass
@abc.abstractmethod
def human_readable_message(self) -> str:
pass
@abc.abstractmethod
def tx_type(self) -> int:
pass
@abc.abstractmethod
def dict(self):
pass
@abc.abstractmethod
def batch_message_part(self) -> str:
pass
@dataclass
class ChangePubKey(EncodedTx):
account_id: int
account: str
new_pk_hash: str
token: Token
fee: int
nonce: int
valid_from: int
valid_until: int
eth_auth_data: Union[ChangePubKeyCREATE2, ChangePubKeyEcdsa, None] = None
eth_signature: Optional[TxEthSignature] = None
signature: Optional[TxSignature] = None
def human_readable_message(self) -> str:
message = f"Set signing key: {self.new_pk_hash.replace('sync:', '').lower()}"
if self.fee:
message += f"\nFee: {self.fee} {self.token.symbol}"
return message
def batch_message_part(self) -> str:
message = f"Set signing key: {self.new_pk_hash.replace('sync:', '').lower()}\n"
if self.fee:
message += f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return message
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.account_id),
serialize_address(self.account),
serialize_address(self.new_pk_hash),
serialize_token_id(self.token.id),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def get_eth_tx_bytes(self) -> bytes:
data = b"".join([
serialize_address(self.new_pk_hash),
serialize_nonce(self.nonce),
serialize_account_id(self.account_id),
])
if self.eth_auth_data is not None:
data += self.eth_auth_data.encode_message()
return data
def get_auth_data(self, signature: str):
if self.eth_auth_data is None:
return {"type": "Onchain"}
elif isinstance(self.eth_auth_data, ChangePubKeyEcdsa):
return self.eth_auth_data.dict(signature)
elif isinstance(self.eth_auth_data, ChangePubKeyCREATE2):
return self.eth_auth_data.dict()
def dict(self):
return {
"type": "ChangePubKey",
"accountId": self.account_id,
"account": self.account,
"newPkHash": self.new_pk_hash,
"fee_token": self.token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"ethAuthData": self.eth_auth_data,
"signature": self.signature.dict(),
"validFrom": self.valid_from,
"validUntil": self.valid_until,
}
@classmethod
def tx_type(cls):
return EncodedTxType.CHANGE_PUB_KEY
@dataclass
class Transfer(EncodedTx):
account_id: int
from_address: str
to_address: str
token: Token
amount: int
fee: int
nonce: int
valid_from: int
valid_until: int
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.TRANSFER
def human_readable_message(self) -> str:
msg = ""
if self.amount != 0:
msg += f"Transfer {self.token.decimal_str_amount(self.amount)} {self.token.symbol} to: {self.to_address.lower()}\n"
if self.fee != 0:
msg += f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return msg + f"Nonce: {self.nonce}"
def batch_message_part(self) -> str:
msg = ""
if self.amount != 0:
msg += f"Transfer {self.token.decimal_str_amount(self.amount)} {self.token.symbol} to: {self.to_address.lower()}\n"
if self.fee != 0:
msg += f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return msg
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.account_id),
serialize_address(self.from_address),
serialize_address(self.to_address),
serialize_token_id(self.token.id),
packed_amount_checked(self.amount),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def dict(self):
return {
"type": "Transfer",
"accountId": self.account_id,
"from": self.from_address,
"to": self.to_address,
"token": self.token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"signature": self.signature.dict(),
"amount": str(self.amount),
"validFrom": self.valid_from,
"validUntil": self.valid_until,
}
@dataclass
class Withdraw(EncodedTx):
account_id: int
from_address: str
to_address: str
amount: int
fee: int
nonce: int
valid_from: int
valid_until: int
token: Token
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.WITHDRAW
def human_readable_message(self) -> str:
msg = ""
if self.amount != 0:
msg += f"Withdraw {self.token.decimal_str_amount(self.amount)} {self.token.symbol} to: {self.to_address.lower()}\n"
if self.fee != 0:
msg += f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return msg + f"Nonce: {self.nonce}"
def batch_message_part(self) -> str:
msg = ""
if self.amount != 0:
msg += f"Withdraw {self.token.decimal_str_amount(self.amount)} {self.token.symbol} to: {self.to_address.lower()}\n"
if self.fee != 0:
msg += f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return msg
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.account_id),
serialize_address(self.from_address),
serialize_address(self.to_address),
serialize_token_id(self.token.id),
int_to_bytes(self.amount, length=16),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def dict(self):
return {
"type": "Withdraw",
"accountId": self.account_id,
"from": self.from_address,
"to": self.to_address,
"token": self.token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"signature": self.signature.dict(),
"amount": str(self.amount),
"validFrom": self.valid_from,
"validUntil": self.valid_until,
}
@dataclass
class ForcedExit(EncodedTx):
initiator_account_id: int
target: str
token: Token
fee: int
nonce: int
valid_from: int
valid_until: int
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.FORCED_EXIT
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.initiator_account_id),
serialize_address(self.target),
serialize_token_id(self.token.id),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def human_readable_message(self) -> str:
message = f"ForcedExit {self.token.symbol} to: {self.target.lower()}\nFee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\nNonce: {self.nonce}"
return message
def batch_message_part(self) -> str:
message = f"ForcedExit {self.token.symbol} to: {self.target.lower()}\n" \
f"Fee: {self.token.decimal_str_amount(self.fee)} {self.token.symbol}\n"
return message
def dict(self):
return {
"type": "ForcedExit",
"initiatorAccountId": self.initiator_account_id,
"target": self.target,
"token": self.token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"signature": self.signature.dict(),
"validFrom": self.valid_from,
"validUntil": self.valid_until,
}
@dataclass
class Order(EncodedTx):
account_id: int
recipient: str
nonce: int
token_sell: Token
token_buy: Token
amount: int
ratio: Fraction
valid_from: int
valid_until: int
signature: Optional[TxSignature] = None
eth_signature: Optional[TxEthSignature] = None
@classmethod
def from_json(cls, json: dict, tokens: Tokens):
def from_optional(value: Optional[Token]) -> Token:
if value is None:
raise ValueError(f"Token None value should not be used")
return value
token_sell_id: int = json["tokenSell"]
token_buy_id: int = json["tokenBuy"]
token_sell = from_optional(tokens.find_by_id(token_sell_id))
token_buy = from_optional(tokens.find_by_id(token_buy_id))
ratio = json["ratio"]
# INFO: could be None
signature = json.get("signature")
if signature is not None:
signature = TxSignature.from_dict(signature)
ether_sig = json.get("ethSignature")
if ether_sig is not None:
ether_sig = TxEthSignature.from_dict(ether_sig)
return cls(
account_id=json["accountId"],
recipient=json["recipient"],
nonce=json["nonce"],
token_sell=token_sell,
token_buy=token_buy,
amount=int(json["amount"]),
ratio=Fraction(int(ratio[0]), int(ratio[1]), _normalize=False),
valid_from=json["validFrom"],
valid_until=json["validUntil"],
signature=signature,
eth_signature=ether_sig
)
def tx_type(self) -> int:
raise NotImplementedError
def msg_type(self) -> int:
return b'o'[0]
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(self.msg_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.account_id),
serialize_address(self.recipient),
serialize_nonce(self.nonce),
serialize_token_id(self.token_sell.id),
serialize_token_id(self.token_buy.id),
serialize_ratio_part(self.ratio.numerator),
serialize_ratio_part(self.ratio.denominator),
packed_amount_checked(self.amount),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def batch_message_part(self) -> str:
pass
def human_readable_message(self) -> str:
if self.amount == 0:
header = f'Limit order for {self.token_sell.symbol} -> {self.token_buy.symbol}'
else:
amount = self.token_sell.decimal_str_amount(self.amount)
header = f'Order for {amount} {self.token_sell.symbol} -> {self.token_buy.symbol}'
message = '\n'.join([
header,
f'Ratio: {self.ratio.numerator}:{self.ratio.denominator}',
f'Address: {self.recipient.lower()}',
f'Nonce: {self.nonce}'
])
return message
def dict(self):
return {
"accountId": self.account_id,
"recipient": self.recipient,
"nonce": self.nonce,
"tokenSell": self.token_sell.id,
"tokenBuy": self.token_buy.id,
"amount": str(self.amount),
"ratio": (str(self.ratio.numerator), str(self.ratio.denominator)),
"validFrom": self.valid_from,
"validUntil": self.valid_until,
"signature": self.signature.dict() if self.signature else None,
"ethSignature": self.eth_signature.dict() if self.eth_signature else None,
}
def is_valid_eth_signature(self, signer_address: str) -> bool:
address = self._recover_signer_address()
return signer_address == address
def _recover_signer_address(self) -> str:
"""
INFO: particular case implementation with dependency from Web3 interface
if it's needed to generelize for all Obejct type(Transfer, Swap etc) move to etherium_signer module
with interface & implemnetation for Web3 as Validator class( Visitor pattern )
"""
from web3.auto import w3
from eth_account.messages import encode_defunct
msg = self.human_readable_message().encode()
encoded_message = encode_defunct(msg)
def get_sig(opt_value: Optional[TxEthSignature]) -> TxEthSignature:
if opt_value is None:
raise ValueError()
return opt_value
# INFO: remove prefix 0x
eth_sig = get_sig(self.eth_signature)
sig = bytes.fromhex(eth_sig.signature[2:])
return w3.eth.account.recover_message(encoded_message, signature=sig)
@dataclass
class Swap(EncodedTx):
submitter_id: int
submitter_address: str
amounts: Tuple[int, int]
orders: Tuple[Order, Order]
fee_token: Token
fee: int
nonce: int
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.SWAP
def human_readable_message(self) -> str:
if self.fee != 0:
message = f'Swap fee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\n'
else:
message = ''
message += f'Nonce: {self.nonce}'
return message
def batch_message_part(self) -> str:
if self.fee != 0:
message = f'Swap fee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\n'
else:
message = ''
return message
def encoded_message(self) -> bytes:
order_bytes = b''.join([
self.orders[0].encoded_message(),
self.orders[1].encoded_message(),
])
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.submitter_id),
serialize_address(self.submitter_address),
serialize_nonce(self.nonce),
ZkSyncLibrary().hash_orders(order_bytes),
serialize_token_id(self.fee_token.id),
packed_fee_checked(self.fee),
packed_amount_checked(self.amounts[0]),
packed_amount_checked(self.amounts[1]),
])
def dict(self):
return {
"type": "Swap",
"submitterId": self.submitter_id,
"submitterAddress": self.submitter_address,
"feeToken": self.fee_token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"signature": self.signature.dict() if self.signature else None,
"amounts": (str(self.amounts[0]), str(self.amounts[1])),
"orders": (self.orders[0].dict(), self.orders[1].dict())
}
@dataclass
class MintNFT(EncodedTx):
creator_id: int
creator_address: str
content_hash: str
recipient: str
fee: int
fee_token: Token
nonce: int
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.MINT_NFT
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.creator_id),
serialize_address(self.creator_address),
serialize_content_hash(self.content_hash),
serialize_address(self.recipient),
serialize_token_id(self.fee_token.id),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
])
def human_readable_message(self) -> str:
message = f"MintNFT {self.content_hash} for: {self.recipient.lower()}\n" \
f"Fee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\nNonce: {self.nonce}"
return message
def batch_message_part(self) -> str:
message = f"MintNFT {self.content_hash} for: {self.recipient.lower()}\n" \
f"Fee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\n"
return message
def dict(self):
return {
"type": "MintNFT",
"creatorId": self.creator_id,
"creatorAddress": self.creator_address,
"contentHash": self.content_hash,
"recipient": self.recipient,
"feeToken": self.fee_token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"signature": self.signature.dict(),
}
@dataclass
class WithdrawNFT(EncodedTx):
account_id: int
from_address: str
to_address: str
fee_token: Token
fee: int
nonce: int
valid_from: int
valid_until: int
token_id: int
signature: Optional[TxSignature] = None
def tx_type(self) -> int:
return EncodedTxType.WITHDRAW_NFT
def encoded_message(self) -> bytes:
return b"".join([
int_to_bytes(0xff - self.tx_type(), 1),
int_to_bytes(TRANSACTION_VERSION, 1),
serialize_account_id(self.account_id),
serialize_address(self.from_address),
serialize_address(self.to_address),
serialize_token_id(self.token_id),
serialize_token_id(self.fee_token.id),
packed_fee_checked(self.fee),
serialize_nonce(self.nonce),
serialize_timestamp(self.valid_from),
serialize_timestamp(self.valid_until)
])
def human_readable_message(self) -> str:
message = f"WithdrawNFT {self.token_id} to: {self.to_address.lower()}\nFee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\nNonce: {self.nonce}"
return message
def batch_message_part(self) -> str:
message = f"WithdrawNFT {self.token_id} to: {self.to_address.lower()}\n" \
f"Fee: {self.fee_token.decimal_str_amount(self.fee)} {self.fee_token.symbol}\n"
return message
def dict(self):
return {
"type": "WithdrawNFT",
"accountId": self.account_id,
"from": self.from_address,
"to": self.to_address,
"feeToken": self.fee_token.id,
"fee": str(self.fee),
"nonce": self.nonce,
"validFrom": self.valid_from,
"validUntil": self.valid_until,
"token": self.token_id,
"signature": self.signature.dict(),
}
class EncodedTxValidator:
def __init__(self, library: ZkSyncLibrary):
self.library = library
def is_valid_signature(self, tx):
zk_sync_signature: TxSignature = tx.signature
if zk_sync_signature is None:
return False
bytes_signature = bytes.fromhex(zk_sync_signature.signature)
pubkey = bytes.fromhex(zk_sync_signature.public_key)
return self.library.is_valid_signature(tx.encoded_message(), pubkey, bytes_signature)
@dataclass
class TransactionWithSignature:
tx: EncodedTx
signature: TxEthSignature
def dict(self):
return {
'tx': self.tx.dict(),
'signature': self.signature.dict(),
}
@dataclass()
class TransactionWithOptionalSignature:
tx: EncodedTx
signature: Union[None, TxEthSignature, List[TxSignature]] = None
def dict(self):
if self.signature is None:
null_value = None
return {
'signature': null_value,
'tx': self.tx.dict()
}
else:
if isinstance(self.signature, list):
null_value = None
value = []
for sig in self.signature:
if sig is None:
value.append(null_value)
else:
value.append(sig.dict())
return {
'signature': value,
'tx': self.tx.dict()
}
else:
return {
'signature': self.signature.dict(),
'tx': self.tx.dict()
} | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/types/transactions.py | transactions.py |
from enum import Enum
from dataclasses import dataclass
from typing import Optional
from zksync_sdk.types.signatures import TxEthSignature
class ChangePubKeyTypes(Enum):
onchain = "Onchain"
ecdsa = "ECDSA"
create2 = "CREATE2"
@dataclass
class ChangePubKeyEcdsa:
batch_hash: bytes = b"\x00" * 32
def encode_message(self) -> bytes:
return self.batch_hash
def dict(self, signature: str):
return {"type": "ECDSA",
"ethSignature": signature,
"batchHash": f"0x{self.batch_hash.hex()}"}
@dataclass
class ChangePubKeyCREATE2:
creator_address: str
salt_arg: bytes
code_hash: bytes
def encode_message(self) -> bytes:
return self.salt_arg
def dict(self):
return {"type": "CREATE2",
"saltArg": f"0x{self.salt_arg.hex()}",
"codeHash": f"0x{self.code_hash.hex()}"}
@dataclass
class Toggle2FA:
enable: bool
account_id: int
time_stamp_milliseconds: int
signature: TxEthSignature
pub_key_hash: Optional[str]
def dict(self):
if self.pub_key_hash is not None:
return {
"enable": self.enable,
"accountId": self.account_id,
"timestamp": self.time_stamp_milliseconds,
"signature": self.signature.dict(),
"pubKeyHash": self.pub_key_hash
}
else:
return {
"enable": self.enable,
"accountId": self.account_id,
"timestamp": self.time_stamp_milliseconds,
"signature": self.signature.dict(),
}
def get_toggle_message(require_2fa: bool, time_stamp: int) -> str:
if require_2fa:
msg = f"By signing this message, you are opting into Two-factor Authentication protection by the zkSync " \
f"Server.\n" \
f"Transactions now require signatures by both your L1 and L2 private key.\n" \
f"Timestamp: {time_stamp}"
else:
msg = f"You are opting out of Two-factor Authentication protection by the zkSync Server.\n" \
f"Transactions now only require signatures by your L2 private key.\n" \
f"BY SIGNING THIS MESSAGE, YOU ARE TRUSTING YOUR WALLET CLIENT TO KEEP YOUR L2 PRIVATE KEY SAFE!\n" \
f"Timestamp: {time_stamp}"
return msg
def get_toggle_message_with_pub(require_2fa: bool, time_stamp: int, pub_key_hash: str) -> str:
msg = get_toggle_message(require_2fa, time_stamp)
msg += f"\nPubKeyHash: {pub_key_hash}"
return msg | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/types/auth_types.py | auth_types.py |
from dataclasses import asdict
from decimal import Decimal
from typing import List, Optional, Union
from web3 import Web3
from zksync_sdk.types import (AccountState, ContractAddress, EncodedTx, EthOpInfo, Fee, Token,
TokenLike, Tokens, TransactionDetails, TransactionWithSignature,
TransactionWithOptionalSignature,
TxEthSignature, Toggle2FA, )
from zksync_sdk.zksync_provider.error import AccountDoesNotExist
from zksync_sdk.zksync_provider.interface import ZkSyncProviderInterface
from zksync_sdk.zksync_provider.types import FeeTxType
from zksync_sdk.zksync_provider.transaction import Transaction
__all__ = ['ZkSyncProviderV01']
class ZkSyncProviderV01(ZkSyncProviderInterface):
async def submit_tx(self, tx: EncodedTx, signature: Union[Optional[TxEthSignature], List[Optional[TxEthSignature]]],
fast_processing: bool = False) -> Transaction:
if isinstance(signature, List):
signature = [s.dict() if s is not None else None for s in signature]
else:
signature = signature.dict() if signature is not None else None
trans_id = await self.provider.request("tx_submit",
[tx.dict(), signature, fast_processing])
return Transaction.build_transaction(self, trans_id)
async def get_tokens(self) -> Tokens:
data = await self.provider.request("tokens", None)
tokens = [Token(address=Web3.toChecksumAddress(token['address']),
id=token['id'],
symbol=token['symbol'],
decimals=token['decimals']
) for token in data.values()]
return Tokens(tokens=tokens)
async def submit_txs_batch(self, transactions: List[TransactionWithSignature],
signatures: Optional[
Union[List[TxEthSignature], TxEthSignature]
] = None) -> List[Transaction]:
if signatures is None:
signatures = []
elif isinstance(signatures, TxEthSignature):
signatures = [signatures]
transactions = [tr.dict() for tr in transactions]
signatures = [sig.dict() for sig in signatures]
trans_ids: List[str] = await self.provider.request("submit_txs_batch", [transactions, signatures])
return [Transaction.build_transaction(self, trans_id) for trans_id in trans_ids]
async def submit_batch_builder_txs_batch(self, transactions: List[TransactionWithOptionalSignature],
signature: TxEthSignature) -> List[Transaction]:
trans = [tr.dict() for tr in transactions]
params = [trans, signature.dict()]
trans_ids: List[str] = await self.provider.request("submit_txs_batch", params)
return [Transaction.build_transaction(self, trans_id) for trans_id in trans_ids]
async def get_contract_address(self) -> ContractAddress:
data = await self.provider.request("contract_address", None)
return ContractAddress(**data)
async def get_state(self, address: str) -> AccountState:
data = await self.provider.request("account_info", [address])
if data is None:
raise AccountDoesNotExist(address=address)
if "accountType" in data and isinstance(data["accountType"], dict) and \
list(data["accountType"].keys())[0] == 'No2FA':
data["accountType"] = 'No2FA'
return AccountState(**data)
async def get_confirmations_for_eth_op_amount(self) -> int:
return await self.provider.request("get_confirmations_for_eth_op_amount", None)
async def get_account_nonce(self, address: str) -> int:
state = await self.get_state(address)
return state.get_nonce()
async def get_tx_receipt(self, address: str) -> TransactionDetails:
return await self.provider.request("tx_info", [address])
async def get_eth_tx_for_withdrawal(self, withdrawal_hash: str) -> str:
return await self.provider.request("get_eth_tx_for_withdrawal", [withdrawal_hash])
async def get_priority_op_status(self, serial_id: int) -> EthOpInfo:
data = await self.provider.request("ethop_info", [serial_id])
return EthOpInfo(**data)
# Please note that the batch fee returns the fee of the transaction in int and not in Fee
# This is a server-side feature
async def get_transactions_batch_fee(self, tx_types: List[FeeTxType], addresses: List[str],
token_like) -> int:
data = await self.provider.request('get_txs_batch_fee_in_wei',
[[tx_type.value for tx_type in tx_types],
addresses, token_like])
return int(data["totalFee"])
async def get_transaction_fee(self, tx_type: FeeTxType, address: str,
token_like: TokenLike) -> Fee:
data = await self.provider.request('get_tx_fee', [tx_type.value, address, token_like])
return Fee(**data)
async def get_token_price(self, token: Token) -> Decimal:
data = await self.provider.request('get_token_price', [token.symbol])
return Decimal(data)
async def toggle_2fa(self, toggle2fa: Toggle2FA) -> bool:
data = await self.provider.request('toggle_2fa', [toggle2fa.dict()])
return 'success' in data and data['success'] | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/zksync_provider/v01.py | v01.py |
import asyncio
from dataclasses import dataclass
from enum import Enum, auto
from typing import Optional
class TransactionStatus(Enum):
FAILED = auto()
COMMITTED = auto()
VERIFIED = auto()
@dataclass
class TransactionResult:
status: TransactionStatus
fail_reason: str
class Transaction:
@classmethod
def build_transaction(cls, provider, transaction_id: str):
transaction = cls(provider, transaction_id)
return transaction
def __init__(self, provider, transaction_hash: str):
self.provider = provider
self.transaction_hash = transaction_hash
async def await_committed(self, attempts: Optional[int] = None, attempts_timeout: Optional[int] = None) \
-> TransactionResult:
status = TransactionResult(TransactionStatus.FAILED,
f"Transaction has not been executed with amount of attempts {attempts}"
f"and timeout {attempts_timeout}")
while True:
if attempts is not None:
if attempts <= 0:
return status
transaction_details = await self.provider.get_tx_receipt(self.transaction_hash)
if attempts is not None:
attempts -= 1
if "failReason" in transaction_details and transaction_details["failReason"] is not None:
return TransactionResult(TransactionStatus.FAILED, transaction_details['failReason'])
if "block" in transaction_details:
block = transaction_details["block"]
if block is not None and "committed" in block and block["committed"]:
return TransactionResult(TransactionStatus.COMMITTED, "")
if attempts_timeout is not None:
await asyncio.sleep(attempts_timeout / 1000)
async def await_verified(self, attempts: Optional[int] = None, attempts_timeout: Optional[int] = None):
intermediate_status = TransactionResult(
TransactionStatus.FAILED,
f"Transaction has not been executed with amount of attempts {attempts}"
f"and timeout {attempts_timeout}")
while True:
if attempts is not None:
if attempts <= 0:
return intermediate_status
transaction_details = await self.provider.get_tx_receipt(self.transaction_hash)
if attempts is not None:
attempts -= 1
if "failReason" in transaction_details and transaction_details["failReason"] is not None:
return TransactionResult(TransactionStatus.FAILED, transaction_details['failReason'])
if "block" in transaction_details:
block = transaction_details["block"]
if block is not None and "committed" in block and block["committed"]:
intermediate_status = TransactionResult(TransactionStatus.COMMITTED, "")
if "block" in transaction_details:
block = transaction_details["block"]
if block is not None and \
"verified" in block and \
block["verified"]:
return TransactionResult(TransactionStatus.VERIFIED, "")
if attempts_timeout is not None:
await asyncio.sleep(attempts_timeout / 1000) | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/zksync_provider/transaction.py | transaction.py |
from dataclasses import dataclass
from zksync_sdk.zksync_provider import FeeTxType
from zksync_sdk.wallet import Wallet, DEFAULT_VALID_FROM, DEFAULT_VALID_UNTIL, AmountsMissing
from zksync_sdk.types import (ChangePubKey, ChangePubKeyCREATE2, ChangePubKeyEcdsa,
ChangePubKeyTypes, EncodedTx, ForcedExit, TokenLike,
TransactionWithOptionalSignature,
Transfer, TxEthSignature,
Withdraw, MintNFT, WithdrawNFT, NFT, EncodedTxType, Order, Swap)
from typing import List, Union, Tuple, Optional
from decimal import Decimal
@dataclass
class BatchResult:
transactions: list
signature: TxEthSignature
total_fees: dict
class BatchBuilder:
IS_ENCODED_TRANSACTION = "is_encoded_trx"
ENCODED_TRANSACTION_TYPE = "internal_type"
TRANSACTIONS_ENTRY = "transactions"
SIGNATURE_ENTRY = "signature"
@classmethod
def from_wallet(cls, wallet: Wallet, nonce: int, txs: Optional[List[EncodedTx]] = None):
obj = BatchBuilder(wallet, nonce, txs)
return obj
def __init__(self, wallet: Wallet, nonce: int, txs: Optional[List[EncodedTx]] = None):
if txs is None:
txs = []
self.wallet = wallet
self.nonce = nonce
self.batch_nonce = nonce
self.transactions: List[dict] = []
for tx in txs:
value = tx.dict()
value[self.IS_ENCODED_TRANSACTION] = True
value[self.ENCODED_TRANSACTION_TYPE] = tx.tx_type()
self.transactions.append(value)
async def build(self) -> BatchResult:
if not self.transactions:
raise RuntimeError("Transaction batch cannot be empty")
res = await self._process_transactions()
trans = res["trans"]
signature = self.wallet.eth_signer.sign(res["msg"].encode())
return BatchResult(trans, signature, res["total_fee"])
def add_withdraw(self,
eth_address: str,
token: TokenLike,
amount: Decimal,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
):
withdraw = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.WITHDRAW,
self.IS_ENCODED_TRANSACTION: False,
"eth_address": eth_address,
"token": token,
"amount": amount,
"fee": fee,
"valid_from": valid_from,
"valid_until": valid_until
}
self.transactions.append(withdraw)
def add_mint_nft(self,
content_hash: str,
recipient: str,
fee_token: TokenLike,
fee: Optional[Decimal] = None
):
mint_nft = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.MINT_NFT,
self.IS_ENCODED_TRANSACTION: False,
"content_hash": content_hash,
"recipient": recipient,
"fee_token": fee_token,
"fee": fee
}
self.transactions.append(mint_nft)
def add_withdraw_nft(self,
to_address: str,
nft_token: NFT,
fee_token: TokenLike,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
):
withdraw_nft = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.WITHDRAW_NFT,
self.IS_ENCODED_TRANSACTION: False,
"to_address": to_address,
"nft_token": nft_token,
"fee_token": fee_token,
"fee": fee,
"valid_from": valid_from,
"valid_until": valid_until
}
self.transactions.append(withdraw_nft)
def add_swap(self,
orders: Tuple[Order, Order],
fee_token: TokenLike,
amounts: Optional[Tuple[Decimal, Decimal]] = None,
fee: Optional[Decimal] = None
):
if amounts is None:
if orders[0].amount == 0 or orders[1].amount == 0:
raise AmountsMissing("in this case you must specify amounts explicitly")
swap = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.SWAP,
self.IS_ENCODED_TRANSACTION: False,
"orders": orders,
"fee_token": fee_token,
"amounts": amounts,
"fee": fee
}
self.transactions.append(swap)
def add_transfer(self,
address_to: str,
token: TokenLike,
amount: Decimal,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
):
transfer = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.TRANSFER,
self.IS_ENCODED_TRANSACTION: False,
"from_address": self.wallet.address(),
"to_address": address_to.lower(),
"token": token,
"amount": amount,
"fee": fee,
"valid_from": valid_from,
"valid_until": valid_until
}
self.transactions.append(transfer)
def add_change_pub_key(self,
fee_token: TokenLike,
eth_auth_type: Union[ChangePubKeyCREATE2, ChangePubKeyEcdsa, None],
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
):
new_pubkey_hash = self.wallet.zk_signer.pubkey_hash_str()
change_pub_key = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.CHANGE_PUB_KEY,
self.IS_ENCODED_TRANSACTION: False,
"account": self.wallet.address(),
"new_pk_hash": new_pubkey_hash,
"fee_token": fee_token,
"fee": fee,
"eth_auth_type": eth_auth_type,
"valid_from": valid_from,
"valid_until": valid_until
}
self.transactions.append(change_pub_key)
def add_force_exit(self,
target_address: str,
token: TokenLike,
fee: Optional[Decimal] = None,
valid_from=DEFAULT_VALID_FROM,
valid_until=DEFAULT_VALID_UNTIL
):
forced_exit = {
self.ENCODED_TRANSACTION_TYPE: EncodedTxType.FORCED_EXIT,
self.IS_ENCODED_TRANSACTION: False,
"target": target_address,
"token": token,
"fee": fee,
"valid_from": valid_from,
"valid_until": valid_until
}
self.transactions.append(forced_exit)
async def _process_change_pub_key(self, obj: dict):
if not obj[self.IS_ENCODED_TRANSACTION]:
account_id = await self.wallet.get_account_id()
token = await self.wallet.resolve_token(obj["fee_token"])
eth_auth_type = obj["eth_auth_type"]
if isinstance(eth_auth_type, ChangePubKeyEcdsa):
eth_auth_type = ChangePubKeyTypes.ecdsa
elif isinstance(eth_auth_type, ChangePubKeyCREATE2):
eth_auth_type = ChangePubKeyTypes.create2
else:
eth_auth_type = ChangePubKeyTypes.onchain
fee = obj["fee"]
if fee is None:
if eth_auth_type == ChangePubKeyTypes.ecdsa:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.change_pub_key_ecdsa,
self.wallet.address(),
token.id)
elif eth_auth_type == ChangePubKeyTypes.onchain:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.change_pub_key_onchain,
self.wallet.address(),
token.id)
elif eth_auth_type == ChangePubKeyTypes.create2:
fee = await self.wallet.zk_provider.get_transaction_fee(
FeeTxType.change_pub_key_create2,
self.wallet.address(),
token.id)
fee = fee.total_fee
else:
fee = token.from_decimal(fee)
change_pub_key = ChangePubKey(
account=obj["account"],
account_id=account_id,
new_pk_hash=obj["new_pk_hash"],
token=token,
fee=fee,
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"],
eth_auth_data=obj["eth_auth_type"]
)
eth_signature = self.wallet.eth_signer.sign(change_pub_key.get_eth_tx_bytes())
eth_auth_data = change_pub_key.get_auth_data(eth_signature.signature)
change_pub_key.eth_auth_data = eth_auth_data
zk_signature = self.wallet.zk_signer.sign_tx(change_pub_key)
change_pub_key.signature = zk_signature
else:
change_pub_key = ChangePubKey(
account_id=obj["accountId"],
account=obj["account"],
new_pk_hash=obj["newPkHash"],
token=obj["fee_token"],
fee=obj["fee"],
nonce=self.nonce,
eth_auth_data=obj["ethAuthData"],
signature=obj["signature"],
valid_from=obj["validFrom"],
valid_until=obj["validUntil"]
)
self.nonce += 1
return change_pub_key
async def _process_withdraw(self, obj: dict):
if not obj[self.IS_ENCODED_TRANSACTION]:
account_id = await self.wallet.get_account_id()
token = await self.wallet.resolve_token(obj["token"])
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.withdraw,
obj["eth_address"],
token.id)
fee = fee.total_fee
else:
fee = token.from_decimal(fee)
amount = token.from_decimal(obj["amount"])
withdraw = Withdraw(account_id=account_id,
from_address=self.wallet.address(),
to_address=obj["eth_address"],
amount=amount,
fee=fee,
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"],
token=token)
zk_signature = self.wallet.zk_signer.sign_tx(withdraw)
withdraw.signature = zk_signature
else:
token = await self.wallet.resolve_token(obj["token"])
withdraw = Withdraw(account_id=obj["accountId"],
from_address=obj["from"],
to_address=obj["to"],
amount=obj["amount"],
fee=obj["fee"],
nonce=self.nonce,
valid_from=obj["validFrom"],
valid_until=obj["validUntil"],
token=token,
signature=obj["signature"]
)
self.nonce += 1
return withdraw
async def _process_transfer(self, obj):
if not obj[self.IS_ENCODED_TRANSACTION]:
account_id = await self.wallet.get_account_id()
token = await self.wallet.resolve_token(obj["token"])
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.transfer,
obj["to_address"],
token.id)
fee = fee.total_fee
else:
fee = token.from_decimal(fee)
amount = token.from_decimal(obj["amount"])
transfer = Transfer(
account_id=account_id,
from_address=obj["from_address"].lower(),
to_address=obj["to_address"].lower(),
token=token,
amount=amount,
fee=fee,
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"]
)
zk_signature = self.wallet.zk_signer.sign_tx(transfer)
transfer.signature = zk_signature
else:
token = await self.wallet.resolve_token(obj["token"])
transfer = Transfer(
account_id=obj["accountId"],
from_address=obj["from"],
to_address=obj["to"],
token=token,
amount=obj["amount"],
fee=obj["fee"],
nonce=self.nonce,
valid_from=obj["validFrom"],
valid_until=obj["validUntil"],
signature=obj["signature"]
)
self.nonce += 1
return transfer
async def _process_forced_exit(self, obj):
if not obj[self.IS_ENCODED_TRANSACTION]:
account_id = await self.wallet.get_account_id()
token = await self.wallet.resolve_token(obj["token"])
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.transfer,
obj["to_address"],
token.id)
fee = fee.total_fee
else:
fee = token.from_decimal(fee)
forced_exit = ForcedExit(initiator_account_id=account_id,
target=obj["target"],
fee=fee,
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"],
token=token)
zk_signature = self.wallet.zk_signer.sign_tx(forced_exit)
forced_exit.signature = zk_signature
else:
token = await self.wallet.resolve_token(obj["token"])
forced_exit = ForcedExit(initiator_account_id=obj["initiatorAccountId"],
target=obj["target"],
fee=obj["fee"],
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"],
token=token,
signature=obj["signature"])
self.nonce += 1
return forced_exit
async def _process_swap(self, obj):
if not obj[self.IS_ENCODED_TRANSACTION]:
fee_token = await self.wallet.resolve_token(obj["fee_token"])
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.swap,
self.wallet.address(),
fee_token.id)
fee = fee.total_fee
else:
fee = fee_token.from_decimal(fee)
amounts = obj["amounts"]
orders = obj["orders"]
if amounts is None:
amounts = (orders[0].amount, orders[1].amount)
else:
amounts = (
orders[0].token_sell.from_decimal(amounts[0]),
orders[1].token_sell.from_decimal(amounts[1])
)
account_id = await self.wallet.get_account_id()
swap = Swap(
orders=orders,
fee_token=fee_token,
amounts=amounts,
fee=fee,
nonce=self.nonce,
submitter_id=account_id,
submitter_address=self.wallet.address()
)
swap.signature = self.wallet.zk_signer.sign_tx(swap)
else:
fee_token = await self.wallet.resolve_token(obj["feeToken"])
swap = Swap(
orders=obj["orders"],
fee_token=fee_token,
amounts=obj["amounts"],
fee=obj["fee"],
nonce=self.nonce,
submitter_id=obj["submitterId"],
submitter_address=obj["submitterAddress"],
signature=obj["signature"]
)
self.nonce += 1
return swap
async def _process_mint_nft(self, obj):
if not obj[self.IS_ENCODED_TRANSACTION]:
fee_token = await self.wallet.resolve_token(obj["fee_token"])
account_id = await self.wallet.get_account_id()
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.mint_nft,
obj["recipient"],
fee_token.id)
fee = fee.total_fee
else:
fee = fee_token.from_decimal(fee)
mint_nft = MintNFT(creator_id=account_id,
creator_address=self.wallet.address(),
content_hash=obj["content_hash"],
recipient=obj["recipient"],
fee=fee,
fee_token=fee_token,
nonce=self.nonce)
zk_signature = self.wallet.zk_signer.sign_tx(mint_nft)
mint_nft.signature = zk_signature
else:
fee_token = await self.wallet.resolve_token(obj["fee_token"])
mint_nft = MintNFT(creator_id=obj["creatorId"],
creator_address=obj["creatorAddress"],
content_hash=obj["content_hash"],
recipient=obj["recipient"],
fee=obj["fee"],
fee_token=fee_token,
nonce=self.nonce,
signature=obj["signature"]
)
self.nonce += 1
return mint_nft
async def _process_withdraw_nft(self, obj):
if not obj[self.IS_ENCODED_TRANSACTION]:
fee_token = await self.wallet.resolve_token(obj["fee_token"])
fee = obj["fee"]
if fee is None:
fee = await self.wallet.zk_provider.get_transaction_fee(FeeTxType.withdraw_nft,
obj["to_address"],
fee_token.id
)
fee = fee.total_fee
else:
fee = fee_token.from_decimal(fee)
account_id = await self.wallet.get_account_id()
withdraw_nft = WithdrawNFT(
account_id=account_id,
from_address=self.wallet.address(),
to_address=obj["to_address"],
fee_token=fee_token,
fee=fee,
nonce=self.nonce,
valid_from=obj["valid_from"],
valid_until=obj["valid_until"],
token_id=obj["nft_token"].id
)
zk_signature = self.wallet.zk_signer.sign_tx(withdraw_nft)
withdraw_nft.signature = zk_signature
else:
fee_token = await self.wallet.resolve_token(obj["feeToken"])
withdraw_nft = WithdrawNFT(
account_id=obj["accountId"],
from_address=obj["from"],
to_address=obj["to"],
fee_token=fee_token,
fee=obj["fee"],
nonce=obj["nonce"],
valid_from=obj["validFrom"],
valid_until=obj["validUntil"],
token_id=obj["nft_token"].id,
signature=obj["signature"]
)
self.nonce += 1
return withdraw_nft
async def _process_transactions(self):
message = ""
trs = []
total_fee_map = dict()
for obj in self.transactions:
if obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.CHANGE_PUB_KEY:
tr = await self._process_change_pub_key(obj)
prev_value = total_fee_map.get(tr.token.symbol, Decimal(0))
dec_fee = tr.token.decimal_amount(tr.fee)
total_fee_map[tr.token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.TRANSFER:
tr = await self._process_transfer(obj)
prev_value = total_fee_map.get(tr.token.symbol, Decimal(0))
dec_fee = tr.token.decimal_amount(tr.fee)
total_fee_map[tr.token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.WITHDRAW:
tr = await self._process_withdraw(obj)
prev_value = total_fee_map.get(tr.token.symbol, Decimal(0))
dec_fee = tr.token.decimal_amount(tr.fee)
total_fee_map[tr.token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.FORCED_EXIT:
tr = await self._process_forced_exit(obj)
prev_value = total_fee_map.get(tr.token.symbol, Decimal(0))
dec_fee = tr.token.decimal_amount(tr.fee)
total_fee_map[tr.token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.MINT_NFT:
tr = await self._process_mint_nft(obj)
prev_value = total_fee_map.get(tr.fee_token.symbol, Decimal(0))
dec_fee = tr.fee_token.decimal_amount(tr.fee)
total_fee_map[tr.fee_token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.WITHDRAW_NFT:
tr = await self._process_withdraw_nft(obj)
prev_value = total_fee_map.get(tr.fee_token.symbol, Decimal(0))
dec_fee = tr.fee_token.decimal_amount(tr.fee)
total_fee_map[tr.fee_token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr))
elif obj[self.ENCODED_TRANSACTION_TYPE] == EncodedTxType.SWAP:
tr = await self._process_swap(obj)
prev_value = total_fee_map.get(tr.fee_token.symbol, Decimal(0))
dec_fee = tr.fee_token.decimal_amount(tr.fee)
total_fee_map[tr.fee_token.symbol] = dec_fee + prev_value
message += tr.batch_message_part()
trs.append(TransactionWithOptionalSignature(tr, [None,
tr.orders[0].eth_signature,
tr.orders[1].eth_signature]
))
else:
raise TypeError("_process_transactions is trying to process unimplemented type")
message += f"Nonce: {self.batch_nonce}"
result = dict(trans=trs, msg=message, total_fee=total_fee_map)
return result | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/zksync_provider/batch_builder.py | batch_builder.py |
from abc import ABC, abstractmethod
from decimal import Decimal
from typing import List, Optional, Union
from zksync_sdk.transport import JsonRPCTransport
from zksync_sdk.types import (AccountState, ContractAddress, EncodedTx, EthOpInfo, Fee, Token,
TokenLike, Tokens, TransactionDetails, TransactionWithSignature,
TransactionWithOptionalSignature,
TxEthSignature, Toggle2FA, )
from zksync_sdk.zksync_provider.types import FeeTxType
from zksync_sdk.zksync_provider.transaction import Transaction
__all__ = ['ZkSyncProviderInterface']
class ZkSyncProviderInterface(ABC):
def __init__(self, provider: JsonRPCTransport):
self.provider = provider
@abstractmethod
async def submit_tx(self, tx: EncodedTx, signature: Union[Optional[TxEthSignature], List[Optional[TxEthSignature]]],
fast_processing: bool = False) -> Transaction:
raise NotImplementedError
@abstractmethod
async def get_tokens(self) -> Tokens:
raise NotImplementedError
@abstractmethod
async def submit_txs_batch(self, transactions: List[TransactionWithSignature],
signatures: Optional[
Union[List[TxEthSignature], TxEthSignature]
] = None) -> List[Transaction]:
raise NotImplementedError
@abstractmethod
async def submit_batch_builder_txs_batch(self, transactions: List[TransactionWithOptionalSignature],
signature: TxEthSignature) -> List[Transaction]:
raise NotImplementedError
@abstractmethod
async def get_contract_address(self) -> ContractAddress:
raise NotImplementedError
@abstractmethod
async def get_state(self, address: str) -> AccountState:
raise NotImplementedError
@abstractmethod
async def get_confirmations_for_eth_op_amount(self) -> int:
raise NotImplementedError
@abstractmethod
async def get_account_nonce(self, address: str) -> int:
raise NotImplementedError
@abstractmethod
async def get_tx_receipt(self, address: str) -> TransactionDetails:
raise NotImplementedError
@abstractmethod
async def get_eth_tx_for_withdrawal(self, withdrawal_hash: str) -> str:
raise NotImplementedError
@abstractmethod
async def get_priority_op_status(self, serial_id: int) -> EthOpInfo:
raise NotImplementedError
@abstractmethod
async def get_transactions_batch_fee(self, tx_types: List[FeeTxType], addresses: List[str],
token_like) -> int:
raise NotImplementedError
@abstractmethod
async def get_transaction_fee(self, tx_type: FeeTxType, address: str,
token_like: TokenLike) -> Fee:
raise NotImplementedError
@abstractmethod
async def get_token_price(self, token: Token) -> Decimal:
raise NotImplementedError
@abstractmethod
async def toggle_2fa(self, toggle2fa: Toggle2FA) -> bool:
raise NotImplementedError | zksync-sdk | /zksync_sdk-0.1.667.tar.gz/zksync_sdk-0.1.667/zksync_sdk/zksync_provider/interface.py | interface.py |
# zkSync2 client sdk
## Contents
- [Getting started](#getting-started)
- [Provider](#provider-zksyncbuilder)
- [Account](#account)
- [Signer](#signer)
- [Transactions](#transactions)
- [Contract interfaces](#contract-interfaces)
- [Examples](#examples)
### Getting started
#### Requirements
| Tool | Required |
|-----------------|----------------|
| python | 3.8, 3.9, 3.10 |
| package manager | pip |
### how to install
```console
pip install zksync2
```
### Provider (zkSyncBuilder)
#### Design
ZkSync 2.0 is designed with the same styling as web3.<br>
It defines the zksync module based on Ethereum and extends it with zkSync-specific methods.<br>
#### How to construct
For usage, there is `ZkSyncBuilder` that returns a Web3 object with an instance of zksync module.<br>
Construction only needs the URL to the zkSync blockchain.
Example:
```python
from zksync2.module.module_builder import ZkSyncBuilder
...
web3 = ZkSyncBuilder.build("ZKSYNC_NET_URL")
```
#### Module parameters and methods
ZkSync module attributes:
| Attribute | Description |
|-----------|-----------------------------------------------------------------|
| chain_id | Returns an integer value for the currently configured "ChainId" |
| gas_price | Returns the current gas price in Wei |
ZkSync module methods:
| Method | Parameters | Return value | Description |
|------------------------------|-----------------------------------------|--------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------|
| zks_estimate_fee | zkSync Transaction | Fee structure | Gets Fee for ZkSync transaction |
| zks_main_contract | - | Address of main contract | Return address of main contract |
| zks_get_confirmed_tokens | from, limit | List[Token] | Returns all tokens in the set range by global index |
| zks_l1_chain_id | - | ChainID | Return ethereum chain ID |
| zks_get_all_account_balances | Address | Dict[str, int] | Return dictionary of token address and its value |
| zks_get_bridge_contracts | - | BridgeAddresses | Returns addresses of all bridge contracts that are interacting with L1 layer |
| eth_estimate_gas | Transaction | estimated gas | Overloaded method of eth_estimate_gas for ZkSync transaction gas estimation |
| wait_for_transaction_receipt | Tx Hash, optional timeout,poll_latency | TxReceipt | Waits for the transaction to be included into block by its hash and returns its receipt. Optional arguments are `timeout` and `poll_latency` in seconds |
| wait_finalized | Tx Hash, optional timeout, poll_latency | TxReceipt | Waits for the transaction to be finalized when finalized block occurs and it's number >= Tx block number |
### Account
Account encapsulate private key and, frequently based on it, the unique user identifier in the network.<br> This unique identifier also mean by wallet address.
#### Account construction
ZkSync2 Python SDK account is compatible with `eth_account` package
In most cases user has its private key and gets account instance by using it.
Example:
```python
from eth_account import Account
from eth_account.signers.local import LocalAccount
...
account: LocalAccount = Account.from_key("PRIVATE_KEY")
```
The base property that is used directly of account is: `Account.address`
### Signer
Signer is used to generate signature of provided transaction based on your account(your private key)<br>
This signature is added to the final EIP712 transaction for its validation
#### Singer construction
zkSync2 already has implementation of signer. For constructing the instance it needs only account and chain_id
Example:
```python
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from eth_account import Account
from zksync2.module.module_builder import ZkSyncBuilder
account = Account.from_key("PRIVATE_KEY")
zksync_web3 = ZkSyncBuilder.build("ZKSYNC_NETWORK_URL")
...
chain_id = zksync_web3.zksync.chain_id
signer = PrivateKeyEthSigner(account, chain_id)
```
#### Methods
Signer has a few methods to generate signature and verify message
| Method | Parameters | Return value | Description |
|-------------------|----------------------------------------------|-----------------------|---------------------------------------------------------------------------|
| sign_typed_data | EIP712 Structure, optional domain | Web3 py SignedMessage | Builds `SignedMessage` based on the encoded in EIP712 format Transaction |
| verify_typed_data | signature, EIP712 structure, optional domain | bool | return True if this encoded transaction is signed with provided signature |
Signer class also has the following properties:
| Attribute | Description |
|-----------|--------------------------------------------------------------------------------|
| address | Account address |
| domain | domain that is used to generate signature. It's depends on chain_id of network |
### Transactions
Basic type of ZkSync transaction is quite similar to the Web3 based one<br>
It's defined in the package: zksync2.module.request_type<br>
But for sending and signed transaction it's necessary to sign and encode it in EIP712 structure<br>
EIP712 transaction type can be found in package: zksync2.transaction.transaction712
There are transaction builders in assistance for<br>
convert ordinary transaction to EIP712 :
* TxFunctionCall
* TxCreateContract
* TxCreate2Contract
* TxWithdraw
Usage will be described in the examples [section][#Examples]
### Contract interfaces
There is a set of system contract that helps execute and interact with ZkSync2 network<br>
For user needs there are the following contracts:
* ZkSyncContract
* L1Bridge
* L2Bridge
* NonceHolder
* ERC20Encoder
* PrecomputeContractDeployer
* ContractEncoder
* PaymasterFlowEncoder
### ZkSyncContract
ZkSyncContract is the implementation of ZkSync main contract functionality.<br>
It's deployed on the L1 network and used like a bridge for providing functionality between L1 and L2<br>
For instance, it handles things relate to the withdrawal operation
To construct object it needs contract main address, L1 Web3 instance and L1 account<br>
Example:
```python
from web3 import Web3
from zksync2.manage_contracts.zksync_contract import ZkSyncContract
from zksync2.module.module_builder import ZkSyncBuilder
from eth_account import Account
from eth_account.signers.local import LocalAccount
zksync = ZkSyncBuilder.build('URL_TO_ZKSYNC_NETWORK')
eth_web3 = Web3(Web3.HTTPProvider('URL_TO_ETH_NETWORK'))
account: LocalAccount = Account.from_key('YOUR_PRIVATE_KEY')
zksync_contract = ZkSyncContract(zksync.zksync.zks_main_contract(),
eth_web3,
account)
```
#### NonceHolder
`NonceHolder` contract is handling the deployment nonce <br>
It's useful to precompute address for contract that is going to be deployer in the network.<br>
To construct it there are need only `account` and `Web3` object with integrated zksync module
```python
from zksync2.manage_contracts.nonce_holder import NonceHolder
from eth_account import Account
from eth_account.signers.local import LocalAccount
from zksync2.module.module_builder import ZkSyncBuilder
zksync_web3 = ZkSyncBuilder.build("ZKSYNC_NETWORK_URL")
account: LocalAccount = Account.from_key("PRIVATE_KEY")
nonce_holder = NonceHolder(zksync_web3, account)
```
Methods:
| Method | Parameters | Return value | Description |
|----------------------------|------------|--------------|------------------------------------------------------------------|
| get_account_nonce | - | Nonce | returns account nonce |
| get_deployment_nonce | - | Nonce | return current deployment nonce that is going to be used |
| increment_deployment_nonce | Address | Nothing | Manually increments deployment nonce by provided account address |
#### ERC20Encoder
This is the helper for encoding ERC20 methods. It's used for transfer non-native tokens<br>
Construction needs only Web3 object with appended zksync module(ZkSyncBuilder)
It has only 1 single method: `encode_method` with arguments of function name, and it's args
Usage example you may find in [section](#examples) `Transfer funds (ERC20 tokens)`
#### PrecomputeContractDeployer
PrecomputeContractDeployer is utility contract represented as type to cover the following functionality:
* encode binary contract representation by `create` method for further deploying
* encode binary contract representation by `create2` method for further deploying
* Precompute contract address for `create` and `create2` methods
Construction: needs only web3 object with appended zksync module
Example:
```python
from zksync2.manage_contracts.precompute_contract_deployer import PrecomputeContractDeployer
from zksync2.module.module_builder import ZkSyncBuilder
zksync_web3 = ZkSyncBuilder.build("ZKSYNC_NETWORK_URL")
deployer = PrecomputeContractDeployer(zksync_web3)
```
The most functionality is hidden in the function builder helper types. See transaction [section](#transactions)
Methods:
| Method | Parameters | Return value | Description |
|----------------------------|-----------------------------------------|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| encode_create | bytecode, optional `call_data` & `salt` | HexStr | create binary representation of contract in internal deploying format.<br/> bytecode - contract binary representation, call_data is used for ctor bytecode only, salt is used to generate unique identifier of deploying contract |
| encode_create2 | bytecode, optional `call_data` & `salt` | HexStr | create binary representation of contract in internal deploying format.<br/> bytecode - contract binary representation, call_data is used for ctor bytecode only, salt is used to generate unique identifier of deploying contract |
| compute_l2_create_address | Address, Nonce | Address | Accepts address of deployer and current deploying nonce and returns address of contract that is going to be deployed by `encode_create` method |
| compute_l2_create2_address | Address, bytecode, ctor bytecode, salt | Address | Accepts address of deployer, binary representation of contract, if needed it's constructor in binary format and self. By default constructor can be b'0' value. Returns address of contract that is going to be deployed by `encode_create2` method |
### ContractEncoder
This is type that helps with encoding contract methods and constructor <br>
that are used as the data for transaction building
Example of construction:
```python
from pathlib import Path
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.module.module_builder import ZkSyncBuilder
zksync_web3 = ZkSyncBuilder.build('ZKSYNC_TEST_URL')
counter_contract = ContractEncoder.from_json(zksync_web3, Path("./Counter.json"))
```
Methods:
| Method | Parameters | Return value | Description |
|--------------------|-----------------------------------|--------------|------------------------------------------------------------------------------|
| encode_method | function name, function arguments | HexStr | encode contract function method with it's arguments in binary representation |
| encode_constructor | constructor arguments | bytes | encode constructor with arguments in binary representation |
#### PaymasterFlowEncoder
PaymasterFlowEncoder is utility contract for encoding Paymaster parameters.<br>
Construction contract needs only Web3 Module object. It can be Eth or ZkSync.<br>
Example:
```python
from zksync2.manage_contracts.paymaster_utils import PaymasterFlowEncoder
from zksync2.module.module_builder import ZkSyncBuilder
zksync_web3 = ZkSyncBuilder.build("ZKSYNC_NETWORK_URL")
paymaster_encoder = PaymasterFlowEncoder(zksync_web3)
```
This utility contract has 2 methods wrapped directly to python:
* encode_approval_based
* encode_general
For example and usage, please have a look into example [section](#examples)
### Examples
* [check balance](./examples/11_check_balance.py)
* [deposit funds](./examples/01_deposit.py)
* [transfer](./examples/02_transfer.py)
* [transfer erc20 tokens](./examples/03_transfer_erc20_token.py)
* [withdraw funds](./examples/09_withdrawal.py)
* [finalize withdrawal](./examples/10_finalize_withdrawal.py)
* [deploy contract, precompute address by create](./examples/04_deploy_create.py)
* [deploy contract with constructor(create method) and interact with contract](./examples/05_deploy_create_with_constructor.py)
* [deploy contract with dependent contract(create method)](./examples/06_deploy_create_with_deps.py)
* [deploy contract, precompute address by create2](./examples/07_deploy_create2.py)
* [deploy contract with dependency, precompute address by create2](./examples/08_deploy_create2_deps.py)
* [how to compile solidity contracts](./examples/README.md)
| zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/README.md | README.md |
import os
from pathlib import Path
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexAddress
from web3 import Web3
from zksync2.core.types import EthBlockParams
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.manage_contracts.nonce_holder import NonceHolder
from zksync2.manage_contracts.precompute_contract_deployer import PrecomputeContractDeployer
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxCreateContract
def generate_random_salt() -> bytes:
return os.urandom(32)
def deploy_contract(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path, constructor_args: [dict | tuple]
) -> HexAddress:
"""Deploy compiled contract with constructor on zkSync network using create() opcode
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:param constructor_args:
Constructor arguments that can be provided via:
dictionary: {"_incrementer": 2}
tuple: tuple([2])
:return:
Address of deployed contract.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, EthBlockParams.PENDING.value
)
# Get deployment nonce
nonce_holder = NonceHolder(zk_web3, account)
deployment_nonce = nonce_holder.get_deployment_nonce(account.address)
# Precompute the address of smart contract
# Use this if there is a case where contract address should be known before deployment
deployer = PrecomputeContractDeployer(zk_web3)
precomputed_address = deployer.compute_l2_create_address(account.address, deployment_nonce)
# Get contract ABI and bytecode information
incrementer_contract = ContractEncoder.from_json(zk_web3, compiled_contract)[0]
# Encode the constructor arguments
encoded_constructor = incrementer_contract.encode_constructor(**constructor_args)
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create deployment contract transaction
create_contract = TxCreateContract(
web3=zk_web3,
chain_id=chain_id,
nonce=nonce,
from_=account.address,
gas_limit=0, # UNKNOWN AT THIS STATE,
gas_price=gas_price,
bytecode=incrementer_contract.bytecode,
call_data=encoded_constructor,
)
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(create_contract.tx)
print(f"Fee for transaction is: {Web3.from_wei(estimate_gas * gas_price, 'ether')} ETH")
# Convert transaction to EIP-712 format
tx_712 = create_contract.tx712(estimate_gas)
# Sign message
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Deploy contract
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
# Wait for deployment contract transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
contract_address = tx_receipt["contractAddress"]
print(f"contract address: {contract_address}")
# Check does precompute address match with deployed address
if precomputed_address.lower() != contract_address.lower():
raise RuntimeError("Precomputed contract address does now match with deployed contract address")
return contract_address
def execute(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path, contract_address: HexAddress
):
"""Interact with deployed smart contract on zkSync network
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:param contract_address:
Contract address on zkSync network
"""
# Get contract ABI and bytecode information
incrementer_contract = ContractEncoder.from_json(zk_web3, compiled_contract)[0]
# Execute Get method on smart contract
value = incrementer_contract.contract.functions.get().call(
{
"from": account.address,
"to": contract_address
})
print(f"Value: {value}")
gas_price = zk_web3.zksync.gas_price
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(account.address, EthBlockParams.LATEST.value)
# Execute increment method on smart contract
tx = incrementer_contract.contract.functions.increment().build_transaction({
"nonce": nonce,
"from": account.address,
"maxPriorityFeePerGas": 1_000_000,
"maxFeePerGas": gas_price,
"to": contract_address
})
# Sign transaction
signed = account.sign_transaction(tx)
# Send transaction to zkSync network
tx_hash = zk_web3.zksync.send_raw_transaction(signed.rawTransaction)
# Wait for transaction to be finalized
zk_web3.zksync.wait_for_transaction_receipt(tx_hash)
print(f"Increment transaction: {tx_hash.hex()}")
# Execute Get method on smart contract
value = incrementer_contract.contract.functions.get().call(
{
"from": account.address,
"to": contract_address
})
print(f"Value after increment: {value}")
if __name__ == "__main__":
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Provide a compiled JSON source contract
contract_path = Path("solidity/incrementer/build/combined.json")
# Set constructor arguments
constructor_arguments = {"_incrementer": 2}
# Perform contract deployment
contract_address = deploy_contract(zk_web3, account, contract_path, constructor_arguments)
# alternative: specifying constructor arguments using args instead of kwargs
# constructor_arguments = tuple([2])
# change 72 line of code with following:
# encoded_constructor = contract_encoder.encode_constructor(*constructor_args)
execute(zk_web3, account, contract_path, contract_address) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/05_deploy_create_with_constructor.py | 05_deploy_create_with_constructor.py |
import os
from pathlib import Path
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexAddress
from web3 import Web3
from zksync2.core.types import EthBlockParams
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxCreateContract
def deploy_contract(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path
) -> HexAddress:
"""Deploy compiled contract on zkSync network using create() opcode
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:return:
Address of deployed contract.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, EthBlockParams.PENDING.value
)
# Get contract ABI and bytecode information
storage_contract = ContractEncoder.from_json(zk_web3, compiled_contract)[0]
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create deployment contract transaction
create_contract = TxCreateContract(
web3=zk_web3,
chain_id=chain_id,
nonce=nonce,
from_=account.address,
gas_limit=0, # UNKNOWN AT THIS STATE
gas_price=gas_price,
bytecode=storage_contract.bytecode,
)
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(create_contract.tx)
print(f"Fee for transaction is: {Web3.from_wei(estimate_gas * gas_price, 'ether')} ETH")
# Convert transaction to EIP-712 format
tx_712 = create_contract.tx712(estimate_gas)
# Sign message
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Deploy contract
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
# Wait for deployment contract transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
contract_address = tx_receipt["contractAddress"]
print(f"Deployed contract address: {contract_address}")
# Return the contract deployed address
return contract_address
if __name__ == "__main__":
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Provide a compiled JSON source contract
contract_path = Path("solidity/storage/build/combined.json")
# Perform contract deployment
deploy_contract(zk_web3, account, contract_path) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/04_deploy_create.py | 04_deploy_create.py |
from eth_account import Account
from eth_account.signers.local import LocalAccount
from web3 import Web3
from examples.utils import EnvPrivateKey
from zksync2.core.types import ZkBlockParams, ADDRESS_DEFAULT, Token
from zksync2.manage_contracts.erc20_contract import ERC20Contract, ERC20Encoder
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxFunctionCall
ZKSYNC_TEST_URL = "https://testnet.era.zksync.dev"
ETH_TEST_URL = "https://rpc.ankr.com/eth_goerli"
SERC20_Address = Web3.to_checksum_address("0xd782e03F4818A7eDb0bc5f70748F67B4e59CdB33")
class Colors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def transfer_erc20(amount: float):
env1 = EnvPrivateKey("ZKSYNC_TEST_KEY")
env2 = EnvPrivateKey("ZKSYNC_TEST_KEY2")
web3 = ZkSyncBuilder.build(ZKSYNC_TEST_URL)
alice: LocalAccount = Account.from_key(env1.key)
bob: LocalAccount = Account.from_key(env2.key)
chain_id = web3.zksync.chain_id
signer = PrivateKeyEthSigner(alice, chain_id)
erc20_token = Token(l1_address=ADDRESS_DEFAULT,
l2_address=SERC20_Address,
symbol="SERC20",
decimals=18)
erc20 = ERC20Contract(web3=web3.zksync,
contract_address=erc20_token.l2_address,
account=alice)
alice_balance_before = erc20.balance_of(alice.address)
bob_balance_before = erc20.balance_of(bob.address)
print(f"Alice {erc20_token.symbol} balance before : {erc20_token.format_token(alice_balance_before)}")
print(f"Bob {erc20_token.symbol} balance before : {erc20_token.format_token(bob_balance_before)}")
erc20_encoder = ERC20Encoder(web3)
transfer_params = (bob.address, erc20_token.to_int(amount))
call_data = erc20_encoder.encode_method("transfer", args=transfer_params)
nonce = web3.zksync.get_transaction_count(alice.address, ZkBlockParams.COMMITTED.value)
gas_price = web3.zksync.gas_price
func_call = TxFunctionCall(chain_id=chain_id,
nonce=nonce,
from_=alice.address,
to=erc20_token.l2_address,
data=call_data,
gas_limit=0, # UNKNOWN AT THIS STATE
gas_price=gas_price,
max_priority_fee_per_gas=100000000)
estimate_gas = web3.zksync.eth_estimate_gas(func_call.tx)
print(f"Fee for transaction is: {estimate_gas * gas_price}")
tx_712 = func_call.tx712(estimated_gas=estimate_gas)
singed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
msg = tx_712.encode(singed_message)
tx_hash = web3.zksync.send_raw_transaction(msg)
tx_receipt = web3.zksync.wait_for_transaction_receipt(tx_hash, timeout=240, poll_latency=0.5)
print(f"Tx status: {tx_receipt['status']}")
print(f"Tx hash: {tx_receipt['transactionHash'].hex()}")
alice_balance_after = erc20.balance_of(alice.address)
bob_balance_after = erc20.balance_of(bob.address)
print(f"Alice {erc20_token.symbol} balance before : {erc20_token.format_token(alice_balance_after)}")
print(f"Bob {erc20_token.symbol} balance before : {erc20_token.format_token(bob_balance_after)}")
if bob_balance_after == bob_balance_before + erc20_token.to_int(amount) and \
alice_balance_after == alice_balance_before - erc20_token.to_int(amount):
print(f"{Colors.OKGREEN}{amount} of {erc20_token.symbol} tokens have been transferred{Colors.ENDC}")
else:
print(f"{Colors.FAIL}{erc20_token.symbol} transfer has failed{Colors.ENDC}")
if __name__ == "__main__":
transfer_erc20(1) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/03_transfer_erc20_token.py | 03_transfer_erc20_token.py |
import os
from pathlib import Path
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexAddress
from web3 import Web3
from zksync2.core.types import EthBlockParams
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.manage_contracts.precompute_contract_deployer import PrecomputeContractDeployer
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxCreate2Contract
def generate_random_salt() -> bytes:
return os.urandom(32)
def deploy_contract(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path
) -> HexAddress:
"""Deploy compiled contract on zkSync network using create2() opcode
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:return:
Address of deployed contract.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, EthBlockParams.PENDING.value
)
# Deployment of same smart contract (same bytecode) without salt cannot be done twice
# Remove salt if you want to deploy contract only once
random_salt = generate_random_salt()
# Precompute the address of smart contract
# Use this if there is a case where contract address should be known before deployment
deployer = PrecomputeContractDeployer(zk_web3)
# Get contract ABI and bytecode information
storage_contract = ContractEncoder.from_json(zk_web3, compiled_contract)[0]
# Get precomputed contract address
precomputed_address = deployer.compute_l2_create2_address(sender=account.address,
bytecode=storage_contract.bytecode,
constructor=b'',
salt=random_salt)
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create2 deployment contract transaction
create2_contract = TxCreate2Contract(web3=zk_web3,
chain_id=chain_id,
nonce=nonce,
from_=account.address,
gas_limit=0,
gas_price=gas_price,
bytecode=storage_contract.bytecode,
salt=random_salt)
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(create2_contract.tx)
print(f"Fee for transaction is: {Web3.from_wei(estimate_gas * gas_price, 'ether')} ETH")
# Convert transaction to EIP-712 format
tx_712 = create2_contract.tx712(estimate_gas)
# Sign message
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Deploy contract
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
# Wait for deployment contract transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
contract_address = tx_receipt["contractAddress"]
print(f"contract address: {contract_address}")
# Check does precompute address match with deployed address
if precomputed_address.lower() != contract_address.lower():
raise RuntimeError("Precomputed contract address does now match with deployed contract address")
return contract_address
if __name__ == "__main__":
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Provide a compiled JSON source contract
contract_path = Path("solidity/storage/build/combined.json")
# Perform contract deployment
deploy_contract(zk_web3, account, contract_path) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/07_deploy_create2.py | 07_deploy_create2.py |
import os
from pathlib import Path
from eth_account import Account
from eth_account.signers.local import LocalAccount
from web3 import Web3
from zksync2.core.types import EthBlockParams
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.manage_contracts.nonce_holder import NonceHolder
from zksync2.manage_contracts.precompute_contract_deployer import PrecomputeContractDeployer
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxCreateContract
def generate_random_salt() -> bytes:
return os.urandom(32)
def deploy_contract(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path
):
"""Deploy compiled contract with dependency on zkSync network using create() opcode
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:return:
Address of deployed contract.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, EthBlockParams.PENDING.value
)
# Get deployment nonce
nonce_holder = NonceHolder(zk_web3, account)
deployment_nonce = nonce_holder.get_deployment_nonce(account.address)
# Precompute the address of smart contract
# Use this if there is a case where contract address should be known before deployment
deployer = PrecomputeContractDeployer(zk_web3)
precomputed_address = deployer.compute_l2_create_address(account.address, deployment_nonce)
# Get ABI and bytecode of demo and foo contracts
demo_contract, foo_contract = ContractEncoder.from_json(zk_web3, compiled_contract)
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create deployment contract transaction
create_contract = TxCreateContract(web3=zk_web3,
chain_id=chain_id,
nonce=nonce,
from_=account.address,
gas_limit=0,
gas_price=gas_price,
bytecode=demo_contract.bytecode,
deps=[foo_contract.bytecode])
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(create_contract.tx)
print(f"Fee for transaction is: {Web3.from_wei(estimate_gas * gas_price, 'ether')} ETH")
# Convert transaction to EIP-712 format
tx_712 = create_contract.tx712(estimate_gas)
# Sign message
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Deploy contract
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
# Wait for deployment contract transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
contract_address = tx_receipt["contractAddress"]
print(f"contract address: {contract_address}")
# Check does precompute address match with deployed address
if precomputed_address.lower() != contract_address.lower():
raise RuntimeError("Precomputed contract address does now match with deployed contract address")
if __name__ == "__main__":
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Provide a compiled JSON source contract
contract_path = Path("solidity/demo/build/combined.json")
# Perform contract deployment
deploy_contract(zk_web3, account, contract_path) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/06_deploy_create_with_deps.py | 06_deploy_create_with_deps.py |
# How to Compile Solidity Smart Contracts
Use `zksolc` compiler to compile Solidity smart contracts.
`zksolc` compiler requires `solc` to be installed. Specific version of
`zksolc` compiler is compatible with specific versions `solc` so make
sure to make correct versions of your compilers.
There are 3 solidity smart contracts:
- `Storage`: contract without constructor.
- `Incrementer`: contract with constructor.
- `Demo`: contract that has dependency to `Foo` contract.
In following examples `Docker` is used to create containers with already
`solc` installed.
## Compile Smart Contracts
Run the container has `solc` tool already installed:
```shell
# create container with installed solc tool
SOLC_VERSION="0.8.19-alpine"
docker create -it --name zksolc --entrypoint ash ethereum/solc:${SOLC_VERSION}
# copy smart contracts source files to container
docker cp examples/solidity zksolc:/solidity
# run and attach to the container
docker start -i zksolc
```
Run commands in container:
```shell
# download zksolc
ZKSOLC_VERSION="v1.3.9"
wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-${ZKSOLC_VERSION} -O /bin/zksolc; chmod +x /bin/zksolc
```
**Compile Storage Smart Contract**
```shell
# create combined-json with abi and binary
zksolc -O3 -o solidity/storage/build \
--combined-json abi,bin \
solidity/storage/Storage.sol
```
**Compile Incrementer Smart Contract**
```shell
# create combined-json with abi and binary
zksolc -O3 -o solidity/incrementer/build \
--combined-json abi,bin \
solidity/incrementer/Incrementer.sol
```
**Compile Demo Smart Contract**
```shell
# create combined-json with abi and binary
zksolc -O3 -o solidity/demo/build \
--combined-json abi,bin \
solidity/demo/Demo.sol \
solidity/demo/Foo.sol
```
Exit from container
```shell
exit
```
Copy generated files from container to host machine
```shell
# copy generated files from container to host
docker cp zksolc:/solidity ./examples/
# remove container
docker rm zksolc
```
On host machine, for each smart contract there is `build/combined.json` file
(e.g. `solidity/storage/build/combined.json`) that can be used in program for
deploying and interacting with smart contract. | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/README.md | README.md |
import os
from pathlib import Path
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexAddress
from web3 import Web3
from zksync2.core.types import EthBlockParams
from zksync2.manage_contracts.contract_encoder_base import ContractEncoder
from zksync2.manage_contracts.precompute_contract_deployer import PrecomputeContractDeployer
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxCreate2Contract
def generate_random_salt() -> bytes:
return os.urandom(32)
def deploy_contract(
zk_web3: Web3, account: LocalAccount, compiled_contract: Path
) -> HexAddress:
"""Deploy compiled contract with dependency on zkSync network using create2() opcode
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the deployment contract tx will be made
:param compiled_contract:
Compiled contract source.
:return:
Address of deployed contract.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, EthBlockParams.PENDING.value
)
# Deployment of same smart contract (same bytecode) without salt cannot be done twice
# Remove salt if you want to deploy contract only once
random_salt = generate_random_salt()
# Precompute the address of smart contract
# Use this if there is a case where contract address should be known before deployment
deployer = PrecomputeContractDeployer(zk_web3)
# Get ABI and bytecode of demo and foo contracts
demo_contract, foo_contract = ContractEncoder.from_json(zk_web3, compiled_contract)
# Get precomputed contract address
precomputed_address = deployer.compute_l2_create2_address(sender=account.address,
bytecode=demo_contract.bytecode,
constructor=b'',
salt=random_salt)
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create2 deployment contract transaction
create2_contract = TxCreate2Contract(web3=zk_web3,
chain_id=chain_id,
nonce=nonce,
from_=account.address,
gas_limit=0,
gas_price=gas_price,
bytecode=demo_contract.bytecode,
deps=[foo_contract.bytecode],
salt=random_salt)
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(create2_contract.tx)
print(f"Fee for transaction is: {Web3.from_wei(estimate_gas * gas_price, 'ether')} ETH")
# Convert transaction to EIP-712 format
tx_712 = create2_contract.tx712(estimate_gas)
# Sign message
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Deploy contract
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
# Wait for deployment contract transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
contract_address = tx_receipt["contractAddress"]
print(f"contract address: {contract_address}")
# Check does precompute address match with deployed address
if precomputed_address.lower() != contract_address.lower():
raise RuntimeError("Precomputed contract address does now match with deployed contract address")
return contract_address
if __name__ == "__main__":
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Provide a compiled JSON source contract
contract_path = Path("solidity/demo/build/combined.json")
# Perform contract deployment
deploy_contract(zk_web3, account, contract_path) | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/08_deploy_create2_deps.py | 08_deploy_create2_deps.py |
import os
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexStr, HexAddress
from eth_utils import to_checksum_address
from web3 import Web3
from zksync2.core.types import ZkBlockParams, EthBlockParams
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.signer.eth_signer import PrivateKeyEthSigner
from zksync2.transaction.transaction_builders import TxFunctionCall
def get_eth_balance(zk_web3: Web3, address: HexAddress) -> float:
"""
Get ETH balance of ETH address on zkSync network
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param address:
ETH address that you want to get balance of.
:return:
Balance of ETH address.
"""
# Get WEI balance of ETH address
balance_wei = zk_web3.zksync.get_balance(
address,
EthBlockParams.LATEST.value
)
# Convert WEI balance to ETH
balance_eth = Web3.from_wei(balance_wei, "ether")
# Return the ETH balance of the ETH address
return balance_eth
def transfer_eth(
zk_web3: Web3,
account: LocalAccount,
address: HexAddress,
amount: float
) -> bytes:
"""
Transfer ETH to a desired address on zkSync network
:param zk_web3:
Instance of ZkSyncBuilder that interacts with zkSync network
:param account:
From which account the transfer will be made
:param address:
Desired ETH address that you want to transfer to.
:param amount:
Desired ETH amount that you want to transfer.
:return:
The transaction hash of the deposit transaction.
"""
# Get chain id of zkSync network
chain_id = zk_web3.zksync.chain_id
# Signer is used to generate signature of provided transaction
signer = PrivateKeyEthSigner(account, chain_id)
# Get nonce of ETH address on zkSync network
nonce = zk_web3.zksync.get_transaction_count(
account.address, ZkBlockParams.COMMITTED.value
)
# Get current gas price in Wei
gas_price = zk_web3.zksync.gas_price
# Create transaction
tx_func_call = TxFunctionCall(
chain_id=chain_id,
nonce=nonce,
from_=account.address,
to=to_checksum_address(address),
value=zk_web3.to_wei(amount, "ether"),
data=HexStr("0x"),
gas_limit=0, # UNKNOWN AT THIS STATE
gas_price=gas_price,
max_priority_fee_per_gas=100_000_000,
)
# ZkSync transaction gas estimation
estimate_gas = zk_web3.zksync.eth_estimate_gas(tx_func_call.tx)
print(f"Fee for transaction is: {estimate_gas * gas_price}")
# Convert transaction to EIP-712 format
tx_712 = tx_func_call.tx712(estimate_gas)
# Sign message & encode it
signed_message = signer.sign_typed_data(tx_712.to_eip712_struct())
# Encode signed message
msg = tx_712.encode(signed_message)
# Transfer ETH
tx_hash = zk_web3.zksync.send_raw_transaction(msg)
print(f"Transaction hash is : {tx_hash.hex()}")
# Wait for transaction to be included in a block
tx_receipt = zk_web3.zksync.wait_for_transaction_receipt(
tx_hash, timeout=240, poll_latency=0.5
)
print(f"Tx status: {tx_receipt['status']}")
# Return the transaction hash of the transfer
return tx_hash
if __name__ == "__main__":
# Byte-format private key
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Set a provider
PROVIDER = "https://testnet.era.zksync.dev"
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(PROVIDER)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Show balance before ETH transfer
print(f"Balance before transfer : {get_eth_balance(zk_web3, account.address)} ETH")
# Perform the ETH transfer
transfer_eth(
zk_web3,
account,
to_checksum_address("0x81E9D85b65E9CC8618D85A1110e4b1DF63fA30d9"),
0.001
)
# Show balance after ETH transfer
print(f"Balance after transfer : {get_eth_balance(zk_web3, account.address)} ETH") | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/02_transfer.py | 02_transfer.py |
import os
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import HexStr
from web3 import Web3
from zksync2.core.types import Token
from zksync2.manage_contracts.zksync_contract import ZkSyncContract
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.provider.eth_provider import EthereumProvider
def deposit(
zksync_provider: Web3,
eth_web3: Web3,
eth_provider: EthereumProvider,
account: LocalAccount,
amount: float
) -> tuple[HexStr, HexStr]:
"""
Deposit ETH from L1 to L2 network
:param zksync_provider:
Instance of ZkSync provider
:param eth_web3:
Instance of Ethereum Web3 provider
:param eth_provider:
Instance of Ethereum provider
:param account:
From which ETH account the withdrawal will be made
:param amount:
How much would the withdrawal will contain
:return:
Deposit transaction hashes on L1 and L2 networks
"""
# execute deposit on L1 network
print("Executing deposit transaction on L1 network")
l1_tx_receipt = eth_provider.deposit(token=Token.create_eth(),
amount=Web3.to_wei(amount, 'ether'),
gas_price=eth_web3.eth.gas_price)
# Check if deposit transaction was successful
if not l1_tx_receipt["status"]:
raise RuntimeError("Deposit transaction on L1 network failed")
# Get ZkSync contract on L1 network
zksync_contract = ZkSyncContract(zksync_provider.zksync.main_contract_address, eth_web3, account)
# Get hash of deposit transaction on L2 network
l2_hash = zksync_provider.zksync.get_l2_hash_from_priority_op(l1_tx_receipt, zksync_contract)
# Wait for deposit transaction on L2 network to be finalized (5-7 minutes)
print("Waiting for deposit transaction on L2 network to be finalized (5-7 minutes)")
l2_tx_receipt = zksync_provider.zksync.wait_for_transaction_receipt(transaction_hash=l2_hash,
timeout=360,
poll_latency=10)
# return deposit transaction hashes from L1 and L2 networks
return l1_tx_receipt['transactionHash'].hex(), l2_tx_receipt['transactionHash'].hex()
if __name__ == "__main__":
# Get the private key from OS environment variables
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Set a provider
ZKSYNC_PROVIDER = "https://testnet.era.zksync.dev"
ETH_PROVIDER = "https://rpc.ankr.com/eth_goerli"
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(ZKSYNC_PROVIDER)
# Connect to Ethereum network
eth_web3 = Web3(Web3.HTTPProvider(ETH_PROVIDER))
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Create Ethereum provider
eth_provider = EthereumProvider(zk_web3, eth_web3, account)
# Perform the deposit
amount = 0.01 # ETH
l1_tx_hash, l2_tx_hash = deposit(zk_web3, eth_web3, eth_provider, account, amount)
print(f"L1 transaction: {l1_tx_hash}")
print(f"L2 transaction: {l2_tx_hash}") | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/01_deposit.py | 01_deposit.py |
import os
from eth_account import Account
from eth_account.signers.local import LocalAccount
from hexbytes import HexBytes
from web3 import Web3
from web3.middleware import geth_poa_middleware
from web3.types import TxReceipt
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.provider.eth_provider import EthereumProvider
def finalize_withdraw(
zksync_provider: Web3, ethereum_provider: EthereumProvider, withdraw_tx_hash: HexBytes
) -> TxReceipt:
"""
Execute finalize withdraw transaction on L1 network
:type zksync_provider:
Instance of ZkSync provider
:param ethereum_provider
Instance of EthereumProvider
:param withdraw_tx_hash
Hash of withdraw transaction on L2 network
:return:
TxReceipt of finalize withdraw transaction on L1 network
"""
zks_receipt = zksync_provider.zksync.wait_finalized(withdraw_tx_hash)
# Check if withdraw transaction was successful
if not zks_receipt["status"]:
raise RuntimeError("Withdraw transaction on L2 network failed")
# Execute finalize withdraw
tx_receipt = ethereum_provider.finalize_withdrawal(zks_receipt["transactionHash"])
# Check if finalize withdraw transaction was successful
if not tx_receipt["status"]:
raise RuntimeError("Finalize withdraw transaction L1 network failed")
return tx_receipt
if __name__ == "__main__":
# Get the private key from OS environment variables
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Get the withdrawal transaction hash from OS environment variables
WITHDRAW_TX_HASH = HexBytes.fromhex(os.environ.get("WITHDRAW_TX_HASH"))
# Set a provider
ZKSYNC_PROVIDER = "https://testnet.era.zksync.dev"
ETH_PROVIDER = "https://rpc.ankr.com/eth_goerli"
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(ZKSYNC_PROVIDER)
# Connect to Ethereum network
eth_web3 = Web3(Web3.HTTPProvider(ETH_PROVIDER))
eth_web3.middleware_onion.inject(geth_poa_middleware, layer=0)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Create Ethereum provider
eth_provider = EthereumProvider(zk_web3, eth_web3, account)
# Finalize withdraw of previous successful withdraw transaction
eth_tx_receipt = finalize_withdraw(zk_web3, eth_provider, WITHDRAW_TX_HASH)
fee = eth_tx_receipt["gasUsed"] * eth_tx_receipt["effectiveGasPrice"]
amount = 0.01
print(f"Finalize withdraw transaction: {eth_tx_receipt['transactionHash'].hex()}")
print(f"Effective ETH withdraw (paid fee): {Web3.from_wei(Web3.to_wei(amount, 'ether') - fee, 'ether')}") | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/10_finalize_withdrawal.py | 10_finalize_withdrawal.py |
import os
from eth_account import Account
from eth_account.signers.local import LocalAccount
from hexbytes import HexBytes
from web3 import Web3
from web3.middleware import geth_poa_middleware
from web3.types import TxReceipt
from zksync2.core.types import Token
from zksync2.module.module_builder import ZkSyncBuilder
from zksync2.provider.eth_provider import EthereumProvider
from zksync2.transaction.transaction_builders import TxWithdraw
def withdraw(
zksync_provider: Web3, account: LocalAccount, amount: float
) -> HexBytes:
"""Withdraw from Layer 2 to Layer 1 on zkSync network
:param zksync_provider:
Instance of ZkSync provider
:param account:
From which ETH account the withdrawal will be made
:param amount:
How much would the withdrawal will contain
:return:
Hash of withdraw transaction on L2 network
"""
# Create withdrawal transaction
withdrawal = TxWithdraw(
web3=zksync_provider,
token=Token.create_eth(),
amount=Web3.to_wei(amount, "ether"),
gas_limit=0, # unknown
account=account,
)
# ZkSync transaction gas estimation
estimated_gas = zksync_provider.zksync.eth_estimate_gas(withdrawal.tx)
# Estimate gas transaction
tx = withdrawal.estimated_gas(estimated_gas)
# Sign the transaction
signed = account.sign_transaction(tx)
# Broadcast the transaction to the network
return zksync_provider.zksync.send_raw_transaction(signed.rawTransaction)
def finalize_withdraw(
zksync_provider: Web3, ethereum_provider: EthereumProvider, withdraw_tx_hash: HexBytes
) -> TxReceipt:
"""
Execute finalize withdraw transaction on L1 network
:type zksync_provider:
Instance of ZkSync provider
:param ethereum_provider
Instance of EthereumProvider
:param withdraw_tx_hash
Hash of withdraw transaction on L2 network
:return:
TxReceipt of finalize withdraw transaction on L1 network
"""
zks_receipt = zksync_provider.zksync.wait_finalized(withdraw_tx_hash)
# Check if withdraw transaction was successful
if not zks_receipt["status"]:
raise RuntimeError("Withdraw transaction on L2 network failed")
# Execute finalize withdraw
tx_receipt = ethereum_provider.finalize_withdrawal(zks_receipt["transactionHash"])
# Check if finalize withdraw transaction was successful
if not tx_receipt["status"]:
raise RuntimeError("Finalize withdraw transaction L1 network failed")
return tx_receipt
if __name__ == "__main__":
# Get the private key from OS environment variables
PRIVATE_KEY = bytes.fromhex(os.environ.get("PRIVATE_KEY"))
# Set a provider
ZKSYNC_PROVIDER = "https://testnet.era.zksync.dev"
ETH_PROVIDER = "https://rpc.ankr.com/eth_goerli"
# Connect to zkSync network
zk_web3 = ZkSyncBuilder.build(ZKSYNC_PROVIDER)
# Connect to Ethereum network
eth_web3 = Web3(Web3.HTTPProvider(ETH_PROVIDER))
eth_web3.middleware_onion.inject(geth_poa_middleware, layer=0)
# Get account object by providing from private key
account: LocalAccount = Account.from_key(PRIVATE_KEY)
# Create Ethereum provider
eth_provider = EthereumProvider(zk_web3, eth_web3, account)
amount = 0.01
# Perform the withdrawal
withdraw_tx_hash = withdraw(zk_web3, account, amount)
print(f"Withdraw transaction hash: {withdraw_tx_hash.hex()}")
print("Wait for withdraw transaction to be finalized on L2 network (11-24 hours)")
print("Read more about withdrawal delay: https://era.zksync.io/docs/dev/troubleshooting/withdrawal-delay.html")
print("When withdraw transaction is finalized, execute 10_finalize_withdrawal.py script "
"with WITHDRAW_TX_HASH environment variable set") | zksync2 | /zksync2-0.5.0.tar.gz/zksync2-0.5.0/examples/09_withdrawal.py | 09_withdrawal.py |
Changelog
=========
0.2.1 (02/16/2012)
------------------
- Fixed packaging bug.
0.2 (02/03/2012)
----------------
Changes
*******
- Added context manager return to lock to allow use of the 'with'
statement.
- Refactored to use zc.zk ZooKeeper library for higher level Zookeeper
abstraction with automatic watch re-establishment.
Features
********
- Node object to retrieve ZNode data from Zookeeper and keep it up
to date.
- Node objects can have data and children subscribers.
- NodeDict object that maps a shallow tree (one level of children)
into a dict-like object.
Backward Incompatibilities
**************************
- SharedZkLock has been refactored into ZkWriteLock and ZkReadLock.
- ``revoked`` is a property of Locks, not a method.
- ZkConnection is gone, lock objects, ZkNode, and ZkNodeDict all expect
zc.zk ZooKeeper instances.
0.1 (11/22/2011)
----------------
Features
********
- Lock implementation, with revokable shared locks.
- Zookeeper connection object with automatic reconnect.
| zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/CHANGES.rst | CHANGES.rst |
===============
Zookeeper Tools
===============
``zktools`` is a package of tools implementing higher level constructs using
`Apache Zookeeper`_.
It currently provides:
* ``Configuration`` - Zookeeper Configuration Helpers
to store and load configuration information stored
in Zookeeper nodes.
* ``Locks`` - A Zookeeper lock with support for
non-blocking acquire, modeled on Python's Lock objects that also includes a
`Revocable Shared Locks with Freaking Laser Beams` described in the
`Zookeeper Recipe's
<http://zookeeper.apache.org/doc/current/recipes.html#sc_recoverableSharedLocks>`_.
See `the full docs`_ for more information.
License
=======
``zktools`` is offered under the MPL license.
Authors
=======
``zktools`` is made available by the `Mozilla Foundation`.
.. _Apache Zookeeper: http://zookeeper.apache.org/
.. _the full docs: http://zktools.rtfd.org/
| zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/README.rst | README.rst |
===============
Zookeeper Tools
===============
A Python collection of higher-level API's built on top of `Apache Zookeeper`_,
a distributed, highly reliable coordination service with no single point of
failure.
Features:
* :mod:`Locks <zktools.locking>` - Support for non-blocking acquire, shared
read/write locks, and modeled on Python's
Lock objects that also includes `Revocable Shared Locks with Freaking
Laser Beams` as described in the `Zookeeper Recipes`_.
* :mod:`Nodes <zktools.node>` - Objects to track values in Zookeeper
Nodes (zNodes) automatically.
Reference Material
==================
Reference material includes documentation for every `zktools` API.
.. toctree::
:maxdepth: 1
api
Changelog <changelog>
Source Code
===========
All source code is available on `github under zktools <https://github.com/mozilla-services/zktools>`_.
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`glossary`
License
=======
``zktools`` is offered under the MPL license.
Authors
=======
``zktools`` is made available by the `Mozilla Foundation`.
.. _Apache Zookeeper: http://zookeeper.apache.org/
.. _Zookeeper Recipes: http://zookeeper.apache.org/doc/current/recipes.html#sc_recoverableSharedLocks
| zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/index.rst | index.rst |
.. _locking_module:
:mod:`zktools.locking`
======================
.. automodule:: zktools.locking
Constants
---------
.. data:: IMMEDIATE
Flag used to declare that revocation should occur immediately. Other
lock-holders will not be given time to release their lock.
Lock Class
----------
.. autoclass:: ZkLock
:members: __init__, acquire, release, revoked, revoke_all, has_lock, clear
Shared Read/Write Lock Classes
------------------------------
.. autoclass:: ZkReadLock
:members: __init__, acquire, revoked, has_lock, revoke_all, release, clear
.. autoclass:: ZkWriteLock
:members: __init__, acquire, revoked, has_lock, revoke_all, release, clear
Private Lock Base Class
-----------------------
.. autoclass:: _LockBase
:members: __init__, _acquire_lock, release, revoked, has_lock, clear
Internal Utility Functions
--------------------------
.. autofunction:: has_read_lock
.. autofunction:: has_write_lock
| zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/api/locking.rst | locking.rst |
Search.setIndex({objects:{"zktools.locking.ZkReadLock":{revoked:[5,3,1,""],clear:[5,1,1,""],acquire:[5,1,1,""],revoke_all:[5,1,1,""],has_lock:[5,1,1,""],release:[5,1,1,""],"__init__":[5,1,1,""]},zktools:{node:[6,0,1,""],connection:[1,0,1,""],locking:[5,0,1,""]},"zktools.locking._LockBase":{revoked:[5,3,1,""],clear:[5,1,1,""],"_acquire_lock":[5,1,1,""],has_lock:[5,1,1,""],release:[5,1,1,""],"__init__":[5,1,1,""]},"zktools.locking.ZkLock":{revoked:[5,3,1,""],clear:[5,1,1,""],acquire:[5,1,1,""],revoke_all:[5,1,1,""],"__init__":[5,1,1,""],release:[5,1,1,""],has_lock:[5,1,1,""]},"zktools.node.ZkNode":{connected:[6,3,1,""],value:[6,3,1,""],"__init__":[6,1,1,""]},"zktools.connection.ZkConnection":{connect:[1,1,1,""],"__init__":[1,1,1,""],"__getattr__":[1,1,1,""]},"zktools.locking":{"_LockBase":[5,4,1,""],ZkReadLock:[5,4,1,""],has_write_lock:[5,2,1,""],IMMEDIATE:[5,5,1,""],ZkWriteLock:[5,4,1,""],has_read_lock:[5,2,1,""],ZkLock:[5,4,1,""]},"zktools.node":{ZkNode:[6,4,1,""]},"zktools.connection":{ZkConnection:[1,4,1,""]},"zktools.locking.ZkWriteLock":{revoked:[5,3,1,""],clear:[5,1,1,""],acquire:[5,1,1,""],revoke_all:[5,1,1,""],has_lock:[5,1,1,""],release:[5,1,1,""],"__init__":[5,1,1,""]}},terms:{"default":6,all:[5,1],code:[0,6],prefix:5,"__getattr__":1,use_json:6,children:5,zookeep:[0,1,2,3,5,6],privat:[5,4],decim:6,specif:5,under:[0,5],lock_nam:5,sourc:[0,6,1,5],everi:[0,4],string:[5,1,6],fals:[5,1,6],mpl:0,has_write_lock:5,util:[5,4],failur:0,zklocktest:5,recip:[0,5],level:0,list:[5,6],second:[5,1],pass:1,index:0,current:[5,1,6],delet:[5,6],version:[5,1],zktool:[0,4,6,1,5],"public":4,avers:5,full:5,path:[5,6],modifi:5,valu:[0,1,6],wait:5,search:0,convers:6,prior:5,extrem:6,implement:[5,2],created_ago:5,revoc:[0,5],revok:[5,2],activ:5,modul:[0,4,6,5],foundat:0,releas:5,api:[0,4],establish:0,highli:0,from:[5,6],describ:0,perm:1,call:[0,5],cversion:5,type:5,until:1,zklock:5,more:5,desir:5,mozilla:0,warn:[5,6],flag:5,zkwritelock:[5,2],examin:5,known:5,obj:5,must:5,none:[5,6],can:[0,6,5],purpos:[],root:5,stream:5,give:1,lock:[5,0,4,2],share:[5,0,4,2],indic:[0,6,5],liter:[5,1,6],serial:6,occur:5,declar:5,alwai:6,end:6,rather:5,write:[0,4,5],how:[5,1],anyon:[5,1],conn:[5,1,6],config:6,connect_timeout:1,updat:6,zkreadlock:[5,2],reflect:6,befor:1,lax:6,date:6,modifed_ago:5,data:[5,2],pzxid:5,github:0,attempt:[5,1,6],author:0,"_acquire_lock":5,inform:3,maintain:3,allow:2,reconnect:[6,1,2],revoke_al:5,paramet:[5,1,6],group:3,cli:5,comprehens:4,non:[0,5],"return":[5,6,1,2],handl:[0,1,6],initi:5,name:[3,5,4,1],changelog:[0,2],easili:5,timeout:[5,1],each:5,debug:1,found:[0,6],connect:[0,1,2,4,5,6],our:5,event:[1,6],out:[5,6],mzxid:5,print:6,correct:0,proxi:0,situat:[],free:5,fred:5,base:[5,4],ask:5,zknode:6,thread:5,synchron:3,iso:6,licens:0,first:1,onc:6,number:6,unlik:5,blank:6,given:5,intention:6,top:0,forcabl:5,statement:2,scheme:1,store:2,option:[5,6,2],tool:0,provid:[3,5,6],holder:5,than:5,zkconnect:[5,6,1,2],remov:5,session_timeout:1,str:[5,6],were:5,pre:[5,1,6],ephemeralown:5,argument:1,expir:[1,6],have:5,tabl:0,need:1,incompat:2,gentli:5,built:0,destroi:5,latter:1,note:[1,6],also:[0,6,1,5],exampl:[5,1,6],take:5,which:[5,6],singl:[0,6],sure:5,distribut:[3,0],normal:5,track:[0,6],object:[0,5,6,1,2],reach:1,"class":[5,4,1,6],clear:5,request:5,doe:1,bracket:6,determin:[5,6],show:5,curlei:6,session:[1,6],access:6,onli:5,coerc:6,acquir:[0,5],configur:[3,6,2],apach:[3,0],should:[5,6],dict:6,datalength:5,get:5,familiar:5,numchildren:5,beam:[0,5],requir:5,my_lock:5,organ:4,method:2,reload:6,set:[5,1,6],my_lock_nam:5,"86927055090548768l":5,see:5,fail:6,has_lock:5,statu:5,someth:5,"import":[5,6],czxid:5,attribut:6,node_nam:5,lock_root:5,come:5,addit:5,both:6,instanc:[5,6],context:2,mani:[5,1],load:[6,2],point:[0,5],period:1,freak:[0,5],assum:6,been:[5,2],mark:6,json:6,immedi:5,ani:5,coordin:0,present:5,zooki:5,laser:[0,5],look:[5,6],servic:[3,0],properti:2,"while":5,behavior:[5,6],glossari:[3,0],"152321l":5,succes:5,them:5,itself:6,zktoolslock:5,"__init__":[5,1,6],datetim:6,alphabet:4,same:[5,1],keynam:5,python:[0,6,5],complex:6,"0x1f":1,document:[0,4],higher:0,capabl:1,rais:1,logfil:5,chang:[6,2],appropri:6,scenario:6,without:[5,6],command:[5,1],thi:[5,4,6],model:0,tip:2,expos:4,except:1,other:5,save:6,applic:6,read:[0,4,5],sharedzklock:2,"1326417195365l":5,world:1,has_read_lock:5,like:6,loss:0,ctime:5,lost:1,docutil:[5,1,6],collect:0,manag:2,node:[0,4,6,1,5],deal:[],some:6,back:6,dead:5,intern:[5,4],forcibl:5,coercion:6,avoid:1,subclass:5,condit:6,overli:6,localhost:1,refer:[0,4],usag:5,host:1,repositori:0,chapter:4,about:1,central:3,materi:[0,4],page:0,idl:5,block:[0,1,5],within:4,automat:[0,6,1,2],empti:5,ensur:1,span:[5,1,6],log:5,support:[0,6,5],"long":5,avail:[0,4,1],start:6,interfac:5,includ:0,"function":[5,4,1],offer:0,line:5,"true":[5,6],info:5,made:0,possibl:6,whether:[5,6],otherwis:5,reconnect_timeout:1,featur:[0,2],constant:[5,4],creat:[5,1,6],"int":[5,1,6],dure:0,exist:[5,6],file:5,mtime:5,check:5,again:1,"_lockbas":5,when:[5,1,6],detail:5,refactor:2,field:5,valid:[5,1],bool:[5,6],you:5,deseri:6,consid:5,ago:5,longer:[5,2],reliabl:0,time:5,backward:2},objtypes:{"0":"py:module","1":"py:method","2":"py:function","3":"py:attribute","4":"py:class","5":"py:data"},titles:["Zookeeper Tools","<tt class=\"docutils literal docutils literal\"><span class=\"pre\">zktools.connection</span></tt>","Changelog","Glossary","API Documentation","<tt class=\"docutils literal docutils literal\"><span class=\"pre\">zktools.locking</span></tt>","<tt class=\"docutils literal docutils literal\"><span class=\"pre\">zktools.node</span></tt>"],objnames:{"0":["py","module","Python module"],"1":["py","method","Python method"],"2":["py","function","Python function"],"3":["py","attribute","Python attribute"],"4":["py","class","Python class"],"5":["py","data","Python data"]},filenames:["index","api/connection","changelog","glossary","api","api/locking","api/node"]}) | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/searchindex.js | searchindex.js |
(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/jquery.js | jquery.js |
* select a different prefix for underscore
*/
$u = _.noConflict();
/**
* make the code below compatible with browsers without
* an installed firebug like debugger
if (!window.console || !console.firebug) {
var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
"dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
"profile", "profileEnd"];
window.console = {};
for (var i = 0; i < names.length; ++i)
window.console[names[i]] = function() {};
}
*/
/**
* small helper function to urldecode strings
*/
jQuery.urldecode = function(x) {
return decodeURIComponent(x).replace(/\+/g, ' ');
}
/**
* small helper function to urlencode strings
*/
jQuery.urlencode = encodeURIComponent;
/**
* This function returns the parsed url parameters of the
* current request. Multiple values per key are supported,
* it will always return arrays of strings for the value parts.
*/
jQuery.getQueryParameters = function(s) {
if (typeof s == 'undefined')
s = document.location.search;
var parts = s.substr(s.indexOf('?') + 1).split('&');
var result = {};
for (var i = 0; i < parts.length; i++) {
var tmp = parts[i].split('=', 2);
var key = jQuery.urldecode(tmp[0]);
var value = jQuery.urldecode(tmp[1]);
if (key in result)
result[key].push(value);
else
result[key] = [value];
}
return result;
};
/**
* small function to check if an array contains
* a given item.
*/
jQuery.contains = function(arr, item) {
for (var i = 0; i < arr.length; i++) {
if (arr[i] == item)
return true;
}
return false;
};
/**
* highlight a given string on a jquery object by wrapping it in
* span elements with the given class name.
*/
jQuery.fn.highlightText = function(text, className) {
function highlight(node) {
if (node.nodeType == 3) {
var val = node.nodeValue;
var pos = val.toLowerCase().indexOf(text);
if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
var span = document.createElement("span");
span.className = className;
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
document.createTextNode(val.substr(pos + text.length)),
node.nextSibling));
node.nodeValue = val.substr(0, pos);
}
}
else if (!jQuery(node).is("button, select, textarea")) {
jQuery.each(node.childNodes, function() {
highlight(this);
});
}
}
return this.each(function() {
highlight(this);
});
};
/**
* Small JavaScript module for the documentation.
*/
var Documentation = {
init : function() {
this.fixFirefoxAnchorBug();
this.highlightSearchWords();
this.initIndexTable();
},
/**
* i18n support
*/
TRANSLATIONS : {},
PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
LOCALE : 'unknown',
// gettext and ngettext don't access this so that the functions
// can safely bound to a different name (_ = Documentation.gettext)
gettext : function(string) {
var translated = Documentation.TRANSLATIONS[string];
if (typeof translated == 'undefined')
return string;
return (typeof translated == 'string') ? translated : translated[0];
},
ngettext : function(singular, plural, n) {
var translated = Documentation.TRANSLATIONS[singular];
if (typeof translated == 'undefined')
return (n == 1) ? singular : plural;
return translated[Documentation.PLURALEXPR(n)];
},
addTranslations : function(catalog) {
for (var key in catalog.messages)
this.TRANSLATIONS[key] = catalog.messages[key];
this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
this.LOCALE = catalog.locale;
},
/**
* add context elements like header anchor links
*/
addContextElements : function() {
$('div[id] > :header:first').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this headline')).
appendTo(this);
});
$('dt[id]').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this definition')).
appendTo(this);
});
},
/**
* workaround a firefox stupidity
*/
fixFirefoxAnchorBug : function() {
if (document.location.hash && $.browser.mozilla)
window.setTimeout(function() {
document.location.href += '';
}, 10);
},
/**
* highlight the search words provided in the url in the text
*/
highlightSearchWords : function() {
var params = $.getQueryParameters();
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
if (terms.length) {
var body = $('div.body');
window.setTimeout(function() {
$.each(terms, function() {
body.highlightText(this.toLowerCase(), 'highlighted');
});
}, 10);
$('<p class="highlight-link"><a href="javascript:Documentation.' +
'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
.appendTo($('#searchbox'));
}
},
/**
* init the domain index toggle buttons
*/
initIndexTable : function() {
var togglers = $('img.toggler').click(function() {
var src = $(this).attr('src');
var idnum = $(this).attr('id').substr(7);
$('tr.cg-' + idnum).toggle();
if (src.substr(-9) == 'minus.png')
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
else
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
}).css('display', '');
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
togglers.click();
}
},
/**
* helper function to hide the search marks again
*/
hideSearchWords : function() {
$('#searchbox .highlight-link').fadeOut(300);
$('span.highlighted').removeClass('highlighted');
},
/**
* make the url absolute
*/
makeURL : function(relativeURL) {
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
},
/**
* get the current relative url
*/
getCurrentURL : function() {
var path = document.location.pathname;
var parts = path.split(/\//);
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
if (this == '..')
parts.pop();
});
var url = parts.join('/');
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
}
};
// quick alias for translations
_ = Documentation.gettext;
$(document).ready(function() {
Documentation.init();
}); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/doctools.js | doctools.js |
$(function() {
// global elements used by the functions.
// the 'sidebarbutton' element is defined as global after its
// creation, in the add_sidebar_button function
var bodywrapper = $('.bodywrapper');
var sidebar = $('.sphinxsidebar');
var sidebarwrapper = $('.sphinxsidebarwrapper');
// for some reason, the document has no sidebar; do not run into errors
if (!sidebar.length) return;
// original margin-left of the bodywrapper and width of the sidebar
// with the sidebar expanded
var bw_margin_expanded = bodywrapper.css('margin-left');
var ssb_width_expanded = sidebar.width();
// margin-left of the bodywrapper and width of the sidebar
// with the sidebar collapsed
var bw_margin_collapsed = '.8em';
var ssb_width_collapsed = '.8em';
// colors used by the current theme
var dark_color = $('.related').css('background-color');
var light_color = $('.document').css('background-color');
function sidebar_is_collapsed() {
return sidebarwrapper.is(':not(:visible)');
}
function toggle_sidebar() {
if (sidebar_is_collapsed())
expand_sidebar();
else
collapse_sidebar();
}
function collapse_sidebar() {
sidebarwrapper.hide();
sidebar.css('width', ssb_width_collapsed);
bodywrapper.css('margin-left', bw_margin_collapsed);
sidebarbutton.css({
'margin-left': '0',
'height': bodywrapper.height()
});
sidebarbutton.find('span').text('»');
sidebarbutton.attr('title', _('Expand sidebar'));
document.cookie = 'sidebar=collapsed';
}
function expand_sidebar() {
bodywrapper.css('margin-left', bw_margin_expanded);
sidebar.css('width', ssb_width_expanded);
sidebarwrapper.show();
sidebarbutton.css({
'margin-left': ssb_width_expanded-12,
'height': bodywrapper.height()
});
sidebarbutton.find('span').text('«');
sidebarbutton.attr('title', _('Collapse sidebar'));
document.cookie = 'sidebar=expanded';
}
function add_sidebar_button() {
sidebarwrapper.css({
'float': 'left',
'margin-right': '0',
'width': ssb_width_expanded - 28
});
// create the button
sidebar.append(
'<div id="sidebarbutton"><span>«</span></div>'
);
var sidebarbutton = $('#sidebarbutton');
light_color = sidebarbutton.css('background-color');
// find the height of the viewport to center the '<<' in the page
var viewport_height;
if (window.innerHeight)
viewport_height = window.innerHeight;
else
viewport_height = $(window).height();
sidebarbutton.find('span').css({
'display': 'block',
'margin-top': (viewport_height - sidebar.position().top - 20) / 2
});
sidebarbutton.click(toggle_sidebar);
sidebarbutton.attr('title', _('Collapse sidebar'));
sidebarbutton.css({
'color': '#FFFFFF',
'border-left': '1px solid ' + dark_color,
'font-size': '1.2em',
'cursor': 'pointer',
'height': bodywrapper.height(),
'padding-top': '1px',
'margin-left': ssb_width_expanded - 12
});
sidebarbutton.hover(
function () {
$(this).css('background-color', dark_color);
},
function () {
$(this).css('background-color', light_color);
}
);
}
function set_position_from_cookie() {
if (!document.cookie)
return;
var items = document.cookie.split(';');
for(var k=0; k<items.length; k++) {
var key_val = items[k].split('=');
var key = key_val[0];
if (key == 'sidebar') {
var value = key_val[1];
if ((value == 'collapsed') && (!sidebar_is_collapsed()))
collapse_sidebar();
else if ((value == 'expanded') && (sidebar_is_collapsed()))
expand_sidebar();
}
}
}
add_sidebar_button();
var sidebarbutton = $('#sidebarbutton');
set_position_from_cookie();
}); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/sidebar.js | sidebar.js |
(function(){var j=this,n=j._,i=function(a){this._wrapped=a},m=typeof StopIteration!=="undefined"?StopIteration:"__break__",b=j._=function(a){return new i(a)};if(typeof exports!=="undefined")exports._=b;var k=Array.prototype.slice,o=Array.prototype.unshift,p=Object.prototype.toString,q=Object.prototype.hasOwnProperty,r=Object.prototype.propertyIsEnumerable;b.VERSION="0.5.5";b.each=function(a,c,d){try{if(a.forEach)a.forEach(c,d);else if(b.isArray(a)||b.isArguments(a))for(var e=0,f=a.length;e<f;e++)c.call(d,
a[e],e,a);else{var g=b.keys(a);f=g.length;for(e=0;e<f;e++)c.call(d,a[g[e]],g[e],a)}}catch(h){if(h!=m)throw h;}return a};b.map=function(a,c,d){if(a&&b.isFunction(a.map))return a.map(c,d);var e=[];b.each(a,function(f,g,h){e.push(c.call(d,f,g,h))});return e};b.reduce=function(a,c,d,e){if(a&&b.isFunction(a.reduce))return a.reduce(b.bind(d,e),c);b.each(a,function(f,g,h){c=d.call(e,c,f,g,h)});return c};b.reduceRight=function(a,c,d,e){if(a&&b.isFunction(a.reduceRight))return a.reduceRight(b.bind(d,e),c);
var f=b.clone(b.toArray(a)).reverse();b.each(f,function(g,h){c=d.call(e,c,g,h,a)});return c};b.detect=function(a,c,d){var e;b.each(a,function(f,g,h){if(c.call(d,f,g,h)){e=f;b.breakLoop()}});return e};b.select=function(a,c,d){if(a&&b.isFunction(a.filter))return a.filter(c,d);var e=[];b.each(a,function(f,g,h){c.call(d,f,g,h)&&e.push(f)});return e};b.reject=function(a,c,d){var e=[];b.each(a,function(f,g,h){!c.call(d,f,g,h)&&e.push(f)});return e};b.all=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.every))return a.every(c,
d);var e=true;b.each(a,function(f,g,h){(e=e&&c.call(d,f,g,h))||b.breakLoop()});return e};b.any=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.some))return a.some(c,d);var e=false;b.each(a,function(f,g,h){if(e=c.call(d,f,g,h))b.breakLoop()});return e};b.include=function(a,c){if(b.isArray(a))return b.indexOf(a,c)!=-1;var d=false;b.each(a,function(e){if(d=e===c)b.breakLoop()});return d};b.invoke=function(a,c){var d=b.rest(arguments,2);return b.map(a,function(e){return(c?e[c]:e).apply(e,d)})};b.pluck=
function(a,c){return b.map(a,function(d){return d[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);var e={computed:-Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g>=e.computed&&(e={value:f,computed:g})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);var e={computed:Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g<e.computed&&(e={value:f,computed:g})});return e.value};b.sortBy=function(a,c,d){return b.pluck(b.map(a,
function(e,f,g){return{value:e,criteria:c.call(d,e,f,g)}}).sort(function(e,f){e=e.criteria;f=f.criteria;return e<f?-1:e>f?1:0}),"value")};b.sortedIndex=function(a,c,d){d=d||b.identity;for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?(e=g+1):(f=g)}return e};b.toArray=function(a){if(!a)return[];if(a.toArray)return a.toArray();if(b.isArray(a))return a;if(b.isArguments(a))return k.call(a);return b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=function(a,c,d){return c&&!d?k.call(a,
0,c):a[0]};b.rest=function(a,c,d){return k.call(a,b.isUndefined(c)||d?1:c)};b.last=function(a){return a[a.length-1]};b.compact=function(a){return b.select(a,function(c){return!!c})};b.flatten=function(a){return b.reduce(a,[],function(c,d){if(b.isArray(d))return c.concat(b.flatten(d));c.push(d);return c})};b.without=function(a){var c=b.rest(arguments);return b.select(a,function(d){return!b.include(c,d)})};b.uniq=function(a,c){return b.reduce(a,[],function(d,e,f){if(0==f||(c===true?b.last(d)!=e:!b.include(d,
e)))d.push(e);return d})};b.intersect=function(a){var c=b.rest(arguments);return b.select(b.uniq(a),function(d){return b.all(c,function(e){return b.indexOf(e,d)>=0})})};b.zip=function(){for(var a=b.toArray(arguments),c=b.max(b.pluck(a,"length")),d=new Array(c),e=0;e<c;e++)d[e]=b.pluck(a,String(e));return d};b.indexOf=function(a,c){if(a.indexOf)return a.indexOf(c);for(var d=0,e=a.length;d<e;d++)if(a[d]===c)return d;return-1};b.lastIndexOf=function(a,c){if(a.lastIndexOf)return a.lastIndexOf(c);for(var d=
a.length;d--;)if(a[d]===c)return d;return-1};b.range=function(a,c,d){var e=b.toArray(arguments),f=e.length<=1;a=f?0:e[0];c=f?e[0]:e[1];d=e[2]||1;e=Math.ceil((c-a)/d);if(e<=0)return[];e=new Array(e);f=a;for(var g=0;1;f+=d){if((d>0?f-c:c-f)>=0)return e;e[g++]=f}};b.bind=function(a,c){var d=b.rest(arguments,2);return function(){return a.apply(c||j,d.concat(b.toArray(arguments)))}};b.bindAll=function(a){var c=b.rest(arguments);if(c.length==0)c=b.functions(a);b.each(c,function(d){a[d]=b.bind(a[d],a)});
return a};b.delay=function(a,c){var d=b.rest(arguments,2);return setTimeout(function(){return a.apply(a,d)},c)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(b.rest(arguments)))};b.wrap=function(a,c){return function(){var d=[a].concat(b.toArray(arguments));return c.apply(c,d)}};b.compose=function(){var a=b.toArray(arguments);return function(){for(var c=b.toArray(arguments),d=a.length-1;d>=0;d--)c=[a[d].apply(this,c)];return c[0]}};b.keys=function(a){if(b.isArray(a))return b.range(0,a.length);
var c=[];for(var d in a)q.call(a,d)&&c.push(d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=function(a){return b.select(b.keys(a),function(c){return b.isFunction(a[c])}).sort()};b.extend=function(a,c){for(var d in c)a[d]=c[d];return a};b.clone=function(a){if(b.isArray(a))return a.slice(0);return b.extend({},a)};b.tap=function(a,c){c(a);return a};b.isEqual=function(a,c){if(a===c)return true;var d=typeof a;if(d!=typeof c)return false;if(a==c)return true;if(!a&&c||a&&!c)return false;
if(a.isEqual)return a.isEqual(c);if(b.isDate(a)&&b.isDate(c))return a.getTime()===c.getTime();if(b.isNaN(a)&&b.isNaN(c))return true;if(b.isRegExp(a)&&b.isRegExp(c))return a.source===c.source&&a.global===c.global&&a.ignoreCase===c.ignoreCase&&a.multiline===c.multiline;if(d!=="object")return false;if(a.length&&a.length!==c.length)return false;d=b.keys(a);var e=b.keys(c);if(d.length!=e.length)return false;for(var f in a)if(!b.isEqual(a[f],c[f]))return false;return true};b.isEmpty=function(a){return b.keys(a).length==
0};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=function(a){return!!(a&&a.concat&&a.unshift)};b.isArguments=function(a){return a&&b.isNumber(a.length)&&!b.isArray(a)&&!r.call(a,"length")};b.isFunction=function(a){return!!(a&&a.constructor&&a.call&&a.apply)};b.isString=function(a){return!!(a===""||a&&a.charCodeAt&&a.substr)};b.isNumber=function(a){return p.call(a)==="[object Number]"};b.isDate=function(a){return!!(a&&a.getTimezoneOffset&&a.setUTCFullYear)};b.isRegExp=function(a){return!!(a&&
a.test&&a.exec&&(a.ignoreCase||a.ignoreCase===false))};b.isNaN=function(a){return b.isNumber(a)&&isNaN(a)};b.isNull=function(a){return a===null};b.isUndefined=function(a){return typeof a=="undefined"};b.noConflict=function(){j._=n;return this};b.identity=function(a){return a};b.breakLoop=function(){throw m;};var s=0;b.uniqueId=function(a){var c=s++;return a?a+c:c};b.template=function(a,c){a=new Function("obj","var p=[],print=function(){p.push.apply(p,arguments);};with(obj){p.push('"+a.replace(/[\r\t\n]/g,
" ").replace(/'(?=[^%]*%>)/g,"\t").split("'").join("\\'").split("\t").join("'").replace(/<%=(.+?)%>/g,"',$1,'").split("<%").join("');").split("%>").join("p.push('")+"');}return p.join('');");return c?a(c):a};b.forEach=b.each;b.foldl=b.inject=b.reduce;b.foldr=b.reduceRight;b.filter=b.select;b.every=b.all;b.some=b.any;b.head=b.first;b.tail=b.rest;b.methods=b.functions;var l=function(a,c){return c?b(a).chain():a};b.each(b.functions(b),function(a){var c=b[a];i.prototype[a]=function(){var d=b.toArray(arguments);
o.call(d,this._wrapped);return l(c.apply(b,d),this._chain)}});b.each(["pop","push","reverse","shift","sort","splice","unshift"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){c.apply(this._wrapped,arguments);return l(this._wrapped,this._chain)}});b.each(["concat","join","slice"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){return l(c.apply(this._wrapped,arguments),this._chain)}});i.prototype.chain=function(){this._chain=true;return this};i.prototype.value=function(){return this._wrapped}})(); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/underscore.js | underscore.js |
(function($) {
$.fn.autogrow = function() {
return this.each(function() {
var textarea = this;
$.fn.autogrow.resize(textarea);
$(textarea)
.focus(function() {
textarea.interval = setInterval(function() {
$.fn.autogrow.resize(textarea);
}, 500);
})
.blur(function() {
clearInterval(textarea.interval);
});
});
};
$.fn.autogrow.resize = function(textarea) {
var lineHeight = parseInt($(textarea).css('line-height'), 10);
var lines = textarea.value.split('\n');
var columns = textarea.cols;
var lineCount = 0;
$.each(lines, function() {
lineCount += Math.ceil(this.length / columns) || 1;
});
var height = lineHeight * (lineCount + 1);
$(textarea).css('height', height);
};
})(jQuery);
(function($) {
var comp, by;
function init() {
initEvents();
initComparator();
}
function initEvents() {
$('a.comment-close').live("click", function(event) {
event.preventDefault();
hide($(this).attr('id').substring(2));
});
$('a.vote').live("click", function(event) {
event.preventDefault();
handleVote($(this));
});
$('a.reply').live("click", function(event) {
event.preventDefault();
openReply($(this).attr('id').substring(2));
});
$('a.close-reply').live("click", function(event) {
event.preventDefault();
closeReply($(this).attr('id').substring(2));
});
$('a.sort-option').live("click", function(event) {
event.preventDefault();
handleReSort($(this));
});
$('a.show-proposal').live("click", function(event) {
event.preventDefault();
showProposal($(this).attr('id').substring(2));
});
$('a.hide-proposal').live("click", function(event) {
event.preventDefault();
hideProposal($(this).attr('id').substring(2));
});
$('a.show-propose-change').live("click", function(event) {
event.preventDefault();
showProposeChange($(this).attr('id').substring(2));
});
$('a.hide-propose-change').live("click", function(event) {
event.preventDefault();
hideProposeChange($(this).attr('id').substring(2));
});
$('a.accept-comment').live("click", function(event) {
event.preventDefault();
acceptComment($(this).attr('id').substring(2));
});
$('a.delete-comment').live("click", function(event) {
event.preventDefault();
deleteComment($(this).attr('id').substring(2));
});
$('a.comment-markup').live("click", function(event) {
event.preventDefault();
toggleCommentMarkupBox($(this).attr('id').substring(2));
});
}
/**
* Set comp, which is a comparator function used for sorting and
* inserting comments into the list.
*/
function setComparator() {
// If the first three letters are "asc", sort in ascending order
// and remove the prefix.
if (by.substring(0,3) == 'asc') {
var i = by.substring(3);
comp = function(a, b) { return a[i] - b[i]; };
} else {
// Otherwise sort in descending order.
comp = function(a, b) { return b[by] - a[by]; };
}
// Reset link styles and format the selected sort option.
$('a.sel').attr('href', '#').removeClass('sel');
$('a.by' + by).removeAttr('href').addClass('sel');
}
/**
* Create a comp function. If the user has preferences stored in
* the sortBy cookie, use those, otherwise use the default.
*/
function initComparator() {
by = 'rating'; // Default to sort by rating.
// If the sortBy cookie is set, use that instead.
if (document.cookie.length > 0) {
var start = document.cookie.indexOf('sortBy=');
if (start != -1) {
start = start + 7;
var end = document.cookie.indexOf(";", start);
if (end == -1) {
end = document.cookie.length;
by = unescape(document.cookie.substring(start, end));
}
}
}
setComparator();
}
/**
* Show a comment div.
*/
function show(id) {
$('#ao' + id).hide();
$('#ah' + id).show();
var context = $.extend({id: id}, opts);
var popup = $(renderTemplate(popupTemplate, context)).hide();
popup.find('textarea[name="proposal"]').hide();
popup.find('a.by' + by).addClass('sel');
var form = popup.find('#cf' + id);
form.submit(function(event) {
event.preventDefault();
addComment(form);
});
$('#s' + id).after(popup);
popup.slideDown('fast', function() {
getComments(id);
});
}
/**
* Hide a comment div.
*/
function hide(id) {
$('#ah' + id).hide();
$('#ao' + id).show();
var div = $('#sc' + id);
div.slideUp('fast', function() {
div.remove();
});
}
/**
* Perform an ajax request to get comments for a node
* and insert the comments into the comments tree.
*/
function getComments(id) {
$.ajax({
type: 'GET',
url: opts.getCommentsURL,
data: {node: id},
success: function(data, textStatus, request) {
var ul = $('#cl' + id);
var speed = 100;
$('#cf' + id)
.find('textarea[name="proposal"]')
.data('source', data.source);
if (data.comments.length === 0) {
ul.html('<li>No comments yet.</li>');
ul.data('empty', true);
} else {
// If there are comments, sort them and put them in the list.
var comments = sortComments(data.comments);
speed = data.comments.length * 100;
appendComments(comments, ul);
ul.data('empty', false);
}
$('#cn' + id).slideUp(speed + 200);
ul.slideDown(speed);
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem retrieving the comments.');
},
dataType: 'json'
});
}
/**
* Add a comment via ajax and insert the comment into the comment tree.
*/
function addComment(form) {
var node_id = form.find('input[name="node"]').val();
var parent_id = form.find('input[name="parent"]').val();
var text = form.find('textarea[name="comment"]').val();
var proposal = form.find('textarea[name="proposal"]').val();
if (text == '') {
showError('Please enter a comment.');
return;
}
// Disable the form that is being submitted.
form.find('textarea,input').attr('disabled', 'disabled');
// Send the comment to the server.
$.ajax({
type: "POST",
url: opts.addCommentURL,
dataType: 'json',
data: {
node: node_id,
parent: parent_id,
text: text,
proposal: proposal
},
success: function(data, textStatus, error) {
// Reset the form.
if (node_id) {
hideProposeChange(node_id);
}
form.find('textarea')
.val('')
.add(form.find('input'))
.removeAttr('disabled');
var ul = $('#cl' + (node_id || parent_id));
if (ul.data('empty')) {
$(ul).empty();
ul.data('empty', false);
}
insertComment(data.comment);
var ao = $('#ao' + node_id);
ao.find('img').attr({'src': opts.commentBrightImage});
if (node_id) {
// if this was a "root" comment, remove the commenting box
// (the user can get it back by reopening the comment popup)
$('#ca' + node_id).slideUp();
}
},
error: function(request, textStatus, error) {
form.find('textarea,input').removeAttr('disabled');
showError('Oops, there was a problem adding the comment.');
}
});
}
/**
* Recursively append comments to the main comment list and children
* lists, creating the comment tree.
*/
function appendComments(comments, ul) {
$.each(comments, function() {
var div = createCommentDiv(this);
ul.append($(document.createElement('li')).html(div));
appendComments(this.children, div.find('ul.comment-children'));
// To avoid stagnating data, don't store the comments children in data.
this.children = null;
div.data('comment', this);
});
}
/**
* After adding a new comment, it must be inserted in the correct
* location in the comment tree.
*/
function insertComment(comment) {
var div = createCommentDiv(comment);
// To avoid stagnating data, don't store the comments children in data.
comment.children = null;
div.data('comment', comment);
var ul = $('#cl' + (comment.node || comment.parent));
var siblings = getChildren(ul);
var li = $(document.createElement('li'));
li.hide();
// Determine where in the parents children list to insert this comment.
for(i=0; i < siblings.length; i++) {
if (comp(comment, siblings[i]) <= 0) {
$('#cd' + siblings[i].id)
.parent()
.before(li.html(div));
li.slideDown('fast');
return;
}
}
// If we get here, this comment rates lower than all the others,
// or it is the only comment in the list.
ul.append(li.html(div));
li.slideDown('fast');
}
function acceptComment(id) {
$.ajax({
type: 'POST',
url: opts.acceptCommentURL,
data: {id: id},
success: function(data, textStatus, request) {
$('#cm' + id).fadeOut('fast');
$('#cd' + id).removeClass('moderate');
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem accepting the comment.');
}
});
}
function deleteComment(id) {
$.ajax({
type: 'POST',
url: opts.deleteCommentURL,
data: {id: id},
success: function(data, textStatus, request) {
var div = $('#cd' + id);
if (data == 'delete') {
// Moderator mode: remove the comment and all children immediately
div.slideUp('fast', function() {
div.remove();
});
return;
}
// User mode: only mark the comment as deleted
div
.find('span.user-id:first')
.text('[deleted]').end()
.find('div.comment-text:first')
.text('[deleted]').end()
.find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id +
', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id)
.remove();
var comment = div.data('comment');
comment.username = '[deleted]';
comment.text = '[deleted]';
div.data('comment', comment);
},
error: function(request, textStatus, error) {
showError('Oops, there was a problem deleting the comment.');
}
});
}
function showProposal(id) {
$('#sp' + id).hide();
$('#hp' + id).show();
$('#pr' + id).slideDown('fast');
}
function hideProposal(id) {
$('#hp' + id).hide();
$('#sp' + id).show();
$('#pr' + id).slideUp('fast');
}
function showProposeChange(id) {
$('#pc' + id).hide();
$('#hc' + id).show();
var textarea = $('#pt' + id);
textarea.val(textarea.data('source'));
$.fn.autogrow.resize(textarea[0]);
textarea.slideDown('fast');
}
function hideProposeChange(id) {
$('#hc' + id).hide();
$('#pc' + id).show();
var textarea = $('#pt' + id);
textarea.val('').removeAttr('disabled');
textarea.slideUp('fast');
}
function toggleCommentMarkupBox(id) {
$('#mb' + id).toggle();
}
/** Handle when the user clicks on a sort by link. */
function handleReSort(link) {
var classes = link.attr('class').split(/\s+/);
for (var i=0; i<classes.length; i++) {
if (classes[i] != 'sort-option') {
by = classes[i].substring(2);
}
}
setComparator();
// Save/update the sortBy cookie.
var expiration = new Date();
expiration.setDate(expiration.getDate() + 365);
document.cookie= 'sortBy=' + escape(by) +
';expires=' + expiration.toUTCString();
$('ul.comment-ul').each(function(index, ul) {
var comments = getChildren($(ul), true);
comments = sortComments(comments);
appendComments(comments, $(ul).empty());
});
}
/**
* Function to process a vote when a user clicks an arrow.
*/
function handleVote(link) {
if (!opts.voting) {
showError("You'll need to login to vote.");
return;
}
var id = link.attr('id');
if (!id) {
// Didn't click on one of the voting arrows.
return;
}
// If it is an unvote, the new vote value is 0,
// Otherwise it's 1 for an upvote, or -1 for a downvote.
var value = 0;
if (id.charAt(1) != 'u') {
value = id.charAt(0) == 'u' ? 1 : -1;
}
// The data to be sent to the server.
var d = {
comment_id: id.substring(2),
value: value
};
// Swap the vote and unvote links.
link.hide();
$('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id)
.show();
// The div the comment is displayed in.
var div = $('div#cd' + d.comment_id);
var data = div.data('comment');
// If this is not an unvote, and the other vote arrow has
// already been pressed, unpress it.
if ((d.value !== 0) && (data.vote === d.value * -1)) {
$('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide();
$('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show();
}
// Update the comments rating in the local data.
data.rating += (data.vote === 0) ? d.value : (d.value - data.vote);
data.vote = d.value;
div.data('comment', data);
// Change the rating text.
div.find('.rating:first')
.text(data.rating + ' point' + (data.rating == 1 ? '' : 's'));
// Send the vote information to the server.
$.ajax({
type: "POST",
url: opts.processVoteURL,
data: d,
error: function(request, textStatus, error) {
showError('Oops, there was a problem casting that vote.');
}
});
}
/**
* Open a reply form used to reply to an existing comment.
*/
function openReply(id) {
// Swap out the reply link for the hide link
$('#rl' + id).hide();
$('#cr' + id).show();
// Add the reply li to the children ul.
var div = $(renderTemplate(replyTemplate, {id: id})).hide();
$('#cl' + id)
.prepend(div)
// Setup the submit handler for the reply form.
.find('#rf' + id)
.submit(function(event) {
event.preventDefault();
addComment($('#rf' + id));
closeReply(id);
})
.find('input[type=button]')
.click(function() {
closeReply(id);
});
div.slideDown('fast', function() {
$('#rf' + id).find('textarea').focus();
});
}
/**
* Close the reply form opened with openReply.
*/
function closeReply(id) {
// Remove the reply div from the DOM.
$('#rd' + id).slideUp('fast', function() {
$(this).remove();
});
// Swap out the hide link for the reply link
$('#cr' + id).hide();
$('#rl' + id).show();
}
/**
* Recursively sort a tree of comments using the comp comparator.
*/
function sortComments(comments) {
comments.sort(comp);
$.each(comments, function() {
this.children = sortComments(this.children);
});
return comments;
}
/**
* Get the children comments from a ul. If recursive is true,
* recursively include childrens' children.
*/
function getChildren(ul, recursive) {
var children = [];
ul.children().children("[id^='cd']")
.each(function() {
var comment = $(this).data('comment');
if (recursive)
comment.children = getChildren($(this).find('#cl' + comment.id), true);
children.push(comment);
});
return children;
}
/** Create a div to display a comment in. */
function createCommentDiv(comment) {
if (!comment.displayed && !opts.moderator) {
return $('<div class="moderate">Thank you! Your comment will show up '
+ 'once it is has been approved by a moderator.</div>');
}
// Prettify the comment rating.
comment.pretty_rating = comment.rating + ' point' +
(comment.rating == 1 ? '' : 's');
// Make a class (for displaying not yet moderated comments differently)
comment.css_class = comment.displayed ? '' : ' moderate';
// Create a div for this comment.
var context = $.extend({}, opts, comment);
var div = $(renderTemplate(commentTemplate, context));
// If the user has voted on this comment, highlight the correct arrow.
if (comment.vote) {
var direction = (comment.vote == 1) ? 'u' : 'd';
div.find('#' + direction + 'v' + comment.id).hide();
div.find('#' + direction + 'u' + comment.id).show();
}
if (opts.moderator || comment.text != '[deleted]') {
div.find('a.reply').show();
if (comment.proposal_diff)
div.find('#sp' + comment.id).show();
if (opts.moderator && !comment.displayed)
div.find('#cm' + comment.id).show();
if (opts.moderator || (opts.username == comment.username))
div.find('#dc' + comment.id).show();
}
return div;
}
/**
* A simple template renderer. Placeholders such as <%id%> are replaced
* by context['id'] with items being escaped. Placeholders such as <#id#>
* are not escaped.
*/
function renderTemplate(template, context) {
var esc = $(document.createElement('div'));
function handle(ph, escape) {
var cur = context;
$.each(ph.split('.'), function() {
cur = cur[this];
});
return escape ? esc.text(cur || "").html() : cur;
}
return template.replace(/<([%#])([\w\.]*)\1>/g, function() {
return handle(arguments[2], arguments[1] == '%' ? true : false);
});
}
/** Flash an error message briefly. */
function showError(message) {
$(document.createElement('div')).attr({'class': 'popup-error'})
.append($(document.createElement('div'))
.attr({'class': 'error-message'}).text(message))
.appendTo('body')
.fadeIn("slow")
.delay(2000)
.fadeOut("slow");
}
/** Add a link the user uses to open the comments popup. */
$.fn.comment = function() {
return this.each(function() {
var id = $(this).attr('id').substring(1);
var count = COMMENT_METADATA[id];
var title = count + ' comment' + (count == 1 ? '' : 's');
var image = count > 0 ? opts.commentBrightImage : opts.commentImage;
var addcls = count == 0 ? ' nocomment' : '';
$(this)
.append(
$(document.createElement('a')).attr({
href: '#',
'class': 'sphinx-comment-open' + addcls,
id: 'ao' + id
})
.append($(document.createElement('img')).attr({
src: image,
alt: 'comment',
title: title
}))
.click(function(event) {
event.preventDefault();
show($(this).attr('id').substring(2));
})
)
.append(
$(document.createElement('a')).attr({
href: '#',
'class': 'sphinx-comment-close hidden',
id: 'ah' + id
})
.append($(document.createElement('img')).attr({
src: opts.closeCommentImage,
alt: 'close',
title: 'close'
}))
.click(function(event) {
event.preventDefault();
hide($(this).attr('id').substring(2));
})
);
});
};
var opts = {
processVoteURL: '/_process_vote',
addCommentURL: '/_add_comment',
getCommentsURL: '/_get_comments',
acceptCommentURL: '/_accept_comment',
deleteCommentURL: '/_delete_comment',
commentImage: '/static/_static/comment.png',
closeCommentImage: '/static/_static/comment-close.png',
loadingImage: '/static/_static/ajax-loader.gif',
commentBrightImage: '/static/_static/comment-bright.png',
upArrow: '/static/_static/up.png',
downArrow: '/static/_static/down.png',
upArrowPressed: '/static/_static/up-pressed.png',
downArrowPressed: '/static/_static/down-pressed.png',
voting: false,
moderator: false
};
if (typeof COMMENT_OPTIONS != "undefined") {
opts = jQuery.extend(opts, COMMENT_OPTIONS);
}
var popupTemplate = '\
<div class="sphinx-comments" id="sc<%id%>">\
<p class="sort-options">\
Sort by:\
<a href="#" class="sort-option byrating">best rated</a>\
<a href="#" class="sort-option byascage">newest</a>\
<a href="#" class="sort-option byage">oldest</a>\
</p>\
<div class="comment-header">Comments</div>\
<div class="comment-loading" id="cn<%id%>">\
loading comments... <img src="<%loadingImage%>" alt="" /></div>\
<ul id="cl<%id%>" class="comment-ul"></ul>\
<div id="ca<%id%>">\
<p class="add-a-comment">Add a comment\
(<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\
<div class="comment-markup-box" id="mb<%id%>">\
reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \
<tt>``code``</tt>, \
code blocks: <tt>::</tt> and an indented block after blank line</div>\
<form method="post" id="cf<%id%>" class="comment-form" action="">\
<textarea name="comment" cols="80"></textarea>\
<p class="propose-button">\
<a href="#" id="pc<%id%>" class="show-propose-change">\
Propose a change ▹\
</a>\
<a href="#" id="hc<%id%>" class="hide-propose-change">\
Propose a change ▿\
</a>\
</p>\
<textarea name="proposal" id="pt<%id%>" cols="80"\
spellcheck="false"></textarea>\
<input type="submit" value="Add comment" />\
<input type="hidden" name="node" value="<%id%>" />\
<input type="hidden" name="parent" value="" />\
</form>\
</div>\
</div>';
var commentTemplate = '\
<div id="cd<%id%>" class="sphinx-comment<%css_class%>">\
<div class="vote">\
<div class="arrow">\
<a href="#" id="uv<%id%>" class="vote" title="vote up">\
<img src="<%upArrow%>" />\
</a>\
<a href="#" id="uu<%id%>" class="un vote" title="vote up">\
<img src="<%upArrowPressed%>" />\
</a>\
</div>\
<div class="arrow">\
<a href="#" id="dv<%id%>" class="vote" title="vote down">\
<img src="<%downArrow%>" id="da<%id%>" />\
</a>\
<a href="#" id="du<%id%>" class="un vote" title="vote down">\
<img src="<%downArrowPressed%>" />\
</a>\
</div>\
</div>\
<div class="comment-content">\
<p class="tagline comment">\
<span class="user-id"><%username%></span>\
<span class="rating"><%pretty_rating%></span>\
<span class="delta"><%time.delta%></span>\
</p>\
<div class="comment-text comment"><#text#></div>\
<p class="comment-opts comment">\
<a href="#" class="reply hidden" id="rl<%id%>">reply ▹</a>\
<a href="#" class="close-reply" id="cr<%id%>">reply ▿</a>\
<a href="#" id="sp<%id%>" class="show-proposal">proposal ▹</a>\
<a href="#" id="hp<%id%>" class="hide-proposal">proposal ▿</a>\
<a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\
<span id="cm<%id%>" class="moderation hidden">\
<a href="#" id="ac<%id%>" class="accept-comment">accept</a>\
</span>\
</p>\
<pre class="proposal" id="pr<%id%>">\
<#proposal_diff#>\
</pre>\
<ul class="comment-children" id="cl<%id%>"></ul>\
</div>\
<div class="clearleft"></div>\
</div>\
</div>';
var replyTemplate = '\
<li>\
<div class="reply-div" id="rd<%id%>">\
<form id="rf<%id%>">\
<textarea name="comment" cols="80"></textarea>\
<input type="submit" value="Add reply" />\
<input type="button" value="Cancel" />\
<input type="hidden" name="parent" value="<%id%>" />\
<input type="hidden" name="node" value="" />\
</form>\
</div>\
</li>';
$(document).ready(function() {
init();
});
})(jQuery);
$(document).ready(function() {
// add comment anchors for all paragraphs that are commentable
$('.sphinx-has-comment').comment();
// highlight search words in search results
$("div.context").each(function() {
var params = $.getQueryParameters();
var terms = (params.q) ? params.q[0].split(/\s+/) : [];
var result = $(this);
$.each(terms, function() {
result.highlightText(this.toLowerCase(), 'highlighted');
});
});
// directly open comment window if requested
var anchor = document.location.hash;
if (anchor.substring(0, 9) == '#comment-') {
$('#ao' + anchor.substring(9)).click();
document.location.hash = '#s' + anchor.substring(9);
}
}); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/websupport.js | websupport.js |
* helper function to return a node containing the
* search summary for a given text. keywords is a list
* of stemmed words, hlwords is the list of normal, unstemmed
* words. the first one is used to find the occurance, the
* latter for highlighting it.
*/
jQuery.makeSearchSummary = function(text, keywords, hlwords) {
var textLower = text.toLowerCase();
var start = 0;
$.each(keywords, function() {
var i = textLower.indexOf(this.toLowerCase());
if (i > -1)
start = i;
});
start = Math.max(start - 120, 0);
var excerpt = ((start > 0) ? '...' : '') +
$.trim(text.substr(start, 240)) +
((start + 240 - text.length) ? '...' : '');
var rv = $('<div class="context"></div>').text(excerpt);
$.each(hlwords, function() {
rv = rv.highlightText(this, 'highlighted');
});
return rv;
}
/**
* Porter Stemmer
*/
var Stemmer = function() {
var step2list = {
ational: 'ate',
tional: 'tion',
enci: 'ence',
anci: 'ance',
izer: 'ize',
bli: 'ble',
alli: 'al',
entli: 'ent',
eli: 'e',
ousli: 'ous',
ization: 'ize',
ation: 'ate',
ator: 'ate',
alism: 'al',
iveness: 'ive',
fulness: 'ful',
ousness: 'ous',
aliti: 'al',
iviti: 'ive',
biliti: 'ble',
logi: 'log'
};
var step3list = {
icate: 'ic',
ative: '',
alize: 'al',
iciti: 'ic',
ical: 'ic',
ful: '',
ness: ''
};
var c = "[^aeiou]"; // consonant
var v = "[aeiouy]"; // vowel
var C = c + "[^aeiouy]*"; // consonant sequence
var V = v + "[aeiou]*"; // vowel sequence
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
var s_v = "^(" + C + ")?" + v; // vowel in stem
this.stemWord = function (w) {
var stem;
var suffix;
var firstch;
var origword = w;
if (w.length < 3)
return w;
var re;
var re2;
var re3;
var re4;
firstch = w.substr(0,1);
if (firstch == "y")
w = firstch.toUpperCase() + w.substr(1);
// Step 1a
re = /^(.+?)(ss|i)es$/;
re2 = /^(.+?)([^s])s$/;
if (re.test(w))
w = w.replace(re,"$1$2");
else if (re2.test(w))
w = w.replace(re2,"$1$2");
// Step 1b
re = /^(.+?)eed$/;
re2 = /^(.+?)(ed|ing)$/;
if (re.test(w)) {
var fp = re.exec(w);
re = new RegExp(mgr0);
if (re.test(fp[1])) {
re = /.$/;
w = w.replace(re,"");
}
}
else if (re2.test(w)) {
var fp = re2.exec(w);
stem = fp[1];
re2 = new RegExp(s_v);
if (re2.test(stem)) {
w = stem;
re2 = /(at|bl|iz)$/;
re3 = new RegExp("([^aeiouylsz])\\1$");
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
if (re2.test(w))
w = w + "e";
else if (re3.test(w)) {
re = /.$/;
w = w.replace(re,"");
}
else if (re4.test(w))
w = w + "e";
}
}
// Step 1c
re = /^(.+?)y$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(s_v);
if (re.test(stem))
w = stem + "i";
}
// Step 2
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
suffix = fp[2];
re = new RegExp(mgr0);
if (re.test(stem))
w = stem + step2list[suffix];
}
// Step 3
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
suffix = fp[2];
re = new RegExp(mgr0);
if (re.test(stem))
w = stem + step3list[suffix];
}
// Step 4
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
re2 = /^(.+?)(s|t)(ion)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(mgr1);
if (re.test(stem))
w = stem;
}
else if (re2.test(w)) {
var fp = re2.exec(w);
stem = fp[1] + fp[2];
re2 = new RegExp(mgr1);
if (re2.test(stem))
w = stem;
}
// Step 5
re = /^(.+?)e$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(mgr1);
re2 = new RegExp(meq1);
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
w = stem;
}
re = /ll$/;
re2 = new RegExp(mgr1);
if (re.test(w) && re2.test(w)) {
re = /.$/;
w = w.replace(re,"");
}
// and turn initial Y back to y
if (firstch == "y")
w = firstch.toLowerCase() + w.substr(1);
return w;
}
}
/**
* Search Module
*/
var Search = {
_index : null,
_queued_query : null,
_pulse_status : -1,
init : function() {
var params = $.getQueryParameters();
if (params.q) {
var query = params.q[0];
$('input[name="q"]')[0].value = query;
this.performSearch(query);
}
},
loadIndex : function(url) {
$.ajax({type: "GET", url: url, data: null, success: null,
dataType: "script", cache: true});
},
setIndex : function(index) {
var q;
this._index = index;
if ((q = this._queued_query) !== null) {
this._queued_query = null;
Search.query(q);
}
},
hasIndex : function() {
return this._index !== null;
},
deferQuery : function(query) {
this._queued_query = query;
},
stopPulse : function() {
this._pulse_status = 0;
},
startPulse : function() {
if (this._pulse_status >= 0)
return;
function pulse() {
Search._pulse_status = (Search._pulse_status + 1) % 4;
var dotString = '';
for (var i = 0; i < Search._pulse_status; i++)
dotString += '.';
Search.dots.text(dotString);
if (Search._pulse_status > -1)
window.setTimeout(pulse, 500);
};
pulse();
},
/**
* perform a search for something
*/
performSearch : function(query) {
// create the required interface elements
this.out = $('#search-results');
this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
this.dots = $('<span></span>').appendTo(this.title);
this.status = $('<p style="display: none"></p>').appendTo(this.out);
this.output = $('<ul class="search"/>').appendTo(this.out);
$('#search-progress').text(_('Preparing search...'));
this.startPulse();
// index already loaded, the browser was quick!
if (this.hasIndex())
this.query(query);
else
this.deferQuery(query);
},
query : function(query) {
var stopwords = ["and","then","into","it","as","are","in","if","for","no","there","their","was","is","be","to","that","but","they","not","such","with","by","a","on","these","of","will","this","near","the","or","at"];
// Stem the searchterms and add them to the correct list
var stemmer = new Stemmer();
var searchterms = [];
var excluded = [];
var hlterms = [];
var tmp = query.split(/\s+/);
var objectterms = [];
for (var i = 0; i < tmp.length; i++) {
if (tmp[i] != "") {
objectterms.push(tmp[i].toLowerCase());
}
if ($u.indexOf(stopwords, tmp[i]) != -1 || tmp[i].match(/^\d+$/) ||
tmp[i] == "") {
// skip this "word"
continue;
}
// stem the word
var word = stemmer.stemWord(tmp[i]).toLowerCase();
// select the correct list
if (word[0] == '-') {
var toAppend = excluded;
word = word.substr(1);
}
else {
var toAppend = searchterms;
hlterms.push(tmp[i].toLowerCase());
}
// only add if not already in the list
if (!$.contains(toAppend, word))
toAppend.push(word);
};
var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
// console.debug('SEARCH: searching for:');
// console.info('required: ', searchterms);
// console.info('excluded: ', excluded);
// prepare search
var filenames = this._index.filenames;
var titles = this._index.titles;
var terms = this._index.terms;
var fileMap = {};
var files = null;
// different result priorities
var importantResults = [];
var objectResults = [];
var regularResults = [];
var unimportantResults = [];
$('#search-progress').empty();
// lookup as object
for (var i = 0; i < objectterms.length; i++) {
var others = [].concat(objectterms.slice(0,i),
objectterms.slice(i+1, objectterms.length))
var results = this.performObjectSearch(objectterms[i], others);
// Assume first word is most likely to be the object,
// other words more likely to be in description.
// Therefore put matches for earlier words first.
// (Results are eventually used in reverse order).
objectResults = results[0].concat(objectResults);
importantResults = results[1].concat(importantResults);
unimportantResults = results[2].concat(unimportantResults);
}
// perform the search on the required terms
for (var i = 0; i < searchterms.length; i++) {
var word = searchterms[i];
// no match but word was a required one
if ((files = terms[word]) == null)
break;
if (files.length == undefined) {
files = [files];
}
// create the mapping
for (var j = 0; j < files.length; j++) {
var file = files[j];
if (file in fileMap)
fileMap[file].push(word);
else
fileMap[file] = [word];
}
}
// now check if the files don't contain excluded terms
for (var file in fileMap) {
var valid = true;
// check if all requirements are matched
if (fileMap[file].length != searchterms.length)
continue;
// ensure that none of the excluded terms is in the
// search result.
for (var i = 0; i < excluded.length; i++) {
if (terms[excluded[i]] == file ||
$.contains(terms[excluded[i]] || [], file)) {
valid = false;
break;
}
}
// if we have still a valid result we can add it
// to the result list
if (valid)
regularResults.push([filenames[file], titles[file], '', null]);
}
// delete unused variables in order to not waste
// memory until list is retrieved completely
delete filenames, titles, terms;
// now sort the regular results descending by title
regularResults.sort(function(a, b) {
var left = a[1].toLowerCase();
var right = b[1].toLowerCase();
return (left > right) ? -1 : ((left < right) ? 1 : 0);
});
// combine all results
var results = unimportantResults.concat(regularResults)
.concat(objectResults).concat(importantResults);
// print the results
var resultCount = results.length;
function displayNextItem() {
// results left, load the summary and display it
if (results.length) {
var item = results.pop();
var listItem = $('<li style="display:none"></li>');
if (DOCUMENTATION_OPTIONS.FILE_SUFFIX == '') {
// dirhtml builder
var dirname = item[0] + '/';
if (dirname.match(/\/index\/$/)) {
dirname = dirname.substring(0, dirname.length-6);
} else if (dirname == 'index/') {
dirname = '';
}
listItem.append($('<a/>').attr('href',
DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
highlightstring + item[2]).html(item[1]));
} else {
// normal html builders
listItem.append($('<a/>').attr('href',
item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
highlightstring + item[2]).html(item[1]));
}
if (item[3]) {
listItem.append($('<span> (' + item[3] + ')</span>'));
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
} else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
$.get(DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' +
item[0] + '.txt', function(data) {
if (data != '') {
listItem.append($.makeSearchSummary(data, searchterms, hlterms));
Search.output.append(listItem);
}
listItem.slideDown(5, function() {
displayNextItem();
});
}, "text");
} else {
// no source available, just display title
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
}
}
// search finished, update title and status message
else {
Search.stopPulse();
Search.title.text(_('Search Results'));
if (!resultCount)
Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
else
Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
Search.status.fadeIn(500);
}
}
displayNextItem();
},
performObjectSearch : function(object, otherterms) {
var filenames = this._index.filenames;
var objects = this._index.objects;
var objnames = this._index.objnames;
var titles = this._index.titles;
var importantResults = [];
var objectResults = [];
var unimportantResults = [];
for (var prefix in objects) {
for (var name in objects[prefix]) {
var fullname = (prefix ? prefix + '.' : '') + name;
if (fullname.toLowerCase().indexOf(object) > -1) {
var match = objects[prefix][name];
var objname = objnames[match[1]][2];
var title = titles[match[0]];
// If more than one term searched for, we require other words to be
// found in the name/title/description
if (otherterms.length > 0) {
var haystack = (prefix + ' ' + name + ' ' +
objname + ' ' + title).toLowerCase();
var allfound = true;
for (var i = 0; i < otherterms.length; i++) {
if (haystack.indexOf(otherterms[i]) == -1) {
allfound = false;
break;
}
}
if (!allfound) {
continue;
}
}
var descr = objname + _(', in ') + title;
anchor = match[3];
if (anchor == '')
anchor = fullname;
else if (anchor == '-')
anchor = objnames[match[1]][1] + '-' + fullname;
result = [filenames[match[0]], fullname, '#'+anchor, descr];
switch (match[2]) {
case 1: objectResults.push(result); break;
case 0: importantResults.push(result); break;
case 2: unimportantResults.push(result); break;
}
}
}
}
// sort results descending
objectResults.sort(function(a, b) {
return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
});
importantResults.sort(function(a, b) {
return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
});
unimportantResults.sort(function(a, b) {
return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
});
return [importantResults, objectResults, unimportantResults]
}
}
$(document).ready(function() {
Search.init();
}); | zktools | /zktools-0.2.1.tar.gz/zktools-0.2.1/docs/_build/html/_static/searchtools.js | searchtools.js |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser # TODO use argparse instead
import threading
import sys
IS_PYTHON2 = sys.version_info[0] == 2
if IS_PYTHON2:
import Queue
from StringIO import StringIO
strToLong = lambda str, base = 10: long(str, base)
else: # Python 3
import queue as Queue
from io import StringIO
strToLong = lambda str, base = 10: int(str, base)
import socket
import signal
import re
import logging as LOG
if sys.platform == 'win32':
print("Cannot run natively on Windows due to missing curses package. Try running in a cygwin shell instead!"); sys.exit(1)
import curses
ZK_DEFAULT_PORT = 2181
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("", "--servers",
dest="servers", default="localhost:%s" % ZK_DEFAULT_PORT,
help="comma separated list of host:port (default localhost:%d)" % ZK_DEFAULT_PORT)
parser.add_option("-n", "--names",
action="store_true", dest="names", default=False,
help="resolve session name from ip (default False)")
parser.add_option("", "--fix_330",
action="store_true", dest="fix_330", default=False,
help="workaround for a bug in ZK 3.3.0")
parser.add_option("-v", "--verbosity",
dest="verbosity", default="DEBUG",
help="log level verbosity (DEBUG, INFO, WARN(ING), ERROR, CRITICAL/FATAL)")
parser.add_option("-l", "--logfile",
dest="logfile", default=None,
help="directory in which to place log file, or empty for none")
parser.add_option("-c", "--config",
dest="configfile", default=None,
help="zookeeper configuration file to lookup servers from")
parser.add_option("-t", "--timeout",
dest="timeout", default=None,
help="connection timeout to zookeeper instance")
(options, args) = parser.parse_args()
if options.logfile:
LOG.basicConfig(filename=options.logfile, level=getattr(LOG, options.verbosity))
else:
LOG.disable(LOG.CRITICAL)
resized_sig = False
# threads to get server data
# UI class
# track current data and historical
class Session(object):
def __init__(self, session, server_id):
# allow both ipv4 and ipv6 addresses
m = re.search('/([\da-fA-F:\.]+):(\d+)\[(\d+)\]\((.*)\)', session)
self.host = m.group(1)
self.port = m.group(2)
self.server_id = server_id
self.interest_ops = m.group(3)
for d in m.group(4).split(","):
k,v = d.split("=")
setattr(self, k, v)
class ZKServer(object):
def __init__(self, server, server_id):
self.server_id = server_id
if ':' in server:
self.host, self.port = server.split(':')[0], int(server.split(':')[1])
else: # fallback to default if user doesn't specify port number
self.host, self.port = server, ZK_DEFAULT_PORT
try:
stat = send_cmd(self.host, self.port, b'stat\n')
sio = StringIO(stat)
line = sio.readline()
m = re.search('.*: (\d+\.\d+\.\d+)-.*', line) # e.g. nodecount:0 zxid:0x0 sessions:0o att
self.version = m.group(1) # raise Exception when stat response empty
sio.readline()
self.sessions = []
for line in sio:
if not line.strip():
break
self.sessions.append(Session(line.strip(), server_id))
for line in sio:
attr, value = line.split(':')
attr = attr.strip().replace(" ", "_").replace("/", "_").lower()
setattr(self, attr, value.strip())
self.min_latency, self.avg_latency, self.max_latency = self.latency_min_avg_max.split("/")
self.unavailable = False
except: # e.g., when server responds with '' (not reachable)
self.unavailable = True
self.mode = "Unavailable"
self.sessions = []
self.version = "Unknown"
return
def send_cmd(host, port, cmd):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if options.timeout:
s.settimeout(float(options.timeout))
s.connect((host, port))
result = []
try:
s.sendall(cmd)
# shutting down the socket write side helps ensure that we don't end up with TIME_WAIT sockets
if not options.fix_330:
s.shutdown(socket.SHUT_WR)
while True:
data = s.recv(4096)
if not data:
break
data = data.decode()
result.append(data)
finally:
s.close()
return "".join(result)
q_stats = Queue.Queue()
p_wakeup = threading.Condition()
def wakeup_poller():
p_wakeup.acquire()
p_wakeup.notifyAll()
p_wakeup.release()
def reset_server_stats(server):
host, port = server.split(':')
send_cmd(host, port, b'srst\n')
server_id = 0
class StatPoller(threading.Thread):
def __init__(self, server):
self.server = server
global server_id
self.server_id = server_id
server_id += 1
threading.Thread.__init__(self)
def run(self):
p_wakeup.acquire()
while True:
s = ZKServer(self.server, self.server_id)
q_stats.put(s)
p_wakeup.wait(3.0)
# no need - never hit here except exit - "p_wakeup.release()"
# also, causes error on console
class BaseUI(object):
def __init__(self, win):
self.win = win
global mainwin
self.maxy, self.maxx = mainwin.getmaxyx()
self.resize(self.maxy, self.maxx)
def resize(self, maxy, maxx):
LOG.debug("resize called y %d x %d" % (maxy, maxx))
self.maxy = maxy
self.maxx = maxx
def addstr(self, y, x, line, flags = 0):
LOG.debug("addstr with maxx %d" % (self.maxx))
self.win.addstr(y, x, line[:self.maxx-1], flags)
self.win.clrtoeol()
self.win.noutrefresh()
class SummaryUI(BaseUI):
def __init__(self, height, width, server_count):
BaseUI.__init__(self, curses.newwin(1, width, 0, 0))
self.session_counts = [0 for i in range(server_count)]
self.node_counts = [0 for i in range(server_count)]
self.zxids = [0 for i in range(server_count)]
def update(self, s):
self.win.erase()
if s.unavailable:
self.session_counts[s.server_id] = 0
self.node_counts[s.server_id] = 0
self.zxids[s.server_id] = 0
else:
self.session_counts[s.server_id] = len(s.sessions)
self.node_counts[s.server_id] = int(s.node_count)
self.zxids[s.server_id] = strToLong(s.zxid, 16)
nc = max(self.node_counts)
zxid = max(self.zxids)
sc = sum(self.session_counts)
self.addstr(0, 0, "Ensemble -- nodecount:%d zxid:0x%x sessions:%d" %
(nc, zxid, sc))
class ServerUI(BaseUI):
def __init__(self, height, width, server_count):
BaseUI.__init__(self, curses.newwin(server_count + 2, width, 1, 0))
def resize(self, maxy, maxx):
BaseUI.resize(self, maxy, maxx)
self.addstr(1, 0, "ID SERVER PORT M OUTST RECVD SENT CONNS MINLAT AVGLAT MAXLAT", curses.A_REVERSE)
def update(self, s):
if s.unavailable:
self.addstr(s.server_id + 2, 0, "%-2s %-15s %5s %s" %
(s.server_id, s.host[:15], s.port, s.mode[:1].upper()))
else:
self.addstr(s.server_id + 2, 0, "%-2s %-15s %5s %s %8s %8s %8s %5d %6s %6s %6s" %
(s.server_id, s.host[:15], s.port, s.mode[:1].upper(),
s.outstanding, s.received, s.sent, len(s.sessions),
s.min_latency, s.avg_latency, s.max_latency))
class SessionUI(BaseUI):
def __init__(self, height, width, server_count):
BaseUI.__init__(self, curses.newwin(height - server_count - 3, width, server_count + 3, 0))
self.sessions = [[] for i in range(server_count)]
self.ip_to_hostname = {}
def hostname(self, session):
if session.host not in self.ip_to_hostname:
hostname = socket.getnameinfo((session.host, int(session.port)), 0)[0]
self.ip_to_hostname[session.host] = hostname
return self.ip_to_hostname[session.host]
def update(self, s):
self.win.erase()
self.addstr(1, 0, "CLIENT PORT S I QUEUED RECVD SENT", curses.A_REVERSE)
self.sessions[s.server_id] = s.sessions
items = []
for l in self.sessions:
items.extend(l)
items.sort(key=lambda x: int(x.queued), reverse=True)
for i, session in enumerate(items):
try:
#ugh, need to handle if slow - thread for async resolver?
host = self.hostname(session) if options.names else session.host
self.addstr(i + 2, 0, "%-15s %5s %1s %1s %8s %8s %8s" %
(host[:15], session.port, session.server_id, session.interest_ops,
session.queued, session.recved, session.sent))
except:
break
mainwin = None
class Main(object):
def __init__(self, servers):
self.servers = servers.split(",")
def show_ui(self, stdscr):
global mainwin
mainwin = stdscr
curses.use_default_colors()
# w/o this for some reason takes 1 cycle to draw wins
stdscr.refresh()
signal.signal(signal.SIGWINCH, sigwinch_handler)
TIMEOUT = 250
stdscr.timeout(TIMEOUT)
server_count = len(self.servers)
maxy, maxx = stdscr.getmaxyx()
uis = (SummaryUI(maxy, maxx, server_count),
ServerUI(maxy, maxx, server_count),
SessionUI(maxy, maxx, server_count))
# start the polling threads
pollers = [StatPoller(server) for server in self.servers]
for poller in pollers:
poller.setName("PollerThread:" + poller.server)
poller.setDaemon(True)
poller.start()
LOG.debug("starting main loop")
global resized_sig
flash = None
while True:
try:
if resized_sig:
resized_sig = False
self.resize(uis)
wakeup_poller()
while not q_stats.empty():
zkserver = q_stats.get_nowait()
for ui in uis:
ui.update(zkserver)
ch = stdscr.getch()
if 0 < ch <= 255:
if ch == ord('q'):
return
elif ch == ord('h'):
flash = "Help: q:quit r:reset stats spc:refresh"
flash_count = 1000/TIMEOUT * 5
elif ch == ord('r'):
for server in self.servers:
try:
reset_server_stats(server)
except:
pass
flash = "Server stats reset"
flash_count = 1000/TIMEOUT * 5
wakeup_poller()
elif ch == ord(' '):
wakeup_poller()
stdscr.move(1, 0)
if flash:
stdscr.addstr(1, 0, flash)
flash_count -= 1
if flash_count == 0:
flash = None
stdscr.clrtoeol()
curses.doupdate()
except KeyboardInterrupt:
break
def resize(self, uis):
curses.endwin()
curses.doupdate()
global mainwin
mainwin.refresh()
maxy, maxx = mainwin.getmaxyx()
for ui in uis:
ui.resize(maxy, maxx)
def sigwinch_handler(*nada):
LOG.debug("sigwinch called")
global resized_sig
resized_sig = True
def read_zk_config(filename):
config = {}
f = open(filename, 'r')
try:
for line in f:
if line.rstrip() and not line.startswith('#'):
k, v = tuple(line.replace(' ', '').strip().split('=', 1))
config[k] = v
except IOError as e:
print("Unable to open `{0}': I/O error({1}): {2}".format(filename, e.errno, e.strerror))
finally:
f.close()
return config
def get_zk_servers(filename):
if filename:
config = read_zk_config(options.configfile)
client_port = config['clientPort']
return ','.join("%s:%s" % (v.split(':', 1)[0], client_port) for k, v in config.items() if k.startswith('server.'))
else:
return ','.join("%s:%s" % (s.strip(), ZK_DEFAULT_PORT) if not ':' in s else "%s" % s for s in options.servers.split(',', 1))
def main_func():
LOG.debug("startup")
ui = Main(get_zk_servers(options.configfile))
curses.wrapper(ui.show_ui)
if __name__ == '__main__':
main_func() | zktop | /zktop-1.0.0.tar.gz/zktop-1.0.0/zktop.py | zktop.py |
# ZKTraffic [](https://travis-ci.org/twitter/zktraffic) [](https://coveralls.io/r/twitter/zktraffic) [](http://badge.fury.io/py/zktraffic)
**Table of Contents**
- [tl;dr](#tldr)
- [Installing](#installing)
- [What is ZKTraffic?](#what-is-zktraffic)
- [Contributing and Testing](#contributing-and-testing)
- [More tools!](#more-tools)
- [OS X](#os-x)
- [Dependencies](#dependencies)
### tl;dr ###
ZooKeeper protocol analyzer and stats gathering daemon
### Installing ###
You can install ZKTraffic via pip:
.. code-block:: bash
$ pip install zktraffic
Or run it from source (if you have the dependencies installed, see below):
.. code-block:: bash
$ git clone https://github.com/twitter/zktraffic.git
$ cd zktraffic
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --iface=eth0
To get a quick count of requests by path:
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --iface=eth0 --count-requests 10000 --sort-by path
/ 1749
/services/prod/search 846
/configs/teleportation/features 843
Or by type:
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --iface=eth0 --count-requests 10000 --sort-by type
GetChildrenRequest 9044
ExistsRequest 958
You can also measure latencies by path (avg, p95 and p99):
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --measure-latency 1000 --group-by path --aggregation-depth 2 --sort-by p99
path avg p95 p99
--------------- ----------- ---------- ----------
/party/services 0.000199077 0.00048846 0.00267805
/party 0.000349498 0.00136839 0.00201204
/party/configs 0.000157728 0.00036664 0.00122663
Or by type:
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --measure-latency 1000 --group-by type --sort-by p99
type avg p95 p99
---------------------- ----------- ----------- -----------
CreateEphemeralRequest 0.000735009 0.000978041 0.0032404
GetChildrenRequest 0.000182547 0.000453258 0.00220628
ExistsRequest 0.000162728 0.000430155 0.000862937
Or by client:
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-dump --measure-latency 1000 --group-by client --sort-by p99
client avg p95 p99
---------------------- ----------- ----------- -----------
10.0.1.3:44308 0.000735009 0.000978041 0.0032404
10.0.1.6:34305 0.000182547 0.000453258 0.00220628
10.0.1.9:36110 0.000162728 0.000430155 0.000862937
Or use the stats gathering daemon:
.. code-block:: bash
$ sudo ZKTRAFFIC_SOURCE=1 bin/zk-stats-daemon --iface=eth0 --http-port=9090
Or you can build PEX files — from the source — for any of the available tools:
.. code-block:: bash
$ pip install pex
# zk-dump
$ pex -v -e zktraffic.cli.zk -o zk-dump.pex .
# zk-stats-daemon
$ pex -v -e zktraffic.cli.stats_daemon -o stats-daemon.pex .
# zab-dump
$ pex -v -e zktraffic.cli.zab -o zab-dump.pex .
# fle-dump
$ pex -v -e zktraffic.cli.fle -o fle-dump.pex .
More info about PEX [here](https://pex.readthedocs.org "PEX").
### What is ZKTraffic? ###
An {iptraf,top}-esque traffic monitor for ZooKeeper. Right now it exports
per-path (and global) stats. Eventually it'll be made to export per-user
stats too.
It has a front-end, zk-dump, that can be used in interactive mode to dump traffic:
```
# need root or CAP_NET_ADMIN & CAP_NET_RAW
$ sudo zk-dump --iface eth0
21:08:05:991542 ConnectRequest(ver=0, zxid=0, timeout=10000, session=0x0, readonly=False, client=127.0.0.1:50049)
————————►21:08:06:013513 ConnectReply(ver=0, timeout=10000, session=0x148cf0aedc60000, readonly=False, client=127.0.0.1:50049)
21:08:07:432361 ExistsRequest(xid=1, path=/, watch=False, size=14, client=127.0.0.1:50049)
————————►21:08:07:447353 ExistsReply(xid=1, zxid=31, error=0, client=127.0.0.1:50049)
21:08:07:448033 GetChildrenRequest(xid=2, path=/, watch=False, size=14, client=127.0.0.1:50049)
————————►21:08:07:456169 GetChildrenReply(xid=2, zxid=31, error=0, count=1, client=127.0.0.1:50049)
...
```
Or, it can work in daemon mode from which it exposes HTTP/JSON endpoints with
stats that can be fed into your favourite data collection system:
.. code-block:: bash
$ sudo zk-stats-daemon.pex --app_daemonize --aggregation-depth=5
# Wait for 1 min and:
$ sleep 60 && curl http://localhost:7070/json/paths | python -mjson.tool
{
"ConnectRequest": 2,
"ConnectRequestBytes": 90,
"CreateRequest/configs": 2,
"CreateRequest/configs/server": 2,
"CreateRequest/discovery": 2,
"CreateRequest/discovery/hosts": 2,
"CreateRequest/discovery/services": 2,
"CreateRequestBytes/configs": 110,
"CreateRequestBytes/configs/server": 124,
"CreateRequestBytes/discovery": 114,
"CreateRequestBytes/discovery/hosts": 126,
"CreateRequestBytes/discovery/services": 132,
"ExistsRequest/": 1574,
"ExistsRequest/configs": 3,
"ExistsRequest/configs/server": 2,
"ExistsRequest/discovery": 4,
"ExistsRequest/discovery/hosts": 2,
"ExistsRequest/discovery/services": 2,
"ExistsRequestBytes/": 22036,
"ExistsRequestBytes/configs": 63,
"ExistsRequestBytes/configs/server": 56,
"ExistsRequestBytes/discovery": 92,
"ExistsRequestBytes/discovery/hosts": 58,
"ExistsRequestBytes/discovery/services": 64,
"GetChildrenRequest/configs": 1285,
"GetChildrenRequest/configs/server": 1242,
"GetChildrenRequest/discovery": 1223,
"GetChildrenRequest/discovery/hosts": 1250,
"GetChildrenRequest/discovery/services": 1222,
"GetChildrenRequest/zookeeper/config": 1285,
"GetChildrenRequest/zookeeper/quota/limits": 1228,
"GetChildrenRequest/zookeeper/quota/limits/by-path": 1269,
"GetChildrenRequest/zookeeper/quota/limits/global": 1230,
"GetChildrenRequest/zookeeper/quota/stats/by-path": 1222,
"GetChildrenRequestBytes/discovery/hosts": 36250,
"GetChildrenRequestBytes/discovery/services": 39104,
"GetChildrenRequestBytes/zookeeper/config": 38550,
"GetChildrenRequestBytes/zookeeper/quota/limits": 44208,
"GetChildrenRequestBytes/zookeeper/quota/limits/by-path": 55836,
"GetChildrenRequestBytes/zookeeper/quota/limits/global": 52890,
"GetChildrenRequestBytes/zookeeper/quota/limits/slices": 51815,
"GetChildrenRequestBytes/zookeeper/quota/stats": 42630,
"GetChildrenRequestBytes/zookeeper/quota/stats/by-path": 52546,
"GetChildrenRequestBytes/zookeeper/quota/stats/global": 50568,
"reads/": 2761,
"reads/configs": 1288,
"reads/configs/server": 1244,
"reads/discovery": 1227,
"reads/discovery/hosts": 1252,
"reads/discovery/services": 1224,
"reads/zookeeper/config": 1285,
"reads/zookeeper/quota/limits": 1228,
"reads/zookeeper/quota/limits/by-path": 1269,
"reads/zookeeper/quota/limits/global": 1230,
"readsBytes/": 38654,
"readsBytes/discovery/services": 39168,
"readsBytes/zookeeper/config": 38550,
"readsBytes/zookeeper/quota/limits": 44208,
"readsBytes/zookeeper/quota/limits/by-path": 55836,
"readsBytes/zookeeper/quota/limits/global": 52890,
"readsBytes/zookeeper/quota/limits/slices": 51815,
"readsBytes/zookeeper/quota/stats": 42630,
"readsBytes/zookeeper/quota/stats/by-path": 52546,
"readsBytes/zookeeper/quota/stats/global": 50568,
"total/readBytes": 655586,
"total/reads": 21251,
"total/writeBytes": 606,
"total/writes": 10,
"writes/": 0,
"writes/configs": 2,
"writes/configs/server": 2,
"writes/discovery": 2,
"writes/discovery/hosts": 2,
"writes/discovery/services": 2,
"writesBytes/": 0,
"writesBytes/configs": 110,
"writesBytes/configs/server": 124,
"writesBytes/discovery": 114,
"writesBytes/discovery/hosts": 126,
"writesBytes/discovery/services": 132
}
Other relevant endpoints for stats are:
* /json/ips: top-N per-ip stats
* /json/auths: per-auth stats
* /json/auths-dump: a full dump of known auths
* /json/info: process uptime and introspection info
* /threads: stacks for all threads
### Contributing and Testing ###
Please see [CONTRIBUTING.md](CONTRIBUTING.md).
### More tools! ###
Along with zk-dump and zk-stats-daemon, you can find fle-dump which allows you
to inspect FastLeaderElection traffic (i.e.: the protocol by which ZooKeeper decides
who will lead and the mechanism by which the leader is subsequently discovered):
.. code-block:: bash
$ sudo fle-dump --iface eth0 -c
Notification(
timestamp=00:57:12:593254,
src=10.0.0.1:32938,
dst=10.0.0.2:3888,
state=following,
leader=3,
zxid=0,
election_epoch=0,
peer_epoch=0,
config=
server.0=10.0.0.1:2889:3888:participant;0.0.0.0:2181
server.1=10.0.0.2:2889:3888:participant;0.0.0.0:2181
server.2=10.0.0.3:2889:3888:participant;0.0.0.0:2181
server.3=10.0.0.4:2889:3888:participant;0.0.0.0:2181
server.4=10.0.0.5:2889:3888:participant;0.0.0.0:2181
version=10010d4d6
)
Notification(
timestamp=00:57:12:595525,
src=10.0.0.2:3888,
dst=10.0.0.1:32938,
state=looking,
leader=1,
zxid=4296326153,
election_epoch=1,
peer_epoch=1,
config=
server.0=10.0.0.1:2889:3888:participant;0.0.0.0:2181
server.1=10.0.0.2:2889:3888:participant;0.0.0.0:2181
server.2=10.0.0.3:2889:3888:participant;0.0.0.0:2181
server.3=10.0.0.4:2889:3888:participant;0.0.0.0:2181
server.4=10.0.0.5:2889:3888:participant;0.0.0.0:2181
version=10010d4d6
)
...
Note: for initial messages to be visible you'll need the patch available
at [ZOOKEEPER-2098](https://issues.apache.org/jira/browse/ZOOKEEPER-2098 "ZOOKEEPER-2098"),
if you are using ZooKeeper prior to ZooKeeper 3.5.1-rc2.
Note: if you are using Linux 3.14 or later, you'll need to disable [TCP Auto Corking](http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=f54b311142a92ea2e42598e347b84e1655caf8e3) by running `echo 0 > /proc/sys/net/ipv4/tcp_autocorking`.
If you are interested in debugging ZAB (ZooKeeper Atomic Broadcast protocol), you can use
zab-dump:
.. code-block:: bash
$ sudo zab-dump --iface eth0
Request(
cxid=6,
dst=10.0.0.1:2889,
length=112,
req_type=CreateRequest,
session_id=0x34e4d23b0d70001,
src=10.0.0.2:48604,
timestr=22:54:31:995353,
zxid=-1,
)
Proposal(
cxid=6,
dst=10.0.0.2:48603,
length=110,
session_id=0x34e4d23b0d70001,
src=10.0.0.1:2889,
timestr=22:54:31:995753,
txn_time=1435816471995,
txn_type=CreateRequest,
txn_zxid=8589934619,
zxid=8589934619,
)
Proposal(
cxid=6,
dst=10.0.0.1:48604,
length=110,
session_id=0x34e4d23b0d70001,
src=10.0.0.1:2889,
timestr=22:54:31:995755,
txn_time=1435816471995,
txn_type=CreateRequest,
txn_zxid=8589934619,
zxid=8589934619,
)
Proposal(
cxid=6,
dst=10.0.0.3:48605,
length=110,
session_id=0x34e4d23b0d70001,
src=10.0.0.1:2889,
timestr=22:54:31:995770,
txn_time=1435816471995,
txn_type=CreateRequest,
txn_zxid=8589934619,
zxid=8589934619,
)
Ack(
dst=10.0.0.1:2889,
length=20,
src=10.0.0.1:48603,
timestr=22:54:31:996068,
zxid=8589934619,
)
Ack(
dst=10.0.0.1:2889,
length=20,
src=10.0.0.1:48604,
timestr=22:54:31:996316,
zxid=8589934619,
)
Ack(
dst=10.0.0.1:2889,
length=20,
src=10.0.0.1:48604,
timestr=22:54:31:996318,
zxid=8589934619,
)
Commit(
dst=10.0.0.1:48603,
length=20,
src=10.0.0.1:2889,
timestr=22:54:31:996193,
zxid=8589934619,
)
Commit(
dst=10.0.0.2:48604,
length=20,
src=10.0.0.1:2889,
timestr=22:54:31:996195,
zxid=8589934619,
)
Commit(
dst=10.0.0.2:48605,
length=20,
src=10.0.0.1:2889,
timestr=22:54:31:996442,
zxid=8589934619,
)
### OS X ###
Although no one has tried running this on OS X in production, it can be used for some parts of development and unit testing. If you are running on OS X, please run the following to install the correct dependencies:
.. code-block:: bash
$ pip install -r ./osx_requirements.txt
### Dependencies ###
* Python 2.7 (Py3K soon)
* ansicolors
* dpkt-fix
* hexdump
* psutil>=2.1.0
* scapy==2.4.2
* six
* twitter.common.app
* twitter.common.collections
* twitter.common.exceptions
* twitter.common.http
* twitter.common.log
| zktraffic | /zktraffic-0.2.0.tar.gz/zktraffic-0.2.0/README.md | README.md |
import json
import os
import sh
import shlex
from . import vault
from .exceptions import CommandError
from .utils import Map, get_random
class Filesystem(object):
"""
An encrypted filesystem object
"""
def __init__(self, base, name):
self.base = base
self.name = name
self.config = FilesystemConfig(self)
self._secret_key = None
@property
def root(self):
"""
Returns (str): full path to the filesystem's base directory
"""
return os.path.join(self.base, self.name)
@property
def loopback_path(self):
return os.path.join(self.root, "fs")
@property
def key_path(self):
return os.path.join(self.root, "key")
@property
def mapper_path(self):
return "/dev/mapper/{}".format(self.name)
@property
def secret_key(self):
if self._secret_key:
return self._secret_key
self._secret_key = vault.unlock(self.key_path).decode("utf8")
return self._secret_key
def create(self, mountpoint, size, automount=True):
"""
creates a new encrypted filesystem
Args:
mountpoint (str): path to mount the filesystem
size (int): filesystem size in MB
automount (bool): whether to mount at boot
"""
if os.path.exists(self.root):
raise CommandError("filesystem {} already exists".format(self.name))
if not os.path.exists(self.root):
os.makedirs(self.root)
self._secret_key = get_random()
# print('secret_key={}'.format(secret_key))
vault.lock(self.key_path, self.secret_key)
dd_command = shlex.split(
"dd if=/dev/zero of={} bs=1M seek={} count=1".format(
self.loopback_path, size - 1
)
)
dd = sh.Command(dd_command[0])
dd(*dd_command[1:])
luks_format_command = shlex.split(
"cryptsetup -q -v luksFormat {} -".format(self.loopback_path)
)
command = sh.Command(luks_format_command[0])
command(*luks_format_command[1:], _in=self.secret_key)
self.unlock()
mkfs = sh.Command("mkfs.ext4")
mkfs(self.mapper_path)
self.lock()
self.config.write(mountpoint, automount)
self.enable_service()
def enable_service(self):
systemctl_command = shlex.split(
"systemctl enable zymkey-filesystem@{}".format(self.name)
)
command = sh.Command(systemctl_command[0])
command(*systemctl_command[1:])
def mount(self, automount=True):
"""
Mounts the filesystem
Args:
automount (bool): when True, check the filesystem's config for automount to be True
"""
# when automount=True and the config's automount is False, do nothing
if automount and not self.config.automount:
return
self.unlock()
mountpoint = self.config.mountpoint
if not os.path.exists(mountpoint):
os.makedirs(mountpoint)
mount_command = shlex.split("mount {} {}".format(self.mapper_path, mountpoint))
command = sh.Command(mount_command[0])
command(*mount_command[1:])
def unmount(self):
unmount_command = shlex.split("umount {}".format(self.config.mountpoint))
command = sh.Command(unmount_command[0])
# try to unmount, but continue if error
# error 32 is raised when the filesystem is not mounted
try:
command(*unmount_command[1:])
except sh.ErrorReturnCode_32:
pass
try:
self.lock()
except sh.ErrorReturnCode_4:
pass
def lock(self):
luks_close_command = shlex.split("cryptsetup luksClose {}".format(self.name))
command = sh.Command(luks_close_command[0])
command(*luks_close_command[1:])
def unlock(self):
luks_open_command = shlex.split(
"cryptsetup luksOpen {} {} -".format(self.loopback_path, self.name)
)
command = sh.Command(luks_open_command[0])
command(*luks_open_command[1:], _in=self.secret_key)
class FilesystemConfig(Map):
def __init__(self, filesystem):
super(FilesystemConfig, self).__init__()
self.filesystem = filesystem
self.check_config()
@property
def config_path(self):
return os.path.join(self.filesystem.root, "config.json")
def check_config(self):
"""
checks to see if a configuration file exists
when a configuration file is found, read its contents
Returns (dict): the configuration file contents
"""
if os.path.exists(self.config_path):
with open(self.config_path, "r") as fh:
data = json.load(fh)
self.update(data)
def write(self, mountpoint, automount):
self.mountpoint = mountpoint
self.automount = automount
# do not attempt to serialize the filesystem attribute
data = self.copy()
data.pop("filesystem")
with open(self.config_path, "w") as fh:
json.dump(data, fh) | zku | /zku-1.0.33-py3-none-any.whl/zymkey/filesystem.py | filesystem.py |
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# .. codeauthor:: Scott Miller <[email protected]>
"""Loader for the Zymkey App Library.
This module loads the C library `libzk_app_utils.so` for importation into the
main module of the Zymkey App Utils Library.
Notes
-----
The `zkalib` object must be set in this separate module and imported into the
main module in order for Sphinx autodoc to be able to mock the import. See
the relevant configuration in docs/conf.py:
autodoc_mock_imports = [
"zymkey.zka",
]
Attributes
----------
zkalib : CDLL
The loaded Zymkey library path that will be imported by the main module.
"""
import os
import errno
from ctypes import cdll, CDLL
import distutils.sysconfig
from .exceptions import ZymkeyLibraryError
from .settings import ZYMKEY_LIBRARY_PATH
zkalib: CDLL
loaded_lib = None
_prefixes = []
for prefix in (distutils.sysconfig.get_python_lib(), ""):
_zymkey_library_path = "{}{}".format(prefix, ZYMKEY_LIBRARY_PATH)
if os.path.exists(_zymkey_library_path):
loaded_lib = cdll.LoadLibrary(_zymkey_library_path)
break
else:
_prefixes.append(os.path.dirname(_zymkey_library_path))
else:
raise ZymkeyLibraryError(
"unable to find {}, checked {}".format(
os.path.basename(ZYMKEY_LIBRARY_PATH), _prefixes
)
)
zkalib = loaded_lib
__all__ = [
"zkalib",
] | zku | /zku-1.0.33-py3-none-any.whl/zymkey/zka.py | zka.py |
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# .. codeauthor:: Scott Miller <[email protected]>
# .. version:: 3.0
# .. date:: 2021-03-02
"""Python interface module to Zymkey Application Utilities Library.
This file contains a Python class which interfaces to the the Zymkey
Application Utilities library. This class facilitates writing user
space applications which use Zymkey to perform cryptographic
operations, such as:
1. Signing of payloads using ECDSA
2. Verification of payloads that were signed using Zymkey
3. Exporting the public key that matches Zymkey's private key
4. "Locking" and "unlocking" data objects
5. Generating random data
Additionally, there are methods for changing the i2c address (i2c units
only), setting tap sensitivity, and controlling the LED.
"""
import distutils.sysconfig
import errno
import hashlib
import os
import sys
import typing as t
from ctypes import *
from .exceptions import (VerificationError, ZymkeyLibraryError,
ZymkeyTimeoutError)
from .settings import ZYMKEY_LIBRARY_PATH
from .utils import is_string
from .zka import zkalib
__all__ = [
"Zymkey",
"client",
"RecoveryStrategy",
"RecoveryStrategyBIP39",
"RecoveryStrategySLIP39",
]
CLOUD_ENCRYPTION_KEY: str = "cloud"
"""The constant used to refer to the cloud encryption key."""
ZYMKEY_ENCRYPTION_KEY: str = "zymkey"
"""The constant used to refer to the zymkey encryption key."""
ENCRYPTION_KEYS: t.Tuple[str, ...] = (CLOUD_ENCRYPTION_KEY, ZYMKEY_ENCRYPTION_KEY)
"""A tuple containing the various encryption key constants."""
keyTypes: t.Mapping[str, int] = {
"secp256r1": 0,
"nistp256": 0,
"secp256k1": 1,
"ed25519": 2,
"x25519": 3,
}
"""A dict containing the available key types."""
kdfFuncTypes: t.Mapping[str, int] = {
"none": 0,
"rfc5869-sha256": 1,
"rfc5869-sha512": 2,
"pbkdf2-sha256": 3,
"pbkdf2-sha512": 4,
}
"""A dict containing the available KDF function types."""
class RecoveryStrategy:
"""The RecoveryStrategy class definition.
This class specifies the recovery strategy used for wallet generation within Python.
Base class strategy is to do no recovery.
"""
def __init__(self, variant = ""):
"""Initialize an instance of RecoveryStrategy.
Parameters
----------
variant
Variant of the key type. Currently only "cardano" for ed25519 is supported.
"""
self.recovery_strategy = ""
self.variant = variant
self.passphrase = ""
class RecoveryStrategyBIP39(RecoveryStrategy):
"""The RecoveryStrategyBIP39 class definition.
This class specifies the BIP39 recovery strategy used for wallet generation within Python.
Derived from RecoveryStrategy class.
"""
def __init__(self, variant = "", passphrase = ""):
"""Initialize an instance of RecoveryStrategyBIP39.
Parameters
----------
variant
Variant of the key type. Currently only "cardano" for ed25519 is supported.
passphrase
Passphrase used for BIP39 generation. Can be empty string. Must be b64 encoded.
"""
self.passphrase = passphrase
self.recovery_strategy = "BIP39"
self.variant = variant
class RecoveryStrategySLIP39(RecoveryStrategy):
"""The RecoveryStrategySLIP39 class definition.
This class specifies the SLIP39 recovery strategy used for wallet generation within Python.
Derived from RecoveryStrategy class.
"""
def __init__(self, group_count, group_threshold, iteration_exponent, variant = "", passphrase = ""):
"""Initialize an instance of RecoveryStrategySLIP39.
Parameters
----------
group_count
Total number of group shares to generate [Max: 14 Groups].
group_threshold
Number of groups needed to restore a master seed with [threshold <= group_count].
iteration_exponent
The higher the exponent the more PBKDF2 hashing is done. [Exponent: 0-5]
variant
Variant of the key type. Currently only "cardano" for ed25519 is supported.
passphrase
Passphrase used for BIP39 generation. Can be empty string. Must be b64 encoded.
"""
self.passphrase = passphrase
self.recovery_strategy = "SLIP39"
self.variant = variant
self.group_count = group_count
self.group_threshold = group_threshold
self.iteration_exponent = iteration_exponent
class DigestObject():
def __init__(self, src, encoding = "utf-8", digest = None):
self.src = src
self.encoding = encoding
self.hashlib = digest
self.str = None
if (isinstance(src, str)):
self.str = src
elif (isinstance(src, bytes)):
self.str = src.decode(encoding)
elif (isinstance(src, bytearray)):
self.str = src.decode(encoding)
if(self.hashlib is None):
self.hashlib = hashlib.sha256()
self.hashlib.update(self.str.encode(encoding))
def digest(self, update_hash = False):
if(update_hash):
self.hashlib.update(self.str.encode(self.encoding))
digest_str = self.hashlib.digest()
return digest_str
class Zymkey(object):
"""The Zymkey class definition.
This class provides access to the Zymkey within Python.
"""
EPHEMERAL_KEY_SLOT = -1
## @name Zymkey Context
###@{
def __init__(self):
"""Initialize an instance of a Zymkey context."""
self._zk_ctx = c_void_p()
ret = self._zkOpen(byref(self._zk_ctx))
# To be able to import this module and class for parsing of inline
# documentation by Sphinx autodoc, the import of the zkalib must be
# mocked by Sphinx. Because the comparison will fail when the mocked
# value of `ret` is compared with an integer, check for a TypeError and
# pass on that exception.
#
# To be extra safe, it also checks the exception
# text to ensure that the exception is a result of a mocked value
# being compared.
try:
if ret < 0:
raise AssertionError("bad return code {!r}".format(ret))
except TypeError as e:
if "instances of 'zkOpen' and 'int'" not in sys.exc_info()[1].args[0]:
raise e
pass
## @brief The class destructor closes a Zymkey context
def __del__(self):
if self._zk_ctx != None:
ret = self._zkClose(self._zk_ctx)
# See the explanation for the `try` block above in `__init__()`. This
# try block exists for the same reason.
try:
if ret < 0:
raise AssertionError("bad return code %d" % ret)
except TypeError as e:
if "instances of 'zkClose' and 'int'" not in sys.exc_info()[1].args[0]:
raise e
pass
self._zk_ctx = None
###@}
## @name LED Control
###@{
def led_on(self) -> None:
"""Turn the LED on.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
ret = self._zkLEDOn(self._zk_ctx)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def led_off(self) -> None:
"""Turn the LED off.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
ret = self._zkLEDOff(self._zk_ctx)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def led_flash(self, on_ms: int, off_ms: int = 0, num_flashes: int = 0) -> None:
"""Flash the LED.
Parameters
----------
on_ms
The amount of time in milliseconds that the LED will be on for.
off_ms
The amount of time in milliseconds that the LED will be off for. If
this parameter is set to 0 (default), the off time is the same as
the on time.
num_flashes
The number of on/off cycles to execute. If this parameter is set
to 0 (default), the LED flashes indefinitely.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
if off_ms == 0:
off_ms = on_ms
ret = self._zkLEDFlash(self._zk_ctx, on_ms, off_ms, num_flashes)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
###@}
## @name Random Number Generation
###@{
def get_random(self, num_bytes: int) -> bytearray:
"""Get some random bytes.
Parameters
----------
num_bytes
The number of random bytes to get.
Returns
-------
bytearray
An array of bytes returned by the random number generator.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
rdata = c_void_p()
ret = self._zkGetRandBytes(self._zk_ctx, byref(rdata), num_bytes)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
rc = (c_ubyte * num_bytes).from_address(rdata.value)
rd_array = bytearray(rc)
return rd_array
def create_random_file(self, file_path: str, num_bytes: int) -> None:
"""Deposit random data in a file.
Parameters
----------
file_path
The absolute path name for the destination file.
num_bytes
The number of random bytes to get.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
ret = self._zkCreateRandDataFile(
self._zk_ctx, file_path.encode("utf-8"), num_bytes
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def lock(
self,
src: t.Union[str, bytes],
dst: t.Optional[str] = None,
encryption_key: str = ZYMKEY_ENCRYPTION_KEY,
) -> t.Union[None, bytearray]:
"""Lock up source (plaintext) data.
This methods encrypts and signs a block of data.
The Zymkey that can be used for locking/unlocking operations
1. The one-way key is meant to lock up data only on the local host
computer. Data encrypted using this key cannot be exported and
deciphered anywhere else.
Parameters
----------
src
The source (plaintext) data to lock.
If a `str` is passed to this method, the value is assumed to be
the absolute path to the location of the source file. If `bytes`
or `bytesarray` is passed, it is assumed to contain binary data.
dst
The destination (ciphertext) of the locked data.
If a `str` is passed to this method, the value is assumed to be
the absolute path to the location of the file where the destination
data is meant to be written. Otherwise, if `None` is passed to the
method (the default), the locked data is returned from the method
as a bytearray.
encryption_key
This specifies which key will be used to lock the data. A value of
'zymbit' (default) specifies that the Zymkey will use the one-way
key.
Returns
-------
bytearray or None
The locked data is returned as a bytearray if no destination is
specified when this method is called. Otherwise, `None` is returned.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
assert encryption_key in ENCRYPTION_KEYS
use_shared_key = encryption_key == CLOUD_ENCRYPTION_KEY
dst_path: t.Optional[bytes] = None
dst_data: t.Optional[t.Union[c_void_p, int]] = None
if isinstance(dst, str):
dst_path = dst.encode("utf-8")
else:
dst_data = c_void_p()
dst_data_sz = c_int()
# If the `src` is a file path
if isinstance(src, str):
src = src.encode("utf-8")
# If the `src` is a file path and `dst` is a file path
if dst_path:
ret = self._zkLockDataF2F(self._zk_ctx, src, dst_path, use_shared_key)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return None
# If the `src` is a file path and `dst` is not a file path
else:
dst_data = c_void_p()
dst_data_sz = c_int()
ret = self._zkLockDataF2B(
self._zk_ctx,
src,
byref(dst_data),
byref(dst_data_sz),
use_shared_key,
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value) # type: ignore
data_array = bytearray(dc)
return data_array
# If the `src` is not a file path
else:
src_sz = len(src)
src_c_ubyte = (c_ubyte * src_sz)(*src)
# If the `src` is not a file path and `dst` is a file path
if dst_path:
ret = self._zkLockDataB2F(
self._zk_ctx, byref(src_c_ubyte), len(src), dst_path, use_shared_key
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return None
# If the `src` is not a file path and `dst` is not a file path
else:
dst_data = c_void_p()
dst_data_sz = c_int()
ret = self._zkLockDataB2B(
self._zk_ctx,
byref(src_c_ubyte),
len(src),
byref(dst_data),
byref(dst_data_sz),
use_shared_key,
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value) # type: ignore
data_array = bytearray(dc)
return data_array
###@}
## @name Unlock Data
###@{
def unlock(
self,
src: t.Union[str, bytes],
dst: t.Optional[str] = None,
encryption_key: str = ZYMKEY_ENCRYPTION_KEY,
raise_exception: bool = True,
) -> t.Union[None, bytearray, t.NoReturn]:
"""Unlock source (ciphertext) data.
This method verifies a locked object signature and decrypts the
associated ciphertext data.
The Zymkey has two keys that can be used for locking/unlocking operations
1. The one-way key is meant to lock up data only on the local host
computer. Data encrypted using this key cannot be exported and
deciphered anywhere else.
Parameters
----------
src
The source (ciphertext) data to verify and decrypt.
If a `str` is passed to this method, the value is assumed to be
the absolute path to the location of the source file. If `bytes`
or `bytesarray` is passed, it is assumed to contain binary data.
dst
The destination of the decrypted data (plaintext).
If a `str` is passed to this method, the value is assumed to be
the absolute path to the location of the file where the destination
data is meant to be written. Otherwise, if `None` is passed to the
method (the default), the locked data is returned from the method
as a bytearray.
encryption_key
This specifies which key will be used to lock the data. A value of
'zymbit' (default) specifies that the Zymkey will use the one-way
key.
raise_exception
Specifies if an exception should be raised if the signature verification
of the locked object fails.
Returns
-------
bytearray or None
The locked data is returned as a bytearray if no destination is
specified when this method is called. Otherwise, `None` is returned.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
# Determine if source and destination are strings. If so, they must be
# filenames
src_is_file = is_string(src)
dst_is_file = is_string(dst)
assert encryption_key in ENCRYPTION_KEYS
use_shared_key = encryption_key == CLOUD_ENCRYPTION_KEY
# Prepare src if it is not specifying a filename
if not src_is_file:
src_sz = len(src)
src_c_ubyte = (c_ubyte * src_sz)(*src)
else:
src = src.encode("utf-8")
# Prepare dst if it is not specifying a filename
if not dst_is_file:
dst_data = c_void_p()
dst_data_sz = c_int()
else:
dst = dst.encode("utf-8")
if src_is_file and dst_is_file:
ret = self._zkUnlockDataF2F(self._zk_ctx, src, dst, use_shared_key)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
if not src_is_file and dst_is_file:
ret = self._zkUnlockDataB2F(
self._zk_ctx, byref(src_c_ubyte), len(src), dst, use_shared_key
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
if src_is_file and not dst_is_file:
ret = self._zkUnlockDataF2B(
self._zk_ctx, src, byref(dst_data), byref(dst_data_sz), use_shared_key
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value)
data_array = bytearray(dc)
return data_array
if not src_is_file and not dst_is_file:
ret = self._zkUnlockDataB2B(
self._zk_ctx,
byref(src_c_ubyte),
len(src),
byref(dst_data),
byref(dst_data_sz),
use_shared_key,
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
if ret == 0:
if raise_exception:
raise VerificationError()
return None
if ret == 1:
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value)
data_array = bytearray(dc)
return data_array
return None
###@}
## @name ECDSA
###@{
def sign(self, src: t.Union[str, bytes, bytearray], slot: int = 0, return_recid: bool = False,
encoding: str = "utf-8", digest: t.Any = None) -> bytearray:
"""Generate a signature using the Zymkey's ECDSA private key.
Parameters
----------
src
The SHA256 digest of the data that will be used to generate the signature.
slot
The key slot used for signing. [HSM6]Slot can't contain a X25519 key pair
return_recid : bool
This parameter asks for the y parity to be returned.
encoding : str
This parameter asks for the encoding for the string source.
digest : _hashlib.HASH
This parameter asks for the type of hash. Can be None. Defaults to sha256.
Returns
-------
bytearray
A bytearray of the signature.
int
If return_recid = True, then return the y parity of the signature (either a 1 or 0).
Todo
----
Allow for overloading of source parameter in similar fashion to lock/unlock.
"""
if (digest is None):
digest = hashlib.sha256()
digest_obj = DigestObject(src, encoding, digest)
return self.sign_digest(digest_obj, slot=slot, return_recid=return_recid)
def sign_digest(
self, digest: t.Any, slot: int = 0, return_recid: bool = False
) -> bytearray:
"""Generate a signature using the Zymkey's ECDSA private key.
Parameters
----------
digest : _hashlib.HASH
A encoded str instance representing the digest to be signed.
slot : int
This parameter specifies the key slot used for signing. [HSM6]Slot can't contain a X25519 key pair
return_recid : bool
This parameter asks for the y parity to be returned.
Returns
-------
bytearray
The signature of the SHA-256 digest passed to this method.
int
If return_recid = True, then return the y parity of the signature (either a 1 or 0).
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
Todo
----
Allow for overloading of source parameter in similar fashion to lock/unlockData.
"""
digest_bytes = bytearray(digest.digest())
src_sz = len(digest_bytes)
src_c_ubyte = (c_ubyte * src_sz)(*digest_bytes)
dst_data = c_void_p()
dst_data_sz = c_int()
recovery_id = c_uint()
ret = self._zkGenECDSASigFromDigestWithRecID(
self._zk_ctx,
src_c_ubyte,
slot,
byref(dst_data),
byref(dst_data_sz),
byref(recovery_id),
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value)
data_array = bytearray(dc)
if return_recid:
return data_array, recovery_id
else:
return data_array
def verify(
self,
src: t.Union[str, bytes, bytearray],
sig: bytearray,
raise_exception: bool = True,
pubkey_slot: int = 0,
foreign: bool = False,
encoding: str = "utf-8",
digest: t.Any = None,
) -> bool:
"""Verify data against a signature.
The public key is not specified in the parameter list to ensure
that the public key that matches the Zymkey's ECDSA private key
is used.
Parameters
----------
src : TYPE
The buffer to verify.
sig : TYPE
The signature to verify against.
raise_exception : bool
By default, when verification fails a `VerificationError` will be
raised, unless this is set to `False`.
pubkey_slot : int
The key slot to use to verify the signature against. Defaults to the
first key slot.
foreign : bool
If false, the normal key store is referenced. Otherwise, the foreign
public key store is referenced.
**Note:** This parameter is only applicable for Supported Devices: HSM6, Secure Compute Module.
encoding : str
This parameter asks for the encoding for the string source.
digest : _hashlib.HASH
This parameter asks for the type of hash. Can be None. Defaults to sha256.
Returns
-------
bool
Returns `True` for a good verification or `False` for a bad
verification when the `raise_exception` parameters is `False`.
"""
if (digest is None):
digest = hashlib.sha256()
digest_obj = DigestObject(src, encoding, digest)
return self.verify_digest(
digest_obj,
sig,
raise_exception=raise_exception,
pubkey_slot=pubkey_slot,
foreign=foreign,
)
def verify_digest(
self,
digest: t.Any,
sig: bytearray,
raise_exception: bool = True,
pubkey_slot: int = 0,
foreign: bool = False,
) -> bool:
"""Verify a signature using the Zymkey's ECDSA public key.
The public key is not specified in the parameter list to ensure
that the public key that matches the Zymkey's ECDSA private key
is used.
Parameters
----------
digest : TYPE
A hashlib instance that will be used to generate the signature.
sig : TYPE
The signature to verify.
raise_exception : bool
By default, when verification fails, a `VerificationError` will be
raised, unless this is set to `False`.
pubkey_slot : int
The key slot to use to verify the signature against. Defaults to
the first key slot.
foreign : bool
If false, the normal key store is referenced. Otherwise, the foreign
public key store is referenced.
**Note:** This parameter is only applicable for Supported Devices: HSM6, Secure Compute Module.
Returns
-------
bool
Returns `True` for a good verification or `False` for a bad
verification when `raise_exception` is `False`.
"""
digest_bytes = bytearray(digest.digest())
src_sz = len(digest_bytes)
sig_sz = len(sig)
src_c_ubyte = (c_ubyte * src_sz)(*digest_bytes)
sig_c_ubyte = (c_ubyte * sig_sz)(*sig)
if not foreign:
ret = self._zkVerifyECDSASigFromDigest(
self._zk_ctx, src_c_ubyte, pubkey_slot, sig_c_ubyte, sig_sz
)
else:
ret = self._zkVerifyECDSASigFromDigestWithForeignKeySlot(
self._zk_ctx, src_c_ubyte, pubkey_slot, sig_c_ubyte, sig_sz
)
if ret == 0:
if raise_exception:
raise VerificationError()
return False
if ret == 1:
return True
else:
raise AssertionError("bad return code %d" % ret)
###@}
## @name ECDH and KDF
###@{
def ecdh(
self,
local_slot: int,
peer_pubkey: t.Union[t.List[bytes], int],
kdf_func_type: str = "none",
salt: t.Optional[t.List[bytes]] = [],
info: t.Optional[t.List[bytes]] = [],
num_iterations: int = 1,
peer_pubkey_slot_is_foreign: bool = True,
derived_key_size: bool = 32,
) -> bytearray:
"""Derive a key or a pre-master secret from an ECDH operation. (Supported Devices: HSM6, Secure Compute Module).
Parameters
----------
local_slot : int
The local key slot to use.
peer_pubkey : t.Union[t.List[bytes], int]
The public key of the peer used to generate the pre-master secret
against the private key located in `local_slot`. This parameter can
be a list of `bytes` if the key is provided explicitly or an `int`
if it refers to a key slot.
kdf_func_type : str
Specifies the KDF (Key Derivation Function) to use
for the returned derived key. Valid values are:
* `"none"`: just return the pre-master secret. NOTE: The raw pre-master
secret should not be used as a derived key should be put through a
suitable KDF. Use 'none' when it is desired to use a different KDF
than what is offered by this method.
* `"rfc5869-sha256"`: RFC5869 with SHA256
* `"rfc5869-sha512"`: RFC5869 with SHA512
* `"pbkdf2-sha256"`: PBKDF2 with SHA256
* `"pbkdf2-sha512"`: PBKDF2 with SHA512
salt : t.Optional[t.List[bytes]]
A unique identifier for KDF. Ignored for `kdf_func_type='none'`.
info : t.Optional[t.List[bytes]]
A unique field for rfc5869. Ignore for other KDF types.
num_iterations : int
The number of iterations that the KDF should complete.
peer_pubkey_slot_is_foreign : bool
TODO_DESCRIPTION
derived_key_size : bool
TODO_DESCRIPTION
Returns
-------
bytearray
The computed signature.
Todo
----
Allow for overloading of source parameter in similar fashion to lock/unlockData.
"""
derived_key = c_void_p()
salt_sz = len(salt)
salt_c_ubyte = (c_ubyte * salt_sz)(*salt)
info_sz = len(info)
info_c_ubyte = (c_ubyte * info_sz)(*info)
# Get the kdf_func_type
kdf_func = kdfFuncTypes[kdf_func_type]
# Get the type of the peer public key. If the type is 'int', peer_pubkey
# refers to a slot internal to the zymkey. Otherwise, a list with the
# contents of the public key is expected.
if type(peer_pubkey) == "int" or type(peer_pubkey) is int:
peer_pubkey = c_int(peer_pubkey)
peer_pubkey_slot_is_foreign = c_bool(peer_pubkey_slot_is_foreign)
if kdf_func_type == "none":
self._zkDoRawECDHWithIntPeerPubkey(
self._zk_ctx,
local_slot,
peer_pubkey,
peer_pubkey_slot_is_foreign,
byref(derived_key),
)
dst_data_sz = c_int(32)
else:
self._zkDoECDHAndKDFWithIntPeerPubkey(
self._zk_ctx,
kdf_func - 1,
local_slot,
peer_pubkey,
peer_pubkey_slot_is_foreign,
salt_c_ubyte,
salt_sz,
info_c_ubyte,
info_sz,
num_iterations,
derived_key_size,
byref(derived_key),
)
else:
peer_pubkey_sz = len(peer_pubkey)
peer_pubkey_c_ubyte = (c_ubyte * peer_pubkey_sz)(*peer_pubkey)
if kdf_func_type == "none":
self._zkDoRawECDH(
self._zk_ctx,
local_slot,
peer_pubkey_c_ubyte,
peer_pubkey_sz,
byref(derived_key),
)
dst_data_sz = c_int(32)
else:
self._zkDoECDHAndKDF(
self._zk_ctx,
kdf_func - 1,
local_slot,
peer_pubkey_c_ubyte,
peer_pubkey_sz,
salt_c_ubyte,
salt_sz,
info_c_ubyte,
info_sz,
num_iterations,
derived_key_size,
byref(derived_key),
)
dc = (c_ubyte * derived_key_size).from_address(derived_key.value)
data_array = bytearray(dc)
return data_array
###@}
## @name Key Management
###@{
def create_ecdsa_public_key_file(self, filename: str, slot: int = 0) -> None:
"""Create a file with the PEM-formatted ECDSA public key.
**[DEPRECATED]:** Use `create_public_key_file` instead.
This method is useful for generating a Certificate Signing Request.
Parameters
----------
filename : str
The absolute file path where the public key will be stored in PEM format.
slot : int
The key slot for the public key.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
ret = self._zkSaveECDSAPubKey2File(self._zk_ctx, filename.encode("utf-8"), slot)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def create_public_key_file(
self, filename: str, slot: int = 0, foreign: bool = False
) -> None:
"""Create a file with the PEM-formatted public key.
This method is useful for generating a Certificate Signing Request.
Parameters
----------
filename : str
The absolute file path where the public key will be stored in PEM format.
slot : int
The key slot for the public key.
foreign : bool
If `True`, designates the pubkey slot to come from the foreign keystore (Supported Devices: HSM6, Secure Compute Module).
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkExportPubKey2File(
self._zk_ctx, filename.encode("utf-8"), slot, foreign
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def get_ecdsa_public_key(self, slot: int = 0) -> bytearray:
"""Retrieves the ECDSA public key as a binary bytearray.
**[DEPRECATED]:** Use `get_public_key` instead.
This method is used to retrieve the public key in binary form.
Parameters
----------
slot : int
The key slot for the public key.
Returns
-------
bytearray
The public key in binary form.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
dst_data = c_void_p()
dst_data_sz = c_int()
ret = self._zkGetECDSAPubKey(
self._zk_ctx, byref(dst_data), byref(dst_data_sz), slot
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value)
data_array = bytearray(dc)
return data_array
def get_public_key(self, slot: int = 0, foreign: bool = False):
"""Retrieves a public key as a binary bytearray.
This method is used to retrieve the public key in binary form.
Parameters
----------
slot : int
The key slot for the public key. Zymkey and HSM4 have slots 0, 1, and 2.
foreign : bool
If `True`, designates the pubkey slot to come from the foreign keystore (Supported Devices: HSM6, Secure Compute Module).
Returns
-------
bytearray
The public key in binary form.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
dst_data = c_void_p()
dst_data_sz = c_int()
ret = self._zkExportPubKey(
self._zk_ctx, byref(dst_data), byref(dst_data_sz), slot, foreign
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_ubyte * dst_data_sz.value).from_address(dst_data.value)
data_array = bytearray(dc)
return data_array
def get_slot_alloc_list(self, foreign: bool = False) -> t.Tuple[list, int]:
"""Get a list of the allocated slots in the key store (Supported Devices: HSM6, Secure Compute Module).
This method gets a list of the allocated slots in the key store.
Parameters
----------
foreign : bool
If `True`, designates the pubkey slot to come from the foreign keystore (Supported Devices: HSM6, Secure Compute Module).
Returns
-------
t.Tuple[list, int]
The allocation list and the maximum number of keys
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
alloc_key_slot_list = c_void_p()
alloc_key_slot_list_sz = c_int()
max_num_keys = c_int()
ret = self._zkGetAllocSlotsList(
self._zk_ctx,
foreign,
byref(max_num_keys),
byref(alloc_key_slot_list),
byref(alloc_key_slot_list_sz),
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_int * alloc_key_slot_list_sz.value).from_address(
alloc_key_slot_list.value
)
alloc_keys = list(dc)
return alloc_keys, max_num_keys.value
def store_foreign_public_key(self, key_type: str, pubkey: bytearray) -> int:
"""Stores a foreign public key on the Zymkey foreign keyring (Supported Devices: HSM6, Secure Compute Module).
This method stores a foreign public key onto the Zymkey foreign public keyring.
Parameters
----------
key_type : TYPE
The EC curve type that should be associated with the public key.
pubkey : TYPE
The public key binary data.
Returns
-------
int
The slot allocated to the key, or less than one for failure.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
pubkey_sz = len(pubkey)
pubkey_c_ubyte = (c_ubyte * pubkey_sz)(*pubkey)
kt = keyTypes[key_type]
ret = self._zkStoreForeignPubKey(self._zk_ctx, kt, pubkey_c_ubyte, pubkey_sz)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return ret
def disable_public_key_export(self, slot=0, foreign=False):
"""Disable exporting of a public key at a given slot (Supported Devices: HSM6, Secure Compute Module).
This method permanently disables exporting a public key from a
given slot.
Parameters
----------
slot
This parameter specifies the key slot for the public key.
foreign
If true, the slot refers to the foreign public keyring.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkDisablePubKeyExport(self._zk_ctx, slot, foreign)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def gen_key_pair(self, key_type):
"""Generate a new key pair (Supported Devices: HSM6, Secure Compute Module).
This method generates a new key pair of the specified type.
Parameters
----------
key_type
This parameter indicates the EC curve type that should be
associated with the new key pair.
Returns
-------
TYPE
the slot allocated to the key or less than one for failure.
"""
kt = keyTypes[key_type]
ret = self._zkGenKeyPair(self._zk_ctx, kt)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return ret
def gen_ephemeral_key_pair(self, key_type):
"""Generate a new ephemeral key pair (Supported Devices: HSM6, Secure Compute Module).
This method generates a new ephemeral key pair of the specified
type, overwriting the previous ephemeral key pair.
Parameters
----------
key_type
This parameter indicates the EC curve type that should be
associated with the new key pair.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
kt = keyTypes[key_type]
ret = self._zkGenEphemeralKeyPair(self._zk_ctx, kt)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def remove_key(self, slot, foreign=False):
"""Remove a key at the designated slot (Supported Devices: HSM6, Secure Compute Module).
This method removes a key at the designated slot in either the
standard key store or the foreign public keyring.
Parameters
----------
slot
This parameter specifies the key slot for the key.
foreign
If true, a public key in the foreign keyring will be deleted.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkRemoveKey(self._zk_ctx, slot, foreign)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def invalidate_ephemeral_key(self) -> int:
"""Invalidate the ephemeral key (Supported Devices: HSM6, Secure Compute Module).
This method invalidates the ephemeral key, effectively removing
it from service until a new key is generated.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkInvalidateEphemeralKey(self._zk_ctx)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return ret
###@}
## @name Digital Wallet (BIP 32/39/44)
###@{
def gen_wallet_master_seed(
self,
key_type,
master_gen_key,
wallet_name,
recovery_strategy=RecoveryStrategy(),
):
"""Generates a new master seed for creating a new BIP32 wallet (Supported Devices: HSM6, Secure Compute Module).
This method generates a new master seed for creating a new BIP32
wallet.
Parameters
----------
key_type
This parameter indicates the EC curve type that should be
associated with the new key pair.
master_gen_key
The master generator key (bytearray) used in the
derivation of the child key.
wallet_name
The name of the wallet (string) that this master seed
is attached to.
recovery_strategy
RecoveryStrategy() class that defines what strategy to be used
{None, BIP39, SLIP39} are currently supported.
RecoveryStrategy->passphrase must be b64 encoded.
Returns
-------
TYPE
the slot the master seed was generated in. 0 for starting SLIP39 sessions.
"""
master_gen_key_sz = len(master_gen_key)
master_gen_key_c_ubyte = (c_ubyte * master_gen_key_sz)(*master_gen_key)
BIP39_mnemonic = c_void_p()
if recovery_strategy is None:
recovery_strategy = RecoveryStrategy()
if recovery_strategy.recovery_strategy == "BIP39":
mnemonic_ptr = byref(BIP39_mnemonic)
else:
mnemonic_ptr = POINTER(c_void_p)()
kt = keyTypes[key_type]
if recovery_strategy.recovery_strategy != "SLIP39":
ret = self._zkGenWalletMasterSeed(
self._zk_ctx,
kt,
recovery_strategy.variant.encode("utf-8"),
wallet_name.encode("utf-8"),
master_gen_key_c_ubyte,
master_gen_key_sz,
recovery_strategy.passphrase.encode("utf-8"),
mnemonic_ptr,
)
else:
ret = self._zkOpenGenSLIP39Session(
self._zk_ctx,
kt,
recovery_strategy.variant.encode("utf-8"),
wallet_name.encode("utf-8"),
master_gen_key_c_ubyte,
master_gen_key_sz,
recovery_strategy.group_count,
recovery_strategy.group_threshold,
recovery_strategy.iteration_exponent,
recovery_strategy.passphrase.encode("utf-8"),
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
if recovery_strategy.recovery_strategy == "BIP39":
mnemonic = cast(BIP39_mnemonic, c_char_p)
return ret, mnemonic.value.decode("utf-8")
elif recovery_strategy.recovery_strategy == "SLIP39":
return ret
else:
return ret
def set_gen_SLIP39_group_info(
self,
group_index,
member_count,
member_threshold,
):
"""Configures the number of members and threshold for the group shares (Supported Devices: HSM6, Secure Compute Module).
This method sets the number of members required for a group share once a SLIP39 session
was opened via gen_wallet_master_seed().
Parameters
----------
group_index
This parameter indicates the index of the group share
to set the amount of member count/threshold for.
member_count
The total number of members (mnemonics) in this group share.
member_threshold
The number of members (mnemonics) needed to reconstruct the group share.
Returns
-------
TYPE
0 on successful configuration. non-zero for error.
"""
ret = self._zkSetSLIP39GroupInfo(self._zk_ctx, group_index, member_count, member_threshold)
if (ret < 0):
raise AssertionError("bad return code %d" % ret)
def add_gen_SLIP39_member_pwd(
self,
passphrase = "",
):
"""Generates a new mnemonic_str tied to a SLIP39 member (Supported Devices: HSM6, Secure Compute Module).
This method generates a new member of a group share. Members can also be
passphrase protected. Passphrases are not required to be unique. This
function is meant to be called after configuring a group via set_gen_SLIP39_group_info().
Parameters
----------
passphrase
This parameter indicates the passphrase of the SLIP39 member
and is associated with the mnemonic string generated. Can
be empty string for no passphrase.
Returns
-------
TYPE
A 24-word recovery phrase known as a mnemonic sentence. non-zero for error.
"""
mnemonic_sentence = c_void_p()
mnemonic_ptr = byref(mnemonic_sentence)
ret = self._zkAddSLIP39Member(self._zk_ctx, passphrase.encode("utf-8"), mnemonic_ptr)
mnemonic = cast(mnemonic_sentence, c_char_p)
if mnemonic is None:
raise AssertionError("Mnemonic returned nothing")
return ret, mnemonic.value.decode("utf-8")
def cancel_SLIP39_session(self):
"""Cancels an active SLIP39 session (Supported Devices: HSM6, Secure Compute Module).
This method cancels an ongoing SLIP39 session for both master seed generation and recovery.
Returns
-------
TYPE
0 on success. non-zero for error.
"""
ret = self._zkCancelSLIP39Session(self._zk_ctx)
if (ret < 0):
raise AssertionError("bad return code %d" % ret)
def gen_oversight_wallet(
self,
key_type,
pub_key,
chain_code,
node_addr,
wallet_name,
variant = ""
):
"""Generates a supervisory bip32 wallet. (Supported Devices: HSM6, Secure Compute Module).
This method generates a new supervisory Bip32 wallet. Meant for
read-only transactions and supervising history.
Parameters
----------
key_type
This parameter indicates the EC curve type that should be
associated with the new key pair.
pub_key
The public key (bytearray) of the last
hardened node of the node address.
chain_code
The chain code (bytearray) of the last
hardened node of the node address.
node_addr
The bip32 node address used. (EX: "m/1852'/1815'/0'").
wallet_name
The name of the wallet (string) that this master seed
is attached to.
variant
Key type variant to generate from. Currently only "cardano" is
supported for "ed25519".
Returns
-------
TYPE
the slot the oversight wallet was generated in.
"""
kt = keyTypes[key_type]
pubkey_sz = len(pub_key)
pubkey_c_ubyte = (c_ubyte * pubkey_sz)(*pub_key)
chaincode_sz = len(chain_code)
chaincode_c_ubyte = (c_ubyte * chaincode_sz)(*chain_code)
ret = self._zkGenOversightWallet(
self._zk_ctx,
kt,
variant.encode("utf-8"),
pubkey_c_ubyte,
chaincode_c_ubyte,
node_addr.encode("utf-8"),
wallet_name.encode("utf-8")
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return ret
def gen_wallet_child_key(self, parent_key_slot, index, hardened, return_chain_code = False):
"""Generates a child key based on a parent key that is in a wallet (Supported Devices: HSM6, Secure Compute Module).
This method generates a child key based on a parent key that is
in a wallet.
Parameters
----------
parent_key_slot
This parameter specifies the parent key slot. This
key must already be part of a wallet.
index
This parameter represents the index for the child key
derivation which becomes part of the node address.
hardened
If true, the key is a hardened key.
return_chain_code
If true, returns the chain code for the key as well.
(Must be from a hardened key).
Returns
-------
TYPE
the allocated slot on success, or a tuple containing the chain code as well.
"""
chain_code_data = c_void_p()
if return_chain_code:
chain_ptr = byref(chain_code_data)
else:
chain_ptr = POINTER(c_void_p)()
ret = self._zkGenWalletChildKey(self._zk_ctx, parent_key_slot, index, hardened, return_chain_code, chain_ptr)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
if return_chain_code:
dc = (c_ubyte * 32).from_address(chain_code_data.value)
chain_code = bytearray(dc)
return ret, chain_code
else:
return ret
def restore_wallet_master_seed(
self, key_type, master_gen_key, wallet_name, recovery_strategy, mnemonics = None
):
"""Restore a wallet's master seed based on the recovery strategy object (Supported Devices: HSM6, Secure Compute Module).
This method restores a wallet's master seed based on a
mnemonic string and a master generator key. This method can be
used in the process of wallet duplication.
Parameters
----------
key_type
This parameter indicates the EC curve type that should be
associated with the new key pair.
master_gen_key
The master generator key used in the derivation of
the child key.
wallet_name
Name of the new wallet to be generated.
recovery_strategy
RecoveryStategy class object that provides the type of recovery
and key variant required for restoration.
mnemonics
Mnemonic sentences required for restoration, number of mnemonics dependant
on recovery strategy used. This field is not used for SLIP39.
Returns
-------
TYPE
the allocated slot on success
"""
master_gen_key_sz = len(master_gen_key)
master_gen_key_c_ubyte = (c_ubyte * master_gen_key_sz)(*master_gen_key)
kt = keyTypes[key_type]
if recovery_strategy.recovery_strategy == "BIP39":
if mnemonics is None:
raise AssertionError("BIP39 requires a mnemonic sentence")
ret = self._zkRestoreWalletMasterSeedFromBIP39Mnemonic(
self._zk_ctx,
kt,
recovery_strategy.variant.encode("utf-8"),
wallet_name.encode("utf-8"),
master_gen_key_c_ubyte,
master_gen_key_sz,
recovery_strategy.passphrase.encode("utf-8"),
mnemonics.encode("utf-8"),
)
elif recovery_strategy.recovery_strategy == "SLIP39":
ret = self._zkOpenRestoreSLIP39Session(
self._zk_ctx,
kt,
recovery_strategy.variant.encode("utf-8"),
wallet_name.encode("utf-8"),
master_gen_key_c_ubyte,
master_gen_key_sz,
recovery_strategy.passphrase.encode("utf-8"),
)
else:
raise AssertionError("Not a supported recovery strategy")
if ret < 0 and recovery_strategy.recovery_strategy != "SLIP39":
raise AssertionError("bad return code %d" % ret)
return ret
def add_restore_SLIP39_mnemonic(
self,
mnemonic_sentence,
passphrase = "",
):
"""Feed a mnemonic string and the passphrase associated with it (Supported Devices: HSM6, Secure Compute Module).
This method feeds in mnemonic sentences (shards) into the module.
Meant to be called after starting a restore_wallet_master_seed() SLIP39 session.
Will return -1 until the master seed is reconstructed properly.
Parameters
----------
mnemonic_sentence
24-word recovery phrase associated with the SLIP39 member.
passphrase
This parameter indicates the passphrase of the SLIP39 member
and is associated with the mnemonic string generated. Can
be empty string for no passphrase.
Returns
-------
TYPE
A -1 for no change in status. Otherwise returns the slot of the master seed successfully
reconstructed from the last shard passed in.
"""
ret = self._zkAddRestoreSLIP39Mnemonic(self._zk_ctx, passphrase.encode("utf-8"), mnemonic_sentence.encode("utf-8"))
return ret
def get_wallet_node_addr(self, slot):
"""Get a wallet node address from a key slot (Supported Devices: HSM6, Secure Compute Module)
This method gets a wallet entry's node address from its key slot
assignment. The wallet name and master seed slot are also
returned.
Parameters
----------
slot
The key slot assignment.
Returns
-------
TYPE
the node address, wallet name and master seed key slot.
"""
node_addr = c_void_p()
wallet_name = c_void_p()
master_seed_slot = c_int()
ret = self._zkGetWalletNodeAddrFromKeySlot(
self._zk_ctx,
slot,
byref(node_addr),
byref(wallet_name),
byref(master_seed_slot),
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
na = cast(node_addr, c_char_p)
wn = cast(wallet_name, c_char_p)
return (
na.value.decode("utf-8"),
wn.value.decode("utf-8"),
master_seed_slot.value,
)
def get_wallet_key_slot(self, node_addr, wallet_name=None, master_seed_slot=None):
"""Look up a wallet key slot number from a node address (Supported Devices: HSM6, Secure Compute Module)
This method gets a wallet key slot number from its node address
and wallet name or master seed key slot. Either the wallet name
or the master seed slot must be present.
Parameters
----------
node_addr
The desired node address to look up
wallet_name
The name of the wallet that the node address belongs
to. Either this parameter or master_seed_slot must be
specified or this function will fail.
master_seed_slot
The master seed slot that the node address belongs
to. Either this parameter or wallet_name must be
specified or this function will fail.
Returns
-------
TYPE
the key slot.
"""
if wallet_name:
wallet_name = wallet_name.encode("utf-8")
master_seed_slot = 0
key_slot = c_int()
ret = self._zkGetWalletKeySlotFromNodeAddr(
self._zk_ctx,
node_addr.encode("utf-8"),
wallet_name,
master_seed_slot,
byref(key_slot),
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return key_slot.value
###@}
## @name Adminstration
###@{
def set_i2c_address(self, address: int) -> None:
"""Set the i2c address of the Zymkey.
**Note:** This is only applicable to versions of the Zymkey with i2c.
This method should be called if the i2c address of the
Zymkey is shared with another i2c device on the same i2c bus.
The default i2c address for Zymkey units is 0x30. Currently,
the address may be set in the ranges of 0x30 - 0x37 and 0x60 - 0x67.
After successful completion of this command, the Zymkey will
reboot itself.
Parameters
----------
address
The i2c address that the Zymkey will set itself to.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
addr_c_int = c_int(address)
ret = self._zkSetI2CAddr(self._zk_ctx, addr_c_int)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
###@}
## @name Accelerometer
###@{
def set_tap_sensitivity(self, axis="all", pct=50.0):
"""Set the sensitivity of tap operations.
This method permits setting the sensitivity of the tap
detection feature. Each axis may be individually
configured or all at once.
Parameters
----------
axis
The axis to configure. Valid values include:
1. 'all': Configure all axes with the specified sensitivity value.
2. 'x' or 'X': Configure only the x-axis
3. 'y' or 'Y': Configure only the y-axis
4. 'z' or 'Z': Configure only the z-axis
pct
The sensitivity expressed as percentage.
1. 0% = Shut down: Tap detection should not occur along the axis.
2. 100% = Maximum sensitivity.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
axis = axis.lower()
axis_c_int = c_int()
if axis == "x":
axis_c_int = 0
elif axis == "y":
axis_c_int = 1
elif axis == "z":
axis_c_int = 2
elif axis == "all":
axis_c_int = 3
else:
raise AssertionError("invalid input value " + axis)
ret = self._zkSetTapSensitivity(self._zk_ctx, axis_c_int, pct)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def wait_for_tap(self, timeout_ms=-1):
"""Wait for tap event
This function is called in order to wait for a tap event to occur.
This function blocks the calling thread unless called with a
timeout of zero.
Parameters
----------
timeout_ms
The maximum amount of time in milliseconds to wait for a tap
event to arrive.
"""
ret = self._zkWaitForTap(self._zk_ctx, timeout_ms)
if ret == -errno.ETIMEDOUT:
raise ZymkeyTimeoutError("wait timed out")
if ret < 0:
raise AssertionError("bad return code %d" % ret)
## @brief Return class for Zymkey.get_accelerometer_data
# @details This class is the return type for Zymkey.get_accelerometer_data. It
# contains the instantaneous reading of an axis along with the
# direction of force that caused the latest tap event.
class ZymkeyAccelAxisData(object):
def __init__(self, g_force, tap_dir):
self.g_force = g_force
self.tap_dir = tap_dir
def get_accelerometer_data(self):
"""Get current accelerometer data and tap info.
This function gets the most recent accelerometer data in units of g
forces plus the tap direction per axis.
Returns
-------
An array of accelerometer readings in units of g-force.
array index 0 = x axis
1 = y axis
2 = z axis
A value of -1 indicates that the tap event was detected in a
negative direction for the axis, +1 for a positive direction
and 0 for stationary.
"""
class _zkAccelAxisDataType(Structure):
_fields_ = [("g", c_double), ("tapDirection", c_int)]
x = _zkAccelAxisDataType()
y = _zkAccelAxisDataType()
z = _zkAccelAxisDataType()
ret = self._zkGetAccelerometerData(self._zk_ctx, byref(x), byref(y), byref(z))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
xret = self.ZymkeyAccelAxisData(x.g, x.tapDirection)
yret = self.ZymkeyAccelAxisData(y.g, y.tapDirection)
zret = self.ZymkeyAccelAxisData(z.g, z.tapDirection)
return xret, yret, zret
###@}
## @name Time
###@{
def get_time(self, precise=False):
"""Get current GMT time
This function is called to get the time directly from a
Zymkey's Real Time Clock (RTC)
Parameters
----------
precise
If true, this API returns the time after the next second
falls. This means that the caller could be blocked up to one second.
If False, the API returns immediately with the current time reading.
Returns
-------
Time in epoch seconds.
"""
epoch_sec = c_int()
ret = self._zkGetTime(self._zk_ctx, byref(epoch_sec), precise)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return epoch_sec.value
###@}
## @name Binding Management
###@{
def lock_binding(self):
"""Set soft binding lock.
This function locks the binding for a specific HSM. This API is
only valid for HSM series products.
Raises
------
AssertionError
If `ret` is a bad return code from the Zymkey library function.
"""
ret = self._zkLockBinding(self._zk_ctx)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def get_current_binding_info(self):
"""Get current binding info.
This function gets the current binding lock state as well as the
current binding state. This API is only valid for devices in the HSM
family.
Returns
-------
binding_is_locked
Binary value which expresses the current binding lock state.
is_bound
Binary value which expresses the current bind state.
"""
locked = c_bool()
is_bound = c_bool()
ret = self._zkGetCurrentBindingInfo(
self._zk_ctx, byref(locked), byref(is_bound)
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return locked.value, is_bound.value
###@}
## @name Perimeter Breach
###@{
def set_perimeter_event_actions(
self, channel, action_notify=True, action_self_destruct=False
):
"""Set perimeter breach action
This function specifies the action to take when a perimeter breach
event occurs. The possible actions are any combination of:
* Notify host
* Zymkey self-destruct
Parameters
----------
channel
The channel (0 or 1) that the action flags will be applied to
action_notify
Set a perimeter breach to notify. (default = True)
action_self_destruct
Set a perimeter breach to self destruct. (default = False)
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
actions = 0
if action_notify:
actions |= 1
if action_self_destruct:
actions |= 2
ret = self._zkSetPerimeterEventAction(self._zk_ctx, channel, actions)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_digital_perimeter_lp_period(self, lp_period):
"""Set the digital perimeter detect low power period (Supported Devices: HSM6, Secure Compute Module).
This function sets the digital perimeter detect low power period (microseconds).
Parameters
----------
lp_period
The perimeter detect low power period in microseconds.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetDigitalPerimeterDetectLPPeriod(self._zk_ctx, lp_period)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_digital_perimeter_lp_max_bits(self, max_num_bits):
"""Set the low power max number of bits (Supported Devices: HSM6, Secure Compute Module).
This function sets the digital perimeter detect low power max number of bits
Parameters
----------
max_num_bits
The perimeter detect low power max number of bits
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetDigitalPerimeterDetectLPMaxBits(self._zk_ctx, max_num_bits)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_digital_perimeter_delays(self, min_delay_ns, max_delay_ns):
"""Set the digital perimeter detect delays (Supported Devices: HSM6, Secure Compute Module).
This function sets the digital perimeter detect delay values.
Parameters
----------
min_delay_ns
The minimum delay in nanoseconds.
max_delay_ns
The maximum delay in nanoseconds.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetDigitalPerimeterDetectDelays(
self._zk_ctx, min_delay_ns, max_delay_ns
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def wait_for_perimeter_event(self, timeout_ms=-1):
"""Wait for a perimeter breach event to be detected
This function is called in order to wait for a perimeter breach
event to occur. This function blocks the calling thread unless
called with a timeout of zero.
Parameters
----------
timeout_ms
(input) The maximum amount of time in milliseconds to wait for a perimeter breach
event to arrive.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkWaitForPerimeterEvent(self._zk_ctx, timeout_ms)
if ret == -errno.ETIMEDOUT:
raise ZymkeyTimeoutError("wait timed out")
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def get_perimeter_detect_info(self):
"""Get current perimeter detect info.
This function gets the timestamp of the first perimeter detect
event for the given channel. The index corresponds to the channel specified in set_perimeter_event_actions.
Returns
-------
TYPE
The array of timestamps for each channel for the first detected
event in epoch seconds
"""
pdata = c_void_p()
pdata_sz = c_int()
ret = self._zkGetPerimeterDetectInfo(
self._zk_ctx, byref(pdata), byref(pdata_sz)
)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
dc = (c_uint32 * pdata_sz.value).from_address(pdata.value)
timestamps_sec = []
for i in range(pdata_sz.value):
timestamps_sec.append(dc[i])
return timestamps_sec
def clear_perimeter_detect_info(self):
"""Clear perimeter detect info.
This function clears all perimeter detect info and rearms all
perimeter detect channels
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkClearPerimeterDetectEvents(self._zk_ctx)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
###@}
## @name Module Info
###@{
def get_cpu_temp(self):
"""Get current CPU temperature (Supported Devices: HSM6, Secure Compute Module).
This function gets the current HSM CPU temperature.
Returns
-------
TYPE
The CPU temperature in celsius as a float
"""
temp = c_float()
ret = self._zkGetCPUTemp(self._zk_ctx, byref(temp))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return temp.value
def get_aux_temp(self, index = 0):
"""Get current aux temperature (Secure Compute Modules only).
THIS FUNCTION IS FOR INTERNAL ZYMBIT USE ONLY.
This function gets the current aux temperature. (defaults to 0)
Parameters
----------
index
(input) The index id of the processor.
Returns
-------
TYPE
The temperature in celsius as a float
"""
temp = c_float()
ret = self._zkGetAUXTemp(self._zk_ctx, index, byref(temp))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return temp.value
def get_rtc_drift(self):
"""Get RTC drift (Supported Devices: HSM6, Secure Compute Module).
This function gets the current RTC drift.
Returns
-------
TYPE
The RTC drift as a float
"""
drift = c_float()
ret = self._zkGetRTCDrift(self._zk_ctx, byref(drift))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return drift.value
def get_batt_volt(self):
"""Get current battery voltage (Supported Devices: HSM6, Secure Compute Module).
This function gets the current battery voltage.
Returns
-------
TYPE
The battery voltage as a float
"""
volt = c_float()
ret = self._zkGetBatteryVoltage(self._zk_ctx, byref(volt))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
return volt.value
###@}
## @name Model Information
###@{
def get_model_number(self):
"""Get Zymkey model number
This function gets the Zymkey model number.
Returns
-------
TYPE
The model number as a string.
"""
model = c_void_p()
ret = self._zkGetModelNumberString(self._zk_ctx, byref(model))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
model_str = cast(model, c_char_p)
return model_str.value.decode("utf-8")
def get_firmware_version(self):
"""Get Zymkey firmware version
This function gets the Zymkey firmware version.
Returns
-------
TYPE
The firmware version as a string.
"""
fw = c_void_p()
ret = self._zkGetFirmwareVersionString(self._zk_ctx, byref(fw))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
fw_str = cast(fw, c_char_p)
return fw_str.value.decode("utf-8")
def get_serial_number(self):
"""Get Zymkey serial number
This function gets the Zymkey serial number.
Returns
-------
TYPE
The serial number as a string.
"""
sn = c_void_p()
ret = self._zkGetSerialNumberString(self._zk_ctx, byref(sn))
if ret < 0:
raise AssertionError("bad return code %d" % ret)
sn_str = cast(sn, c_char_p)
return sn_str.value.decode("utf-8")
###@}
## @name Battery Voltage Monitor
###@{
def set_battery_voltage_action(self, sleep=False, self_destruct=False):
"""Set battery voltage action. (Supported Devices: HSM6, Secure Compute Module)
This function specifies the action to take when the
battery voltage falls below the threshold set by
set_battery_voltage_threshold. If this function is never
called, do nothing is default. There are three actions:
* Do nothing
* Go to sleep until battery is replaced
* Self-destruct
With sleep and self_destruct set to False, it removes a
previously set sleep or self_destruct action.
Parameters
----------
sleep
Set the sleep action.
self_destruct
Set the self_destruct action.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
if not sleep and not self_destruct:
action = 0
elif not sleep and self_destruct:
action = 1
elif sleep and not self_destruct:
action = 2
elif sleep and self_destruct:
raise AssertionError("Sleep and self-destruct cannot both be True")
ret = self._zkSetBatteryVoltageAction(self._zk_ctx, action)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_battery_voltage_threshold(self, threshold):
"""Sets the battery voltage threshold. (Supported Devices: HSM6, Secure Compute Module)
This function sets the threshold at which if the
battery voltage falls bellow, the action set by
set_battery_voltage_action will be carried out.
The recommended threshold is 2.3V is assumed by default. Threshold
must be below 2.5V.
Parameters
----------
threshold
The threshold in Volts.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetBatteryVoltageThreshold(self._zk_ctx, threshold)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
###@}
## @name HSM CPU Temperature Monitor
###@{
def set_cpu_temp_action(self, self_destruct=False):
"""Set HSM CPU temperature threshold action. (Supported Devices: HSM6, Secure Compute Module)
This function specifies the action to take when the
HSM CPU temperature falls below the threshold set by
set_cpu_low_temp_threshold, or rises above the threshold
set by set_cpu_high_temp_threshold. There are two
actions to apply:
* Do nothing
* Self-destruct
To remove a previously set self-destruct action, call
this function with self_destruct=False.
Parameters
----------
self_destruct
Set the self_destruct action.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
if self_destruct:
action = 1
else:
action = 0
ret = self._zkSetCPUTempAction(self._zk_ctx, action)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_cpu_low_temp_threshold(self, threshold):
"""Sets the HSM CPU low temperature threshold. (Supported Devices: HSM6, Secure Compute Module)
This function sets the threshold at which if the
on-board HSM CPU's tempreature falls below, the
action set by set_cpu_temp_action will be carried out.
WARNING: You can lock yourself out in dev mode if
you set a threshold above the CPU's ambient temperature.
The recommended setting is no more than 20C.
If this function is never called, -10 degrees celsius is
assumed.
Parameters
----------
threshold
The threshold in celsius.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetCPULowTempThreshold(self._zk_ctx, threshold)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_cpu_high_temp_threshold(self, threshold):
"""Sets the HSM CPU high temperature threshold. (Supported Devices: HSM6, Secure Compute Module)
This function sets the threshold at which if the
on-board HSM CPU's tempreature rises above, the
action set by set_cpu_temp_action will be carried out.
WARNING: You can lock yourself out in dev mode if
you set a threshold below the CPU's ambient temperature.
The recommended setting is no less than 40C.
If this function is never called, 65 degrees celsius is
assumed.
Parameters
----------
threshold
The threshold in celsius.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetCPUHighTempThreshold(self._zk_ctx, threshold)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def set_supervised_boot_policy(self, policy_id : int = 0):
"""Sets the Supervised boot policy. (Supported Devices: Secure Compute Module)
This function sets the action policy to take when
Supervised boot detects a file change during the boot process.
Parameters
----------
policy_id
The actions to apply to the Supervised boot process:
- 0 Do Nothing
- 1 Self-Destruct
- 2 Hold Chip in Reset
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
ret = self._zkSetSupervisedBootPolicy(self._zk_ctx, policy_id)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def add_or_update_supervised_boot_file(self, filepath : str = "", slot : int = 15):
"""Update file manifest for Supervised boot to check. (Supported Devices: Secure Compute Module)
This function adds or updates a file in the file manifest to
be checked by Supervised during the boot process.
Parameters
----------
slot
The slot to sign the file with.
filepath
The file to be signed and checked by Supervised boot.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
file_path = filepath.encode("utf-8")
ret = self._zkAddOrUpdateSupervisedBootFile(self._zk_ctx, file_path, slot)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def remove_supervised_boot_file(self, filepath : str = ""):
"""Remove a file from file manifest for Supervised boot to check. (Supported Devices: Secure Compute Module)
This function removes a file in the file manifest to
be checked by Supervised boot during the boot process.
Parameters
----------
filepath
The file to be removed from the manifest.
Returns
-------
TYPE
0 for success, less than 0 for failure.
"""
file_path = filepath.encode("utf-8")
ret = self._zkRemoveSupervisedBootFile(self._zk_ctx, file_path)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
def get_supervised_boot_file_manifest(self):
"""Get the file manifest for Supervised boot to check. (Supported Devices: Secure Compute Module)
This function gets a list of the files that are checked
by Supervised boot during the boot process.
Returns
-------
TYPE
0 for success, less than 0 for failure.
TYPE
File manifest to be checked by Supervised boot.
"""
manifest = c_void_p()
manifest_ptr = byref(manifest)
ret = self._zkGetSupervisedBootFileManifest(self._zk_ctx, manifest_ptr)
if ret < 0:
raise AssertionError("bad return code %d" % ret)
manifest_str = cast(manifest, c_char_p)
return manifest_str.value.decode("utf-8")
###@}
# Interfaces to the C library
_zkOpen = zkalib.zkOpen
_zkOpen.restype = c_int
_zkOpen.argtypes = [POINTER(c_void_p)]
_zkClose = zkalib.zkClose
_zkClose.restype = c_int
_zkClose.argtypes = [c_void_p]
_zkLEDOn = zkalib.zkLEDOn
_zkLEDOn.restype = c_int
_zkLEDOn.argtypes = [c_void_p]
_zkLEDOff = zkalib.zkLEDOff
_zkLEDOff.restype = c_int
_zkLEDOff.argtypes = [c_void_p]
_zkLEDFlash = zkalib.zkLEDFlash
_zkLEDFlash.restype = c_int
_zkLEDFlash.argtypes = [c_void_p, c_ulong, c_ulong, c_ulong]
_zkGetRandBytes = zkalib.zkGetRandBytes
_zkGetRandBytes.restype = c_int
_zkGetRandBytes.argtypes = [c_void_p, POINTER(c_void_p), c_int]
_zkCreateRandDataFile = zkalib.zkCreateRandDataFile
_zkCreateRandDataFile.restype = c_int
_zkCreateRandDataFile.argtypes = [c_void_p, c_char_p, c_int]
_zkLockDataF2F = zkalib.zkLockDataF2F
_zkLockDataF2F.restype = c_int
_zkLockDataF2F.argtypes = [c_void_p, c_char_p, c_char_p, c_bool]
_zkLockDataB2F = zkalib.zkLockDataB2F
_zkLockDataB2F.restype = c_int
_zkLockDataB2F.argtypes = [c_void_p, c_void_p, c_int, c_char_p, c_bool]
_zkLockDataF2B = zkalib.zkLockDataF2B
_zkLockDataF2B.restype = c_int
_zkLockDataF2B.argtypes = [
c_void_p,
c_char_p,
POINTER(c_void_p),
POINTER(c_int),
c_bool,
]
_zkLockDataB2B = zkalib.zkLockDataB2B
_zkLockDataB2B.restype = c_int
_zkLockDataB2B.argtypes = [
c_void_p,
c_void_p,
c_int,
POINTER(c_void_p),
POINTER(c_int),
c_bool,
]
_zkUnlockDataF2F = zkalib.zkUnlockDataF2F
_zkUnlockDataF2F.restype = c_int
_zkUnlockDataF2F.argtypes = [c_void_p, c_char_p, c_char_p, c_bool]
_zkUnlockDataB2F = zkalib.zkUnlockDataB2F
_zkUnlockDataB2F.restype = c_int
_zkUnlockDataB2F.argtypes = [c_void_p, c_void_p, c_int, c_char_p, c_bool]
_zkUnlockDataF2B = zkalib.zkUnlockDataF2B
_zkUnlockDataF2B.restype = c_int
_zkUnlockDataF2B.argtypes = [
c_void_p,
c_char_p,
POINTER(c_void_p),
POINTER(c_int),
c_bool,
]
_zkUnlockDataB2B = zkalib.zkUnlockDataB2B
_zkUnlockDataB2B.restype = c_int
_zkUnlockDataB2B.argtypes = [
c_void_p,
c_void_p,
c_int,
POINTER(c_void_p),
POINTER(c_int),
c_bool,
]
_zkGenECDSASigFromDigest = zkalib.zkGenECDSASigFromDigest
_zkGenECDSASigFromDigest.restype = c_int
_zkGenECDSASigFromDigest.argtypes = [
c_void_p,
c_void_p,
c_int,
POINTER(c_void_p),
POINTER(c_int),
]
_zkGenECDSASigFromDigestWithRecID = zkalib.zkGenECDSASigFromDigestWithRecID
_zkGenECDSASigFromDigestWithRecID.restype = c_int
_zkGenECDSASigFromDigestWithRecID.argtypes = [
c_void_p,
c_void_p,
c_int,
POINTER(c_void_p),
POINTER(c_int),
POINTER(c_uint),
]
_zkVerifyECDSASigFromDigest = zkalib.zkVerifyECDSASigFromDigest
_zkVerifyECDSASigFromDigest.rettype = c_int
_zkVerifyECDSASigFromDigest.argtypes = [c_void_p, c_void_p, c_int, c_void_p, c_int]
try:
_zkVerifyECDSASigFromDigestWithForeignKeySlot = (
zkalib.zkVerifyECDSASigFromDigestWithForeignKeySlot
)
_zkVerifyECDSASigFromDigestWithForeignKeySlot.rettype = c_int
_zkVerifyECDSASigFromDigestWithForeignKeySlot.argtypes = [
c_void_p,
c_void_p,
c_int,
c_void_p,
c_int,
]
_zkStoreForeignPubKey = zkalib.zkStoreForeignPubKey
_zkStoreForeignPubKey.restype = c_int
_zkStoreForeignPubKey.argtypes = [c_void_p, c_int, c_void_p, c_int]
_zkDisablePubKeyExport = zkalib.zkDisablePubKeyExport
_zkDisablePubKeyExport.restype = c_int
_zkDisablePubKeyExport.argtypes = [c_void_p, c_int, c_bool]
_zkGenKeyPair = zkalib.zkGenKeyPair
_zkGenKeyPair.restype = c_int
_zkGenKeyPair.argtypes = [c_void_p, c_int]
_zkGenEphemeralKeyPair = zkalib.zkGenEphemeralKeyPair
_zkGenEphemeralKeyPair.restype = c_int
_zkGenEphemeralKeyPair.argtypes = [c_void_p, c_int]
_zkRemoveKey = zkalib.zkRemoveKey
_zkRemoveKey.restype = c_int
_zkRemoveKey.argtypes = [c_void_p, c_int, c_bool]
_zkInvalidateEphemeralKey = zkalib.zkInvalidateEphemeralKey
_zkInvalidateEphemeralKey.restype = c_int
_zkInvalidateEphemeralKey.argtypes = [c_void_p]
_zkDoRawECDH = zkalib.zkDoRawECDH
_zkDoRawECDH.restype = c_int
_zkDoRawECDH.argtypes = [c_void_p, c_int, c_void_p, c_int, POINTER(c_void_p)]
_zkDoRawECDHWithIntPeerPubkey = zkalib.zkDoRawECDHWithIntPeerPubkey
_zkDoRawECDHWithIntPeerPubkey.restype = c_int
_zkDoRawECDHWithIntPeerPubkey.argtypes = [
c_void_p,
c_int,
c_int,
c_bool,
POINTER(c_void_p),
]
_zkDoECDHAndKDF = zkalib.zkDoECDHAndKDF
_zkDoECDHAndKDF.restype = c_int
_zkDoECDHAndKDF.argtypes = [
c_void_p,
c_int,
c_int,
c_void_p,
c_int,
c_void_p,
c_int,
c_void_p,
c_int,
c_int,
c_int,
POINTER(c_void_p),
]
_zkDoECDHAndKDFWithIntPeerPubkey = zkalib.zkDoECDHAndKDFWithIntPeerPubkey
_zkDoECDHAndKDFWithIntPeerPubkey.restype = c_int
_zkDoECDHAndKDFWithIntPeerPubkey.argtypes = [
c_void_p,
c_int,
c_int,
c_int,
c_bool,
c_void_p,
c_int,
c_void_p,
c_int,
c_int,
c_int,
POINTER(c_void_p),
]
_zkGenWalletMasterSeed = zkalib.zkGenWalletMasterSeedWithBIP39
_zkGenWalletMasterSeed.restype = c_int
_zkGenWalletMasterSeed.argtypes = [
c_void_p,
c_int,
c_char_p,
c_char_p,
c_void_p,
c_int,
c_char_p,
POINTER(c_void_p),
]
_zkOpenGenSLIP39Session = zkalib.zkGenWalletMasterSeedWithSLIP39
_zkOpenGenSLIP39Session.restype = c_int
_zkOpenGenSLIP39Session.argtypes = [
c_void_p,
c_int,
c_char_p,
c_char_p,
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_char_p,
]
_zkSetSLIP39GroupInfo = zkalib.zkSetSLIP39GroupInfo
_zkSetSLIP39GroupInfo.restype = c_int
_zkSetSLIP39GroupInfo.argtypes = [
c_void_p,
c_int,
c_int,
c_int,
]
_zkAddSLIP39Member = zkalib.zkAddSLIP39MemberPassword
_zkAddSLIP39Member.restype = c_int
_zkAddSLIP39Member.argtypes = [
c_void_p,
c_char_p,
POINTER(c_void_p),
]
_zkCancelSLIP39Session = zkalib.zkCancelSLIP39Session
_zkCancelSLIP39Session.restype = c_int
_zkCancelSLIP39Session.argtypes = [
c_void_p,
]
_zkGenOversightWallet = zkalib.zkGenOversightWallet
_zkGenOversightWallet.restype = c_int
_zkGenOversightWallet.argtypes = [
c_void_p,
c_int,
c_char_p,
c_void_p,
c_void_p,
c_char_p,
c_char_p,
]
_zkRestoreWalletMasterSeedFromBIP39Mnemonic = (
zkalib.zkRestoreWalletMasterSeedFromBIP39Mnemonic
)
_zkRestoreWalletMasterSeedFromBIP39Mnemonic.restype = c_int
_zkRestoreWalletMasterSeedFromBIP39Mnemonic.argtypes = [
c_void_p,
c_int,
c_char_p,
c_char_p,
c_void_p,
c_int,
c_char_p,
c_char_p,
]
_zkAddRestoreSLIP39Mnemonic = zkalib.zkAddRestoreSLIP39Mnemonic
_zkAddRestoreSLIP39Mnemonic.restype = c_int
_zkAddRestoreSLIP39Mnemonic.argtypes = [
c_void_p,
c_char_p,
c_char_p,
]
_zkOpenRestoreSLIP39Session = (
zkalib.zkRestoreWalletMasterSeedFromSLIP39
)
_zkOpenRestoreSLIP39Session.restype = c_int
_zkOpenRestoreSLIP39Session.argtypes = [
c_void_p,
c_int,
c_char_p,
c_char_p,
c_void_p,
c_int,
c_char_p,
]
_zkGenWalletChildKey = zkalib.zkGenWalletChildKey
_zkGenWalletChildKey.restype = c_int
_zkGenWalletChildKey.argtypes = [c_void_p, c_int, c_uint, c_bool, c_bool, POINTER(c_void_p)]
_zkGetWalletNodeAddrFromKeySlot = zkalib.zkGetWalletNodeAddrFromKeySlot
_zkGetWalletNodeAddrFromKeySlot.restype = c_int
_zkGetWalletNodeAddrFromKeySlot.argtypes = [
c_void_p,
c_int,
POINTER(c_void_p),
POINTER(c_void_p),
POINTER(c_int),
]
_zkGetWalletKeySlotFromNodeAddr = zkalib.zkGetWalletKeySlotFromNodeAddr
_zkGetWalletKeySlotFromNodeAddr.restype = c_int
_zkGetWalletKeySlotFromNodeAddr.argtypes = [
c_void_p,
c_char_p,
c_char_p,
c_int,
POINTER(c_int),
]
_zkGetAllocSlotsList = zkalib.zkGetAllocSlotsList
_zkGetAllocSlotsList.restype = c_int
_zkGetAllocSlotsList.argtypes = [
c_void_p,
c_bool,
POINTER(c_int),
POINTER(c_void_p),
POINTER(c_int),
]
_zkExportPubKey2File = zkalib.zkExportPubKey2File
_zkExportPubKey2File.restype = c_int
_zkExportPubKey2File.argtypes = [c_void_p, c_char_p, c_int, c_bool]
_zkExportPubKey = zkalib.zkExportPubKey
_zkExportPubKey.restype = c_int
_zkExportPubKey.argtypes = [
c_void_p,
POINTER(c_void_p),
POINTER(c_int),
c_int,
c_bool,
]
_zkLockBinding = zkalib.zkLockBinding
_zkLockBinding.restype = c_int
_zkLockBinding.argtypes = [c_void_p]
_zkGetCurrentBindingInfo = zkalib.zkGetCurrentBindingInfo
_zkGetCurrentBindingInfo.restype = c_int
_zkGetCurrentBindingInfo.argtypes = [c_void_p, POINTER(c_bool), POINTER(c_bool)]
_zkGetCPUTemp = zkalib.zkGetCPUTemp
_zkGetCPUTemp.restype = c_int
_zkGetCPUTemp.argtypes = [c_void_p, POINTER(c_float)]
_zkGetAUXTemp = zkalib.zkGetAUXTemp
_zkGetAUXTemp.restype = c_int
_zkGetAUXTemp.argtypes = [c_void_p, c_int, POINTER(c_float)]
_zkGetRTCDrift = zkalib.zkGetRTCDrift
_zkGetRTCDrift.restype = c_int
_zkGetRTCDrift.argtypes = [c_void_p, POINTER(c_float)]
_zkGetBatteryVoltage = zkalib.zkGetBatteryVoltage
_zkGetBatteryVoltage.restype = c_int
_zkGetBatteryVoltage.argtypes = [c_void_p, POINTER(c_float)]
_zkSetDigitalPerimeterDetectLPPeriod = (
zkalib.zkSetDigitalPerimeterDetectLPPeriod
)
_zkSetDigitalPerimeterDetectLPPeriod.restype = c_int
_zkSetDigitalPerimeterDetectLPPeriod.argtypes = [c_void_p, c_int]
_zkSetDigitalPerimeterDetectLPMaxBits = (
zkalib.zkSetDigitalPerimeterDetectLPMaxBits
)
_zkSetDigitalPerimeterDetectLPMaxBits.restype = c_int
_zkSetDigitalPerimeterDetectLPMaxBits.argtypes = [c_void_p, c_int]
_zkSetDigitalPerimeterDetectDelays = zkalib.zkSetDigitalPerimeterDetectDelays
_zkSetDigitalPerimeterDetectDelays.restype = c_int
_zkSetDigitalPerimeterDetectDelays.argtypes = [c_void_p, c_int, c_int]
_zkSetBatteryVoltageAction = zkalib.zkSetBatteryVoltageAction
_zkSetBatteryVoltageAction.restype = c_int
_zkSetBatteryVoltageAction.argtypes = [c_void_p, c_int]
_zkSetBatteryVoltageThreshold = zkalib.zkSetBatteryVoltageThreshold
_zkSetBatteryVoltageThreshold.restype = c_int
_zkSetBatteryVoltageThreshold.argtypes = [c_void_p, c_float]
_zkSetCPUTempAction = zkalib.zkSetCPUTempAction
_zkSetCPUTempAction.restype = c_int
_zkSetCPUTempAction.argtypes = [c_void_p, c_int]
_zkSetCPULowTempThreshold = zkalib.zkSetCPULowTempThreshold
_zkSetCPULowTempThreshold.restype = c_int
_zkSetCPULowTempThreshold.argtypes = [c_void_p, c_float]
_zkSetCPUHighTempThreshold = zkalib.zkSetCPUHighTempThreshold
_zkSetCPUHighTempThreshold.restype = c_int
_zkSetCPUHighTempThreshold.argtypes = [c_void_p, c_float]
_zkSetSupervisedBootPolicy = zkalib.zkSetSupervisedBootPolicy
_zkSetSupervisedBootPolicy.restype = c_int
_zkSetSupervisedBootPolicy.argtypes = [c_void_p, c_int]
_zkAddOrUpdateSupervisedBootFile = zkalib.zkAddOrUpdateSupervisedBootFile
_zkAddOrUpdateSupervisedBootFile.restype = c_int
_zkAddOrUpdateSupervisedBootFile.argtypes = [c_void_p, c_char_p, c_int]
_zkRemoveSupervisedBootFile = zkalib.zkRemoveSupervisedBootFile
_zkRemoveSupervisedBootFile.restype = c_int
_zkRemoveSupervisedBootFile.argtypes = [c_void_p, c_char_p]
_zkGetSupervisedBootFileManifest = zkalib.zkGetSupervisedBootFileManifest
_zkGetSupervisedBootFileManifest.restype = c_int
_zkGetSupervisedBootFileManifest.argtypes = [c_void_p, POINTER(c_void_p)]
except:
pass
_zkGetECDSAPubKey = zkalib.zkGetECDSAPubKey
_zkGetECDSAPubKey.restype = c_int
_zkGetECDSAPubKey.argtypes = [c_void_p, POINTER(c_void_p), POINTER(c_int), c_int]
_zkSaveECDSAPubKey2File = zkalib.zkSaveECDSAPubKey2File
_zkSaveECDSAPubKey2File.restype = c_int
_zkSaveECDSAPubKey2File.argtypes = [c_void_p, c_char_p, c_int]
_zkSetI2CAddr = zkalib.zkSetI2CAddr
_zkSetI2CAddr.restype = c_int
_zkSetI2CAddr.argtypes = [c_void_p, c_int]
_zkSetTapSensitivity = zkalib.zkSetTapSensitivity
_zkSetTapSensitivity.restype = c_int
_zkSetTapSensitivity.argtypes = [c_void_p, c_int, c_float]
_zkGetTime = zkalib.zkGetTime
_zkGetTime.restype = c_int
_zkGetTime.argtypes = [c_void_p, POINTER(c_int), c_bool]
_zkWaitForTap = zkalib.zkWaitForTap
_zkWaitForTap.restype = c_int
_zkWaitForTap.argtypes = [c_void_p, c_int]
_zkGetAccelerometerData = zkalib.zkGetAccelerometerData
_zkGetAccelerometerData.restype = c_int
_zkGetAccelerometerData.argtypes = [c_void_p, c_void_p, c_void_p, c_void_p]
_zkWaitForPerimeterEvent = zkalib.zkWaitForPerimeterEvent
_zkWaitForPerimeterEvent.restype = c_int
_zkWaitForPerimeterEvent.argtypes = [c_void_p, c_int]
_zkGetPerimeterDetectInfo = zkalib.zkGetPerimeterDetectInfo
_zkGetPerimeterDetectInfo.restype = c_int
_zkGetPerimeterDetectInfo.argtypes = [c_void_p, POINTER(c_void_p), POINTER(c_int)]
_zkClearPerimeterDetectEvents = zkalib.zkClearPerimeterDetectEvents
_zkClearPerimeterDetectEvents.restype = c_int
_zkClearPerimeterDetectEvents.argtypes = [c_void_p]
_zkSetPerimeterEventAction = zkalib.zkSetPerimeterEventAction
_zkSetPerimeterEventAction.restype = c_int
_zkSetPerimeterEventAction.argtypes = [c_void_p, c_int, c_int]
_zkGetModelNumberString = zkalib.zkGetModelNumberString
_zkGetModelNumberString.restype = c_int
_zkGetModelNumberString.argtypes = [c_void_p, POINTER(c_void_p)]
_zkGetFirmwareVersionString = zkalib.zkGetFirmwareVersionString
_zkGetFirmwareVersionString.restype = c_int
_zkGetFirmwareVersionString.argtypes = [c_void_p, POINTER(c_void_p)]
_zkGetSerialNumberString = zkalib.zkGetSerialNumberString
_zkGetSerialNumberString.restype = c_int
_zkGetSerialNumberString.argtypes = [c_void_p, POINTER(c_void_p)]
client: t.Optional[Zymkey]
try:
client = Zymkey()
except AssertionError:
client = None
pass
def create_new_client():
try:
new_client = Zymkey()
except AssertionError:
new_client = None
return new_client | zku | /zku-1.0.33-py3-none-any.whl/zymkey/__init__.py | __init__.py |
# zkviz: Visualize Link Network Between Zettels (Notes)


Produce an interactive overview of all your notes and their connections.

## Installing
I recommend using Python 3 and an environment specifically for zkviz.
Assuming that you're using macOS or Linux, to create the environment, open
a Terminal window and type the following to create the standalone environment
and activate it.
```sh
python3 -m venv ~/envs/zkviz
source ~/envs/zkviz/bin/activate
```
Then install zkviz with:
```sh
pip install zkviz
```
If [Graphviz](https://graphviz.org/download/) is installed on your computer,
zkviz can use it to draw the network. It is not a Python package so it needs to
be installed independently. If you're on a Mac and have
[Homebrew](https://brew.sh) installed, you can install Graphviz from a Terminal
with:
```sh
brew install graphviz
```
## Usage
To execute zkviz from the Terminal, you either need to add the zkviz
environment path to your `PATH` environment variable or specify the path to the
zkviz executable directly. Below, I use the explicit path.
Executing zkviz without any argument will build the visualization based on all
the `*.md` files found in the current directory.
```sh
~/envs/zkviz/bin/zkviz
```
You can also point zkviz to the folder containing your notes. For example:
```sh
~/envs/zkviz/bin/zkviz --notes-dir ~/Notes
```
By default zkviz will look for files with the `.md` extension, but you can override
the default with the `--pattern` option:
```sh
~/envs/zkviz/bin/zkviz --pattern '*.mkdown'
```
You can also specify multiple patterns separately. With the following, zkviz
will find all txt and md files. I recommend wrapping the pattern in quotes.
```sh
~/envs/zkviz/bin/zkviz --pattern '*.md' --pattern '*.txt'
```
You can also pass a list of files to zkviz:
```sh
~/envs/zkviz/bin/zkviz "~/Notes/201906021303 the state of affairs.md" "~/Notes/201901021232 Journey to the center of the earth.md"
```
To use Graphviz to generate the visualization, add the `--use-graphviz` option:
```sh
~/envs/zkviz/bin/zkviz --notes-dir ~/Notes --use-graphviz
```
By default, zkviz will draw a node for every reference found in the files
provided, even if the referenced zettel does not exist, and even if a zettel
refers to itself. You can change that behavior in two ways. The `--only-list`
option tells zkviz to draw links only to zettels that have been provided to it.
In the example below, only links between the two zettels will be shown:
```sh
~/envs/zkviz/bin/zkviz --only-list "20190810190224 Note 1.md" "20190810190230 Note 2.md"
```
The other way to change the behavior is to disable self-reference links using
the `--no-self-ref` option.
## Using zkviz with Keyboard Maestro
The `keyboard-maestro` folder includes a [Keyboard Maestro](https://www.keyboardmaestro.com)
macro to automatically create a visualization based on the list of files
currently selected in [The Archive](https://zettelkasten.de/the-archive/). To
use this macro, download it and import it into Keyboard Maestro. The follow the
README comment within the macro to set the necessary variables.
## Making a Release
1. Bump the version in `zkviz/__init__.py`
2. Update the changelog, link the versions.
3. Commit and tag with version number
4. Build a source dist with `python setup.py clean && rm dist/* && python setup.py sdist`
5. Test upload to PyPI test with `twine upload --repository-url https://test.pypi.org/legacy/ dist/*`
6. Create a temporary environment `mktmpenv` and test install with `pip install --index-url https://test.pypi.org/simple/ zkviz`
7. If everything looks good, upload for real with `twine upload dist/*`
| zkviz | /zkviz-1.3.0.tar.gz/zkviz-1.3.0/README.md | README.md |
import argparse
import collections
import functools
import json
import logging
import os
import pathlib
import platform
import subprocess
import tempfile
import urllib.parse
from typing import Any, Dict, FrozenSet, List, Union
import requests
import tqdm
import vvm
import vvm.install
from appdirs import user_cache_dir, user_log_dir
from semantic_version import SimpleSpec, Version
logger = logging.getLogger(__name__)
class PlatformError(Exception):
...
class Config(collections.UserDict):
"""Configuration container with attribute access support."""
DEFAULTS = {
"zk_version": SimpleSpec(">=1.1.0"),
"cache_dir": pathlib.Path(user_cache_dir(__name__)),
"log_file": pathlib.Path(user_log_dir(__name__)).joinpath(__name__ + ".log"),
"verbosity": logging.WARNING,
"vyper_version": Version("0.3.3"),
}
CONVERTERS = {
"zk_version": SimpleSpec,
"cache_dir": lambda x: pathlib.Path(x).absolute(),
"log_file": lambda x: pathlib.Path(x).absolute(),
"verbosity": int,
"vyper_version": Version,
}
def __init__(self, **kwargs: Any) -> None:
env, prefix = {}, __name__ + "_"
for k, v in os.environ.items():
if not k.startswith(prefix.upper()):
continue
key = k.lower()[len(prefix) :]
env[key] = self.CONVERTERS[key](v) # type: ignore
user = {
k: self.CONVERTERS[k](v) # type: ignore
for k, v in kwargs.items()
if v is not None
}
self.data = collections.ChainMap(user, env, self.DEFAULTS) # type: ignore
class BinaryVersion(Version):
def __init__(self, *args, location: str, **kwargs):
super().__init__(*args, **kwargs)
self.location = location
class VersionManager:
"""zkVyper Version Manager."""
_AMD64 = ("amd64", "x86_64", "i386", "i586", "i686")
_ARM64 = ("aarch64_be", "aarch64", "armv8b", "armv8l")
_REMOTE_BASE_URL = "https://api.github.com/repos/matter-labs/zkvyper-bin/contents/"
def __init__(self, config: Config) -> None:
self._session = requests.Session()
self._config = config
log_file: pathlib.Path = config["log_file"]
if not log_file.exists():
log_file.parent.mkdir(parents=True)
self._logger = self._get_logger()
def compile(self, files: List[Union[str, pathlib.Path]]):
needs_vyper = (
self._config["vyper_version"] not in vvm.get_installed_vyper_versions()
)
if needs_vyper:
vvm.install_vyper(self._config["vyper_version"])
zkvyper = self._config["zk_version"].select(self.local_versions)
if not zkvyper:
selected = self._config["zk_version"].select(self.remote_versions)
if not selected:
version = self._config["zk_version"]
self._logger.error(
f"zkVyper version meeting constraints not available: {version!s}"
)
raise Exception()
self.install(selected)
zkvyper = self._config["zk_version"].select(self.local_versions)
zkvyper = pathlib.Path(urllib.parse.urlparse(zkvyper.location).path)
vyper = vvm.install.get_executable(self._config["vyper_version"])
ret = subprocess.run(
[zkvyper, "--vyper", vyper, "-f", "combined_json", *files],
capture_output=True,
)
return json.loads(ret.stdout.decode().strip())
def install(
self,
version: BinaryVersion,
overwrite: bool = False,
show_progress: bool = False,
):
show_progress = show_progress or self._config["verbosity"] <= logging.INFO
vyper_version = self._config["vyper_version"]
if vyper_version not in vvm.get_installed_vyper_versions():
self._logger.info(f"Attempting to install vyper version {vyper_version!s}")
vvm.install_vyper(vyper_version, show_progress)
self._logger.info(f"Vyper version v{vyper_version!s} installed.")
if version in self.local_versions and not overwrite:
return
self._logger.debug(
f"Installing zkVyper v{version!s} from {version.location!r}."
)
resp = self._session.get(version.location, stream=show_progress)
fp: pathlib.Path = self._config["cache_dir"] / ("zkvyper-" + str(version))
f = fp.open("wb")
try:
if show_progress:
with tqdm.tqdm(
total=int(resp.headers["content-length"]),
unit="b",
unit_scale=True,
desc=f"zkVyper v{version!s}",
) as prog:
for chunk in resp.iter_content():
f.write(chunk)
prog.update(len(chunk))
else:
f.write(resp.content)
except BaseException as exc:
f.close()
fp.unlink()
self._logger.error(f"Installation of v{version!s} failed.")
self._logger.debug("", exc_info=exc)
raise
f.close()
fp.chmod(0o755)
# check binary is correct
ret = subprocess.run([fp.as_posix(), "--version"], capture_output=True)
if ret.returncode != 0:
logger.error(
"Downloaded binary would not execute, or returned unexpected output."
)
fp.unlink()
raise Exception()
elif str(version) not in ret.stdout.decode():
logger.error(
f"Attempted to install zkVyper v{version}, received something else."
)
fp.unlink()
raise Exception()
self._logger.debug(f"Installation of v{version!s} finished.")
def uninstall(self, version: BinaryVersion):
try:
pathlib.Path(urllib.parse.urlparse(version.location).path).unlink()
except FileNotFoundError:
self._logger.warning(
f"zkVyper v{version!s} not found at {version.location!r}."
)
else:
self._logger.info(
f"Uninstalling zkVyper v{version!s} found at {version.location!r}."
)
@functools.cached_property
def remote_versions(self) -> FrozenSet[BinaryVersion]:
"""Remote zkVyper binary versions compatible with the host system."""
remote_url = self._REMOTE_BASE_URL + self._platform_id
self._logger.debug(f"Fetching remote zkVyper versions from {remote_url!r}.")
resp = self._session.get(remote_url)
resp.raise_for_status()
versions = set()
for file in resp.json():
if file["type"] != "file":
continue
version_string = file["name"].split("-")[-1][1:]
versions.add(BinaryVersion(version_string, location=file["download_url"]))
self._logger.debug(f"Found {len(versions)} zkVyper versions.")
return frozenset(versions)
@property
def local_versions(self) -> FrozenSet[BinaryVersion]:
"""Local zkVyper binary versions."""
versions = set()
cache_dir: pathlib.Path = self._config["cache_dir"]
for fp in cache_dir.iterdir():
if not fp.is_file():
continue
versions.add(BinaryVersion(fp.name.split("-")[-1], location=fp.as_uri()))
return frozenset(versions)
def _get_logger(self):
_logger = logger.getChild(self.__class__.__name__)
_logger.setLevel(logging.DEBUG)
if not _logger.hasHandlers():
fh = logging.FileHandler(self._config["log_file"])
fh.setFormatter(logging.Formatter("%(levelname)s:%(name)s:%(message)s"))
_logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(self._config["verbosity"])
_logger.addHandler(ch)
return _logger
@functools.cached_property
def _platform_id(self) -> str:
"""Platform identifier.
See `Stack Overflow <https://stackoverflow.com/a/45125525>`_.
"""
system, machine = platform.system(), platform.machine().lower()
if system == "Linux" and machine in self._AMD64:
return "linux-amd64"
elif system == "Darwin" and machine in self._AMD64:
return "macosx-amd64"
elif system == "Darwin" and machine in self._AMD64:
return "macosx-arm64"
raise PlatformError()
def compile(srcfiles: List[Union[str, pathlib.Path]], **kwargs: Any) -> Dict[str, Dict]:
return VersionManager(Config(**kwargs)).compile(srcfiles)
def compile_source(src: str, **kwargs) -> Dict:
with tempfile.NamedTemporaryFile(suffix=".vy") as f:
f.write(src.encode())
f.flush()
return compile([f.name], **kwargs)
def main():
config = Config()
# top-level parser
parser = argparse.ArgumentParser("zkvvm", description="zkVyper Version Manager")
parser.add_argument(
"--cache-dir",
type=pathlib.Path,
default=config["cache_dir"],
help=f"Default: {config['cache_dir']!s}",
)
parser.add_argument(
"--log-file",
type=pathlib.Path,
default=config["log_file"],
help=f"Default: {config['log_file']!s}",
)
parser.add_argument("-v", action="count", default=0, help="Verbosity")
parser.add_argument(
"--vyper-version",
type=Version,
default=config["vyper_version"],
help=f"Default: {config['vyper_version']!s}",
)
parser.add_argument(
"--zk-version",
help="zkVyper compiler version to use",
default=config["zk_version"],
type=Version,
)
parser.add_argument("--version", action="version", version="%(prog)s 0.1.0")
subparsers = parser.add_subparsers(title="commands", dest="command")
subparsers.add_parser("ls", help="List available local versions")
subparsers.add_parser("ls-remote", help="List available remote versions")
install = subparsers.add_parser("install", help="Install a remote version")
install.add_argument("version", help="Version to install", type=SimpleSpec)
install.add_argument("--overwrite", action="store_const", const=True, default=False)
uninstall = subparsers.add_parser("uninstall", help="Uninstall a local version")
uninstall.add_argument("version", help="Version to uninstall", type=Version)
uninstall.add_argument("-y", action="store_const", const=True, default=False)
compile = subparsers.add_parser("compile", help="Compile contract(s)")
compile.add_argument("files", action="append", type=pathlib.Path)
args = parser.parse_args()
config["cache_dir"] = args.cache_dir
config["log_file"] = args.log_file
config["verbosity"] -= args.v * 10
config["vyper_version"] = args.vyper_version
config["zk_version"] = args.zk_version
vm = VersionManager(config)
if args.command is None:
parser.print_help()
elif args.command == "ls":
if vm.local_versions:
print(*[str(v) for v in sorted(vm.local_versions, reverse=True)], sep="\n")
else:
print("No local versions found.")
elif args.command == "ls-remote":
print(*map(str, sorted(vm.remote_versions, reverse=True)), sep="\n")
elif args.command == "install":
version = args.version.select(vm.remote_versions)
if version:
vm.install(version, args.overwrite)
else:
print("Version not available")
elif args.command == "uninstall":
version = next(
(version for version in vm.local_versions if version == args.version), None
)
if version and (args.y or input("Confirm [y/N]: ").lower().strip() == "y"):
vm.uninstall(version)
elif version is None:
print("Version not found locally")
elif args.command == "compile":
print(json.dumps(vm.compile(args.files)))
if __name__ == "__main__":
main() | zkvvm | /zkvvm-0.1.0-py3-none-any.whl/zkvvm.py | zkvvm.py |
import argparse
import yaml
import time
from zkw_db.my_db import Mysql
import logging
import json
import cv2
def read_yaml(yaml_dir: str):
yaml_data = yaml.load(open(yaml_dir, 'r').read(), yaml.FullLoader)
return yaml_data
def read_json():
json_dir = 'laugh.json'
json_data = json.load(open(json_dir, 'r'))
return json_data
def parse_opt():
parse = argparse.ArgumentParser()
parse.add_argument('--yaml_dir', type=str, help='config data_dir path', default='')
parse.add_argument('--log_dir', type=str, help='save lod dir path', default='')
args = parse.parse_args()
return args
def main():
# star_time = time.time()
star_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
print('start_time:', star_time)
opts = parse_opt()
json_data = read_json()
yaml_dir = opts.yaml_dir = json_data['yaml_dir']
log_dir = opts.log_dir = json_data['log_dir']
config = read_yaml(yaml_dir=yaml_dir)
# log日志信息
logger = logging.getLogger('results')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_dir)
fh.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s")
print(formatter)
fh.setFormatter(formatter)
logger.addHandler(fh)
'''
数据库连接
'''
my_db = Mysql(
host=config['host'],
user=config['user'],
password=config['password'],
database=config['database'],
port=config['port'],
charset=config['charset']
)
'''
字典(dict)方式传参
'''
data = {
'host': config['host'],
'user': config['user'],
'password': config['password'],
'port': config['port'],
'datetime': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
}
# 增
sql = "INSERT INTO " + config['first_table'][0] + \
"(host,user,password,port,datetime) VALUES ('%s','%s','%s','%s','%s') " \
% (data['host'], data['user'], data['password'], data['port'], data['datetime'])
logger.info(sql)
my_db.insert(sql=sql)
# #查询一条信息
# sql = "select * from "+config['first_table']+" where age='17';"
# logger.info(sql)
# res = my_db.fetchone(sql)
# print(res)
#
#
# #改
# sql = "update "+config['first_table']+" set age='18' where age='17';"
# logger.info(sql)
# my_db.updata(sql)
#
# #删
# sql = "delete from "+config['first_table']+" where age='18';"
# logger.info(sql)
# my_db.updata(sql)
#
# #查询全部信息
# sql = "select * from "+config['first_table']+" ;"
# logger.info(sql)
# for i in my_db.fetchall(sql):
# print(i)
if __name__ == '__main__':
main() | zkw-db | /zkw_db-0.3.tar.gz/zkw_db-0.3/zkw_db/my_class.py | my_class.py |
import pymysql
class Mysql:
# 打开数据库的连接
def __init__(self, host, user, password, database, port, charset):
try:
self.db = pymysql.connect(
host=host,
user=user,
password=password,
database=database,
port=port,
charset=charset,
)
except pymysql.Error as e:
print('数据库连接失败', e)
exit()
self.cursor = self.db.cursor() # 创建一个游标对象
# 查询一条信息
'''
sql = "select * from "+config['first_table']+" where age='17';"
'''
def fetchone(self, sql):
try:
self.cursor.execute(sql)
data = self.cursor.fetchone()
return data
except pymysql.Error as e:
print('fetchone Error', e)
print('sql:', sql)
# 查询全部信息
'''
sql = "select * from "+config['first_table']+" ;"
for i in my_db.fetchall(sql):
print(i)
'''
def fetchall(self, sql):
try:
self.cursor.execute(sql)
data = self.cursor.fetchall()
return data
except pymysql.Error as e:
print('fetchall Error:', e)
print('sql :', sql)
# 插入信息
'''
sql = "insert into " + config['first_table'] + "(id,name,age,hobby) values('1','aa','15','乒乓') "
sql = "INSERT INTO " + config['first_table'] + "(host,user,password,port,datetime) VALUES ('%s','%s','%s','%s','%s') " % (data['host'], data['user'], data['password'],data['port'], data['datetime'])
'''
def insert(self, sql):
try:
self.cursor.execute(sql)
self.db.commit()
except pymysql.Error as e:
print('insert Error:', e)
print('sql:', sql)
# 修改或删除信息
'''
删 sql = "delete from "+config['first_table']+" where age='18';"
改 sql = "update "+config['first_table']+" set age='18' where age='17';"
'''
def updata(self, sql):
try:
self.cursor.execute(sql)
self.db.commit()
except pymysql.Error as e:
print('updata Error:', e)
print('sql:', sql)
# 关闭游标 关闭数据库
def close(self):
self.cursor.close()
self.db.close() | zkw-db | /zkw_db-0.3.tar.gz/zkw_db-0.3/zkw_db/my_db.py | my_db.py |
from datetime import datetime
from zla_utilities import \
utility as ut
def get_sql_1(partnolist):
sql_1 = 'rsdet.partno in {}'
sql_2 = 'servdet.partno in {}'
sel_rs = sql_1.format(ut.sel_insql(partnolist))
sel_serv = sql_2.format(ut.sel_insql(partnolist))
return sel_rs,sel_serv
def get_sql_2(customerlist):
sql_1 = """rs.customerid in (select idcustomerlistkad from customerlistkad where idcustomersap in {})"""
sql_2 = """serv.customerid in (select idcustomerlistkad from customerlistkad where idcustomersap in {})"""
if customerlist is not None:
rscus_data = sql_1.format(ut.sel_insql(customerlist))
servcus_data = sql_2.format(ut.sel_insql(customerlist))
else:
rscus_data = None
servcus_data = None
return rscus_data,servcus_data
def get_sql_3(mlist):
sql_base1 = 'rs.rsdate '
sql_base2 = 'serv.docdate '
if all(x is None for x in mlist):
_rssql = sql_base1 + '>= DATE_SUB(now(), INTERVAL 48 MONTH)'
_servsql = sql_base2 + '>= DATE_SUB(now(), INTERVAL 48 MONTH)'
elif all(type(x) is str for x in mlist):
_subsql = convertor_bothdate(mlist)
_rssql = sql_base1 + _subsql
_servsql = sql_base2 + _subsql
else:
_subsql = convertor_singledate(mlist)
_rssql = sql_base1 + _subsql
_servsql = sql_base2 + _subsql
return _rssql,_servsql
def convertor_bothdate(dlist):
_start = datetime.strptime(dlist[0], '%Y-%m-%d')
_end = datetime.strptime(dlist[1], '%Y-%m-%d')
if _start > _end:
print('Not allow start_date less than end_date !!')
_sub = 'between "x" and "y"'
else:
_sub = 'between "{}" and "{}"'.format(dlist[0], dlist[1])
return _sub
def convertor_singledate(dlist):
if dlist[0] is not None:
_sub = '>= "{}"'.format(dlist[0])
else:
_sub = 'between DATE_SUB("{}", INTERVAL 48 MONTH) and "{}"'.format(dlist[1], dlist[1])
return _sub
def merge_wherecondition(sql_sentense):
_where = [' (' + _sen + ') ' for _sen in sql_sentense if _sen is not None]
return "and".join(_where)
def finishing_sql(_rswhere,_servwhere):
_sqlrs = """select rs.rsdate as date,kad.idcustomersap as customerid, rsdet.qty
from rsorder as rs inner join rsorderdetails as rsdet on rs.idrsorder = rsdet.idrsorder
inner join customerlistkad as kad on kad.idcustomerlistkad = rs.customerid
where"""
_sqlserv = """select serv.docdate as date,kad.idcustomersap as customerid, servdet.qty
from servicedata as serv inner join servicedetails as servdet on serv.idservicedata = servdet.idservicedata
inner join customerlistkad as kad on kad.idcustomerlistkad = serv.customerid
where"""
_groupbyrs = " group by rs.rsdate,kad.idcustomersap"
_groupbyserv = " group by serv.docdate,kad.idcustomersap"
rs_msql = _sqlrs + _rswhere + _groupbyrs
sev_msql = _sqlserv + _servwhere + _groupbyserv
return rs_msql,sev_msql | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/rs_data_sql.py | rs_data_sql.py |
from zla_material_class.zla_class.common_tools.master_sql import *
from zla_material_class.zla_class.rs_tools.rs_data_sql import *
from zla_material_class.zla_class.rs_tools.rs_method import *
from .common_tools import general_func as gfunc
from zla_general import \
vendor_finding_bymat as vmat, \
header_ecnbysap as header, \
movingcost_bymat as mov
class get_rs(object):
def __init__(self, partno, start=None, end=None, customer=None, parent=False):
"""
For getting SKC retail data as your requirment
:param partno: part number is needed.
:param start: start date (option)
:param end: end date (option)
:param customer: customer code by sap only (option)
:param parent: if True data will be get all of branch data together (default is False)
"""
# Make character sentense
self.partno = partno
self.allcustomer = customer_parentable(customer, parent)
self.dictchain = header._parent_ecnchainbymat(partno)
self.master = get_partsmaster(partno)
self.character = get_partscharacter(partno)
self.purchasing = get_partspurchasing(partno)
self.movingcost = mov.get_movingcost(partno)
self.vendor, self.cost, self.mapping = vmat.get_vendor_mat(partno, self.purchasing.get('spt'))
# Make sql_sentense
lst_partno = list(self.dictchain.keys())
select_partno = lst_partno[lst_partno.index(partno):]
sql_frntpartno, sql_servpartno = get_sql_1(select_partno)
sql_frntcustomer, sql_servcustomer = get_sql_2(self.allcustomer)
sql_frnttimerange, sql_servtimerange = get_sql_3([start, end])
list_frntsql = [sql_frntpartno, sql_frntcustomer, sql_frnttimerange]
list_servsql = [sql_servpartno, sql_servcustomer, sql_servtimerange]
where_frntsql = gfunc.merge_wherecondition(list_frntsql)
where_servsql = gfunc.merge_wherecondition(list_servsql)
frnt_msql,serv_msql = finishing_sql(where_frntsql,where_servsql)
frnt_data = gfunc.get_data(frnt_msql)
serv_data = gfunc.get_data(serv_msql)
frnt_data['partno_header'] = partno
serv_data['partno_header'] = partno
self.frntraw = frnt_data.copy()
self.servraw = serv_data.copy()
self.rsraw = pd.concat([self.frntraw,self.servraw])
self.frntdata = History(frnt_data)
self.servdata = History(serv_data)
self.rsdata = History(merge_data(self.frntdata.raw,self.servdata.raw))
class History(pd.DataFrame):
def __init__(self,_data):
super(History, self).__init__()
self.raw = _data
def history(self,_by='days'):
"""
generate SKC retails sale data as your requirement
:param _by:
- 'days'(default) is shown for dialy data
- 'weeks' is shown for weekly data
- 'months' is shown for monthly data
- 'years' is shown for annual data}
:return:
Dataframe with date index
"""
return gfunc.to_number(self.raw,_by,_datecol='date') | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/rs_class.py | rs_class.py |
from zla_material_class.zla_class.rs_tools.rs_method import *
from .common_tools import general_func as gfunc
from .gi_tools import gi_data_sql as gisql,\
multiple_gi_sql as multigisql
import zla_material_class.zla_class.ws_tools.multiple_ws_sql as multiwssql
from zla_utilities import utility as ut
from zla_general import header_ecnbysap as header
import pandas as pd
from zla_general import \
get_newdeldate_bydf as newdel
class get_multiple_gi(object):
def __init__(self, list_partno=None, start=None, end=None, list_customer=None, parent=False):
"""
For getting SKC whole sale data as your requirment by multiple material
:param partno: part number is need. (Must be input as list)
:param start: start date (option)
:param end: end date (option)
:param customer: customer code by sap only (option- Must be input as list)
:param parent: if True data will be get all of branch data together (default is False)
"""
self.lstpartno = list_partno
self.allcustomer = invert_tocustomerlv1(list_customer, parent)
# Make sql_sentense
_dictlist = header._parent_ecnchainbylist(self.lstpartno)
selecteditem = selected_partno(_dictlist)
sql_sentence = multigisql.get_multiplesql(selecteditem, self.allcustomer)
sql_timerange = gisql.get_sql_3([start, end])
list_sql = [sql_sentence, sql_timerange]
where_sql = gfunc.merge_wherecondition(list_sql)
msql = multigisql.finishing_multisql(where_sql)
_original = gfunc.get_data(msql)
_original = header._parent_ecnchainbydf(_original, 'partno')
self.raw_data = _original.copy()
self.gidata = History(_original)
def selected_partno(_dict):
if _dict is not None:
return list(ut.merge_dict_inlist(_dict).keys())
else:
return None
def invert_tocustomerlv1(cust_item,parent):
if parent == True:
db = 'partsdatabase'
msql = """select customerid,customername,parent
from customerlistsap where parent in (select parent from customerlistsap where customerid in {})""".format(ut.sel_insql(cust_item))
return dbcon.opendata(msql,db)['customerid'].unique()
else:
return cust_item
class History(pd.DataFrame):
def __init__(self,_data):
super(History, self).__init__()
self.raw = _data
def history(self,_by='days'):
"""
generate SKC retails sale data as your requirement
:param _by:
- 'days'(default) is shown for dialy data
- 'weeks' is shown for weekly data
- 'months' is shown for monthly data
- 'years' is shown for annual data}
:return:
Dataframe with date index
"""
return gfunc.to_number(self.raw,_by,_datecol='billdate') | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/multiple_gi.py | multiple_gi.py |
import zla_material_class.zla_class.ws_tools.ws_data_sql as wssql
from .common_tools import general_func as gfunc
import zla_material_class.zla_class.ws_tools.multiple_ws_sql as multiwssql
from zla_utilities import utility as ut, \
db_connector as dbcon
from zla_general import header_ecnbysap as header
import pandas as pd
from zla_general import \
get_newdeldate_bydf as newdel
class get_multiple_ws(object):
def __init__(self, list_partno=None, start=None, end=None, list_customer=None, parent=False):
"""
For getting SKC whole sale data as your requirment by multiple material
:param partno: part number is need. (Must be input as list)
:param start: start date (option)
:param end: end date (option)
:param customer: customer code by sap only (option- Must be input as list)
:param parent: if True data will be get all of branch data together (default is False)
"""
self.lstpartno = list_partno
self.allcustomer = invert_tocustomerlv1(list_customer, parent)
# Make sql_sentense
_dictlist = header._parent_ecnchainbylist(self.lstpartno)
selecteditem = selected_partno(_dictlist)
sql_nontapa = "orders.itemcat <> 'TAPA'"
sql_sentence = multiwssql.get_multiplesql(selecteditem, self.allcustomer)
sql_timerange = wssql.get_sql_3([start, end])
list_sql = [sql_nontapa, sql_sentence, sql_timerange]
where_sql = gfunc.merge_wherecondition(list_sql)
msql = multiwssql.finishing_multisql(where_sql)
_original = gfunc.get_data(msql)
_original['deldate'] = _original['del1stdate']
_original = header._parent_ecnchainbydf(_original, 'partno')
self.raw_data = _original.copy()
self.raw_newdeldata = newdel.generate_deldate(_original).copy()
self.original_data = History(_original)
self.newdel_data = History(newdel.generate_deldate(_original))
def selected_partno(_dict):
if _dict is not None:
return list(ut.merge_dict_inlist(_dict).keys())
else:
return None
def invert_tocustomerlv1(cust_item,parent):
if parent == True:
db = 'partsdatabase'
msql = """select customerid,customername,parent
from customerlistsap where parent in (select parent from customerlistsap where customerid in {})""".format(ut.sel_insql(cust_item))
return dbcon.opendata(msql,db)['customerid'].unique()
else:
return cust_item
class History(pd.DataFrame):
def __init__(self,_data):
super(History, self).__init__()
self.raw = _data
def history(self,_by='days'):
"""
generate SKC retails sale data as your requirement
:param _by:
- 'days'(default) is shown for dialy data
- 'weeks' is shown for weekly data
- 'months' is shown for monthly data
- 'years' is shown for annual data}
:return:
Dataframe with date index
"""
return gfunc.to_number(self.raw,_by,_datecol='deldate') | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/multiple_ws.py | multiple_ws.py |
import zla_material_class.zla_class.ws_tools.ws_data_sql as wssql
import zla_material_class.zla_class.common_tools.master_sql as mastersql
from .common_tools import general_func as gfunc
import pandas as pd
from zla_general import \
vendor_finding_bymat as vmat, \
header_ecnbysap as header, \
get_newdeldate_bydf as newdel, \
movingcost_bymat as mov
class get_ws(object):
def __init__(self, partno, start=None, end=None, customer=None, parent=False):
"""
For getting SKC whole sale data as your requirment
:param partno: part number is needed.
:param start: start date (option)
:param end: end date (option)
:param customer: customer code by sap only (option)
:param parent: if True data will be get all of branch data together (default is False)
"""
# Make character sentense
self.partno = partno
self.allcustomer = mastersql.customer_parentable(customer, parent)
self.dictchain = header._parent_ecnchainbymat(partno)
self.master = mastersql.get_partsmaster(partno)
self.character = mastersql.get_partscharacter(partno)
self.purchasing = mastersql.get_partspurchasing(partno)
self.movingcost = mov.get_movingcost(partno)
self.vendor, self.cost, self.mapping = vmat.get_vendor_mat(partno, self.purchasing.get('spt'))
# Make sql_sentense
sql_nontapa = "orders.itemcat <> 'TAPA'"
lst_partno = list(self.dictchain.keys())
select_partno = lst_partno[lst_partno.index(partno):]
sql_partno = wssql.get_sql_1(select_partno)
sql_customer = wssql.get_sql_2(self.allcustomer)
sql_timerange = wssql.get_sql_3([start, end])
list_sql = [sql_nontapa, sql_partno, sql_customer, sql_timerange]
where_sql = gfunc.merge_wherecondition(list_sql)
msql = wssql.finishing_sql(where_sql)
_original = gfunc.get_data(msql)
_original['deldate'] = _original['del1stdate']
_original['partno_header'] = partno
self.raw_data = _original.copy()
self.new_deldata = newdel.generate_deldate(_original).copy()
self.original_data = History(_original)
self.newdel_data = History(newdel.generate_deldate(_original))
class History(pd.DataFrame):
def __init__(self,_data):
super(History, self).__init__()
self.raw = _data
def history(self,_filter ='all',_by='days'):
"""
generate SKC whole sale data as your requirement
:param _filter:
- 'all' (default) is overall volume
- 'dom_all' is domestic all'
- 'dom_ndist' is domestic for non distributor,
- 'dom_dist' is domestic for distributor only,
- 'exp_all' is export all
- 'exp_nkbt' is export for SKC territory only
- 'exp_kbt' is export for non territory}
:param _by:
- 'days'(default) is shown for dialy data
- 'weeks' is shown for weekly data
- 'months' is shown for monthly data
- 'years' is shown for annual data}
:return:
Dataframe with date index
"""
_his = self.raw.copy()
if _filter == 'all':
_ans = gfunc.to_number(_his,_by,_datecol='deldate')
elif _filter == 'dom_all':
filter_1 = (self.raw['sorg'] == 'DY10')
_ans = gfunc.to_number(_his[filter_1],_by,_datecol='deldate')
elif _filter == 'dom_ndist':
filter_1 = (self.raw['sorg'] == 'DY10')
filter_2 = (self.raw['distch'] != '20')
_ans = gfunc.to_number(_his[filter_1 & filter_2], _by,_datecol='deldate')
elif _filter == 'dom_dist':
filter_1 = (self.raw['sorg'] == 'DY10')
filter_2 = (self.raw['distch'] == '20')
_ans = gfunc.to_number(_his[filter_1 & filter_2], _by,_datecol='deldate')
elif _filter == 'exp_all':
filter_1 = (self.raw['sorg'] == 'DY20')
_ans = gfunc.to_number(_his[filter_1], _by,_datecol='deldate')
elif _filter == 'exp_nkbt':
filter_1 = (self.raw['sorg'] == 'DY20')
filter_2 = (self.raw['distch'] != '30')
_ans = gfunc.to_number(_his[filter_1 & filter_2], _by,_datecol='deldate')
elif _filter == 'exp_kbt':
filter_1 = (self.raw['sorg'] == 'DY20')
filter_2 = (self.raw['distch'] == '30')
_ans = gfunc.to_number(_his[filter_1 & filter_2], _by,_datecol='deldate')
else:
raise Exception("Sorry, your filter was wrong (allow only : ['all','dom_all','dom_ndist','dom_dist','exp_all','exp_nkbt','exp_kbt'])")
return _ans | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/ws_class.py | ws_class.py |
from zla_material_class.zla_class.rs_tools.rs_method import *
import zla_material_class.zla_class.rs_tools.rs_data_sql as rssql
import zla_material_class.zla_class.rs_tools.multiple_rs_sql as multirssql
from zla_material_class.zla_class.common_tools import general_func as gfunc
from zla_utilities import utility as ut
from zla_general import header_ecnbysap as header
import pandas as pd
class get_multiple_rs(object):
def __init__(self, list_partno, start=None, end=None, list_customer=None, parent=False):
"""
For getting SKC retail data as your requirment by multiple material
:param partno: part number is need. (Must be input as list)
:param start: start date (option)
:param end: end date (option)
:param customer: customer code by sap only (option- Must be input as list)
:param parent: if True data will be get all of branch data together (default is False)
"""
self.lstpartno = list_partno
self.allcustomer = invert_tocustomerlv1(list_customer, parent)
# Make sql_sentense
_dictlist = header._parent_ecnchainbylist(self.lstpartno)
select_partno = list(ut.merge_dict_inlist(_dictlist).keys())
sql_frnt, sql_serv = multirssql.get_multiplesql(select_partno,self.allcustomer)
sql_frnttimerange, sql_servtimerange = rssql.get_sql_3([start, end])
list_frntsql = [sql_frnt, sql_frnttimerange]
list_servsql = [sql_serv, sql_servtimerange]
where_frntsql = gfunc.merge_wherecondition(list_frntsql)
where_servsql = gfunc.merge_wherecondition(list_servsql)
frnt_msql, serv_msql = multirssql.finishing_multisql(where_frntsql, where_servsql)
frnt_data = gfunc.get_data(frnt_msql)
serv_data = gfunc.get_data(serv_msql)
frnt_data = header._parent_ecnchainbydf(frnt_data, 'partno')
serv_data = header._parent_ecnchainbydf(serv_data, 'partno')
self.frntraw = frnt_data.copy()
self.servraw = serv_data.copy()
self.rsraw = pd.concat([self.frntraw, self.servraw])
self.frntdata = History(frnt_data)
self.servdata= History(serv_data)
self.rsdata= History(merge_data(self.frntdata.raw, self.servdata.raw))
def invert_tocustomerlv1(cust_item,parent):
if parent == True:
db = 'partsdatabase'
msql = """select customerid,customername,parent
from customerlistsap where parent in (select parent from customerlistsap where customerid in {})""".format(ut.sel_insql(cust_item))
return dbcon.opendata(msql,db)['customerid'].unique()
else:
return cust_item
class History(pd.DataFrame):
def __init__(self,_data):
super(History, self).__init__()
self.raw = _data
def history(self,_by='days'):
"""
generate SKC retails sale data as your requirement
:param _by:
- 'days'(default) is shown for dialy data
- 'weeks' is shown for weekly data
- 'months' is shown for monthly data
- 'years' is shown for annual data}
:return:
Dataframe with date index
"""
return gfunc.to_number(self.raw,_by,_datecol='date')
if __name__ == '__main__':
_list = ['W9516-54172','W9516-54162']
_x = get_multiple_rs(list_partno=_list) | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/multiple_rs.py | multiple_rs.py |
from datetime import datetime
from zla_utilities import \
utility as ut
def get_sql_1(partnolist):
sql_1 = 'orders.partno in {}'
sel_data = sql_1.format(ut.sel_insql(partnolist))
return sel_data
def get_sql_2(customerlist):
sql_1 = 'ws.customerid in {}'
if customerlist is not None:
cus_data = sql_1.format(ut.sel_insql(customerlist))
else:
cus_data = None
return cus_data
def get_sql_3(mlist):
sql_base = 'orders.del1stdate '
if all(x is None for x in mlist):
_sql = sql_base + '>= DATE_SUB(now(), INTERVAL 48 MONTH)'
elif all(type(x) is str for x in mlist):
_subsql = convertor_bothdate(mlist)
_sql = sql_base + _subsql
else:
_subsql = convertor_singledate(mlist)
_sql = sql_base + _subsql
return _sql
def convertor_bothdate(dlist):
_start = datetime.strptime(dlist[0], '%Y-%m-%d')
_end = datetime.strptime(dlist[1], '%Y-%m-%d')
if _start > _end:
print('Not allow start_date less than end_date !!')
_sub = 'between "x" and "y"'
else:
_sub = 'between "{}" and "{}"'.format(dlist[0], dlist[1])
return _sub
def convertor_singledate(dlist):
if dlist[0] is not None:
_sub = '>= "{}"'.format(dlist[0])
else:
_sub = 'between DATE_SUB("{}", INTERVAL 48 MONTH) and "{}"'.format(dlist[1], dlist[1])
return _sub
def merge_wherecondition(sql_sentense):
_where = [' (' + _sen + ') ' for _sen in sql_sentense if _sen is not None]
return "and".join(_where)
def finishing_sql(_where):
_sql = """select orders.partno, ws.customerid, ws.shipto ,ws.sotype,ws.sorg,ws.distch, ws.orderdate, orders.del1stdate,sum(orders.qty) as qty
from partssaleorder as ws inner join orderdetails as orders on ws.saleorder = orders.saleorder
where"""
_groupby = " group by orders.partno, ws.customerid, ws.shipto , ws.sotype, ws.sorg, ws.distch, ws.orderdate,orders.del1stdate"
msql = _sql + _where + _groupby
return msql | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/ws_data_sql.py | ws_data_sql.py |
from datetime import datetime
from zla_material_class.zla_class.common_tools import general_func as gfunc
from zla_utilities import \
utility as ut
def get_sql_1(partnolist):
sql_1 = 'rsdet.partno in {}'
sql_2 = 'servdet.partno in {}'
sel_rs = sql_1.format(ut.sel_insql(partnolist))
sel_serv = sql_2.format(ut.sel_insql(partnolist))
return sel_rs,sel_serv
def get_sql_2(customerlist):
sql_1 = """rs.customerid in (select idcustomerlistkad from customerlistkad where idcustomersap in {})"""
sql_2 = """serv.customerid in (select idcustomerlistkad from customerlistkad where idcustomersap in {})"""
if customerlist is not None:
rscus_data = sql_1.format(ut.sel_insql(customerlist))
servcus_data = sql_2.format(ut.sel_insql(customerlist))
else:
rscus_data = None
servcus_data = None
return rscus_data,servcus_data
def get_sql_3(mlist):
sql_base1 = 'rs.rsdate '
sql_base2 = 'serv.docdate '
if all(x is None for x in mlist):
_rssql = sql_base1 + '>= DATE_SUB(now(), INTERVAL 48 MONTH)'
_servsql = sql_base2 + '>= DATE_SUB(now(), INTERVAL 48 MONTH)'
elif all(type(x) is str for x in mlist):
_subsql = gfunc.convertor_bothdate(mlist)
_rssql = sql_base1 + _subsql
_servsql = sql_base2 + _subsql
else:
_subsql = gfunc.convertor_singledate(mlist)
_rssql = sql_base1 + _subsql
_servsql = sql_base2 + _subsql
return _rssql,_servsql
def finishing_sql(_rswhere,_servwhere):
_sqlrs = """select rs.rsdate as date,kad.idcustomersap as customerid, sum(rsdet.qty) as qty,rs.typeoforder
from rsorder as rs inner join rsorderdetails as rsdet on rs.idrsorder = rsdet.idrsorder
inner join customerlistkad as kad on kad.idcustomerlistkad = rs.customerid
where"""
_sqlserv = """select serv.docdate as date,kad.idcustomersap as customerid, sum(servdet.qty) as qty
from servicedata as serv inner join servicedetails as servdet on serv.idservicedata = servdet.idservicedata
inner join customerlistkad as kad on kad.idcustomerlistkad = serv.customerid
where"""
_groupbyrs = " group by rs.rsdate,kad.idcustomersap,rs.typeoforder"
_groupbyserv = " group by serv.docdate,kad.idcustomersap"
rs_msql = _sqlrs + _rswhere + _groupbyrs
sev_msql = _sqlserv + _servwhere + _groupbyserv
return rs_msql,sev_msql | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/rs_tools/rs_data_sql.py | rs_data_sql.py |
from datetime import datetime
from zla_utilities import \
utility as ut, \
db_connector as dbcon
def get_multiplesql(itemlist,customerlist):
if customerlist is not None:
cust_kad = convertcustomersap_kad(customerlist)
tup_mat = tuple([(i, j) for i in itemlist for j in cust_kad])
sql_1 = '(rsdet.partno,rs.customerid) in {}'
sql_2 = '(servdet.partno,serv.customerid) in {}'
sel_rs = sql_1.format(ut.sel_insql(tup_mat))
sel_serv = sql_2.format(ut.sel_insql(tup_mat))
else:
sql_1 = 'rsdet.partno in {}'
sql_2 = 'servdet.partno in {}'
sel_rs = sql_1.format(ut.sel_insql(itemlist))
sel_serv = sql_2.format(ut.sel_insql(itemlist))
return sel_rs,sel_serv
def convertcustomersap_kad(customerlist):
db = 'partsdatabase'
msql = 'select idcustomerlistkad from customerlistkad where idcustomersap in {}'.format(ut.sel_insql(customerlist))
listkad = dbcon.opendata(msql,db)['idcustomerlistkad'].unique()
return listkad
def finishing_multisql(_rswhere,_servwhere):
_sqlrs = """select rs.rsdate as date,kad.idcustomersap as customerid,rsdet.partno, sum(rsdet.qty) as qty,rs.typeoforder
from rsorder as rs inner join rsorderdetails as rsdet on rs.idrsorder = rsdet.idrsorder
inner join customerlistkad as kad on kad.idcustomerlistkad = rs.customerid
where"""
_sqlserv = """select serv.docdate as date,kad.idcustomersap as customerid,servdet.partno, sum(servdet.qty) as qty
from servicedata as serv inner join servicedetails as servdet on serv.idservicedata = servdet.idservicedata
inner join customerlistkad as kad on kad.idcustomerlistkad = serv.customerid
where"""
_groupbyrs = " group by rs.rsdate,kad.idcustomersap,rsdet.partno,rs.typeoforder"
_groupbyserv = " group by serv.docdate,kad.idcustomersap,servdet.partno"
rs_msql = _sqlrs + _rswhere + _groupbyrs
sev_msql = _sqlserv + _servwhere + _groupbyserv
return rs_msql,sev_msql | zla-class | /zla_class-1.0.6-py3-none-any.whl/zla_class/rs_tools/multiple_rs_sql.py | multiple_rs_sql.py |
import datetime, time
import calendar
now = datetime.datetime.now()
def get_time(year=now.year,
month=now.month,
day=now.day,
hour=now.hour,
minute=now.minute,
second=now.second,
week=-1,
last_day_of_month=False,
type="time",
detail=True):
"""
:param year: 年 (默认今年)
:param month: 月 (默认当月)
:param day: 天 (默认今天)
:param hour: 时 (默认当前时间)
:param minute: 分 (默认当前时间)
:param second: 秒 (默认当前时间)
:param week: 星期x (默认-1,如果不等于-1,则day参数无效)
:param last_day_of_month: 每个月的最后一天 (默认False)
:param type: 输出类型 (time / str)
:param detail: 是否输出时分秒? (默认输出时分秒)
:return: 时间
"""
if week != -1:
weekday = datetime.datetime(year, month, day, hour, minute, second)
one_day = datetime.timedelta(days=1)
while weekday.weekday() != 0:
weekday -= one_day
ret = weekday + datetime.timedelta(days=week - 1)
else:
if last_day_of_month: # 每个月的最后一天
day = calendar.monthrange(year, month)[1]
if not detail:
date = datetime.date(year, month, day)
else:
date = datetime.datetime(year, month, day, hour, minute, second)
ret = date if type == "time" else str(date)
return ret
def get_timestamp(detail=True):
"""
获取当前时间戳
:param detail: True输出完整的时间戳/ False输出前10位(小数点之前)
:return: 时间戳
"""
if detail:
ret = time.time()
else:
ret = int(time.time())
return ret
def timestamp_to_str(timestamp, strformat):
"""
时间戳转字符串
:param timestamp: 时间戳
:param strformat: 转换格式 (%Y-%m-%d %H:%M:%S)
:return: 时间字符串
"""
ret = time.strftime(strformat, time.localtime(timestamp))
return ret
def str_to_timestamp(timestr, strformat):
"""
字符串转时间戳
:param timestr: 时间字符串
:param strformat: 转换格式 (%Y-%m-%d %H:%M:%S)
:return: 时间戳 (前10位)
"""
ret = int(time.mktime(time.strptime(timestr, strformat)))
return ret | zlc-get-time | /zlc-get-time-0.0.4.tar.gz/zlc-get-time-0.0.4/zlc_test_package/__init__.py | __init__.py |
# lib-name
```
step to commit:
step rename:
rename:
./
|__src/
|__packge_name/
to your package name
step edit:
edit:
./
|__setup.cfg
name, version, author, author_email
step build:
step-1:
make a pypi account
step-2:
run code on command line:
Unix/macOS:
python3 -m pip install --upgrade build
python3 -m pip install --upgrade twine
python3 -m build
twine upload dist/*
Windows:
py -m pip install --upgrade build
py -m pip install --upgrade twine
py -m build
twine upload dist/*
```
Uploading distributions to `https://test.pypi.org/legacy/`
Enter your username: `[your username]`
Enter your password: `[your password, this is hidden]`
Uploading ```package-name-version-py3-none-any.whl```
100%|█████████████████████| 4.65k/4.65k [00:01<00:00, 2.88kB/s]
Uploading ```package-name-version.tar.gz```
100%|█████████████████████| 4.25k/4.25k [00:01<00:00, 3.05kB/s] | zlc | /zlc-0.0.1.tar.gz/zlc-0.0.1/README.md | README.md |
# This file is part of python-zlib-ng which is distributed under the
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2.
# This file uses code from CPython's Lib/gzip.py after backported changes from
# python-isal were merged into CPython.
# Changes compared to CPython:
# - Subclassed GzipFile to GzipNGFile. Methods that included calls to zlib have
# been overwritten with the same methods, but now calling to zlib_ng.
# - _GzipReader._add_read_data uses zlib_ng.crc32 instead of zlib.crc32.
# - compress, decompress use zlib_ng methods rather than zlib.
# - The main() function's gzip utility supports many more options for easier
# use. This was ported from the python-isal module
"""Similar to the stdlib gzip module. But using zlib-ng to speed up its
methods."""
import argparse
import gzip
import io
import os
import struct
import sys
import time
import _compression # noqa: I201 # Not third-party
from . import zlib_ng
__all__ = ["GzipFile", "open", "compress", "decompress", "BadGzipFile",
"READ_BUFFER_SIZE"]
_COMPRESS_LEVEL_FAST = zlib_ng.Z_BEST_SPEED
_COMPRESS_LEVEL_TRADEOFF = zlib_ng.Z_DEFAULT_COMPRESSION
_COMPRESS_LEVEL_BEST = zlib_ng.Z_BEST_COMPRESSION
#: The amount of data that is read in at once when decompressing a file.
#: Increasing this value may increase performance.
#: 128K is also the size used by pigz and cat to read files from the
# filesystem.
READ_BUFFER_SIZE = 128 * 1024
FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
READ, WRITE = 1, 2
try:
BadGzipFile = gzip.BadGzipFile # type: ignore
except AttributeError: # Versions lower than 3.8 do not have BadGzipFile
BadGzipFile = OSError # type: ignore
# The open method was copied from the CPython source with minor adjustments.
def open(filename, mode="rb", compresslevel=_COMPRESS_LEVEL_TRADEOFF,
encoding=None, errors=None, newline=None):
"""Open a gzip-compressed file in binary or text mode. This uses the isa-l
library for optimized speed.
The filename argument can be an actual filename (a str or bytes object), or
an existing file object to read from or write to.
The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
"rb", and the default compresslevel is 6.
For binary mode, this function is equivalent to the GzipFile constructor:
GzipFile(filename, mode, compresslevel). In this case, the encoding, errors
and newline arguments must not be provided.
For text mode, a GzipFile object is created, and wrapped in an
io.TextIOWrapper instance with the specified encoding, error handling
behavior, and line ending(s).
"""
if "t" in mode:
if "b" in mode:
raise ValueError("Invalid mode: %r" % (mode,))
else:
if encoding is not None:
raise ValueError(
"Argument 'encoding' not supported in binary mode")
if errors is not None:
raise ValueError("Argument 'errors' not supported in binary mode")
if newline is not None:
raise ValueError("Argument 'newline' not supported in binary mode")
gz_mode = mode.replace("t", "")
# __fspath__ method is os.PathLike
if isinstance(filename, (str, bytes)) or hasattr(filename, "__fspath__"):
binary_file = GzipNGFile(filename, gz_mode, compresslevel)
elif hasattr(filename, "read") or hasattr(filename, "write"):
binary_file = GzipNGFile(None, gz_mode, compresslevel, filename)
else:
raise TypeError("filename must be a str or bytes object, or a file")
if "t" in mode:
return io.TextIOWrapper(binary_file, encoding, errors, newline)
else:
return binary_file
class GzipNGFile(gzip.GzipFile):
"""The GzipNGFile class simulates most of the methods of a file object with
the exception of the truncate() method.
This class only supports opening files in binary mode. If you need to open
a compressed file in text mode, use the gzip.open() function.
"""
def __init__(self, filename=None, mode=None,
compresslevel=_COMPRESS_LEVEL_BEST,
fileobj=None, mtime=None):
"""Constructor for the GzipNGFile class.
At least one of fileobj and filename must be given a
non-trivial value.
The new class instance is based on fileobj, which can be a regular
file, an io.BytesIO object, or any other object which simulates a file.
It defaults to None, in which case filename is opened to provide
a file object.
When fileobj is not None, the filename argument is only used to be
included in the gzip file header, which may include the original
filename of the uncompressed file. It defaults to the filename of
fileobj, if discernible; otherwise, it defaults to the empty string,
and in this case the original filename is not included in the header.
The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x',
or 'xb' depending on whether the file will be read or written.
The default is the mode of fileobj if discernible; otherwise, the
default is 'rb'. A mode of 'r' is equivalent to one of 'rb', and
similarly for 'w' and 'wb', 'a' and 'ab', and 'x' and 'xb'.
The compresslevel argument is an integer from 0 to 3 controlling the
level of compression; 0 is fastest and produces the least compression,
and 3 is slowest and produces the most compression. Unlike
gzip.GzipFile 0 is NOT no compression. The default is 2.
The mtime argument is an optional numeric timestamp to be written
to the last modification time field in the stream when compressing.
If omitted or None, the current time is used.
"""
super().__init__(filename, mode, compresslevel, fileobj, mtime)
if self.mode == WRITE:
self.compress = zlib_ng.compressobj(compresslevel,
zlib_ng.DEFLATED,
-zlib_ng.MAX_WBITS,
zlib_ng.DEF_MEM_LEVEL,
0)
if self.mode == READ:
raw = _GzipNGReader(self.fileobj)
self._buffer = io.BufferedReader(raw)
def __repr__(self):
s = repr(self.fileobj)
return '<gzip_ng ' + s[1:-1] + ' ' + hex(id(self)) + '>'
def write(self, data):
self._check_not_closed()
if self.mode != WRITE:
import errno
raise OSError(errno.EBADF, "write() on read-only GzipNGFile object")
if self.fileobj is None:
raise ValueError("write() on closed GzipNGFile object")
if isinstance(data, bytes):
length = len(data)
else:
# accept any data that supports the buffer protocol
data = memoryview(data)
length = data.nbytes
if length > 0:
self.fileobj.write(self.compress.compress(data))
self.size += length
self.crc = zlib_ng.crc32(data, self.crc)
self.offset += length
return length
class _GzipNGReader(gzip._GzipReader):
def __init__(self, fp):
# Call the init method of gzip._GzipReader's parent here.
# It is not very invasive and allows us to override _PaddedFile
_compression.DecompressReader.__init__(
self, gzip._PaddedFile(fp), zlib_ng._ZlibDecompressor,
wbits=-zlib_ng.MAX_WBITS)
# Set flag indicating start of a new member
self._new_member = True
self._last_mtime = None
def read(self, size=-1):
if size < 0:
return self.readall()
# size=0 is special because decompress(max_length=0) is not supported
if not size:
return b""
# For certain input data, a single
# call to decompress() may not return
# any data. In this case, retry until we get some data or reach EOF.
while True:
if self._decompressor.eof:
# Ending case: we've come to the end of a member in the file,
# so finish up this member, and read a new gzip header.
# Check the CRC and file size, and set the flag so we read
# a new member
self._read_eof()
self._new_member = True
self._decompressor = self._decomp_factory(
**self._decomp_args)
if self._new_member:
# If the _new_member flag is set, we have to
# jump to the next member, if there is one.
self._init_read()
if not self._read_gzip_header():
self._size = self._pos
return b""
self._new_member = False
# Read a chunk of data from the file
if self._decompressor.needs_input:
buf = self._fp.read(READ_BUFFER_SIZE)
uncompress = self._decompressor.decompress(buf, size)
else:
uncompress = self._decompressor.decompress(b"", size)
if self._decompressor.unused_data != b"":
# Prepend the already read bytes to the fileobj so they can
# be seen by _read_eof() and _read_gzip_header()
self._fp.prepend(self._decompressor.unused_data)
if uncompress != b"":
break
if buf == b"":
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
self._crc = zlib_ng.crc32(uncompress, self._crc)
self._stream_size += len(uncompress)
self._pos += len(uncompress)
return uncompress
# Aliases for improved compatibility with CPython gzip module.
GzipFile = GzipNGFile
_GzipReader = _GzipNGReader
def _read_exact(fp, n):
'''Read exactly *n* bytes from `fp`
This method is required because fp may be unbuffered,
i.e. return short reads.
'''
data = fp.read(n)
while len(data) < n:
b = fp.read(n - len(data))
if not b:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
data += b
return data
def _read_gzip_header(fp):
'''Read a gzip header from `fp` and progress to the end of the header.
Returns last mtime if header was present or None otherwise.
'''
magic = fp.read(2)
if magic == b'':
return None
if magic != b'\037\213':
raise BadGzipFile('Not a gzipped file (%r)' % magic)
(method, flag, last_mtime) = struct.unpack("<BBIxx", _read_exact(fp, 8))
if method != 8:
raise BadGzipFile('Unknown compression method')
if flag & FEXTRA:
# Read & discard the extra field, if present
extra_len, = struct.unpack("<H", _read_exact(fp, 2))
_read_exact(fp, extra_len)
if flag & FNAME:
# Read and discard a null-terminated string containing the filename
while True:
s = fp.read(1)
if not s or s == b'\000':
break
if flag & FCOMMENT:
# Read and discard a null-terminated string containing a comment
while True:
s = fp.read(1)
if not s or s == b'\000':
break
if flag & FHCRC:
_read_exact(fp, 2) # Read & discard the 16-bit header CRC
return last_mtime
def _create_simple_gzip_header(compresslevel: int,
mtime=None) -> bytes:
"""
Write a simple gzip header with no extra fields.
:param compresslevel: Compresslevel used to determine the xfl bytes.
:param mtime: The mtime (must support conversion to a 32-bit integer).
:return: A bytes object representing the gzip header.
"""
if mtime is None:
mtime = time.time()
if compresslevel == _COMPRESS_LEVEL_BEST:
xfl = 2
elif compresslevel == _COMPRESS_LEVEL_FAST:
xfl = 4
else:
xfl = 0
# Pack ID1 and ID2 magic bytes, method (8=deflate), header flags (no extra
# fields added to header), mtime, xfl and os (255 for unknown OS).
return struct.pack("<BBBBLBB", 0x1f, 0x8b, 8, 0, int(mtime), xfl, 255)
def compress(data, compresslevel=_COMPRESS_LEVEL_BEST, *, mtime=None):
"""Compress data in one shot and return the compressed string.
compresslevel sets the compression level in range of 0-9.
mtime can be used to set the modification time. The modification time is
set to the current time by default.
"""
if mtime == 0:
# Use zlib as it creates the header with 0 mtime by default.
# This is faster and with less overhead.
return zlib_ng.compress(data, level=compresslevel, wbits=31)
header = _create_simple_gzip_header(compresslevel, mtime)
trailer = struct.pack("<LL", zlib_ng.crc32(data), (len(data) & 0xffffffff))
# Wbits=-15 creates a raw deflate block.
return (header + zlib_ng.compress(data, level=compresslevel, wbits=-15) +
trailer)
def decompress(data):
"""Decompress a gzip compressed string in one shot.
Return the decompressed string.
"""
decompressed_members = []
while True:
fp = io.BytesIO(data)
if _read_gzip_header(fp) is None:
return b"".join(decompressed_members)
# Use a zlib raw deflate compressor
do = zlib_ng.decompressobj(wbits=-zlib_ng.MAX_WBITS)
# Read all the data except the header
decompressed = do.decompress(data[fp.tell():])
if not do.eof or len(do.unused_data) < 8:
raise EOFError("Compressed file ended before the end-of-stream "
"marker was reached")
crc, length = struct.unpack("<II", do.unused_data[:8])
if crc != zlib_ng.crc32(decompressed):
raise BadGzipFile("CRC check failed")
if length != (len(decompressed) & 0xffffffff):
raise BadGzipFile("Incorrect length of data produced")
decompressed_members.append(decompressed)
data = do.unused_data[8:].lstrip(b"\x00")
def _argument_parser():
parser = argparse.ArgumentParser()
parser.description = (
"A simple command line interface for the gzip_ng module. "
"Acts like gzip.")
parser.add_argument("file", nargs="?")
compress_group = parser.add_mutually_exclusive_group()
for i in range(1, 10):
args = [f"-{i}"]
if i == 1:
args.append("--fast")
elif i == 9:
args.append("--best")
compress_group.add_argument(
*args, action="store_const", dest="compresslevel",
const=i,
help=f"use compression level {i}"
)
compress_group.set_defaults(compress=True)
compress_group.add_argument(
"-d", "--decompress", action="store_const",
dest="compress",
const=False,
help="Decompress the file instead of compressing.")
output_group = parser.add_mutually_exclusive_group()
output_group.add_argument("-c", "--stdout", action="store_true",
help="write on standard output")
output_group.add_argument("-o", "--output",
help="Write to this output file")
parser.add_argument("-n", "--no-name", action="store_true",
dest="reproducible",
help="do not save or restore the original name and "
"timestamp")
parser.add_argument("-f", "--force", action="store_true",
help="Overwrite output without prompting")
# -b flag not taken by gzip. Hidden attribute.
parser.add_argument("-b", "--buffer-size",
default=READ_BUFFER_SIZE, type=int,
help=argparse.SUPPRESS)
return parser
def main():
args = _argument_parser().parse_args()
compresslevel = args.compresslevel or _COMPRESS_LEVEL_TRADEOFF
if args.output:
out_filepath = args.output
elif args.stdout:
out_filepath = None # to stdout
elif args.file is None:
out_filepath = None # to stout
else:
if args.compress:
out_filepath = args.file + ".gz"
else:
out_filepath, extension = os.path.splitext(args.file)
if extension != ".gz" and not args.stdout:
sys.exit(f"filename doesn't end in .gz: {args.file!r}. "
f"Cannot determine output filename.")
if out_filepath is not None and not args.force:
if os.path.exists(out_filepath):
yes_or_no = input(f"{out_filepath} already exists; "
f"do you wish to overwrite (y/n)?")
if yes_or_no not in {"y", "Y", "yes"}:
sys.exit("not overwritten")
if args.compress:
if args.file is None:
in_file = sys.stdin.buffer
else:
in_file = io.open(args.file, mode="rb")
if out_filepath is not None:
out_buffer = io.open(out_filepath, "wb")
else:
out_buffer = sys.stdout.buffer
if args.reproducible:
gzip_file_kwargs = {"mtime": 0, "filename": b""}
else:
gzip_file_kwargs = {"filename": out_filepath}
out_file = GzipNGFile(mode="wb", fileobj=out_buffer,
compresslevel=compresslevel, **gzip_file_kwargs)
else:
if args.file:
in_file = open(args.file, mode="rb")
else:
in_file = GzipNGFile(mode="rb", fileobj=sys.stdin.buffer)
if out_filepath is not None:
out_file = io.open(out_filepath, mode="wb")
else:
out_file = sys.stdout.buffer
global READ_BUFFER_SIZE
READ_BUFFER_SIZE = args.buffer_size
try:
while True:
block = in_file.read(args.buffer_size)
if block == b"":
break
out_file.write(block)
finally:
if in_file is not sys.stdin.buffer:
in_file.close()
if out_file is not sys.stdout.buffer:
out_file.close()
if __name__ == "__main__": # pragma: no cover
main() | zlib-ng | /zlib_ng-0.2.0-cp310-cp310-musllinux_1_1_x86_64.whl/zlib_ng/gzip_ng.py | gzip_ng.py |
import io
import os
if hasattr(os, 'add_dll_directory'):
# On windows, need to be sure that zlib is in the dll directory path before loading ext module
# In particular, needed for python 3.8+ on Windows
with os.add_dll_directory(os.environ.get("ZLIB_HOME", "C:/Program Files/zlib/bin")):
from _zlib_state import Decompressor
else:
from _zlib_state import Decompressor
class GzipStateFileBase(io.RawIOBase):
def __init__(self, path, keep_last_state=False, on_block_boundary=None):
if isinstance(path, str):
self.file = open(path, 'rb')
else:
self.file = path
self.decomp = Decompressor(32 + 15)
self.last_state = None
self.last_state_pos = None
self.keep_last_state = keep_last_state
self.on_block_boundary = on_block_boundary
def zseek(self, pos, state):
self.file.seek(pos)
self.decomp = Decompressor(-15)
self.decomp.set_state(state[0], state[1], state[2])
def readinto(self, buf):
if len(buf) == 1:
raise ValueError()
count = 0
while count == 0 and not self.decomp.eof():
needed_bytes = self.decomp.needs_input()
if needed_bytes > 0:
# decompressor needs more input
self.decomp.feed_input(self.file.read(needed_bytes))
count += self.decomp.read(outbytes=buf)
if self.keep_last_state and self.decomp.block_boundary():
self.last_state = self.decomp.get_state()
self.last_state_pos = self.decomp.total_in()
if self.on_block_boundary:
self.on_block_boundary(count)
return count
def read(self, size=-1):
if size == 1:
raise ValueError()
if size == -1:
return super().read(size) # reads entire file
result = None
while not result and not self.decomp.eof():
needed_bytes = self.decomp.needs_input()
if needed_bytes > 0:
# decompressor needs more input
self.decomp.feed_input(self.file.read(needed_bytes))
result = self.decomp.read(outsize=size)
if self.decomp.block_boundary():
self.last_state = self.decomp.get_state()
self.last_state_pos = self.decomp.total_in()
if self.on_block_boundary:
self.on_block_boundary(len(result))
return result
def eof(self):
return self.decomp.eof()
def __bool__(self):
return not self.decomp.eof()
def readable(self):
return not self.decomp.eof()
def fileno(self):
return self.file.fileno()
def seek(self):
raise NotImplementedError()
def truncate(self):
raise NotImplementedError()
def close(self):
super().close()
self.file.close()
class GzipStateFile(io.BufferedIOBase):
def __init__(self, path, keep_last_state=False, buffer_size=io.DEFAULT_BUFFER_SIZE):
self.raw = GzipStateFileBase(path, keep_last_state, on_block_boundary=self.on_block_boundary)
self.buffer_size = buffer_size
self.buffer = bytearray(buffer_size)
self.buffer_start = 0
self.buffer_stop = 0
self.output_pos = 0
self.last_state_output_pos = 0
def detach(self):
return self.raw
def read(self, count=-1):
if count == -1:
return self._readall()
result = b''
while len(result) < count and not self.raw.eof():
result += self.read1(count - len(result))
return result
def _readall(self):
result = b''
while not self.raw.eof():
result += self.read(self.buffer_size)
return result
def read1(self, count=-1):
start, stop = self.buffer_start, self.buffer_stop
if start == stop:
# buffer empty
size = self.raw.readinto(self.buffer)
self.buffer_start, self.buffer_stop = start, stop = 0, size
size = stop - start
if count != -1: # indicates to read as much as possible
size = min(size, count)
result = self.buffer[start:start+size]
self.buffer_start += size
self.output_pos += size
return result
def readinto(self, buf):
count = 0
while count < len(buf) and not self.raw.eof():
count += self.readinto1(buf[count:])
return count
def readinto1(self, buf):
start, stop = self.buffer_start, self.buffer_stop
if start == stop:
# buffer empty
size = self.raw.readinto(self.buffer)
self.buffer_start, self.buffer_stop = start, stop = 0, size
size = min(stop - start, count)
buf[:size] = self.buffer[start:start+size]
self.buffer_start += size
self.output_pos += size
return size
def readline(self):
if self.buffer_start != self.buffer_stop:
try:
idx = self.buffer[self.buffer_start:self.buffer_stop].index(b'\n')
# line found in buffer. Use that and advance buffer pointers
line = self.buffer[self.buffer_start:self.buffer_start+idx+1]
self.buffer_start += idx + 1
self.output_pos += idx + 1
return line
except ValueError:
# line break not found in remaining buffer. Start with the remaining buffer and carry on
line = self.buffer[self.buffer_start:self.buffer_stop]
self.buffer_start, self.buffer_stop = 0, 0 # buffer consumed
self.output_pos += len(line)
else:
line = b''
while not line.endswith(b'\n') and not self.raw.eof():
chunk = self.read1(self.buffer_size)
try:
idx = chunk.index(b'\n')
self.buffer_start -= (len(chunk) - idx - 1) # move back the buffer cursor to the end of the line
self.output_pos -= (len(chunk) - idx - 1)
chunk = chunk[:idx+1]
except ValueError:
pass # \n not found, that's OK
line += chunk
return line
def peek(self, count=-1):
start, stop = self.buffer_start, self.buffer_stop
if start == stop:
# buffer empty
size = self.raw.readinto(self.buffer)
self.buffer_start, self.buffer_stop = start, stop = 0, size
size = stop - start
if count != -1: # indicates to read as much as possible
size = min(size, count)
result = self.buffer[start:start+size]
# do NOT advance the start pointer
return result
def zseek(self, pos, state):
self.raw.zseek(pos, state)
self.buffer_start, self.buffer_stop = 0, 0
def eof(self):
return self.raw.eof()
def __getattr__(self, key):
if key in ('last_state', 'last_state_pos'):
return getattr(self.raw, key)
return getattr(super(), key)
def close(self):
super().close()
self.raw.close()
self.buffer = None
def on_block_boundary(self, new_data_count):
self.last_state_output_pos = self.output_pos + new_data_count | zlib-state | /zlib_state-0.1.5-cp38-cp38-manylinux2010_x86_64.whl/zlib_state/__init__.py | __init__.py |
.. image:: https://travis-ci.org/killswitch-GUI/zlib_wrapper.svg?branch=master
:target: https://travis-ci.org/killswitch-GUI/zlib_wrapper
.. image:: https://coveralls.io/repos/github/killswitch-GUI/zlib_wrapper/badge.svg?branch=master :target: https://coveralls.io/github/killswitch-GUI/zlib_wrapper?branch=master
zlib_wrapper
--------
A very small library for building crc32 header on top of zlib (in 2.7 standard library). Was built to learn pip packaging, and implement compression before encryption for EmPyre https://github.com/adaptivethreat/EmPyre.
To get up and running:
>>> from zlib_wrapper import compress
>>> from zlib_wrapper import decompress
zlib_wrapper compression step-by-step
--------
To get you crc32 for supplied data or string:
>>> data = "Killswitc-gui is a sick handle"
>>> c = compress.compress()
>>> start_crc32 = ac.crc32_data(data)
To get your compressed for suplied data or string:
>>> comp_data = a.comp_data(data)
To build your custom zlib header with crc32:
>>> final_comp_data = a.build_header(comp_data, start_crc32)
zlib_wrapper decompression step-by-step
--------
Decompression goes through crc32 checks and returns a custom dictonary object.
To decompress your compressed data:
>>> dec_data = b.dec_data(final_comp_data)
{'data': 'Killswitc-gui is a sick handle', 'header_crc32': 2727504892, 'crc32_check': True, 'dec_crc32': 2727504892}
| zlib_wrapper | /zlib_wrapper-0.1.3.tar.gz/zlib_wrapper-0.1.3/README.rst | README.rst |
import sqlite3
import zlib
class ZlibDB:
def __init__(self, db_path, encoding='utf-8', level=9):
self.db_path = db_path
self.encoding = encoding
self.level = level
self._connect()
def _connect(self):
self.conn = sqlite3.connect(self.db_path)
self.conn.execute(
'CREATE TABLE IF NOT EXISTS kv (key TEXT UNIQUE, value BLOB)')
def close(self):
self.conn.commit()
self.conn.close()
self.conn = None
def commit(self):
self.conn.commit()
def get(self, key):
"""Returns the (decompressed) value for the specific key."""
row = self.conn.execute(
'SELECT value FROM kv WHERE key = ?', (key,)).fetchone()
return None if row is None else zlib.decompress(row[0])
def put(self, key, value, level=None):
"""Saves the value for the specific key.
The `value` is compressed using zlib by the specific
compression `level` before written to the db.
"""
if level is None:
level = self.level
if isinstance(value, str):
value = value.encode(self.encoding)
if not isinstance(value, bytes):
raise TypeError('value must be bytes or string')
value = zlib.compress(value, level=level)
self.conn.execute(
'REPLACE INTO kv (key, value) VALUES (?,?)', (key, value))
def delete(self, key):
"""Deletes the items for the specific key."""
self.conn.execute('DELETE FROM kv WHERE key = ?', (key,))
def size(self):
"""Returns the number of items in the db."""
return self.conn.execute('SELECT COUNT(1) FROM kv').fetchone()[0]
def keys(self):
"""Generator of keys."""
c = self.conn.cursor()
for row in c.execute('SELECT key FROM kv'):
yield row[0]
def values(self):
"""Generator of values."""
c = self.conn.cursor()
for row in c.execute('SELECT value FROM kv'):
yield zlib.decompress(row[0])
def items(self):
"""Generator of (key, value) tuples."""
c = self.conn.cursor()
for row in c.execute('SELECT key, value FROM kv'):
yield (row[0], zlib.decompress(row[1]))
def range(self, start, end):
"""Generator of (key, value) tuples in the key range [start, end)."""
c = self.conn.cursor()
for row in c.execute(
'SELECT key, value FROM kv WHERE key >= ? AND key < ? ORDER BY key ASC',
(start, end)
):
yield (row[0], zlib.decompress(row[1]))
def __contains__(self, key):
"""Returns True if the key exists in the db; False otherwise."""
row = self.conn.execute(
'SELECT 1 FROM kv WHERE key = ?', (key,)).fetchone()
return row is not None
def __getitem__(self, key):
value = self.get(key)
if value is None:
raise KeyError(key)
return value
def __setitem__(self, key, value):
self.put(key, value)
def __delitem__(self, key):
if key not in self:
raise KeyError(key)
self.delete(key)
def __iter__(self):
return self.keys()
def __len__(self):
return self.size()
def __enter__(self):
if self.conn is None:
self._connect()
return self
def __exit__(self, *exc_info):
self.close()
def open(*args, **kwargs):
"""Returns an instance of ZlibDB."""
return ZlibDB(*args, **kwargs) | zlibdb | /zlibdb-0.2.tar.gz/zlibdb-0.2/zlibdb.py | zlibdb.py |
# __CryptoPriceBIN__
Python module for searching books and fetching book links from Zlib
## __How this works__
Scraps the information from the HTML of the page using `beautifulsoup4` .
## __Requirements__
BeautifulSoup4 `pip install bs4`<br/>
Requests `pip install requests`
## __Installation__
`pip install zlibrary_module`
## __Usage__
```python
from zlibrary_module import zlib
zlib.search("The Alchemist")
zlib.select("The Alchemist", 1)
```
Output
```python
"""
1.The Alchemist 1 The Alchemist (PDF)
2.Saint Germain: Master Alchemist
3.The Red Lion- The Elixir of Eternal Life (An Alchemist Novel) (PDF)
4.The Alchemist
5.Parusavedi, Alchemist in Telugu, పరుసవేది. (PDF)
6.Rappers best friend : an instrumental series
7.Herbal Alchemists Handbook, The: A Grimoire of Philtres. Elixirs, Oils, Incense, and Formulas for Ritual Use (PDF)
8.Alchemist Ph.C-HD LIVE Operators Manual Manual (PDF)
9.The Alchemist by Ben Jonson (PDF)
10.The Neutronium Alchemist
"""
https://zlibrary.to/pdfs/the-alchemist-1-the-alchemist-pdf
```
## __Disclaimer__
I'm in no way related to that website / person behind it / the kind of information hosted on it. The author of this library isn't responsible for what you do with this library.
| zlibrary-module | /zlibrary_module-0.0.3.tar.gz/zlibrary_module-0.0.3/README.md | README.md |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.