code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import argparse
import ast
import _ast
import imp
import inspect
try:
import meta #meta is optional, and used for parameter detection if the source code is not found.
except ImportError:
pass
import sys
import re
from collections import OrderedDict, namedtuple
import commentjson as json
from .modular_runner import *
from .params import get_params
param_tuple = namedtuple('param_tuple', ['param', 'is_optional'])
class ParamFinder(ast.NodeVisitor):
'''
AST graph walker designed to find all uses of the params namespace in the ast graph.
There is some magic in handling the 'self' keyword, which is used to denote class-specific params.
For example, params.self.output_dir in class RSEMRunner will be mapped to the tuple ('RSEMRunner', 'output_dir')
'''
def __init__(self):
self.params_found = []
def filter_private(self):
print self.params_found
self.params_found = [x for x in self.params_found if x.param[-1][0]!='_']
def register_param(self, raw_tuple):
'''
Creates a list of params that keeps the order they were seen, but removes duplicates.
'''
if self.is_valid_param(raw_tuple):
self.params_found.append(self.format_param(raw_tuple))
def is_valid_param(self, x):
'''
Given a raw tuple formed from an AST path, we see if it's well formed. A well formed
tuple is currently defined as any param that is not wholly containing self and optional
tags. More sophisticated validation here is possible.
'''
if len(x) == 1 and x[0] in ['self', 'optional']:
return False
if len(x) == 2 and x[0] in ['self', 'optional'] and x[1] in ['self', 'optional']:
return False
return True
def format_param(self, raw_tuple):
'''
Converts a raw ast trace from the ast graph into a param_tuple named tuple. It:
-strips out optional and puts it in its own field
-converts self into the current class name.
'''
try:
raw_tuple.remove('optional')
is_optional = True
except ValueError: #optional is not in the param.
is_optional = False
try:
while True:
self_index = raw_tuple.index('self')
raw_tuple[self_index] = self.current_class
except ValueError:
pass
if len(raw_tuple) > 2:
raise ValueError('Malformed parameter tuple: {}'.format(raw_tuple))
return param_tuple(tuple(raw_tuple), is_optional)
def uniqueify(self):
seen = set()
self.params_found = [x for x in self.params_found if x not in seen and not seen.add(x)]
def old_visit_Attribute(self, node):
'''
TODO: check that it's read and not written
'''
if isinstance(node.value, _ast.Attribute) and node.value.attr == 'params' and node.attr != 'self':
self.params_found.append(node.attr)
#This next bit identifies lines two steps removed from params. In that case, we know that the middle attribute is 'self' and we
#replace it with the relevant class name.
elif isinstance(node.value, _ast.Attribute) and isinstance(node.value.value, _ast.Attribute) and node.value.value.attr == 'params':
self.params_found.append((self.current_class, node.attr))
self.generic_visit(node)
def visit_Attribute(self, node):
'''
TODO: check that it's read and not written
'''
if isinstance(node, _ast.Attribute):
current_node = node
param_builder = []
param_builder.append(node.attr)
while isinstance(current_node.value, _ast.Attribute):
if current_node.value.attr == 'params':
self.register_param(param_builder[::-1])
break
param_builder.append(current_node.value.attr)
current_node = current_node.value
self.generic_visit(node)
def case_insensitive_list_match(query, l):
"""
Returns the first case-insensitive matching item to the query
"""
query = query.lower()
l_low = [x.lower() for x in l]
for (i,x) in enumerate(l_low):
if query == x:
return l[i]
return None
def walk_all_methods(pf, class_object):
'''
'''
for func in dir(class_object):
try:
try:
#we get each method from source, deindent it, and then parse it
source_lines = inspect.getsourcelines(getattr(class_object,func))[0]
indent = len(source_lines[0]) - len(source_lines[0].lstrip())
source_lines = [line[indent:] for line in source_lines]
ast_tree = ast.parse(''.join(source_lines))
except IOError:
print('Module source code not found: ({}, {}). Decompiling instead.'.format(class_object, func))
try:
ast_tree = meta.decompile(getattr(class_object,func).__code__)
except AssertionError:
print 'meta failed to decompile function {} in class {}. Some parameters may be missing from the generated file.'.format(func, class_object.__name__)
continue
except NameError:
print 'meta is not installed. Parameters may be missing from the generated file.'
except TypeError:
continue
pf.visit(ast_tree)
except AttributeError:
continue
class JsonBuilderMain():
"""
Compiles a params proto file into a template that must be filled out for running ZIPPY.
"""
def __init__(self, input_args):
self.input_args = input_args
if self.input_args.out:
self.output_file = self.input_args.out
else:
arg_split = self.input_args.proto.split('.')
self.output_file = '.'.join(arg_split[:-1]+['compiled']+ [arg_split[-1]])
self.params = get_params(self.input_args.proto, proto=True)
self.modules_loaded = 0
for x in getattr(self.params, 'imports', []):
self.load_external_modules(x)
def load_external_modules(self, module_path):
modules_to_add = {}
m = imp.load_source('user_module_import_{}'.format(self.modules_loaded), module_path)
self.modules_loaded += 1
for k in vars(m).keys():
if 'Runner' in k:
modules_to_add[k] = vars(m)[k]
globals().update(modules_to_add)
def make_params_file(self):
'''
Given the params.stages list in the input params file, finds all the needed params to run the pipeline.
Each stage will have an identifier, and may have a previous stage. We also add a 'scratch_path' for the pyflow workspace.
'''
pf = ParamFinder()
for stage in self.params.stages:
class_name = case_insensitive_list_match('{}Runner'.format(stage), globals().keys())
try:
class_object = globals()[class_name]
except KeyError:
raise KeyError("One of your workflow stages, {}, is not found. If it is defined in a custom file, make sure that file is imported in your params file. If it is built-in, make sure you are spelling it correctly!".format(stage))
pf.current_class = stage
walk_all_methods(pf, class_object)
pf.uniqueify()
pf.filter_private()
try:
defaults = get_params(self.input_args.defaults, proto=True)
except IOError:
print 'Warning: No defaults file found.'
defaults = None
output_map = OrderedDict()
if hasattr(self.params, 'imports'):
output_map["imports"] = self.params.imports
identifiers = set()
optional_params = filter(lambda x: x.is_optional, pf.params_found)
required_params = filter(lambda x: not x.is_optional, pf.params_found)
output_map["stages"] = []
#stage params
for (i,stage) in enumerate(self.params.stages):
identifier = get_identifier(stage, identifiers)
stage_map = {"stage": stage, "identifier": identifier}
if i > 0:
print output_map
stage_map['previous_stage'] = output_map["stages"][i-1]["identifier"]
for param in required_params:
param = param.param
print param
if param[0] == stage and len(param)>1:
if hasattr(defaults, 'stages') and stage in defaults.stages and param[1] in defaults.stages[stage]:
stage_map[param[1]] = defaults.stages[stage][param[1]]
else:
stage_map[param[1]] = ''
for param in optional_params:
if self.input_args.default_behavior == 'ignore':
continue
param = param.param
if param[0] == stage:
if self.input_args.default_behavior == 'include':
if stage in defaults.stages and param[1] in defaults.stages[stage]:
stage_map[param[1]] = defaults.stages[stage][param[1]]
else:
stage_map[param[1]] = ''
elif self.input_args.default_behavior == 'warn':
if not hasattr(defaults, 'stages') or stage not in defaults.stages or param[1] not in defaults.stages[stage]:
print "Warning: parameter {} is not included in stage {} defaults".format(param[1], stage)
output_map["stages"].append(stage_map)
#global params
for param in required_params:
if len(param.param) > 1:
continue
param = param.param[0]
if hasattr(defaults, param):
output_map[param] = getattr(defaults, param)
else:
output_map[param] = ''
for param in optional_params:
if len(param.param) > 1:
continue
if self.input_args.default_behavior == 'ignore':
continue
param = param.param[0]
if self.input_args.default_behavior == 'include':
if hasattr(defaults, param):
output_map[param] = getattr(defaults, param)
else:
output_map[param] = ''
elif self.input_args.default_behavior == 'warn':
if not hasattr(defaults, param):
print "Warning: global parameter {} is not included".format(param)
output_map["scratch_path"] = ''
with open(self.output_file, 'w') as f:
json.dump(output_map, f, indent=4)
def get_identifier(stage, identifiers):
'''
Called when beginning the output for a new pipeline stage. Returns a unique id for that pipeline stage.
'''
while stage in identifiers:
instance = re.search('(.+_)(\d+)$', stage)
if instance is not None: #we are an identifer of the form stagename.#
stage = instance.group(1)+str(int(instance.group(2))+1)
else: #we are an identifier of the form stagename
stage = stage+'_1'
identifiers.add(stage)
return stage
def get_argparser():
parser = argparse.ArgumentParser(usage='make_params is used to compile a proto-workflow into a form useable by ZIPPY', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('proto', help='The proto-workflow to compile.')
parser.add_argument('out', help='The name of the compiled workflow to create. If none, prepends compiled to the filetype as output (e.g., foo.compiled.json)', nargs='?')
parser.add_argument('--defaults', default='defaults.json', help='File with default parameter values (in json format)')
parser.add_argument('--default_behavior', default='warn', choices=['include', 'warn', 'ignore'], help='How to treat optional parameters. <Include> them all in your file, <warn> you if they are not in the specified defaults file, or <ignore> them.')
return parser
if __name__ == '__main__':
parser = get_argparser()
input_params = parser.parse_args()
jsb = JsonBuilderMain(input_params)
jsb.make_params_file() | zippy-pipeline | /zippy-pipeline-2.1.3.0.tar.gz/zippy-pipeline-2.1.3.0/zippy/make_params.py | make_params.py |
import resource
import glob
import string
import os
import sys
import fnmatch
from collections import defaultdict, namedtuple
from functools import wraps
from abc import ABCMeta, abstractmethod
import itertools
import numpy
from .utils import sub_check_wilds
from pyflow import WorkflowRunner
from star import SingleStarFlow
from bwa import BWAWorkflow
from samplesheet import SampleSheet, check_valid_samplename
from bcl2fastq import Bcl2Fastq
SampleTuple = namedtuple('SampleTuple', ['id', 'name'])
zippy_dir = os.path.dirname(__file__)
def passthrough(*all_pass):
"""
Function decorator used to forward outputs.
"""
def passthrough_decorator(get_output):
@wraps(get_output)
def func_wrapper(self, sample):
output = get_output(self, sample)
for to_pass in all_pass:
pass_in = self.collect_input(sample, to_pass)
if pass_in is not None and len(pass_in) > 0:
output[to_pass] = pass_in
return output
return func_wrapper
return passthrough_decorator
def organize_fastqs(fastq_files, is_paired_end=True):
'''
Helper function used to separate R1/R2
'''
r1_files = [x for x in fastq_files if '_R1_' in x]
r2_files = [x for x in fastq_files if '_R2_' in x]
if is_paired_end and len(r1_files)>0 and len(r2_files)>0:
assert not set(r1_files).intersection(set(r2_files))
return (r1_files, r2_files)
else:
return fastq_files
class ModularRunner():
'''
Naming convention: a collect method is called at the front end of a stage to take in data.
A get method is called at the back end of a stage to give data to the next stage.
typically, collect methods should never need to be overridden. Get methods must be changed when
the stage does something unusual (such as merging or creating samples).
'''
__metaclass__ = ABCMeta
def __init__(self, identifier, params, previous_stages):
self.identifier = identifier
self.params = params
self.previous_stages = previous_stages
self.set_up_optional_params()
try:
self.sample_sheet = SampleSheet(self.params.sample_sheet, fix_dup_sample_names=self.params.optional.zippy_fix_duplicate_sample_names)
except AttributeError:
self.sample_sheet = None
def set_up_optional_params(self):
overrides = self.define_optionals()
self.params.self._update_from_dict(overrides)
overrides = self.define_zippy_optionals()
self.params._update_from_dict(overrides)
def define_optionals(self):
'''
Overrideable method that can be used to return a map of default values. If these parameters
are not set in the json file, they will be added here
'''
return {}
def define_zippy_optionals(self):
'''
Zippy's built-in parameters have their defaults given here.
'''
return {'zippy_fix_duplicate_sample_names': False}
@abstractmethod
def get_output(self, sample):
'''
This function is called to return the output generated by this stage.
'sample' is the sample named_tuple for that sample
The form of the output is a dictionary from zippy type to file path. See the
wiki for more details.
'''
pass
def get_dependencies(self, sample):
'''
This function is called for a stage to return its own outgoing tasks related to a sample.
I.e., this is the list of workflow tasks that must be run before the stage is complete for
that sample. By default, we assume that self.task is a
dict (or, often, a defaultdict of lists). This can be overloaded for more complicated tasks.
'''
return self.task[sample]
@abstractmethod
def workflow(self, workflowRunner):
'''
Add all of this stage's tasks to the workflow graph using the input workflowRunner.
'''
pass
def get_memory_count(self, count):
'''
Checks the default core count through the filter of a global max memory, and a in-stage set parameter. Memory is defined in MB
'''
if hasattr(self.params.self, 'memory'):
count = self.params.self.optional.memory
if hasattr(self.params, 'max_memory'):
count = min(count, self.params.optional.max_memory)
return count
def get_core_count(self, count):
'''
Checks the default core count through the filter of a global max cores count, and a in-stage set parameter
'''
if hasattr(self.params.self, 'cores'):
count = self.params.self.optional.cores
if hasattr(self.params, 'max_cores'):
count = min(count, self.params.optional.max_cores)
return count
def get_samples(self):
'''
Returns a list of all sample tuples that this stage returns AS OUTPUT. By default, stages do not change the
sample list, and so we just chain our samples along our previous stages by calling collect samples.
'''
return self.collect_samples()
def collect_samples(self):
'''
Returns a list of all sample tuples that this stage must process by taking the union of all previous stages.
'''
samples = set()
for previous_stage in self.previous_stages:
if previous_stage is not None:
samples |= set(previous_stage.get_samples())
return samples
def setup_workflow(self, workflowRunner):
'''
If the skip param for this stage is set to true, we don't add its workflow.
'''
try:
skip = self.params.self.optional.skip
except AttributeError:
self.workflow(workflowRunner)
else:
if skip:
self.get_dependencies=lambda x: []
else:
self.workflow(workflowRunner)
def collect_input(self, sample, file_type, as_list=False):
'''
For input, we transparently return either a single instance or a list of 2 or more instances, depending on what we have.
'''
results = []
for previous_stage in self.previous_stages:
try:
stage_results = previous_stage.get_output(sample)[file_type]
if isinstance(stage_results, list):
results.extend(stage_results)
elif isinstance(stage_results, str):
results.append(stage_results)
elif isinstance(stage_results, unicode):
results.append(str(stage_results))
else:
raise TypeError('Input for {} received neither a string or list: {}'.format(self.identifier, stage_results))
except (KeyError,TypeError):
continue
if len(results) > 1:
results = list(set(results)) #we want to remove duplicates that got to this point through multiple paths (e.g., passthroughs)
if len(results) > 1 or as_list:
return results
else:
return results[0]
if len(results) == 1:
if as_list:
return results
else:
return results[0]
else:
return None
def collect_dependencies(self, sample):
dependencies = []
for previous_stage in self.previous_stages:
if previous_stage is None:
continue
new_dependencies = previous_stage.get_dependencies(sample)
if isinstance(new_dependencies, list):
dependencies.extend(new_dependencies)
elif isinstance(new_dependencies, str):
dependencies.append(new_dependencies)
elif isinstance(new_dependencies, unicode): #python 2 = derp
dependencies.append(str(new_dependencies))
else:
raise TypeError('Dependencies for {} received neither a string or list: {}'.format(self.identifier, new_dependencies))
return dependencies
class Bcl2FastQRunner(ModularRunner):
"""
Produces fastqs for the samples in your samplesheet. Built-in parameter turns off lane splitting, as the framework makes the assumption
one sample = one set of fastqs.
Input: none explicitly!
Output: fastq
"""
def get_samples(self):
sample_id_map = {}
samples = []
for line in self.sample_sheet.get("Data"):
if line.get("Sample_ID") == '' and line.get("Sample_Name") == '':
continue
sample = line.get("Sample_ID")
if sample not in sample_id_map:
#samples.append(SampleTuple(sample, self.sample_sheet.sample_id_to_sample_name(sample)))
samples.append(SampleTuple(sample, self.sample_sheet.unique_sample_name_map[sample]))
sample_id_map[sample] = True
return samples
def get_output(self, sample):
"""
Limitations: bcl2fastq2 has insanely varied output given the input. Currently, we assume that sample_index is consecutively numeric.
#TODO: detect paired end automatically
"""
sample_first_instance = {}
samples_to_return = []
for line in self.sample_sheet.get("Data"):
if line.get("Sample_ID") == '' and line.get("Sample_Name") == '':
continue
if sample.id == line.get("Sample_ID"):
if not sample in sample_first_instance:
sample_first_instance[sample] = line.data_i
else: #if no_lane_splitting=True this is needed
continue
#if line.has("Lane"): #this is not necessary when no_lane_splitting=True
# samples_to_return.append("{path}/{sample_name}_S{sample_index}_L{lane:03d}_R1_001.fastq.gz".format(
# path=self.params.self.output_dir, sample_name=line.get("Sample_Name"), sample_index=sample_first_instance[sample], lane=int(line.get("Lane"))))
# samples_to_return.append("{path}/{sample_name}_S{sample_index}_L{lane:03d}_R2_001.fastq.gz".format(
# path=self.params.self.output_dir, sample_name=line.get("Sample_Name"), sample_index=sample_first_instance[sample], lane=int(line.get("Lane"))))
#else: #no lane information
samples_to_return.append("{path}/{sample_name}_S{sample_index}_R1_001.fastq.gz".format(
path=self.params.self.output_dir, sample_name=line.get("Sample_Name"), sample_index=sample_first_instance[sample]))
samples_to_return.append("{path}/{sample_name}_S{sample_index}_R2_001.fastq.gz".format(
path=self.params.self.output_dir, sample_name=line.get("Sample_Name"), sample_index=sample_first_instance[sample]))
print 'z'
print samples_to_return
print 'z'
return {'fastq': samples_to_return}
def get_dependencies(self, sample):
return self.bcl2fastq_task
def workflow(self, workflowRunner):
dependencies = []
for sample in self.collect_samples():
dependencies.extend(self.collect_dependencies(sample))
if hasattr(self.params.self, 'args'):
args = " " + self.params.self.optional.args
if "no-lane-splitting" not in args:
args+=" --no-lane-splitting"
else:
args = '--no-lane-splitting'
bcl2fastq_wf = Bcl2Fastq(self.params.bcl2fastq_path, self.params.sample_path, self.params.self.output_dir, self.params.sample_sheet, args=args, max_job_cores=self.get_core_count(16))
self.bcl2fastq_task = workflowRunner.addWorkflowTask(self.identifier, bcl2fastq_wf, dependencies=dependencies)
class RSEMRunner(ModularRunner):
"""
Currently, the RSEM runner only goes from fastq to quants.
TODOs:
-Add support for chaining based on the RSEM bam file
-Add support for running from bams
Input: fastqs. The fastqs are assumed to contain r1/r2 information.
Output: .gene.results files.
"""
def get_output(self, sample):
return {'rsem': os.path.join(self.params.self.output_dir,sample.name+".genes.results"),
'rsem_model': os.path.join(self.params.self.output_dir,sample.name+".stat",sample.name+".model"),
'transcript_bam': os.path.join(self.params.self.output_dir,sample.name+".transcript.bam")}
def define_optionals(self):
return {'args': ''}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(16)
for sample in self.collect_samples():
sample_name = sample.name
fastq_files = self.collect_input(sample, 'fastq', as_list=True)
fastq_files = organize_fastqs(fastq_files, self.sample_sheet.is_paired_end())
if self.sample_sheet.is_paired_end():
rsem_command = "{rsem_path} --paired-end {r1} {r2} {reference} {sample_name} --star -p {cores} --star-path {star_path} --star-gzipped-read-file --temporary-folder {temp_folder}".format( #I have no idea why we need to specify a directory below reference. So weird.
rsem_path=self.params.rsem_path,
r1=",".join(fastq_files[0]),
r2=",".join(fastq_files[1]),
reference=self.params.rsem_annotation,
sample_name=os.path.join(self.params.self.output_dir,sample_name),
star_path=self.params.star_path[:-5], #-5 due to the star_path variable having the /STAR suffix, which rsem doesn't want
genome_dir=self.params.genome,
cores=cores,
temp_folder=os.path.join(self.params.scratch_path,sample.id))
else:
rsem_command = "{rsem_path} {r1} {reference} {sample_name} --star -p {cores} --star-path {star_path} --star-gzipped-read-file --temporary-folder {temp_folder}".format( #I have no idea why we need to specify a directory below reference. So weird.
rsem_path=self.params.rsem_path,
r1=",".join(fastq_files),
reference=self.params.rsem_annotation,
sample_name=os.path.join(self.params.self.output_dir,sample_name),
star_path=self.params.star_path[:-5], #-5 due to the star_path variable having the /STAR suffix, which rsem doesn't want
genome_dir=self.params.genome,
cores=cores,
temp_folder=os.path.join(self.params.scratch_path,sample.id))
rsem_command += ' ' + self.params.self.optional.args
workflowRunner.flowLog(rsem_command)
workflowRunner.flowLog("output: {}".format(self.get_output(sample)))
dependencies = self.collect_dependencies(sample)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id), rsem_command, dependencies=dependencies, nCores=cores, memMb=self.get_memory_count(1024*100)))
class BWARunner(ModularRunner):
'''
Runs BWA.
Input: fastq
Output: bam
'''
def get_output(self, sample):
return {'bam': os.path.join(self.params.self.output_dir, sample.name, sample.name+".raw.bam")}
def define_optionals(self):
return {'args': '', 'genome_filename': 'genome.fa'}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(20)
mem = self.get_memory_count(1024 * 127)
args = self.params.self.optional.args
for sample in self.collect_samples():
sample_name = sample.name
dependencies = self.collect_dependencies(sample)
fastq_files = self.collect_input(sample, 'fastq', as_list=True)
fastq_files = organize_fastqs(fastq_files, self.sample_sheet.is_paired_end())
if self.sample_sheet.is_paired_end():
if len(fastq_files[0]) != 1 or len(fastq_files[1]) != 1:
raise NotImplementedError("bwa only supports one fastq per sample")
#put R1, then R2 in a list
fastq_files = [x for x in itertools.chain(*fastq_files)]
bwa_wf = BWAWorkflow(os.path.join(self.params.self.output_dir, sample_name),
self.params.bwa_path, self.params.samtools_path, os.path.join(self.params.genome, self.params.self.optional.genome_filename), cores, mem, fastq_files, sample=sample.id, args=args)
else:
if len(fastq_files) != 1:
raise NotImplementedError("bwa only supports one fastq per sample: {}".format(fastq_files))
bwa_wf = BWAWorkflow(os.path.join(self.params.self.output_dir, sample_name),
self.params.bwa_path, self.params.samtools_path, os.path.join(self.params.genome, self.params.self.optional.genome_filename), cores, mem, fastq_files, sample=sample.id, args=args)
bwa_task = workflowRunner.addWorkflowTask('bwa_{}_{}'.format(self.identifier, sample.id), bwa_wf, dependencies=dependencies)
mv_task = workflowRunner.addTask('mv_{}_{}'.format(self.identifier, sample.id), "mv {} {}".format(os.path.join(self.params.self.output_dir, sample_name, "out.sorted.bam"), os.path.join(self.params.self.output_dir, sample_name, sample_name+".raw.bam")), dependencies=bwa_task, isForceLocal=True)
self.task[sample].append(workflowRunner.addTask('index_{}_{}'.format(self.identifier, sample.id), "{} index {}".format(self.params.samtools_path, os.path.join(self.params.self.output_dir,sample_name, sample_name+".raw.bam")), dependencies=mv_task))
class CommandLineRunner(ModularRunner):
'''
The CommandLineRunner allows the execution of code which is not modularized for zippy. It uses a simple templating system to create
sample specific commands. To run command line runner, you need to specify:
-input_format and output_format: the filetypes you will require and hand off
-output from the stage path, how to get to the file outputs. This command can be templated to use the sample id (sample.id) or sample name (sample.name)
-command The general command you wish to run. This command can be templated to use the sample id (sample.id) or sample name (sample.name) for
per-sample command execution. It can also be templated with 'self.output' to put the output string into the command
-input_delimiter: in the case where there is more than one input sample
TODO: handle cases of non-per-sample execution (merge stages), or initial stages.
TODO: handle input
'''
def get_output(self, sample):
return {self.params.self.output_format : os.path.join(self.params.self.output_dir, self.create_output_string(sample))}
def create_output_string(self, sample):
sample_dict = {"sample.id": sample.id,
"sample.name": sample.name}
return sub_check_wilds(sample_dict, self.params.self.output)
def create_command_string(self, sample, input_files):
sample_dict = {"sample.id": sample.id,
"sample.name": sample.name,
"self.output": create_output_string(sample)}
return sub_check_wilds(sample_dict, self.params.self.command)
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(4)
mem = self.get_memory_count(1024 * 32)
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
input_files = self.collect_input(sample, self.params.self.input_format)
custom_command = create_command_string(sample, input_files)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
custom_command, dependencies=dependencies, nCores=cores, memMb=mem))
class SubsampleBAMRunner(ModularRunner):
'''
Uses samtools to sample a percentage of reads from an input bam.
Input: bam
Output: bam
'''
def get_output(self, sample):
return {'bam': os.path.join(self.params.self.output_dir, sample.name+".sub.bam")}
def workflow(self, workflowRunner):
"""
TODO: might the aligner return more than 1 BAM?
"""
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
bam_file = self.collect_input(sample, 'bam')
new_bam_file = os.path.join(self.params.self.output_dir, sample.name+".sub.bam")
subsample_command = '{4} view -s {3}{0} -b {1} > {2}'.format(
self.params.self.subsample_fraction, bam_file, new_bam_file, random.randint(0,100000), self.params.samtools_path)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
subsample_command, dependencies=dependencies))
class BAMtoFASTQRunner(ModularRunner):
'''
Uses samtools to convert a bam to a fastq.gz
TODO: currently paired-end only
Input: bam
Output: fastq
'''
def get_output(self, sample):
return {'fastq': [os.path.join(self.params.self.output_dir, sample.name+"_R1_.fastq.gz"), os.path.join(self.params.self.output_dir, sample.name+"_R2_.fastq.gz")]}
def workflow(self, workflowRunner):
"""
TODO: might the aligner return more than 1 BAM?
"""
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
bam_file = self.collect_input(sample, 'bam')
new_r1 = os.path.join(self.params.self.output_dir, sample.name+"_R1_.fastq")
new_r2 = os.path.join(self.params.self.output_dir, sample.name+"_R2_.fastq")
command = '{samtools} fastq -1 {r1} -2 {r2} {input_bam}'.format(
samtools=self.params.samtools_path, r1=new_r1, r2=new_r2, input_bam=bam_file)
fastq_task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
command, dependencies=dependencies)
self.task[sample].append(workflowRunner.addTask('gzip1_{}_{}'.format(self.identifier, sample.id),
'gzip -f {}'.format(new_r1), dependencies=fastq_task))
self.task[sample].append(workflowRunner.addTask('gzip2_{}_{}'.format(self.identifier, sample.id),
'gzip -f {}'.format(new_r2), dependencies=fastq_task))
class PicardAlignStatsRunner(ModularRunner):
"""
Produces a bunch of picard stats from bwa output. Currently includes:
-CollectAlignmentSummaryMetrics
-CollectInsertSizeMetrics
-CollectHsMetrics
"""
def get_output(self, sample):
return {'align_stats': os.path.join(self.params.self.output_dir,"{}.stats.txt".format(sample.name)),
'insert_stats': os.path.join(self.params.self.output_dir,"{}.insert.txt".format(sample.name)),
'insert_plot': os.path.join(self.params.self.output_dir,"{}.insert.plot.pdf".format(sample.name))}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
mem = self.get_memory_count(24 * 1024)
for sample in self.collect_samples():
sample_scratch = os.path.join(self.params.scratch_path, sample.name+self.identifier)
if not os.path.exists(sample_scratch):
os.makedirs(sample_scratch)
sample_name = sample.name
bam_file = self.collect_input(sample, 'bam')
dependencies = self.collect_dependencies(sample)
picard_command = "java -Xmx4G -Djava.io.tmpdir={} -jar {} CollectAlignmentSummaryMetrics R={}/genome.fa I={} O={} TMP_DIR={}".format(
sample_scratch, self.params.picard, self.params.genome, bam_file, os.path.join(self.params.self.output_dir,"{}.stats.txt".format(sample_name)), sample_scratch)
stats_task = workflowRunner.addTask('alignstats_{}_{}'.format(self.identifier, sample.id), picard_command, memMb=mem, dependencies=dependencies)
self.task[sample].append(stats_task)
picard_command = "java -Xmx4G -Djava.io.tmpdir={} -jar {} CollectInsertSizeMetrics I={} O={} H={} M=0.1 TMP_DIR={}".format(
sample_scratch, self.params.picard, bam_file, os.path.join(self.params.self.output_dir,"{}.insert.txt".format(sample_name)), os.path.join(self.params.self.output_dir,"{}.insert.plot.pdf".format(sample_name)), sample_scratch)
stats_task = workflowRunner.addTask('insertstats_{}_{}'.format(self.identifier, sample.id), picard_command, memMb=mem, dependencies=dependencies)
self.task[sample].append(stats_task)
if hasattr(self.params.self, 'target_intervals') and hasattr(self.params.self, 'bait_intervals'):
picard_command = "java -Xmx4G -Djava.io.tmpdir={} -jar {} CollectHsMetrics R={}/genome.fa I={} O={} TMP_DIR={} BAIT_INTERVALS={} TARGET_INTERVALS={}".format(
sample_scratch, self.params.picard, self.params.genome, bam_file, os.path.join(self.params.self.output_dir,"{}.stats.txt".format(sample_name)), sample_scratch, self.params.self.optional.bait_intervals, self.params.self.optional.target_intervals)
stats_task = workflowRunner.addTask('hsstats_{}_{}'.format(self.identifier, sample.id), picard_command, memMb=mem, dependencies=dependencies)
self.task[sample].append(stats_task)
class MarkDuplicatesRunner(ModularRunner):
"""
Runs picard markduplicates. Currently fixed java heap size to 8GB, as it seems to fail with smaller heap sizes
Input/output are both bams.
"""
@passthrough('starlog')
def get_output(self, sample):
'''
Passes through starlog if available.
'''
return {'bam': os.path.join(self.params.self.output_dir,"{}.dedup.bam".format(sample.name))}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
mem = self.get_memory_count(50 * 1024)
for sample in self.collect_samples():
sample_name = sample.name
bam_file = self.collect_input(sample, 'bam')
dependencies = self.collect_dependencies(sample)
sample_scratch = os.path.join(self.params.scratch_path, sample.name+self.identifier)
if self.params.self.optional.use_mate_cigar:
function = 'MarkDuplicatesWithMateCigar'
else:
function = 'MarkDuplicates'
picard_command = "java -Xmx8G -Djava.io.tmpdir={} -jar {} {} I={} O={} M={} TMP_DIR={}".format(
sample_scratch, self.params.picard, function, bam_file, os.path.join(self.params.self.output_dir,"{}.dedup.bam".format(sample_name)),
os.path.join(self.params.self.output_dir,"{}.dedup.stats.txt".format(sample_name)), sample_scratch)
dedup_task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id), picard_command, memMb=mem, dependencies=dependencies)
self.task[sample].append(workflowRunner.addTask('index_{}_{}'.format(self.identifier, sample.id), "{} index {}".format(self.params.samtools_path, os.path.join(self.params.self.output_dir, "{}.dedup.bam".format(sample_name))), dependencies=dedup_task))
class MACSRunner(ModularRunner):
'''
Calls macs for peak detection. Defaults to --format BAMPE unless arguments are specified.
Input: bams
Output: none consumable by ZIPPY
'''
def get_output(self, sample):
return None
def define_optionals(self):
return {'args': '-g hs --format BAMPE'}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(6)
mem = self.get_memory_count(32 * 1024)
for sample in self.collect_samples():
sample_name = sample.name
bam_file = self.collect_input(sample, 'bam')
dependencies = self.collect_dependencies(sample)
macs_command = "{} callpeak --name {} --treatment {} --outdir {}".format(
self.params.macs_path, sample_name, bam_file, self.params.self.output_dir)
macs_command += ' ' + self.params.self.optional.args
workflowRunner.flowLog(macs_command)
task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id), macs_command, nCores=cores, memMb=mem, dependencies=dependencies)
self.task[sample].append(task)
class DataRunner(ModularRunner):
'''
Datarunner loads all the files in its given directory, and provides it as output. It assumes that sample names will be in the name of relevant files.
It performs no workflow.
To get its samples, you must either define self.params.self.samples, a list of samples to load, or self.params.self.sample_sheet, a csv sample sheet that has the
information about what samples to load.
For file types that need to be mapped to ZIPPY types, you can use self.params.self.optional.type_map, which is a map from raw file types to types that zippy expects.
For example, for rsem files going to edger, rsem produces '.genes.results' files, but the proper ZIPPY type is 'rsem'.
TODO: it should have an option to look recursively through a directory
'''
def get_samples(self):
if hasattr(self.params.self, 'sample_sheet'):
sample_sheet = SampleSheet(self.params.self.optional.sample_sheet)
sample_id_map = {}
samples = []
for line in sample_sheet.get("Data"):
if line.get("Sample_ID") == '' and line.get("Sample_Name") == '':
continue
sample = line.get("Sample_ID")
if sample not in sample_id_map:
samples.append(SampleTuple(sample, sample_sheet.sample_id_to_sample_name(sample)))
sample_id_map[sample] = True
return samples
else:
for sample_name in self.params.self.samples:
check_valid_samplename(sample_name)
return [SampleTuple(i,x) for (i,x) in enumerate(self.params.self.samples)]
def type_map_match(self, fname):
type_map = self.params.self.optional.type_map
for (raw_type, zippy_type) in type_map.iteritems():
if fname.endswith(raw_type):
return zippy_type
return False
def get_output(self, sample):
output_map = {}
#print os.path.join(self.params.self.output_dir, '*')
#print glob.glob(os.path.join(self.params.self.output_dir, '*'))
for fname in glob.glob(os.path.join(self.params.self.output_dir, '*')):
sample_name = sample.name
if sample_name in fname:
file_split = fname.split('.')
if hasattr(self.params.self, 'type_map') and self.type_map_match(fname) != False:
file_type = self.type_map_match(fname)
elif file_split[-1] == 'gz':
file_type = file_split[-2]
else:
file_type = file_split[-1]
if file_type in output_map:
if not isinstance(output_map[file_type], list):
output_map[file_type] = [output_map[file_type]]
output_map[file_type].append(fname)
else:
output_map[file_type] = fname
return output_map
def get_dependencies(self, sample):
return []
def workflow(self, workflowRunner):
pass
class IndelRealignmentRunner(ModularRunner):
'''
Needs to be tested!
'''
def get_output(self, sample):
base_file_name = os.path.basename(self.collect_input(sample.name, 'bam'))
return {'bam': os.path.join(self.params.self.output_dir,base_file_name)}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(6)
mem = self.get_memory_count(12 * 1024)
for sample in self.collect_samples():
bam_file = self.collect_input(sample, 'bam')
base_file_name = os.path.basename(self.collect_input(sample, 'bam'))
dependencies = self.collect_dependencies(sample)
realign_command = "source {mono_source}; {mono} {RI} -bamFiles {bam} -genomeFolders {genome} -outFolder \
{outFolder}".format(mono_source = self.params.mono_source, mono = self.params.mono, RI = self.params.indelRealign, \
bam = bam_file, genome = self.params.genome, outFolder=self.params.self.output_dir)
ri_task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id), realign_command, nCores=cores, memMb=mem, dependencies=dependencies)
self.task[sample].append(workflowRunner.addTask('index_{}_{}'.format(self.identifier, sample.id), "{} index {}".format(self.params.samtools_path, os.path.join(self.params.self.output_dir, base_file_name)), dependencies=ri_task))
class PiscesRunner(ModularRunner):
'''
Runs the pisces variant caller.
Note: Pisces takes as input a directory, so all the samples are processed at once.
TODO: gvcf False support, better cores support, per-sample mode.
Input: bam
Output: vcf
'''
def get_output(self, sample):
base_file_name = os.path.basename(self.collect_input(sample, 'bam'))
out_file_name = '.'.join(base_file_name.split('.')[:-1])+'.genome.vcf'
return {'vcf': os.path.join(self.params.self.output_dir,out_file_name)}
def get_dependencies(self, sample):
return self.task
def define_optionals(self):
return {'args': ''}
def workflow(self, workflowRunner):
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
mem = self.get_memory_count(32 * 1024)
cores = self.get_core_count(8)
args = self.params.self.optional.args
dependencies = []
for sample in self.collect_samples():
bam_file = self.collect_input(sample, 'bam')
dependencies.extend(self.collect_dependencies(sample))
bam_dir = os.path.normpath(os.path.join(bam_file, '..'))
command = "{dotnet} {pisces} -BAMFolder {input_path} -G {genome} -OutFolder {output_path} {args}".format(
dotnet=self.params.dotnet, pisces=self.params.pisces_path, input_path=bam_dir, genome=self.params.genome, output_path=self.params.self.output_dir, args=args)
workflowRunner.flowLog(command)
self.task = workflowRunner.addTask('pisces_{}_{}'.format(self.identifier, sample.id), command, dependencies=dependencies, nCores=cores, memMb=mem)
class StarRunner(ModularRunner):
'''
Runs the STAR aligner. Uses a minimally useful set of default parameters. Further parameters can be
passed via the command line using the 'args' param.
Input: fastq
Output: a sorted, indexed bam.
'''
def get_output(self, sample):
return {'bam': os.path.join(self.params.self.output_dir,'{}.raw.bam'.format(sample.name)),
'starlog': os.path.join(self.params.self.output_dir,'{}Log.final.out'.format(sample.name)),
'sjout': os.path.join(self.params.self.output_dir, '{}SJ.out.tab'.format(sample.name))}
def define_optionals(self):
return {'args': '--outSAMtype BAM SortedByCoordinate'}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(16)
mem = self.get_memory_count(12 * 1024)
args = self.params.self.optional.args
for sample in self.collect_samples():
fastq = self.collect_input(sample, 'fastq')
dependencies = self.collect_dependencies(sample)
star_wf = SingleStarFlow(self.params.star_path, self.params.star_index, sample.name, fastq, self.params.self.output_dir,
max_job_cores=cores, tmp_path=os.path.join(self.params.scratch_path, 'star{}'.format(sample.name)), command_args=args)
self.task[sample].append(workflowRunner.addWorkflowTask('star_{}_{}'.format(self.identifier, sample.id), star_wf, dependencies=dependencies))
class FastQCRunner(ModularRunner):
'''
Runs fastqc.
Input: fastq
Output: none consumable by ZIPPY
'''
def get_output(self, sample):
pass
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
fastq_files = self.collect_input(sample, 'fastq')
dependencies = self.collect_dependencies(sample)
command = "{} {} -o {}".format(self.params.fastqc_path, ' '.join(fastq_files), self.params.self.output_dir)
self.task[sample].append(workflowRunner.addTask('fastqc_{}_{}'.format(self.identifier, sample.id), command, dependencies=dependencies, memMb=self.get_memory_count(8*1024)))
class MergeBamRunner(ModularRunner):
'''
Uses samtools merge to combine a set of bams. This is our first merge stage, and hence is currently considered experimental.
Currently, it takes as input a list of sample names, so it does not explicitly depend on the samplesheet.
TODO: make a merge be many-to-many, instead of many-to-one. This would involve taking in a list of lists as input.
TODO: currently we merge to 1 by only returning get_output for our first sample name. The right way to do this is to modify
the sample information downstream stages see. So we must decouple our pipeline from the samplesheet. Which is probably a good idea anyway. No offense, Isas.
'''
def get_samples(self):
return [SampleTuple('1', self.identifier)]
def get_output(self, sample):
return {'bam': os.path.join(self.params.self.output_dir,'{}.merged.bam'.format(self.identifier))}
def get_dependencies(self, sample):
return self.task
def workflow(self, workflowRunner):
bams = []
dependencies = []
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
if sample.name not in self.params.self.samples:
continue
bams.append(self.collect_input(sample, 'bam'))
dependencies.extend(self.collect_dependencies(sample))
output_file_path = os.path.join(self.params.self.output_dir,'{}.merged.bam'.format(self.identifier))
merge_command = '{} merge -f {} {}'.format(self.params.samtools_path, output_file_path, " ".join(bams))
merge_task = workflowRunner.addTask('mergebam_{}'.format(self.identifier), merge_command, dependencies=dependencies)
index_command = '{} index {}'.format(self.params.samtools_path, output_file_path)
self.task = [workflowRunner.addTask('indexbam_{}'.format(self.identifier), index_command, dependencies=merge_task)]
class RNAQCRunner(ModularRunner):
'''
@TODO: make literally everything optional
Produces RNA-seq stats for your run. Tested with star.
Input: bam
'''
def get_output(self, sample):
pass
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
workflowRunner.flowLog('sample {}'.format(sample))
dependencies = self.collect_dependencies(sample)
bam_path = self.collect_input(sample, 'bam')
rsem_model_path = self.collect_input(sample, 'rsem_model')
sample_scratch = os.path.join(self.params.scratch_path, sample.name+self.identifier)
if not os.path.exists(sample_scratch):
os.makedirs(sample_scratch)
starlog_path = self.collect_input(sample, 'starlog')
script_path = os.path.join(zippy_dir, 'rna_stats.py')
if starlog_path is None: #we did not use star, so we can't get the star stats
command = "{python} {script} {bam_path} {stat_path}/{sample_name}.summary.txt --ribosome_bed {ribosome_bed} --intron_bed {intron_bed} --temp_path {temp} ".format(
python=self.params.python, script=script_path, bam_path=bam_path, stat_path=self.params.self.output_dir, sample_name=sample.name, ribosome_bed=self.params.ribosome_bed, intron_bed=self.params.intron_bed, temp=sample_scratch+'a.out')
else:
command = "{python} {script} {bam_path} {stat_path}/{sample_name}.summary.txt --ribosome_bed {ribosome_bed} --intron_bed {intron_bed} --starlog_path {starlog_path} --temp_path {temp}".format(
python=self.params.python, script=script_path, bam_path=bam_path, stat_path=self.params.self.output_dir, sample_name=sample.name, ribosome_bed=self.params.ribosome_bed, intron_bed=self.params.intron_bed, starlog_path=starlog_path, temp=sample_scratch+'a.out')
if hasattr(self.params, 'manifest_bed'):
command += " --manifest_bed {}".format(self.params.optional.manifest_bed)
if hasattr(self.params.self, 'dup_stats') and self.params.self.optional.dup_stats:
command += " --dup_stats"
if rsem_model_path is not None:
command += " --rsem_model {}".format(rsem_model_path)
self.task[sample].append(workflowRunner.addTask('stats_combine_{}'.format(sample.id), command, memMb=40*1024, dependencies=dependencies))
workflowRunner.flowLog(command)
class SalmonRunner(ModularRunner):
"""
Input: fastqs. The fastqs are assumed to contain r1/r2 information.
Output: .gene.results files.
"""
def get_output(self, sample):
#TODO
return {'rsem': os.path.join(self.params.self.output_dir,sample.name+".genes.results")}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(16)
mem = self.get_memory_count(100 * 1024)
for sample in self.collect_samples():
sample_name = sample.name
fastq_files = self.collect_input(sample, 'fastq')
if self.sample_sheet.is_paired_end():
r1_files = [x for x in fastq_files if '_R1_' in x]
r2_files = [x for x in fastq_files if '_R2_' in x]
salmon_command = "{salmon_path} quant -i {salmon_index} -l A -1 {r1} -2 {r2} -o {output_path} --numThreads {cores}".format(
salmon_path=self.params.salmon_path,
r1=" ".join(r1_files),
r2=" ".join(r2_files),
reference=self.params.salmon_index,
output_path=self.params.self.output_dir)
else:
salmon_command = "{salmon_path} quant -i {salmon_index} -l A -1 {r1} -o {output_path} --numThreads {cores}".format(
salmon_path=self.params.salmon_path,
r1=" ".join(fastq_files),
reference=self.params.salmon_index,
output_path=self.params.self.output_dir)
if hasattr(self.params.self, 'args'):
salmon_command += ' ' + self.params.self.optional.args
workflowRunner.flowLog(salmon_command)
dependencies = self.collect_dependencies(sample)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id), salmon_command, dependencies=dependencies, nCores=cores, memMb=mem))
class StrelkaRunner(ModularRunner):
"""
Strelka call variants from either tumor/normal or germline samples. To select the mode of strelka, you must specify is_somatic as true or false.
Input: bam
Output: vcf
"""
def get_output(self, sample):
if self.params.self.is_somatic:
return {'vcf': os.path.join(self.params.self.output_dir, sample.name+".somatic.vcf.gz")}
else:
return {'vcf': os.path.join(self.params.self.output_dir, sample.name+".germline.vcf.gz")}
def define_optionals(self):
return {'args': ''}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(16)
mem = self.get_memory_count(1024 * 32)
args = self.params.self.optional.args
for sample in self.collect_samples():
#we do this because strelka gets very upset if your directory already exists. So if you stop your pipeline between strelka configure
#and strelka run, you would get stuck with an error.
random_id = ''.join(numpy.random.choice(list(string.digits+string.ascii_lowercase), 6))
scratch_path = os.path.join(self.params.scratch_path, sample.name+random_id)
if os.path.exists(os.path.join(scratch_path, 'runWorkflow.py')):
os.remove(os.path.join(scratch_path, 'runWorkflow.py'))
if not os.path.exists(scratch_path):
os.makedirs(scratch_path)
dependencies = self.collect_dependencies(sample)
bam_file = self.collect_input(sample, 'bam')
workflowRunner.flowLog('Strelka bams for sample {}: {}'.format(sample.name, bam_file))
if self.params.self.is_somatic:
strelka_config = '{} {}/bin/configureStrelkaSomaticWorkflow.py'.format(self.params.python, self.params.strelka_path)
assert len(bam_file) == 2
if hasattr(self.params.self, 'normal_pattern'):
normal_matches = [x for x in bam_file if self.params.self.optional.normal_pattern in x]
tumor_matches = [x for x in bam_file if not self.params.self.optional.normal_pattern in x]
elif hasattr(self.params.self, 'tumor_pattern'):
normal_matches = [x for x in bam_file if not self.params.self.optional.tumor_pattern in x]
tumor_matches = [x for x in bam_file if self.params.self.optional.tumor_pattern in x]
else:
raise AttributeError('For somatic strelka, either "normal_pattern" or "tumor_pattern" must be defined. This argument contains a string that identifies the normal/tumor sample respectively.')
if len(normal_matches)!=1 or len(tumor_matches)!=1:
raise AttributeError('Pattern {} was unable to differentiate bam files: {}'.format(self.params.self.optional.normal_pattern, bam_file))
bam_string = "--normalBam {} --tumorBam {}".format(normal_matches[0], tumor_matches[0])
else:
strelka_config = '{} {}/bin/configureStrelkaGermlineWorkflow.py'.format(self.params.python, self.params.strelka_path)
bam_string = '--bam {}'.format(bam_file)
configure_command = '{strelka_config} {bam_string} \
--referenceFasta={genome}/genome.fa --runDir={scratch_path} --callMemMb={mem} {args}'.format(
strelka_config=strelka_config, bam_string=bam_string, genome=self.params.genome,
scratch_path=scratch_path, mem=mem, args=args)
sub_task = workflowRunner.addTask('configure_{}_{}'.format(self.identifier, sample.id),
configure_command, dependencies=dependencies, isForceLocal=True)
strelka_command = '{python} {scratch_path}/runWorkflow.py -m local -j {core_count}'.format(
python=self.params.python, scratch_path=scratch_path, core_count=cores)
strelka_task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
strelka_command, dependencies=sub_task, nCores=cores)
if self.params.self.is_somatic:
module_dir = os.path.abspath(os.path.dirname(__file__))
merge_path = os.path.join(module_dir, 'vcf_merge.py')
merge_command = "{python} {merge_path} {scratch_path}/somatic.snvs.vcf.gz {scratch_path}/somatic.indels.vcf.gz {output_dir}/{sample_name}.somatic.vcf.gz".format(
python=self.params.python, scratch_path=os.path.join(scratch_path, 'results', 'variants'), output_dir=self.params.self.output_dir, sample_name=sample.name,
merge_path=merge_path)
move_task = workflowRunner.addTask('merge_{}_{}'.format(self.identifier, sample.id),
merge_command, dependencies=strelka_task, nCores=1, memMb=4*1024)
else:
move_command = "cp {scratch_path}/variants.vcf.gz {output_dir}/{sample_name}.germline.vcf.gz".format(
scratch_path=os.path.join(scratch_path, 'results', 'variants'), output_dir=self.params.self.output_dir, sample_name=sample.name)
move_task = workflowRunner.addTask('move_{}_{}'.format(self.identifier, sample.id),
move_command, dependencies=strelka_task, nCores=1, memMb=4*1024, isForceLocal=True)
self.task[sample].append(workflowRunner.addTask('clean_temp_{}_{}'.format(self.identifier, sample.id),
"rm -r {}".format(scratch_path), dependencies=move_task, isForceLocal=True))
class EdgerRunner(ModularRunner):
'''
This is a merge stage that runs edger to perform differential expression analysis.
Requires sample_groups, a two element list. Each element of the list is a list of sample names of one group of the analysis. The sample
names can also be wildcards matching unix filename wildcard syntax.
'''
def get_samples(self):
return [SampleTuple('1', self.identifier)]
def get_output(self, sample):
pass
#return {'rsem': os.path.join(self.params.self.output_dir,sample.name+".genes.results")}
def get_dependencies(self, sample):
return self.task
def sample_in_group(self, sample_name, sample_group):
for group_pattern in sample_group:
if fnmatch.fnmatch(sample_name, group_pattern):
return True
return False
def workflow(self, workflowRunner):
rsems_by_group = [ [] , [] ] #it's... an owl?
dependencies = []
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
temp_path = os.path.join(self.params.scratch_path, self.identifier)
if not os.path.exists(temp_path):
os.makedirs(temp_path)
for sample in self.collect_samples():
for (i,sample_group) in enumerate(self.params.self.sample_groups):
if not self.sample_in_group(sample.name, sample_group):
continue
rsems_by_group[i].append(self.collect_input(sample, 'rsem'))
dependencies.extend(self.collect_dependencies(sample))
workflowRunner.flowLog('Edger groups: {}'.format(rsems_by_group))
workflowRunner.flowLog('dependencies: {}'.format(dependencies))
script_path = os.path.join(zippy_dir, 'run_edger.py')
output_file_path = os.path.join(self.params.self.output_dir,'{}.edger.out'.format(self.identifier))
command = '{python} {script_path} --group1 {group1} --group2 {group2} --out_file {out_file} --temp_folder {temp_folder} --r_path {r_path}'.format(
group1=' '.join(rsems_by_group[0]),
group2=' '.join(rsems_by_group[1]),
out_file=output_file_path,
temp_folder=temp_path,
r_path=self.params.r_path,
python=self.params.python,
script_path=script_path)
self.task = [workflowRunner.addTask('edger_{}'.format(self.identifier), command, dependencies=dependencies)]
class DeleteRunner(ModularRunner):
def get_output(self, sample):
pass
def workflow(self, workflowRunner):
self.task = defaultdict(list)
for sample in self.collect_samples():
files_to_delete = []
dependencies = self.collect_dependencies(sample)
for file_type in self.params.self.file_types:
output_for_format = self.collect_input(sample, file_type)
if isinstance(output_for_format, list):
files_to_delete.extend(output_for_format)
else:
files_to_delete.append(output_for_format)
command = 'rm -f {}'.format(' '.join(files_to_delete))
workflowRunner.flowLog(command)
self.task[sample].append(workflowRunner.addTask('delete_{}_{}'.format(self.identifier, sample.id), command, dependencies=dependencies))
class BloomSubsampleBAMRunner(ModularRunner):
'''
Uses a bloom filter to sample a NUMBER of reads from an input bam. Bloom filter requires pybloomfilter-mmap package.
Input: bam
Output: bam
'''
def get_output(self, sample):
return {'bam': os.path.join(self.params.self.output_dir, sample.name+".sub.bam")}
def define_optionals(self):
return {'args': None}
def workflow(self, workflowRunner):
"""
TODO: might the aligner return more than 1 BAM?
"""
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(16)
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
bam_file = self.collect_input(sample, 'bam')
new_bam_file = os.path.join(self.params.self.output_dir, sample.name+".sub.bam")
script_path = os.path.join(zippy_dir, 'downsampling_bloom.py')
subsample_command = '{python} {script_path} --bam {bam} --downsampled_pairs {count} --output {output_path} --threads {threads}'.format(
python=self.params.python, script_path=script_path, bam=bam_file, count=self.params.self.reads, output_path=new_bam_file, threads=cores)
if self.params.self.optional.args:
subsample_command += ' ' + self.params.self.optional.args
mem = self.get_memory_count(1024 * 32)
sub_task = workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
subsample_command, dependencies=dependencies, nCores=cores, memMb=mem)
self.task[sample].append(workflowRunner.addTask('index_{}_{}'.format(self.identifier, sample.id),
"{} index {}".format(self.params.samtools_path, os.path.join(self.params.self.output_dir, sample.name+".sub.bam")), dependencies=sub_task))
class Minimap2Runner(ModularRunner):
'''
Runs miniMap2. Experimental!
Limitations: single char arg chaining (i.e., -ax instead of -a -x) is not supported.
args:
genome_filename (optional): override this to change the name of the genome (default: genome.fa)
Input: fastq
Output: if '-a' is in args, it produces sorted, indexed bams. Otherwise it produces pafs
'''
def get_output(self, sample):
if self.make_bams:
return {'bam': os.path.join(self.params.self.output_dir, sample.name+".raw.bam")}
else:
return {'paf': os.path.join(self.params.self.output_dir, sample.name+".paf")}
def define_optionals(self):
return {'genome_filename': 'genome.fa', 'args': ''}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
cores = self.get_core_count(20)
mem = self.get_memory_count(1024 * 127)
genome_suffix = self.params.self.optional.genome_filename
args = self.params.self.optional.args
if '-a' in args:
self.make_bams = True
for sample in self.collect_samples():
sample_name = sample.name
dependencies = self.collect_dependencies(sample)
fastq_files = self.collect_input(sample, 'fastq', as_list=True)
fastq_files = organize_fastqs(fastq_files, self.sample_sheet.is_paired_end())
if self.sample_sheet.is_paired_end():
#if r1/r2 are separate, we wish to iterleave them:
#r1_1, r2_1, r1_2, r2_2 etc. This does that.
fastq_files = [x for x in itertools.chain(*zip(*fastq_files))]
if self.make_bams:
#we pipe to bam and sort
unsorted_file = os.path.join(self.params.self.output_dir, sample.name+'.unsorted.bam')
sorted_file = os.path.join(self.params.self.output_dir, sample.name+'.raw.bam')
output_command = '| {samtools} view -u > '.format(samtools=self.params.samtools_path)
output_command += unsorted_file
output_command += ' && {samtools} sort -@ 32 -m 16G {unsorted} > {sorted}'.format(
samtools=self.params.samtools_path, unsorted=unsorted_file, sorted=sorted_file)
output_command += ' && rm {unsorted}'.format(unsorted=unsorted_file)
else:
output_command = ' > '+os.path.join(self.params.self.output_dir, sample.name+'.paf')
command = '{minimap2} {args} {ref} {fastq_files} {output_command}'.format(
minimap2=self.params.minimap2_path, args=args,
ref=os.path.join(self.params.genome, genome_suffix),
fastq_files=" ".join(fastq_files), output_command=output_command)
minimap_task = workflowRunner.addTask('minimap2_{}_{}'.format(self.identifier, sample.id), command, nCores=cores, memMb=mem, dependencies=dependencies)
if self.make_bams:
self.task[sample].append(workflowRunner.addTask('index_{}_{}'.format(self.identifier, sample.id), "{} index {}".format(self.params.samtools_path, os.path.join(self.params.self.output_dir,sample_name+".raw.bam")), dependencies=minimap_task))
else:
self.task[sample].append(minimap_task)
class NirvanaRunner(ModularRunner):
'''
Uses Nirvana to annotate variants in a vcf file
'''
def get_output(self, sample):
return {'json': os.path.join(self.params.self.output_dir, sample.name + ".json.gz"),
'json_index': os.path.join(self.params.self.output_dir, sample.name + ".json.gz.jsi"),
'vcf': os.path.join(self.params.self.output_dir, sample.name + ".vcf.gz")}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
args = ''
if hasattr(self.params.self, 'args'):
args = self.params.self.optional.args
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
input_vcf = self.collect_input(sample, 'vcf')
output_path = os.path.join(self.params.self.output_dir, sample.name)
command = "{dotnet} {nirvana_path} -i {input_vcf} -c {nirvana_cache} -sd {nirvana_supplement} -r {nirvana_ref} -o {output_path} {args}".format(
dotnet=self.params.dotnet,
nirvana_path=self.params.nirvana_path,
input_vcf=input_vcf,
nirvana_cache=self.params.nirvana_cache,
nirvana_supplement=self.params.nirvana_supplement,
nirvana_ref=self.params.nirvana_ref,
output_path=output_path,
args=args)
mem = self.get_memory_count(1024 * 32)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
command, dependencies=dependencies, memMb=mem))
class PrimerDimerMinerRunner(ModularRunner):
'''
Runs the included primer dimer miner script.
Detects reads with more than one of a provided list of primers.
Input: fastq
Output: None exposed in ZIPPY. Produces a file containing pairs of primers that seem to be dimerizing
'''
def get_output(self, sample):
return {}
def define_optionals(self):
return {'kmer': 15}
def workflow(self, workflowRunner):
self.task = defaultdict(list)
if not os.path.exists(self.params.self.output_dir):
os.makedirs(self.params.self.output_dir)
for sample in self.collect_samples():
dependencies = self.collect_dependencies(sample)
script_path = os.path.join(zippy_dir, 'downsampling_bloom.py')
fastq_files = self.collect_input(sample, 'fastq', as_list=True)
pdm_command = '{python} {script_path} --probe_list {probe_list} --kmer {kmer}'.format(
python=self.params.python, script_path=script_path, probe_list=self.params.self.probe_list,
kmer=self.params.self.optional.kmer)
if len(fastq_files) > 2 or len(fastq_files) < 1:
raise NotImplementedError('PrimerDimerMiner must take 1 or 2 fastqs as input')
else:
for (i,x) in enumerate(fastq_files):
pdm_command+=' --input_file{} {}'.format(i+1, x)
self.task[sample].append(workflowRunner.addTask('{}_{}'.format(self.identifier, sample.id),
pdm_command, dependencies=dependencies))
class CopyFolderRunner(ModularRunner):
'''
Copies a folder
args:
input_dir: the input folder path
output_dir: the output folder path
'''
def get_output(self, sample):
return {'folder': os.path.join(self.params.self.output_dir)}
def get_samples(self):
samples_in = self.collect_samples()
return samples_in if len(samples_in) > 0 else [SampleTuple('1', self.identifier)]
def get_dependencies(self, sample):
return self.task
def workflow(self, workflowRunner):
self.task = defaultdict(list)
dependencies = []
for sample in self.collect_samples():
dependencies.append(self.collect_dependencies(sample))
command = 'cp -rT {in_path} {out_path}'.format(
in_path=self.params.self.input_dir, out_path=self.params.self.output_dir)
self.task = workflowRunner.addTask('{}'.format(self.identifier),
command, dependencies=dependencies) | zippy-pipeline | /zippy-pipeline-2.1.3.0.tar.gz/zippy-pipeline-2.1.3.0/zippy/modular_runner.py | modular_runner.py |
import re
import logging
import json
from pathlib import Path
from datetime import datetime
from .downloader import AsyncFastFileDownloader, AsyncFileDownloader, FileDownloader
log = logging.getLogger(__name__)
__all__ = (
'File',
)
class File:
def __init__(self, data) -> None:
self._data = data
def __repr__(self) -> str:
return '<Zippyshare File name="%s" size="%s">' % (
self.name,
self.size_fmt
)
@property
def name(self) -> str:
""":class:`str`: Return name of the file"""
return self._data['name_file']
@property
def size(self) -> float:
""":class:`float`: Return size of the file, in bytes."""
re_num = re.compile(r'[0-9.]{1,}')
return float(re_num.match(self._data['size']).group()) * 1000 * 1000
@property
def size_fmt(self) -> str:
""":class:`str`: Return formatted size of the file"""
return self._data['size']
@property
def date_uploaded(self) -> datetime:
""":class:`datetime.datetime`: Return date that this file uploaded."""
date_format = '%d-%m-%Y %H:%M'
return datetime.strptime(self._data['date_upload'], date_format)
@property
def date_uploaded_fmt(self) -> str:
""":class:`str`: Return formatted date that this file uploaded."""
return self._data['date_upload']
@property
def url(self):
""":class:`str`: Return origin url"""
return self._data['url']
@property
def download_url(self) -> str:
""":class:`download_url`: Return downloadable url"""
return self._data['download_url']
def download(
self,
progress_bar: bool=True,
replace: bool=False,
folder: str=None,
filename: str=None
) -> Path:
"""
Download this file
Parameters
------------
progress_bar: :class:`bool`
Enable/Disable progress bar,
default to `True`
replace: :class:`bool`
Replace file if exist,
default to `False`
folder: :class:`str`
Set a folder where to store downloaded file,
default to `None`.
filename: :class:`str`
Set a replacement filename, default to `None`.
Returns
--------
:class:`pathlib.Path`
Zippyshare file downloaded
"""
if filename:
_filename = filename
extra_word = 'as "%s"' % _filename
else:
_filename = self.name
extra_word = ''
log.info('Downloading "%s" %s' % (self.name, extra_word))
file_path = (Path('.') / (folder if folder else '') / _filename)
file_path.parent.mkdir(exist_ok=True, parents=True)
downloader = FileDownloader(
self.download_url,
str(file_path),
progress_bar=progress_bar,
replace=replace
)
downloader.download()
downloader.cleanup()
log.info('Successfully downloaded "%s" %s' % (self.name, extra_word))
return file_path
async def download_coro(
self,
progress_bar: bool=True,
replace: bool=False,
folder: str=None,
filename: str=None,
fast: bool=False
) -> Path:
"""Same like :meth:`File.download()` but for asynchronous process
Parameters
------------
progress_bar: :class:`bool`
Enable/Disable progress bar,
default to `True`
replace: :class:`bool`
Replace file if exist,
default to `False`
folder: :class:`str`
Set a folder where to store downloaded file,
default to `None`.
filename: :class:`str`
Set a replacement filename, default to `None`.
fast: :class:`bool`
Enable Fast download, default to ``False``
Returns
--------
:class:`pathlib.Path`
Zippyshare file downloaded
"""
if filename:
_filename = filename
extra_word = 'as "%s"' % _filename
else:
_filename = self.name
extra_word = ''
log.info('%s "%s" %s' % (
'Fast Downloading' if fast else 'Downloading',
self.name,
extra_word
))
file_path = (Path('.') / (folder if folder else '') / _filename)
file_path.parent.mkdir(exist_ok=True, parents=True)
args = (
self.download_url,
str(file_path),
progress_bar,
replace
)
if fast:
downloader = AsyncFastFileDownloader(*args)
else:
downloader = AsyncFileDownloader(*args)
await downloader.download()
await downloader.cleanup()
log.info('Successfully downloaded "%s" %s' % (self.name, extra_word))
return file_path
def to_JSON(self) -> str:
"""Return all zippyshare informations in JSON"""
return json.dumps(self._data.copy())
def to_dict(self) -> dict:
"""Return all zippyshare informations in dict"""
return self._data.copy() | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/file.py | file.py |
import re
import os
import math
import tarfile
import zipfile
import logging
from pathlib import Path
from .errors import InvalidURL
log = logging.getLogger(__name__)
ALLOWED_NAMES = {
k: v for k, v in math.__dict__.items() if not k.startswith("__")
}
# Credit for the evaluate() method: Leodanis Pozo Ramos https://realpython.com/python-eval-function/
def evaluate(expression):
"""Evaluate a math expression."""
# Compile the expression
code = compile(expression, "<string>", "eval")
# Validate allowed names
for name in code.co_names:
if name not in ALLOWED_NAMES:
raise NameError("The use of '%s' is not allowed. Expression used: %s" % (name, expression))
return eval(code, {"__builtins__": {}}, ALLOWED_NAMES)
REGEXS_ZIPPYSHARE_URL = [
# View zippyshare url
r'https:\/\/www[0-9]{1,3}\.zippyshare\.com\/v\/[0-9A-Za-z]{8}\/file\.html',
r'https:\/\/www\.zippyshare\.com\/v\/[0-9A-Za-z]{8}\/file\.html',
# Download Zippyshare url
r'https:\/\/www[0-9]{1,3}\.zippyshare\.com\/d\/[0-9A-Za-z]{8}\/',
r'https:\/\/www\.zippyshare\.com\/d\/[0-9A-Za-z]{8}\/',
]
def check_valid_zippyshare_url(url):
"""Check if given url is valid Zippyshare url"""
for regex in REGEXS_ZIPPYSHARE_URL:
if re.match(regex, url) is not None:
return url
raise InvalidURL('"%s" is not a zippyshare url' % (url))
# TODO: Document this !
def getStartandEndvalue(value: str, sub: str, second_sub=None):
v = value[value.find(sub)+1:]
if second_sub is not None:
return v[:v.find(second_sub)]
else:
return v[:v.find(sub)]
def extract_archived_file(file) -> None:
"""Extract all files from supported archive file (zip and tar)."""
# Extracting tar files
log.debug('Opening "%s" in tar archive format' % file)
try:
tar = tarfile.open(file, 'r')
except tarfile.ReadError as e:
log.debug('Failed to open "%s" in tar format, %s: %s' % (
file,
e.__class__.__name__,
str(e)
))
pass
else:
log.info('Extracting all files in "%s"' % file)
tar.extractall(Path(file).parent)
tar.close()
return
# Extracting zip files
log.debug('Opening "%s" in zip archive format' % file)
is_zip = zipfile.is_zipfile(file)
if not is_zip:
log.debug('File "%s" is not zip format' % file)
return
try:
zip_file = zipfile.ZipFile(file)
except zipfile.BadZipFile as e:
log.debug('Failed to open "%s" in zip format, %s: %s' % (
file,
e.__class__.__name__,
str(e)
))
pass
else:
log.info('Extracting all files in "%s"' % file)
zip_file.extractall(Path(file).parent)
zip_file.close()
def archive_zip(downloaded_files, name):
path = list(downloaded_files.values())[0]
zip_path = (path.parent / name)
with zipfile.ZipFile(zip_path, 'w') as zip_writer:
for file, path in downloaded_files.items():
log.debug('Writing "%s" to "%s"' % (
path,
zip_path
))
zip_writer.write(path)
os.remove(path)
def build_pretty_list_log(iterable, word, spacing=4):
word = '%s = [\n' % word
for context in iterable:
# Build spacing
for _ in range(spacing):
word += ' '
# Build name file
word += '"%s",\n' % context
word += ']'
return word | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/utils.py | utils.py |
import asyncio
import aiohttp
import requests
import asyncio
import logging
from typing import List, Dict
from .utils import extract_archived_file, build_pretty_list_log, archive_zip
from .errors import FileExpired
from .parser import finalization_info, parse_info
from .file import File
from .downloader import StdoutDownloader
from .network import Net
__all__ = (
'download', 'extract_info',
'download_coro', 'extract_info_coro',
'get_info', 'get_info_coro',
'download_stdout'
)
log = logging.getLogger(__name__)
def get_info(url) -> Dict[str, str]:
"""
Get informations in Zippyshare url.
This function will return raw information from given zippyshare url
which in :class:`dict` data type. Normally you don't use this
, this will be used in :meth:`extract_info` and :meth:`download`.
"""
log.info('Grabbing required informations in %s' % url)
log.debug('Establishing connection to Zippyshare.')
r = Net.requests.get(url)
try:
r.raise_for_status()
except requests.HTTPError as e:
log.exception('Zippyshare send %s code' % r.status_code)
raise e from None
log.debug('Successfully established connection to Zippyshare.')
log.debug('Checking if file is not expired')
if 'File has expired and does not exist anymore on this server' in r.text:
log.exception('File has expired and does not exist anymore')
raise FileExpired('File has expired and does not exist anymore')
log.debug('Checking if file is exist')
if 'File does not exist on this server' in r.text:
log.exception('File does not exist on this server')
raise FileNotFoundError('File does not exist on this server')
return finalization_info(parse_info(url, r.text))
async def get_info_coro(url) -> Dict[str, str]:
"""
"Coroutine function"
Get informations in Zippyshare url.
This function will return raw information from given zippyshare url
which in :class:`dict` data type. Normally you don't use this
, this will be used in :meth:`extract_info_coro` and :meth:`download_coro`.
"""
log.info('Grabbing required informations in %s' % url)
log.debug('Establishing connection to Zippyshare.')
r = await Net.aiohttp.get(url)
try:
r.raise_for_status()
except aiohttp.ClientResponseError as e:
log.exception('Zippyshare send %s code' % r.status)
raise e from None
body_html = await r.text()
log.debug('Successfully established connection to Zippyshare.')
log.debug('Checking if file is not expired')
if 'File has expired and does not exist anymore on this server' in body_html:
log.exception('File has expired and does not exist anymore')
raise FileExpired('File has expired and does not exist anymore')
log.debug('Checking if file is exist')
if 'File does not exist on this server' in body_html:
log.exception('File does not exist on this server')
raise FileNotFoundError('File does not exist on this server')
return await finalization_info(parse_info(url, body_html), True)
def download(*urls, zip: str=None, unzip: bool=False, **kwargs) -> List[File]:
"""
Download multiple zippyshare urls
Parameters
-----------
*urls
Zippyshare urls.
zip: :class:`str`
Zip all downloaded files once finished.
Zip filename will be taken from ``zip`` parameter,
default to ``None``.
NOTE: You can't mix ``zip`` and ``unzip`` options together
with value ``True``, it will raise error.
unzip: :class:`bool`
Unzip all downloaded files once finished
(if given file is zip format extract it, otherwise ignore it),
default to ``False``.
NOTE: You can't mix ``zip`` and ``unzip`` options together
with value ``True``, it will raise error.
**kwargs
These parameters will be passed to :meth:`File.download()`,
except for parameter ``filename``.
Returns
-------
List[:class:`File`]
a list of Zippyshare files
"""
if unzip and zip:
raise ValueError("unzip and zip paramaters cannot be set together")
downloaded_files = {}
files = []
for url in urls:
info = get_info(url)
file = File(info)
files.append(file)
if kwargs.get('filename') is not None:
kwargs.pop('filename')
file_path = file.download(**kwargs)
downloaded_files[file] = file_path
if unzip:
extract_archived_file(str(file_path))
if zip:
log.info(build_pretty_list_log(downloaded_files, 'Zipping all downloaded files to "%s"' % zip))
archive_zip(downloaded_files, zip)
log.info(build_pretty_list_log(downloaded_files, 'Successfully zip all downloaded files to "%s"' % zip))
return files
def extract_info(url: str, download: bool=True, unzip: bool=False, **kwargs) -> File:
"""
Extract all informations in Zippyshare url.
Parameters
------------
url: :class:`str`
Zippyshare url.
download: :class:`bool`
Download given zippyshare url if ``True``,
default to ``True``.
unzip: :class:`bool`
Unzip downloaded file once finished
(if given file is zip or tar format extract it, otherwise ignore it),
default to ``False``.
**kwargs
These parameters will be passed to :meth:`File.download()`
Returns
-------
:class:`File`
Zippyshare file
"""
info = get_info(url)
file = File(info)
if download:
file_path = file.download(**kwargs)
if unzip:
extract_archived_file(str(file_path))
return file
async def extract_info_coro(url: str, download: bool=True, unzip: bool=False, **kwargs) -> File:
"""
"Coroutine Function"
Extract all informations in Zippyshare url.
Parameters
------------
url: :class:`str`
Zippyshare url.
download: :class:`bool`
Download given zippyshare url if ``True``,
default to ``True``.
unzip: :class:`bool`
Unzip downloaded file once finished
(if given file is zip or tar format extract it, otherwise ignore it),
default to ``False``.
**kwargs
These parameters will be passed to :meth:`File.download_coro()`
Returns
-------
:class:`File`
Zippyshare file
"""
info = await get_info_coro(url)
file = File(info)
loop = asyncio.get_event_loop()
if download:
file_path = await file.download_coro(**kwargs)
if unzip:
await loop.run_in_executor(None, lambda: extract_archived_file(str(file_path)))
return file
async def download_coro(*urls, zip: str=None, unzip: bool=False, **kwargs) -> List[File]:
"""
"Coroutine Function"
Download multiple zippyshare urls
Parameters
-----------
*urls: :class:`str`
Zippyshare urls.
zip: :class:`str`
Zip all downloaded files once finished.
Zip filename will be taken from ``zip``,
default to ``None``.
NOTE: You can't mix ``zip`` and ``unzip`` options together
with value ``True``, it will raise error.
unzip: :class:`bool`
Unzip all downloaded files once finished
(if given file is zip format extract it, otherwise ignore it),
default to ``False``.
NOTE: You can't mix ``zip`` and ``unzip`` options together
with value ``True``, it will raise error.
**kwargs
These parameters will be passed to :meth:`File.download_coro()`,
except for parameter ``filename``.
Returns
-------
List[:class:`File`]
a list of Zippyshare files
"""
if unzip and zip:
raise ValueError("unzip and zip paramaters cannot be set together")
loop = asyncio.get_event_loop()
downloaded_files = {}
files = []
for url in urls:
info = await get_info_coro(url)
file = File(info)
files.append(file)
if kwargs.get('filename') is not None:
kwargs.pop('filename')
file_path = await file.download_coro(**kwargs)
downloaded_files[file] = file_path
if unzip:
await loop.run_in_executor(None, lambda: extract_archived_file(str(file_path)))
if zip:
log.info(build_pretty_list_log(downloaded_files, 'Zipping all downloaded files to "%s"' % zip))
await loop.run_in_executor(None, lambda: archive_zip(downloaded_files, zip))
log.info(build_pretty_list_log(downloaded_files, 'Successfully zip all downloaded files to "%s"' % zip))
return files
def download_stdout(url):
"""Extract zippyshare download url and then download its content to stdout
Warning
--------
This will print all its content to stdout,
if you are not intend to use this for piping the content to media player (like vlc),
then DO NOT DO THIS.
Example usage (Command-line)
.. code-block:: shell
# Let's say you want watching videos with vlc from zippyshare
# this can be done with piping the stdout from zippyshare-dl
$ zippyshare-dl "insert zippyshare url here" -pipe | vlc -
# or (for Linux / Mac OS)
$ python3 -m zippyshare_downloader "insert zippyshare url here" -pipe | vlc -
# or (for Windows)
$ py -3 -m zippyshar_downloader "insert zippyshare url here" -pipe | vlc -
"""
file = extract_info(url, download=False)
downloader = StdoutDownloader(file.download_url)
downloader.download() | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/fetcher.py | fetcher.py |
import requests
import aiohttp
import asyncio
__all__ = (
'Net', 'NetworkObject',
'set_proxy', 'clear_proxy'
)
# Modified requests session class with __del__ handler
# so the session will be closed properly
class requestsProxiedSession(requests.Session):
def __init__(self, trust_env=True) -> None:
super().__init__()
self.trust_env = trust_env
def __del__(self):
self.close()
# Because aiohttp doesn't support proxy from session
# we need to subclass it to proxy each requests without
# add "proxy" parameter to each requests
class aiohttpProxiedSession(aiohttp.ClientSession):
def __init__(self, proxy, *args, **kwargs):
super().__init__(*args, **kwargs)
self.proxy = proxy
def set_proxy(self, proxy):
self.proxy = proxy
def remove_proxy(self):
self.proxy = None
async def _request(self, *args, **kwargs):
kwargs.update(proxy=self.proxy)
return await super()._request(*args, **kwargs)
# This improvement comes from https://github.com/mansuf/mangadex-downloader/blob/v0.3.0/mangadex_downloader/network.py#L259-L372
# soon this will be separated module
class NetworkObject:
def __init__(self, proxy=None, trust_env=False) -> None:
self._proxy = proxy
self._aiohttp = None # type: aiohttpProxiedSession
self._trust_env = trust_env
# This will be disable proxy from environtments
self._requests = None
@property
def proxy(self):
"""Return HTTP/SOCKS proxy, return ``None`` if not configured"""
return self._proxy
@proxy.setter
def proxy(self, proxy):
self.set_proxy(proxy)
@property
def trust_env(self):
"""Return ``True`` if http/socks proxy are grabbed from env"""
return self._trust_env
@trust_env.setter
def trust_env(self, yes):
self._trust_env = yes
if self._aiohttp:
self._aiohttp._trust_env = yes
if self._requests:
self._requests.trust_env = yes
def is_proxied(self):
"""Return ``True`` if requests/aiohttp from :class:`NetworkObject`
are configured using proxy.
"""
return self.proxy is not None
def set_proxy(self, proxy):
"""Setup HTTP/SOCKS proxy for aiohttp/requests"""
if not proxy:
self.clear_proxy()
self._proxy = proxy
if self._requests:
self._update_requests_proxy(proxy)
if self._aiohttp:
self._update_aiohttp_proxy(proxy)
def clear_proxy(self):
"""Remove all proxy from aiohttp/request and disable environments proxy"""
self._proxy = None
self._trust_env = False
if self._requests:
self._requests.proxies.clear()
self._requests.trust_env = False
if self._aiohttp:
self._aiohttp.remove_proxy()
self._aiohttp._trust_env = False
def _update_aiohttp_proxy(self, proxy):
if self._aiohttp:
self._aiohttp.set_proxy(proxy)
self._aiohttp._trust_env = self._trust_env
@property
def aiohttp(self):
"""Return proxied aiohttp (if configured)"""
self._create_aiohttp()
return self._aiohttp
def _update_requests_proxy(self, proxy):
if self._requests:
pr = {
'http': proxy,
'https': proxy
}
self._requests.proxies.update(pr)
self._requests.trust_env = self._trust_env
def _create_requests(self):
if self._requests is None:
self._requests = requestsProxiedSession(self._trust_env)
self._update_requests_proxy(self.proxy)
@property
def requests(self):
"""Return proxied requests (if configured)"""
self._create_requests()
return self._requests
def _create_aiohttp(self):
# Check if current asyncio loop is running
# if running create aiohttp session
# if not don't create it
loop = asyncio.get_event_loop()
# Raise error if using in another thread
if self._aiohttp and self._aiohttp._loop != loop:
raise RuntimeError('created aiohttp session cannot be used in different thread')
if self._aiohttp is None:
self._aiohttp = aiohttpProxiedSession(self.proxy)
self._update_aiohttp_proxy(self.proxy)
def close(self):
"""Close requests session only"""
self._requests.close()
self._requests = None
async def close_async(self):
"""Close aiohttp & requests session"""
self.close()
if not self._aiohttp.closed:
await self._aiohttp.close()
self._aiohttp = None
Net = NetworkObject()
def set_proxy(proxy):
"""Setup HTTP/SOCKS proxy for aiohttp/requests
This is shortcut for :meth:`NetworkObject.set_proxy`.
"""
Net.set_proxy(proxy)
def clear_proxy():
"""Remove all proxy from aiohttp/requests
This is shortcut for :meth:`NetworkObject.clear_proxy`.
"""
Net.clear_proxy() | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/network.py | network.py |
import logging
import urllib.parse
from bs4 import BeautifulSoup
from typing import Dict
from .patterns import PATTERNS
from .errors import ParserError
from .network import Net
log = logging.getLogger(__name__)
def parse_info(url, body_html) -> Dict[str, str]:
"""
Parse required informations from request Zippyshare url.
"""
parser = BeautifulSoup(body_html, 'html.parser')
list_infos = []
log.debug('Getting Name file, size, date upload.')
for element in parser.find_all('font'):
str_element = str(element)
# Size file, Uploaded
if str_element.startswith('<font style="line-height:18px; font-size: 13px;">'):
list_infos.append(element)
# Name file
elif str_element.startswith('<font style="line-height:22px; font-size: 14px;">'):
list_infos.append(element)
# Name file
elif str_element.startswith('<font style="line-height:20px; font-size: 14px;">'):
list_infos.append(element)
log.debug('Getting download url.')
for pattern in PATTERNS:
try:
download_url = pattern(body_html, url)
except Exception as e:
log.debug('%s failed to get download url, %s: %s' % (
pattern.__name__,
e.__class__.__name__,
str(e)
))
continue
else:
log.debug('%s success to get download url' % pattern.__name__)
return {
"name_file": list_infos[0].decode_contents(),
"size": list_infos[1].decode_contents(),
"date_upload": list_infos[2].decode_contents(),
'url': url,
'download_url': download_url
}
log.exception('all patterns parser failed to get required informations')
raise ParserError('all patterns parser is failed to get required informations')
def _get_absolute_filename(info):
r = Net.requests.get(info['download_url'], stream=True)
new_namefile = r.headers['Content-Disposition'].replace('attachment; filename*=UTF-8\'\'', '')
info['name_file'] = urllib.parse.unquote(new_namefile)
r.close()
return info
async def _get_absolute_filename_coro(info):
resp = await Net.aiohttp.get(info['download_url'])
new_namefile = resp.headers['Content-Disposition'].replace('attachment; filename*=UTF-8\'\'', '')
info['name_file'] = urllib.parse.unquote(new_namefile)
resp.close()
return info
async def __dummy_return(info):
return info
def finalization_info(info, _async=False) -> Dict[str, str]:
"""
Fix if required informations contains invalid info.
"""
error = False
# Fix https://github.com/mansuf/zippyshare-downloader/issues/4
if '<img alt="file name" src="/fileName?key' in info['name_file']:
log.warning('Filename is in image not in text, running additional fetch...')
error = True
# Fix https://github.com/mansuf/zippyshare-downloader/issues/5
elif len(info['name_file']) > 70:
log.warning('Filename is too long, running additional fetch...')
error = True
if error:
if _async:
return _get_absolute_filename_coro(info)
else:
return _get_absolute_filename(info)
else:
if _async:
return __dummy_return(info)
else:
return info | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/parser.py | parser.py |
import math
import io
import re
from bs4 import BeautifulSoup
from .errors import *
from .utils import evaluate, getStartandEndvalue
__all__ = (
'pattern1', 'pattern2', 'pattern3',
'pattern4', 'pattern5',
'PATTERNS'
)
# Determine html parser
# use lxml if installed for speed
try:
import lxml
except ImportError:
bs4_parser = 'html.parser'
else:
bs4_parser = 'lxml'
def pattern1(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
# Finding omg attribute value in dlbutton element
elements = io.StringIO(scrapped_script).readlines()
omg_element = 'document.getElementById(\'dlbutton\').omg = '
for element in elements:
e = element.strip()
if e.startswith(omg_element):
omg = e.replace(omg_element, '').replace('"', '').replace(';', '')
break
else:
omg = None
if omg is None:
raise ParserError('omg attribute in download button javascript cannot be found')
# Finding uncompiled Random Number between FileID and Filename
# http://www.zippyshare.com/d/{FileID}/uncompiled_number/{Filename}
startpos_init = scrapped_script.find('document.getElementById(\'dlbutton\').href')
scrapped_init = scrapped_script[startpos_init:]
endpos_init = scrapped_init.find(';')
scrapped = scrapped_init[:endpos_init]
element_value = scrapped.replace('document.getElementById(\'dlbutton\').href = ', '')
url_download_init = getStartandEndvalue(element_value, '"')
uncompiled_number = getStartandEndvalue(element_value, '(', ')')
# Finding Random Number variable a in scrapped_script
variables = io.StringIO(scrapped_script).readlines()
for var in variables:
if var.strip().startswith('var a = '):
a = var.strip().replace('var a = ', '').replace(';', '')
break
else:
a = None
if a is None:
raise ParserError('variable a in download button javascript cannot be found')
# Finding Random Number variable b in scrapped_script
variables = io.StringIO(scrapped_script).readlines()
for var in variables:
if var.strip().startswith('var b = '):
b = var.strip().replace('var b = ', '').replace(';', '')
break
else:
b = None
if b is None:
raise ParserError('variable b in download button javascript cannot be found')
if omg != 'f':
random_number = uncompiled_number.replace('a', str(math.ceil(int(a)/3))).replace('b', b)
else:
random_number = uncompiled_number.replace('a', str(math.floor(int(a)/3))).replace('b', b)
# Now using self.evaluate() to safely do math calculations
url_number = str(evaluate(random_number))
continuation_download_url_init = getStartandEndvalue(element_value, '(')
continuation_download_url = continuation_download_url_init[continuation_download_url_init.find('"')+1:]
return url[:url.find('.')] + '.zippyshare.com' + url_download_init + url_number + continuation_download_url
def pattern2(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
# Make sure we don't find the fake one
duplicates = []
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
duplicates.append(1)
if len(duplicates) > 1:
raise ParserError("found duplicate script tag, pattern2 can't handle it")
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
# Finding uncompiled Random Number between FileID and Filename
# http://www.zippyshare.com/d/{FileID}/uncompiled_number/{Filename}
startpos_init = scrapped_script.find('document.getElementById(\'dlbutton\').href')
scrapped_init = scrapped_script[startpos_init:]
endpos_init = scrapped_init.find(';')
scrapped = scrapped_init[:endpos_init]
element_value = scrapped.replace('document.getElementById(\'dlbutton\').href = ', '')
url_download_init = getStartandEndvalue(element_value, '"')
random_number = getStartandEndvalue(element_value, '(', ')')
# Now using self.evaluate() to safely do math calculations
url_number = str(evaluate(random_number))
continuation_download_url_init = getStartandEndvalue(element_value, '(')
continuation_download_url = continuation_download_url_init[continuation_download_url_init.find('"')+1:]
return url[:url.find('.')] + '.zippyshare.com' + url_download_init + url_number + continuation_download_url
def pattern3(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
scripts = io.StringIO(scrapped_script).readlines()
_vars = {}
init_url = None
numbers_pattern = None
file_url = None
for script in scripts:
# Finding variables that contain numbers
re_var = re.compile(r'(var ([a-zA-Z]) = )([0-9%]{1,})(;)')
found = re_var.search(script)
if found:
_name = found.group(2)
_value = found.group(3)
_vars[_name] = _value
# Finding url download button
if script.strip().startswith('document.getElementById(\'dlbutton\').href'):
string_re_dlbutton = r'(document\.getElementById\(\'dlbutton\'\)\.href = \")' \
'(\/[a-zA-Z]\/[a-zA-Z0-9]{1,}\/)\"\+' \
'(\([a-zA-Z] \+ [a-zA-Z] \+ [a-zA-Z] - [0-9]\))\+\"(\/.{1,})\";'
re_dlbutton = re.compile(string_re_dlbutton)
result = re_dlbutton.search(script)
if result:
init_url = result.group(2)
numbers_pattern = result.group(3)
file_url = result.group(4)
else:
raise ParserError('Invalid regex pattern when finding url dlbutton')
if not _vars:
raise ParserError('Cannot find required variables in dlbutton script')
else:
for var_name, var_value in _vars.items():
numbers_pattern = numbers_pattern.replace(var_name, var_value)
final_numbers = str(evaluate(numbers_pattern))
return url[:url.find('.')] + '.zippyshare.com' + init_url + final_numbers + file_url
def pattern4(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
# Finding omg attribute value in dlbutton element
elements = io.StringIO(scrapped_script).readlines()
omg_element = 'document.getElementById(\'dlbutton\').omg = '
for element in elements:
e = element.strip()
if e.startswith(omg_element):
omg = e.replace(omg_element, '').replace('"', '').replace(';', '')
break
else:
omg = None
if omg is None:
raise ParserError('omg attribute in download button javascript cannot be found')
# Emulate .substr() function
substr_re = r'.substr\((?P<start>[0-9]), (?P<length>[0-9])\)'
substr = re.search(substr_re, omg)
if not substr:
raise ParserError(".substr() function cannot be found")
substr_start = substr.group('start')
substr_length = substr.group('length')
substr_value = re.sub(substr_re, '', omg)[int(substr_start):int(substr_length)]
scripts = io.StringIO(scrapped_script).readlines()
_vars = {}
init_url = None
math_func = None
file_url = None
for script in scripts:
# Finding variables that contain numbers
re_var = re.compile(r'(var ([a-zA-Z]) = )([0-9]{1,})(;)')
found = re_var.search(script)
if found:
_name = found.group(2)
_value = found.group(3)
if _value.startswith('document'):
continue
_vars[_name] = _value
# Finding url download button
if script.strip().startswith('document.getElementById(\'dlbutton\').href'):
string_re_dlbutton = r'(document\.getElementById\(\'dlbutton\'\)\.href = \")(\/[a-zA-Z]\/[a-zA-Z0-9]{1,}\/)\"\+(\(Math\.pow\([a-zA-Z], [0-9]\)\+[a-zA-Z]\))\+\"(\/.{1,})\";'
re_dlbutton = re.compile(string_re_dlbutton)
result = re_dlbutton.search(script)
if result:
init_url = result.group(2)
math_func = result.group(3)
file_url = result.group(4)
else:
raise ParserError('Invalid regex pattern when finding url dlbutton')
re_math_pow = r'\(Math\.pow\((?P<x>[a-zA-Z]), (?P<y>[0-9]{1,})\)\+[a-zA-Z]\)'
x_y_math_pow = re.search(re_math_pow, math_func)
if not x_y_math_pow:
raise ParserError("Math.pow() cannot be found")
x = x_y_math_pow.group('x')
x = x.replace(x, _vars[x])
y = x_y_math_pow.group('y')
b = len(substr_value)
final_numbers = int(math.pow(int(x), int(y)) + b)
return url[:url.find('.')] + '.zippyshare.com' + init_url + str(final_numbers) + file_url
def pattern5(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
for script in parser.find_all('script'):
if 'document.getElementById(\'dlbutton\').href' in script.decode_contents():
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
omg_element = parser.find('span', {'id': 'omg'})
if omg_element is None:
raise ParserError("Cannot find span element with id='omg'")
omg_value = omg_element.attrs['class'][0]
scripts = io.StringIO(scrapped_script).readlines()
init_url = None
numbers_pattern = None
file_url = None
for script in scripts:
# Finding url download button
if script.strip().startswith('document.getElementById(\'dlbutton\').href'):
string_re_dlbutton = r'(document\.getElementById\(\'dlbutton\'\)\.href = \")' \
'(\/[a-zA-Z]\/[a-zA-Z0-9]{1,}\/)\"\+' \
'(\([0-9a-zA-Z%+()\/ ]{1,}\))\+\"(\/.{1,})\";'
re_dlbutton = re.compile(string_re_dlbutton)
result = re_dlbutton.search(script)
if result:
init_url = result.group(2)
numbers_pattern = result.group(3)
file_url = result.group(4)
else:
raise ParserError('Invalid regex pattern when finding url dlbutton')
_vars = {}
re_init_func = r'var (?P<var_name>[a-zA-Z]) = function\(\) \{return (?P<var_value>[0-9]{1,})\};'
# Find a function with return statement only
for script in scripts:
init_func = re.search(re_init_func, script)
if init_func is None:
continue
init_func_value = init_func.group('var_value')
init_func_name = init_func.group('var_name')
_vars[init_func_name] = init_func_value
if not _vars:
raise ParserError("Cannot find function with return statement only")
# Find functions with return value "some_func() + some_numbers"
found_eval_func = False
re_eval_func = r'var (?P<var_name>[a-zA-Z]) = function\(\) \{return (?P<var_func_name>[a-zA-Z])\(\) (?P<operator>[+-/*]) (?P<var_const_value>[0-9]{1,})\};'
for script in scripts:
eval_func = re.search(re_eval_func, script)
if eval_func is None:
continue
var_name = eval_func.group('var_name')
func_name = eval_func.group('var_func_name')
const_value = eval_func.group('var_const_value')
operator = eval_func.group('operator')
try:
eval_func_value = _vars[func_name]
except KeyError:
# Failed here
raise ParserError('Failed to find function with return value "some_func() + some_numbers"') from None
_vars[var_name] = str(evaluate("{0} {1} {2}".format(
eval_func_value,
operator,
const_value
)))
found_eval_func = True
if not found_eval_func:
# Fail here, do not continue
raise ParserError('Failed to find function with return value "some_func() + some_numbers"')
# Find the next var containing omg element
re_var_omg = r"var (?P<var_name>[a-zA-Z]) = document\.getElementById\('omg'\)"
found_omg_var = False
for script in scripts:
var_omg = re.search(re_var_omg, script)
if var_omg is None:
continue
var_name = var_omg.group('var_name')
_vars[var_name] = omg_value
found_omg_var = True
break
if not found_omg_var:
raise ParserError("Failed to find omg variable")
# Final
re_eval_var = r'if \((?P<boolean>true|false)\) { (?P<var_name>[a-zA-Z]) = (?P<expression>[a-zA-Z0-9*+/-]{1,});}'
found_eval_var = False
for script in scripts:
eval_var = re.search(re_eval_var, script)
if eval_var is None:
continue
init_bool_value = eval_var.group('boolean')
var_name = eval_var.group('var_name')
expression = eval_var.group('expression')
bool_value = True if init_bool_value == "true" else False
found_eval_var = True
if bool_value:
final_expr = expression.replace(var_name, _vars[var_name])
_vars[var_name] = str(evaluate(final_expr))
if not found_eval_var:
raise ParserError("Failed to find if (true||false) statement")
for var_name, var_value in _vars.items():
numbers_pattern = numbers_pattern.replace(var_name, var_value)
numbers_pattern = re.sub(r'[()]', '', numbers_pattern)
final_numbers = str(int(evaluate(numbers_pattern)))
return url[:url.find('.')] + '.zippyshare.com' + init_url + final_numbers + file_url
def pattern6(body_string, url):
# Getting download button javascript code
parser = BeautifulSoup(body_string, bs4_parser)
for script in parser.find_all('script'):
decoded = script.decode_contents()
first_re_match = re.search(r"var ([a-zA-Z]{1,}) = ([0-9]{1,});", decoded)
if 'document.getElementById(\'dlbutton\').href' in decoded and first_re_match:
scrapped_script = script.decode_contents()
break
else:
scrapped_script = None
if scrapped_script is None:
raise ParserError('download button javascript cannot be found')
_vars = {}
# Find variable with numbers value
re_var_number = re.search(r"var ([a-zA-Z]{1,}) = ([0-9]{1,});", scrapped_script)
if re_var_number is None:
raise ParserError("variable with numbers only cannot be found")
var, value = re_var_number.group(1), re_var_number.group(2)
_vars[var] = value
# omg element
re_omg = r"document\.getElementById\('dlbutton'\)\.omg = (?P<expr>.{1,});"
result = re.search(re_omg, scrapped_script)
if result is None:
raise ParserError("omg element in scrapped script cannot be found")
omg = result.group("expr")
# Evaluate the omg element
re_eval_omg = r"var (?P<var>[a-zA-Z]) = parseInt\(document\.getElementById\('dlbutton'\)\.omg\) " \
r"(?P<operator>[%*+-/]{1}) (?P<value>\([0-9%*+-/]{1,}\));"
result = re.search(re_eval_omg, scrapped_script)
if result is None:
raise ParserError("evaluate omg element in scrapped script cannot be found")
var, value = result.group("var"), result.group("value")
_vars[var] = evaluate("{0} {1} {2}".format(
omg,
result.group("operator"),
value
))
# Some function with conditional if
re_func = r"var (?P<var>[a-zA-Z]) = function\(\) \{if \((?P<bool>false|true)\) " \
r"\{return (?P<bool_true>.{1,})\} else \{return (?P<bool_false>.{1,})\}\};"
result = re.search(re_func, scrapped_script)
if result is None:
raise ParserError("cannot find function with if <bool> statement")
var, bool_value = result.group("var"), result.group("bool")
expr = result.group("bool_{0}".format(bool_value))
for var_name, var_value in _vars.items():
expr = expr.replace(var_name, str(var_value))
try:
_vars[var] = evaluate(expr)
except ZeroDivisionError:
# wtf, ZeroDivisionError ?
pass
scripts = io.StringIO(scrapped_script).readlines()
init_url = None
numbers_pattern = None
file_url = None
for script in scripts:
# Finding url download button
if script.strip().startswith('document.getElementById(\'dlbutton\').href'):
string_re_dlbutton = r"(?P<element>document\.getElementById\(\'dlbutton\'\)\.href = \")" \
r"(?P<init_url>\/[a-zA-Z]\/[a-zA-Z0-9]{1,}\/)\"\+" \
r"(?P<num_expr>\([0-9a-zA-Z%*+-/()\/ ]{1,}\))\+\"(?P<file>\/.{1,})\";"
re_dlbutton = re.compile(string_re_dlbutton)
result = re_dlbutton.search(script)
if result:
init_url = result.group("init_url")
numbers_pattern = result.group("num_expr")
file_url = result.group("file")
else:
raise ParserError('Invalid regex pattern when finding url dlbutton')
for var_name, var_value in _vars.items():
numbers_pattern = numbers_pattern.replace(var_name, str(var_value))
final_numbers = str(evaluate(numbers_pattern))
return url[:url.find('.')] + '.zippyshare.com' + init_url + final_numbers + file_url
PATTERNS = [
pattern1,
pattern2,
pattern3,
pattern4,
pattern5,
pattern6
] | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/patterns.py | patterns.py |
import asyncio
import tqdm
import os
import sys
import time
import logging
from .network import Net
log = logging.getLogger(__name__)
# re.compile('bytes=([0-9]{1,}|)-([0-9]{1,}|)', re.IGNORECASE)
class BaseDownloader:
def download(self):
"""Download the file"""
raise NotImplementedError
def cleanup(self):
"Do the cleanup, Maybe close the session or the progress bar ? idk."
raise NotImplementedError
class FileDownloader(BaseDownloader):
def __init__(self, url, file, progress_bar=True, replace=False, **headers) -> None:
self.url = url
self.file = str(file) + '.temp'
self.real_file = file
self.progress_bar = progress_bar
self.replace = replace
self.headers_request = headers
if headers.get('Range') is not None and self._get_file_size(self.file):
raise ValueError('"Range" header is not supported while in resume state')
self._tqdm = None
def _build_progres_bar(self, initial_size, file_sizes, desc='file_sizes'):
if self.progress_bar:
kwargs = {
'initial': initial_size or 0,
'total': file_sizes,
'unit': 'B',
'unit_scale': True
}
# Determine ncols progress bar
length = len(desc)
if length < 20:
kwargs.setdefault('ncols', 80)
elif length > 20 and length < 50:
kwargs.setdefault('dynamic_ncols', True)
# Length desc is more than 40 or 50
elif length >= 50:
desc = desc[:20] + '...'
kwargs.setdefault('ncols', 90)
kwargs.setdefault('desc', desc)
self._tqdm = tqdm.tqdm(**kwargs)
def _update_progress_bar(self, n):
if self._tqdm:
self._tqdm.update(n)
def _get_file_size(self, file):
if os.path.exists(file):
return os.path.getsize(file)
else:
return None
def _parse_headers(self, initial_sizes):
headers = self.headers_request or {}
if initial_sizes:
headers['Range'] = 'bytes=%s-' % initial_sizes
return headers
def download(self):
initial_file_sizes = self._get_file_size(self.file)
# Parse headers
headers = self._parse_headers(initial_file_sizes)
# Initiate request
resp = Net.requests.get(self.url, headers=headers, stream=True)
# Grab the file sizes
file_sizes = float(resp.headers.get('Content-Length'))
# If "Range" header request is present
# Content-Length header response is not same as full size
if initial_file_sizes:
file_sizes += initial_file_sizes
real_file_sizes = self._get_file_size(self.real_file)
if real_file_sizes:
if file_sizes == real_file_sizes and not self.replace:
log.info('File exist and replace is False, cancelling download...')
return
# Build the progress bar
self._build_progres_bar(initial_file_sizes, float(file_sizes))
# Heavily adapted from https://github.com/choldgraf/download/blob/master/download/download.py#L377-L390
chunk_size = 2 ** 16
with open(self.file, 'ab' if initial_file_sizes else 'wb') as writer:
while True:
t0 = time.time()
chunk = resp.raw.read(chunk_size)
dt = time.time() - t0
if dt < 0.005:
chunk_size *= 2
elif dt > 0.1 and chunk_size > 2 ** 16:
chunk_size = chunk_size // 2
if not chunk:
break
writer.write(chunk)
self._update_progress_bar(len(chunk))
# Delete original file if replace is True and real file is exist
if real_file_sizes and self.replace:
os.remove(self.real_file)
os.rename(self.file, self.real_file)
def cleanup(self):
# Close the progress bar
if self._tqdm:
self._tqdm.close()
class StdoutDownloader(BaseDownloader):
def __init__(self, url) -> None:
self.url = url
def download(self):
r = Net.requests.get(self.url, stream=True)
stdout = open(sys.stdout.fileno(), 'wb')
for content in r.iter_content(1024):
stdout.write(content)
def cleanup(self):
pass
class AsyncFileDownloader(BaseDownloader):
"""FileDownloader for async process using aiohttp with resumeable support"""
def __init__(self, url, file, progress_bar=True, replace=False, **headers) -> None:
self.url = url
self.file = str(file) + '.temp'
self.real_file = file
self.progress_bar = progress_bar
self.replace = replace
self.headers_request = headers
if headers.get('Range') is not None and self._get_file_size(self.file):
raise ValueError('"Range" header is not supported while in resume state')
self._tqdm = None
def _build_progres_bar(self, initial_size, file_sizes, desc='file_sizes'):
if self.progress_bar:
kwargs = {
'initial': initial_size or 0,
'total': file_sizes,
'unit': 'B',
'unit_scale': True
}
# Determine ncols progress bar
length = len(desc)
if length < 20:
kwargs.setdefault('ncols', 80)
elif length > 20 and length < 50:
kwargs.setdefault('dynamic_ncols', True)
# Length desc is more than 40 or 50
elif length >= 50:
desc = desc[:20] + '...'
kwargs.setdefault('ncols', 90)
kwargs.setdefault('desc', desc)
self._tqdm = tqdm.tqdm(**kwargs)
def _update_progress_bar(self, n):
if self._tqdm:
self._tqdm.update(n)
def _get_file_size(self, file):
if os.path.exists(file):
return os.path.getsize(file)
else:
return None
def _parse_headers(self, initial_sizes):
headers = self.headers_request or {}
if initial_sizes:
headers['Range'] = 'bytes=%s-' % initial_sizes
return headers
async def download(self):
initial_file_sizes = self._get_file_size(self.file)
# Parse headers
headers = self._parse_headers(initial_file_sizes)
# Initiate request
resp = await Net.aiohttp.get(self.url, headers=headers)
# Grab the file sizes
file_sizes = float(resp.headers.get('Content-Length'))
# If "Range" header request is present
# Content-Length header response is not same as full size
if initial_file_sizes:
file_sizes += initial_file_sizes
real_file_sizes = self._get_file_size(self.real_file)
if real_file_sizes:
if file_sizes == real_file_sizes and not self.replace:
log.info('File exist and replace is False, cancelling download...')
return
# Build the progress bar
self._build_progres_bar(initial_file_sizes, float(file_sizes))
# Heavily adapted from https://github.com/choldgraf/download/blob/master/download/download.py#L377-L390
chunk_size = 2 ** 16
with open(self.file, 'ab' if initial_file_sizes else 'wb') as writer:
while True:
t0 = time.time()
chunk = await resp.content.read(chunk_size)
dt = time.time() - t0
if dt < 0.005:
chunk_size *= 2
elif dt > 0.1 and chunk_size > 2 ** 16:
chunk_size = chunk_size // 2
if not chunk:
break
writer.write(chunk)
self._update_progress_bar(len(chunk))
# Delete original file if replace is True and real file is exist
if real_file_sizes and self.replace:
os.remove(self.real_file)
os.rename(self.file, self.real_file)
async def cleanup(self):
# Close the progress bar
if self._tqdm:
self._tqdm.close()
class AsyncFastFileDownloader(BaseDownloader):
"""FAST FileDownloader with 2 connections simultaneously for async process using aiohttp with resumeable support"""
def __init__(self, url, file, progress_bar=True, replace=False, **headers) -> None:
self.url = url
self.real_file = file
self.progress_bar = progress_bar
self.replace = replace
self.headers_request = headers
if headers.get('Range') is not None:
raise ValueError('"Range" header is not supported in fast download')
self._tqdm = None
def _build_progres_bar(self, initial_size, file_sizes, desc='file_sizes'):
if self.progress_bar:
kwargs = {
'initial': initial_size or 0,
'total': file_sizes,
'unit': 'B',
'unit_scale': True
}
# Determine ncols progress bar
length = len(desc)
if length < 20:
kwargs.setdefault('ncols', 80)
elif length > 20 and length < 50:
kwargs.setdefault('dynamic_ncols', True)
# Length desc is more than 40 or 50
elif length >= 50:
desc = desc[:20] + '...'
kwargs.setdefault('ncols', 90)
kwargs.setdefault('desc', desc)
self._tqdm = tqdm.tqdm(**kwargs)
def _update_progress_bar(self, n):
if self._tqdm:
self._tqdm.update(n)
def _close_progress_bar(self):
if self._tqdm:
self._tqdm.close()
def _get_file_size(self, file):
if os.path.exists(file):
return os.path.getsize(file)
else:
return None
def _parse_headers(self, initial_sizes, end_sizes):
headers = self.headers_request or {}
headers['Range'] = 'bytes=%s-%s' % (int(initial_sizes), int(end_sizes))
return headers
def _get_temp_file(self, part):
return str(self.real_file) + '.temp.' + str(part)
async def _prepare_download(self, part, start_size, end_size):
file = self._get_temp_file(part)
initial_file_sizes = self._get_file_size(file) or 0
pure_temp_file_sizes = initial_file_sizes
exist = True if initial_file_sizes else False
# If temp part file exist
# addition it with start_size
initial_file_sizes += start_size
# Parse headers
headers = self._parse_headers(initial_file_sizes, end_size)
# initiate request
resp = await Net.aiohttp.get(self.url, headers=headers)
return pure_temp_file_sizes, resp, file, exist
async def _download(self, file, resp, exist):
# Heavily adapted from https://github.com/choldgraf/download/blob/master/download/download.py#L377-L390
chunk_size = 2 ** 16
with open(file, 'ab' if exist else 'wb') as writer:
while True:
t0 = time.time()
chunk = await resp.content.read(chunk_size)
dt = time.time() - t0
if dt < 0.005:
chunk_size *= 2
elif dt > 0.1 and chunk_size > 2 ** 16:
chunk_size = chunk_size // 2
if not chunk:
break
writer.write(chunk)
self._update_progress_bar(len(chunk))
def _get_parts_size(self, length: int):
divided = length / 2
if not divided.is_integer():
parts_size = [0, divided - 0.5, divided + 0.5, length]
else:
parts_size = [0, divided - 1, divided, length]
return parts_size
def _merge_files(self, parts, file_sizes):
self._close_progress_bar()
with open(self.real_file, 'wb') as writer:
self._build_progres_bar(0, file_sizes, 'merging_files')
for part in parts:
chunks_size = 2 ** 16
file = self._get_temp_file(part)
with open(file, 'rb') as read:
while True:
chunks = read.read(chunks_size)
if not chunks:
break
writer.write(chunks)
self._update_progress_bar(len(chunks))
self._close_progress_bar()
async def download(self):
# Grab the file sizes
resp = await Net.aiohttp.get(self.url)
file_sizes = float(resp.headers.get('Content-Length'))
resp.close()
# TODO: Add explanation below this.
parts_size = self._get_parts_size(file_sizes)
part1_kwargs = {
'part': 1,
'start_size': parts_size[0],
'end_size': parts_size[1]
}
part2_kwargs = {
'part': 2,
'start_size': parts_size[2],
'end_size': parts_size[3]
}
ifs_p1, resp_p1, f1, e1 = await self._prepare_download(**part1_kwargs)
ifs_p2, resp_p2, f2, e2 = await self._prepare_download(**part2_kwargs)
self._build_progres_bar(ifs_p1 + ifs_p2, file_sizes)
fut1 = asyncio.ensure_future(self._download(
f1,
resp_p1,
e1
))
fut2 = asyncio.ensure_future(self._download(
f2,
resp_p2,
e2
))
await asyncio.gather(fut1, fut2)
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, lambda: self._merge_files([1, 2], file_sizes))
for part in [1,2]:
os.remove(self._get_temp_file(part))
async def cleanup(self):
# Close the progress bar
if self._tqdm:
self._tqdm.close() | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/downloader.py | downloader.py |
import argparse
import logging
import os
from zippyshare_downloader.utils import check_valid_zippyshare_url
from zippyshare_downloader import (
__version__,
__description__,
__author__,
__repository__,
__license__
)
class InvalidParameter(Exception):
"""Raised when invalid parameter found"""
pass
def _check_urls(url):
if os.path.exists(url):
with open(url, 'r') as opener:
return [check_valid_zippyshare_url(i) for i in opener.read().splitlines()]
else:
return check_valid_zippyshare_url(url)
def _build_argparse_description():
return "{description}, created by {author}. Repository url: {repository}".format(
description=__description__,
author=__author__,
repository=__repository__
)
def setup_args():
parser = argparse.ArgumentParser(description=_build_argparse_description())
# URL or File location
parser.add_argument(
'ZIPPYSHARE_URL or FILE',
type=_check_urls,
help='Zippyshare URL or file containing zippyshare urls',
)
# No download
parser.add_argument(
'--no-download',
action='store_true',
help='No download file'
)
# Verbose output
parser.add_argument(
'--verbose',
'-v',
action='store_true',
help='Enable verbose output'
)
# Replace downloaded file (if exist)
parser.add_argument(
'--replace',
'-r',
action='store_true',
help='Replace file if exist'
)
# No Output
parser.add_argument(
'--silent',
'-s',
action='store_true',
help='No output'
)
# Store folder
parser.add_argument(
'--folder',
help='Store downloaded file in given folder',
metavar='FOLDER'
)
# Change filename
parser.add_argument(
'--filename',
help='Set a replacement filename. will be ignored if using multiple zippyshare urls',
metavar='FILENAME'
)
# Zip
parser.add_argument(
'--zip',
'-z',
help='Zip all downloaded files (if using multiple zippyshare urls) once finished' \
'the zip filename will be taken from this option. NOTE: you can\'t combine' \
'--zip and --unzip options, it will throw error.',
metavar='FILENAME',
)
# Unzip each downloaded file
parser.add_argument(
'--unzip',
'-uz',
help='Unzip all downloaded files, one by one. NOTE: You can\'t combine' \
'--zip and --unzip options, it will throw error.',
action='store_true'
)
# Async process
parser.add_argument(
'--async',
help='Run zippyshare-downloader in asynchronous process',
action='store_true',
dest='async_process'
)
# Fast Download
parser.add_argument(
'--fast',
help='Enable Fast Download (Only available with --async option).',
action='store_true',
)
# Stdout download
parser.add_argument(
'-pipe',
help='Download to stdout, useful for piping media content to media player (like vlc)',
action='store_true'
)
# JSON output format
parser.add_argument(
'--json',
help='Print out file informations to JSON format. NOTE: logging will be disabled.',
action='store_true'
)
parser.add_argument(
'--version',
'-V',
help='Print zippyshare-downloader version',
action='version',
version=__version__
)
parser.add_argument(
'--proxy',
help='Set http/socks proxy'
)
parser.add_argument(
'--proxy-trust-env',
help='use http/socks proxy from environments',
action='store_true'
)
args = parser.parse_args()
urls = args.__dict__['ZIPPYSHARE_URL or FILE']
args.urls = urls
return args
def setup_logging(name_module, verbose=False):
log = logging.getLogger(name_module)
handler = logging.StreamHandler()
fmt = logging.Formatter('[%(levelname)s] %(message)s')
handler.setFormatter(fmt)
log.addHandler(handler)
if verbose:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
return log
def build_kwargs(args, urls):
# -pipe option will not enable progress bar because of downloader
if args.json:
progress_bar = False
else:
progress_bar = not args.silent
kwargs = {
'urls': urls,
'download': not args.no_download,
'unzip': args.unzip,
'zip': args.zip,
'progress_bar': progress_bar,
'replace': args.replace,
'folder': args.folder,
'filename': args.filename,
'silent': args.silent,
'async': args.async_process,
'fast': args.fast,
'pipe': args.pipe,
'json': args.json
}
return kwargs
def pretty_print_result(file):
result = "\n"
result += "Result from url %s\n" % file.url
result += "================"
# build additional lines from length file.url
for _ in range(len(file.url)):
result += "="
result += "=\n"
result += "Name : %s\n" % file.name
result += "Size : %s\n" % file.size_fmt
result += "Date uploaded : %s\n" % file.date_uploaded_fmt
result += "Download URL : %s" % file.download_url
print(result) | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/cli/utils.py | utils.py |
import asyncio
import logging
from json import dumps
from zippyshare_downloader.cli.utils import (
setup_args,
setup_logging,
build_kwargs,
pretty_print_result,
InvalidParameter
)
from zippyshare_downloader.fetcher import (
download_stdout,
extract_info_coro,
download_coro,
extract_info,
download
)
from zippyshare_downloader.network import Net
__all__ = (
'main',
)
def process(**kwargs):
silent = kwargs.pop('silent')
json = kwargs.pop('json')
# We don't do fast download in non-async here
fast = kwargs.pop('fast')
if fast:
log.error('--fast option must be used with --async option')
raise InvalidParameter('--fast option must be used with --async option')
urls = kwargs.pop('urls')
# If urls is grabbed from file
if isinstance(urls, list):
# We don't do stdout download here if given urls is grabbed from file
if kwargs.pop('pipe'):
raise InvalidParameter('-pipe are not supported with multiple zippyshare urls')
# If --no-download is specified
if not kwargs.get('download'):
# Warn the users if --unzip is specified
# while --no-download is exist
if kwargs.get('unzip'):
log.warning('--unzip is set, while --no-download is also set. Ignoring --unzip')
# Begin the fetching !!!
files = []
for url in urls:
files.append(extract_info(url, **kwargs))
# If download is yes
else:
kwargs.pop('download')
files = download(*urls, **kwargs)
# Warn the users if --filename is specified
# while using multiple zippyshare urls
if kwargs.get('filename'):
log.warning('Using multi zippyshare urls and --filename is set. Ignoring --filename option')
# Print all file informations
if json:
print(dumps({'urls': [file.to_JSON() for file in files]}))
elif not silent:
for file in files:
pretty_print_result(file)
# If urls is single url
else:
# download to stdout
if kwargs.pop('pipe'):
download_stdout(urls)
return
kwargs.pop('zip')
file = extract_info(urls, **kwargs)
if json:
print(file.to_JSON())
elif not silent:
pretty_print_result(file)
async def process_async(**kwargs):
silent = kwargs.pop('silent')
json = kwargs.pop('json')
# Check if "-pipe" used with --async
if kwargs.pop('pipe'):
# if yes, throw errror.
# Unsupported
raise InvalidParameter('-pipe cannot be used with --async option')
urls = kwargs.pop('urls')
# If urls is grabbed from file
if isinstance(urls, list):
# If --no-download is specified
if not kwargs.get('download'):
# Warn the users if --unzip is specified
# while --no-download is exist
if kwargs.get('unzip'):
log.warning('--unzip is set and --no-download is also set. Ignoring --unzip option')
# Begin the fetching !!!
files = []
for url in urls:
files.append(await extract_info_coro(url, **kwargs))
# If download is yes
else:
# Delete download parameter
kwargs.pop('download')
# Warn the users if --filename is specified
# while using multiple zippyshare urls
if kwargs.get('filename'):
log.warning('Using multi zippyshare urls and --filename is set. Ignoring --filename option')
files = await download_coro(*urls, **kwargs)
# Print all files informations
if json:
print(dumps({'urls': [file.to_JSON() for file in files]}))
elif not silent:
for file in files:
pretty_print_result(file)
# If urls is single url
else:
kwargs.pop('zip')
file = await extract_info_coro(urls, **kwargs)
if json:
print(file.to_JSON())
elif not silent:
pretty_print_result(file)
def main():
global log
log = logging.getLogger('zippyshare_downloader_null_logging').addHandler(logging.NullHandler())
# Parse parameters
args = setup_args()
kwargs = build_kwargs(args, args.urls)
# Disable logging if "-pipe" or "--json" or "--silent" is present
if kwargs.get('pipe') or kwargs.get('json'):
pass
else:
if not args.silent:
log = setup_logging('zippyshare_downloader', args.verbose)
# Throw error if "-pipe" and "--no-download" are present
if kwargs.get('pipe') and not kwargs.get('download'):
raise InvalidParameter('-pipe cannot be used with --no-download option')
Net.trust_env = args.proxy_trust_env
async_process = kwargs.pop('async')
if not async_process:
Net.set_proxy(args.proxy)
process(**kwargs)
Net.close()
else:
# Little helper
async def run_async():
if args.proxy:
Net.set_proxy(args.proxy)
await process_async(**kwargs)
await Net.close_async()
# Using uvloop if installed
# for faster operations
try:
import uvloop # type: ignore
except ImportError:
pass
else:
uvloop.install()
loop = asyncio.get_event_loop()
loop.run_until_complete(run_async()) | zippyshare-downloader | /zippyshare_downloader-0.3.4-py3-none-any.whl/zippyshare_downloader/cli/process.py | process.py |
zippyshare_generator
=======================
Generator Links
Installing
-------------
Install and update using `pip`_:
.. code-block:: text
$ pip install zippyshare
zippyshare supports Python 2 and newer.
.. _pip: https://pip.pypa.io/en/stable/quickstart/
Example
----------------
What does it look like? Here is an example of a simple generate link:
.. code-block:: batch
$ zippyshare.py -d /root/Downloads "https://www110.zippyshare.com/v/0CtTucxG/file.html" -n "myMovies.mp4"
And it will download automaticaly with "Internet Download Manager (IDM) for Windows or build in download manager
You can use on python interpreter
.. code-block:: python
>>> from zippyshare import zippyshare
>>> generator = zippyshare()
>>> url_download = generator.generate("https://www110.zippyshare.com/v/0CtTucxG/file.html")
>>> generator.download(url_download, ".", "myMovies.mp4", False)
>>> #it will download it automatically
For more options use '-h' or '--help'
.. code-block:: python
$ zippyshare.py --help
or
$ zippyshare --help
Support
---------
* Download With 'wget' (linux/windows) or 'Internet Download Manager (IDM) (Windows) (pip install idm)'
* Python 2.7 + (only)
* Windows, Linux
Links
-------
* License: `BSD <https://bitbucket.org/licface/zippyshare/src/default/LICENSE.rst>`_
* Code: https://bitbucket.org/licface/zippyshare
* Issue tracker: https://bitbucket.org/licface/zippyshare/issues | zippyshare-generator | /zippyshare_generator-0.29.tar.gz/zippyshare_generator-0.29/README.rst | README.rst |
Copyright © 2019 by the cumulus13 team.
Some rights reserved.
Redistribution and use in source and binary forms of the software as
well as documentation, with or without modification, are permitted
provided that the following conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
----
zippyshare uses parts of optparse written by Hadi Cahyadi, LD. Ward and maintained
by the Python Software Foundation.
Copyright © 2017-2019 Hadi Cahyadi, LD. All rights reserved.
Copyright © 2002-2006 Python Software Foundation. All rights reserved.
| zippyshare-generator | /zippyshare_generator-0.29.tar.gz/zippyshare_generator-0.29/LICENSE.rst | LICENSE.rst |
import os
import sys
import traceback
# sys.excepthook = traceback.format_exc
import argparse
import clipboard
from configset import configset
from pydebugger.debug import debug
from make_colors import make_colors
import requests
from bs4 import BeautifulSoup as bs
from parserheader import parserheader
import re
from pywget import wget
from pause import pause
try:
from . import js_exe
except:
import js_exe
if sys.version_info.major == 3:
import urllib.request, urllib.parse, urllib.error
else:
import urllib as urllibx
class urllib:
def request(self):
pass
def parse(self):
pass
def error(self):
pass
urllib.request = urllibx
urllib.parse = urllibx
urllib.error = urllibx
class zippyshare(object):
def __init__(self, url = None, download_path = os.getcwd(), altname = None):
super(zippyshare, self)
self.debug = False
if not os.path.isfile(os.path.join(os.path.dirname(__file__), 'zippyshare.ini')):
configname = os.path.join(os.path.dirname(__file__), 'zippyshare.ini')
else:
configname = os.path.join(os.path.dirname(__file__), 'zippyshare.ini')
self.config = configset(configname)
if url:
self.url = url
url_download, name = self.generate(self.url)
if name:
altname = name
self.download(url_download, download_path, altname)
def generate(self, url):
#header = self.parseheader()
debug(url = url)
#print("url =", url)
try:
www = re.findall('https:/.(.*?).zippyshare', url)[0]
print(make_colors("URL", 'lw' 'bl') + " : " + make_colors(url, 'b', 'lg'))
except:
if len(url) > 80:
print(make_colors("URL", 'lw' 'bl') + " : " + make_colors(url[:51], 'lw', 'm'))
else:
print(make_colors("URL", 'lw' 'bl') + " : " + make_colors(url, 'lw', 'm'))
print(make_colors("Invalid Link !", 'lw', 'lr', ['blink']))
#print("www =", www)
debug(www = www, debug = self.debug)
header = {}
while 1:
try:
a = requests.get(url, headers = header)
break
except:
pass
b = bs(a.content, 'lxml')
name = ''
name = b.find('table', {'class':'folderlogo'}).find('div', {'class':'center'}).find('font', text=re.compile("\.mp4"))
if name:
name = name.text
debug(name = name)
#
try:
js_script = b.find("div", {'class': 'center',}).find_all("script")[1]
except:
js_script = b.find("div", {'class': 'right',}).find_all("script")[0]
#clipboard.copy(str(js_script))
debug(js_script = js_script)
js_content = ""
# a_script = re.findall("\+\((.*?) \+ a", str(js_script))[0]
# debug(a_script = a_script)
var_a = re.findall("var a = (.*?)\n", str(js_script))
debug(var_a = var_a)
# omg = b.find('span', {'id': 'omg',}).get('class')[0]
# debug(omg = omg)
#b_script = re.split("\(|\)|%|\+", str(a_script[1]))
#debug(b_script = b_script)
#js_content1 = int(b_script[1].strip())
#debug(js_content1 = js_content1)
#js_content2 = int(b_script[2].strip())
#debug(js_content2 = js_content2)
#js_content3 = int(b_script[3].strip())
#debug(js_content3 = js_content3)
#js_content4 = int(b_script[4].strip())
#debug(js_content4 = js_content4)
#js_content = ((js_content1 % js_content2) + (js_content3 + js_content4))
#debug(js_content = js_content)
#js_content = """
#var a = %d %% %d;
#var b = %d %% %d;
#var x = a + b;
#"""%(js_content1, js_content2, js_content3, js_content4)
# js_content = """
# var a = function() {return 1};
# var b = function() {return a() + 1};
# var c = function() {return b() + 1};
# var d = %s*2
# var x = %s + a() + b() + c() + d + 5/5
# """%(omg, a_script)
js_content = """
var a = %s;
omg = "asdasd".substr(0, 3);
b = omg.length;
var x = (Math.pow(a, 3)+b)
"""%(var_a[0][:-1])
#print "js_content =", js_content
js_content = js_exe.generator(js_content, "x")
debug(js_content = js_content)
meta_file = b.find('meta', {'name': 'twitter:title',}).get('content').strip()
meta_file = urllib.parse.quote(meta_file)
debug(meta_file = meta_file, debug = self.debug)
code_download_html = b.find('div', {'id': 'lrbox',}).find_all('script')[2].text
debug(code_download_html = code_download_html, debug = self.debug)
code_download = re.findall('document.getElementById\(\'dlbutton\'\).href = "/d/(.*?)/+', code_download_html)
debug(code_download = code_download)
code_download = code_download[0]
debug(code_download = code_download)
# https://www114.zippyshare.com/d/9VTPobOj/830587/Zippynime.id%20Quan_Zhi_Gao_Shou_S2_6.%5b480p%5d.mp4
url_download = 'https://' + str(www) + ".zippyshare.com/d/" + str(code_download) + '/' + str(js_content) + '/' + str(meta_file)
debug(url_download = url_download, debug = self.debug)
debug(code_download = code_download)
debug(js_content = js_content)
debug()
return url_download, name
def download(self, url, download_path = os.getcwd(), altname = None, prompt = False):
debug(url = url)
try:
import idm
dm = idm.IDMan()
dm.download(url, download_path, altname, confirm= prompt)
except:
if os.getenv('debug'):
traceback.format_exc()
if altname:
download_path = os.path.join(download_path, altname)
wget.download(url, download_path)
def parseheader(self, header_text = None):
default = """
HTTP/1.1 200 Connection established
Server: nginx
Date: Thu, 12 Sep 2019 10:03:26 GMT
Content-Type: text/html;charset=UTF-8
Transfer-Encoding: chunked
Connection: keep-alive
Set-Cookie: JSESSIONID=8FFFF1380195C68BA0E0C2C960AD8B32; Path=/; HttpOnly
Set-Cookie: zippop=1; Domain=.zippyshare.com; Expires=Thu, 12-Sep-2019 22:03:26 GMT; Path=/
Content-Language: en
Expires: Thu, 12 Sep 2019 10:03:25 GMT
Cache-Control: no-cache
Strict-Transport-Security: max-age=31536000; includeSubDomains; preload
Content-Encoding: gzip
"""
if not header_text:
header_text = self.config.read_config('header', 'text', value= default)
debug(header_text = header_text, debug = self.debug)
p = parserheader()
header = p.parserHeader(header_text)
debug(header = header, debug = self.debug)
def usage(self):
parser = argparse.ArgumentParser(formatter_class= argparse.RawTextHelpFormatter)
parser.add_argument('URL', action = 'store', help = 'Zippyshare url, example: "https://www48.zippyshare.com/v/pedPCo05/file.html", type "c" for get url from clipboard')
parser.add_argument('-p', '--download-path', action = 'store', help = 'Download path to save file')
parser.add_argument('-n', '--name', action = 'store', help = 'Alternative Save as name')
parser.add_argument('-P', '--prompt', action = 'store_true', help = 'Prompt Before download')
parser.add_argument('-d', '--debug', action = 'store_true', help = 'Debugger process')
parser.add_argument('-c', '--clipboard', action = 'store_true', help = 'Copy generated link to clipboard')
if len(sys.argv) == 1:
parser.print_help()
else:
args = parser.parse_args()
debug(debugger = args.debug)
if self.config.read_config('debug', 'debug', value= False):
self.debug = eval(self.config.read_config('debug', 'debug', value= False))
debug(self_debug = self.debug)
self.debug = args.debug
debug(self_debug = self.debug)
if args.URL == 'c':
args.URL = clipboard.paste()
url_download, name = self.generate(args.URL)
if name:
args.name = name
if args.download_path:
self.download(url_download, args.download_path, args.name, args.prompt)
else:
print(make_colors("GENERATED:", 'w', 'r') + " " + make_colors(url_download, 'b', 'ly', attrs= ['blink']))
if args.clipboard:
clipboard.copy(url_download)
if __name__ == '__main__':
c = zippyshare()
c.usage() | zippyshare-generator | /zippyshare_generator-0.29.tar.gz/zippyshare_generator-0.29/zippyshare_generator/zippyshare.py | zippyshare.py |
# zippyshare
<badges>[](https://pypi.org/project/zippyshare/)
[](https://pypi.org/project/zippyshare/)
[](https://pypi.org/project/zippyshare/)
[](https://paypal.me/foxe6)
[](https://paypal.me/foxe6)
[](https://paypal.me/foxe6)
</badges>
<i>zippyshare API for batch remote upload.</i>
# Hierarchy
```
zippyshare
```
# Example
## python
See `test`. | zippyshare | /zippyshare-0.0.7.tar.gz/zippyshare-0.0.7/README.md | README.md |
from dataclasses import dataclass, fields
import inspect
import struct
@dataclass(frozen=True)
class EOCD:
disk_index: int
cd_start_disk: int
cd_records_on_disk: int
cd_records_total: int
cd_size: int
cd_offset: int
comment_len: int
comment: str
@classmethod
def from_bytes(cls, b: bytes):
def e(start: int, end: int):
return int.from_bytes(b[start:end], byteorder='little')
# Find signature...start from smallest possible EOCD (no comment) to largest
for i in range(len(b)-22, -1, -1):
if b[i:i+4] == b'PK\x05\x06':
fixed_data = struct.unpack(r'<4H2LH', b[i+4:i+22])
comment_last_byte = i+22+fixed_data[6]
comment = '' if len(b) < comment_last_byte else b[i+22:comment_last_byte].decode()
return cls(**dict(zip(map(lambda field: field.name, fields(EOCD)), fixed_data + (comment,))))
return None
def to_bytes(self) -> bytes:
def d(value: int, size: int) -> bytes:
return int.to_bytes(value, length=size, byteorder='little')
return b'PK\x05\x06' \
+ d(self.disk_index, 2) \
+ d(self.cd_start_disk, 2) \
+ d(self.cd_records_on_disk, 2) \
+ d(self.cd_records_total, 2) \
+ d(self.cd_size, 4) \
+ d(self.cd_offset, 4) \
+ d(self.comment_len, 2) \
+ self.comment.encode()
@dataclass(frozen=True)
class CDFileHeader:
made_by_version: int
min_version_needed: int
bit_flags: bytes
compression_method: int
last_mod_time: int
last_mod_date: int
uncompressed_crc: bytes
compressed_size: int
uncompressed_size: int
filename_len: int
extra_field_len: int
file_comment_len: int
disk_of_file_start: int
internal_file_attr: bytes
external_file_attr: bytes
file_header_offset: int
filename: str
extra_field: str
comment: str
@classmethod
def from_bytes(cls, b: bytes):
# Find signature
for i in range(0, len(b) - 4):
if b[i:i+4] == b'PK\x01\x02':
fixed_data = struct.unpack(r'<6H4s2L4H2s4sL', b[i+4:i+46])
variable_length = fixed_data[9] + fixed_data[10] + fixed_data[11]
variable_data = struct.unpack(
f"{fixed_data[9]}s{fixed_data[10]}s{fixed_data[11]}s",
b[i+46:i+46+variable_length],
)
payload = fixed_data + tuple(map(bytes.decode, variable_data))
return cls(**dict(zip(map(lambda field: field.name, fields(CDFileHeader)), payload)))
return None
@classmethod
def gen_from_bytes(cls, b: bytes):
start_byte = 0
while start_byte < len(b):
signature = b[start_byte:start_byte+4]
if signature == b'PK\x05\x06': # Found EOCD
break
elif signature == b'PK\x01\x02': # Found CDFileHeader
cd_meta = cls.from_bytes(b[start_byte:])
if cd_meta:
yield cd_meta
start_byte += 46+cd_meta.filename_len+cd_meta.file_comment_len+cd_meta.extra_field_len
else:
start_byte += 1
else:
start_byte += 1
def to_bytes(self) -> bytes:
def d(value: int, size: int) -> bytes:
return int.to_bytes(value, length=size, byteorder='little')
return b'PK\x01\x02' \
+ d(self.made_by_version, 2) \
+ d(self.min_version_needed, 2) \
+ self.bit_flags \
+ d(self.compression_method, 2) \
+ d(self.last_mod_time, 2) \
+ d(self.last_mod_date, 2) \
+ self.uncompressed_crc \
+ d(self.compressed_size, 4) \
+ d(self.uncompressed_size, 4) \
+ d(self.filename_len, 2) \
+ d(self.extra_field_len, 2) \
+ d(self.file_comment_len, 2) \
+ d(self.disk_of_file_start, 2) \
+ self.internal_file_attr \
+ self.external_file_attr \
+ d(self.file_header_offset, 4) \
+ self.filename.encode() \
+ self.extra_field.encode() \
+ self.comment.encode()
@dataclass(frozen=True)
class FileHeader:
min_version_needed: int
bit_flags: bytes
compression_method: int
last_mod_time: int
last_mod_date: int
uncompressed_crc: bytes
compressed_size: int
uncompressed_size: int
filename_len: int
extra_field_len: int
filename: str
extra_field: str
@classmethod
def from_bytes(cls, b: bytes):
def e(start: int, end: int):
return int.from_bytes(b[start:end], byteorder='little')
# Find signature
for i in range(0, len(b) - 4):
if b[i:i+4] == b'PK\x03\x04':
fixed_data = struct.unpack(r'<5H3L2H', b[i+4:i:30])
variable_length = fixed_data[8] + fixed_data[9]
variable_data = struct.unpack(
f"{fixed_data[8]}s{fixed_data[9]}s",
b[i+30:i+30+variable_length],
)
payload = fixed_data + tuple(map(bytes.decode, variable_data))
return cls(**dict(zip(map(lambda field: field.name, fields(FileHeader)), payload)))
return None
@classmethod
def from_central_directory(cls, cd_meta: CDFileHeader):
return cls(**{
i: cd_meta.__getattribute__(i) for i in map(lambda field: field.name, fields(cd_meta))
if i in inspect.signature(cls).parameters
})
def to_bytes(self) -> bytes:
def d(value: int, size: int) -> bytes:
return int.to_bytes(value, length=size, byteorder='little')
return b'PK\x03\x04' \
+ d(self.min_version_needed, 2) \
+ self.bit_flags \
+ d(self.compression_method, 2) \
+ d(self.last_mod_time, 2) \
+ d(self.last_mod_date, 2) \
+ self.uncompressed_crc \
+ d(self.compressed_size, 4) \
+ d(self.uncompressed_size, 4) \
+ d(self.filename_len, 2) \
+ d(self.extra_field_len, 2) \
+ self.filename.encode() \
+ self.extra_field.encode() | zipr-core | /zipr_core-0.0.6-py3-none-any.whl/zipr/core/zip.py | zip.py |
# ZipReport
[](https://github.com/zipreport/zipreport/actions)
[](https://zipreport.readthedocs.io/en/latest/)
[](https://pypi.org/project/zipreport-lib/)
[](https://github.com/zipreport/zipreport/blob/master/LICENSE)
Transform HTML templates into beautiful PDF or MIME reports, with full CSS and client Javascript support, under a
permissive license.
Want to see it in action? Check this [example](docs/samples/pagedjs.pdf)!
**Highlights**:
- Create your reports using Jinja templates;
- Dynamic image support (embedding of runtime-generated images);
- Reports are packed in a single file for easy distribution or bundling;
- Optional MIME processor to embed resources in a single email message;
- Support for generated JS content (with zipreport-server or zipreport-cli);
- Support for headers, page numbers and ToC (via PagedJS, see below);
**Requirements**:
- Python >= 3.6
- Jinja2 >= 3.1
- Compatible backend for pdf generation (zipreport-server, zipreport-cli, xhtmltopdf, or WeasyPrint);
Note: For previous Jinja2 versions, zipreport-lib 0.9.5 is functionally similar.
### Installation
Installing via pip:
```shell script
$ pip install zipreport-lib
```
##### Quick example
Using zipreport-cli backend to render a report file:
```python
from zipreport import ZipReportCli
from zipreport.report import ReportFileLoader
# path to zipreport-cli binary
cli_path = "/opt/zpt-cli/zpt-cli"
# output file
output_file = "result.pdf"
# template variables to be used for rendering
report_data = {
'title': "Example report using Jinja templating",
'color_list': ['red', 'blue', 'green'],
'description': 'a long text field with some filler description so the page isn\'t that empty',
}
# load zpt report file
zpt = ReportFileLoader.load("reports/simple.zpt")
# render the report with default job options
result = ZipReportCli(cli_path).render_defaults(zpt, report_data)
if result.success:
# write output file
with open(output_file, 'wb') as rpt:
rpt.write(result.report.read())
```
### Paged.js
[Paged.js](https://www.pagedjs.org/) is an amazing javascript library that performs pagination of HTML documents for print,
under MIT license. It acts as polyfill for W3C specification for print, and allows the creation of headers, footers,
page numbers, table of contents, etc. in the browser.
### Available backends
#### zipreport-server/zipreport-cli
This is the recommended backend to use, that enables full usage of client-side JavaScript and leveraging the Paged.js
capabilities.
[zipreport-cli](https://github.com/zipreport/zipreport-cli) is an electron-based command-line utility used to convert
webpages to PDF.
[zipreport-server](https://github.com/zipreport/zipreport-server) is a daemon that allows the usage of zipreport-cli via API.
#### WeasyPrint
This backend is provided for compatibility. For new projects, please use zipreport-cli or zipreport-server.
[WeasyPrint](https://weasyprint.org/) is a popular Python library to generate PDFs from HTML. It doesn't support JavaScript,
and CSS is limited.
#### wkhtmltopdf
This backend is provided for compatibility. While it supports some JavaScript, it's not able to run Paged.js.
[Wkhtmltopdf](https://wkhtmltopdf.org/) is a binary utility based on QtWebKit to generate PDF files from HTML pages.
While it features some JavaScript and CSS support, the underlying library is obsolete.
### Documentation
Detailed documentation on usage and report building is available on the [project documentation](https://zipreport.readthedocs.io/en/latest/).
| zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/README.md | README.md |
from zipreport.processors.interface import ProcessorInterface
from zipreport.processors import ZipReportProcessor, ZipReportClient, ZipReportCliProcessor, MIMEProcessor, \
WkHtml2PdfProcessor
from zipreport.report import ReportFile
from zipreport.report.job import ReportJob, JobResult
from zipreport.template import JinjaRender
class BaseReport:
"""
Common base class for simple report generation
"""
def __init__(self, processor: ProcessorInterface):
if not isinstance(processor, ProcessorInterface):
raise RuntimeError("Invalid processor")
self._processor = processor
@staticmethod
def create_job(zpt: ReportFile) -> ReportJob:
"""
Creates a ReportJob from a ReportFile
:param zpt: ReportFile to use
:return: newly created ReportJob
"""
if not isinstance(zpt, ReportFile):
raise RuntimeError("Invalid report file")
return ReportJob(zpt)
def render(self, job: ReportJob, data: dict = None) -> JobResult:
"""
Render a report job, using Jinja
:param job: ReportJob to render
:param data: dict of variables for the report
:return: JobResult
"""
if not isinstance(job, ReportJob):
raise RuntimeError("Invalid report file")
JinjaRender(job.get_report()).render(data)
return self._processor.process(job)
def render_defaults(self, zpt: ReportFile, data: dict = None) -> JobResult:
"""
Render a report file
It will create a ReportJob with default configuration
:param zpt: ReportFile to use
:param data: dict of variables for the report
:return: JobResult
"""
return self.render(self.create_job(zpt), data)
class ZipReport(BaseReport):
"""
zipreport-server API based report generation
"""
def __init__(self, url: str, api_key: str, api_version: int = 1, secure_ssl: bool = False):
"""
Constructor
:param url: zipreport-server url
:param api_key: zipreport-server api key
:param api_version: api version (default 1)
:param secure_ssl: if true, verifies CA validity for SSL certificates (default false)
"""
super(ZipReport, self).__init__(ZipReportProcessor(ZipReportClient(url, api_key, api_version, secure_ssl)))
class ZipReportCli(BaseReport):
"""
zipreport-cli local report generation
"""
def __init__(self, cli_path: str):
"""
Constructor
:param cli_path: full path to zipreport-cli binary
"""
super(ZipReportCli, self).__init__(ZipReportCliProcessor(cli_path))
class WkHtml2PdfReport(BaseReport):
"""
wkhtmltopdf report generation
"""
def __init__(self, cli_path: str):
"""
Constructor
:param cli_path: full path to wkhtmltopdf binary
"""
super(WkHtml2PdfReport, self).__init__(WkHtml2PdfProcessor(cli_path))
class MIMEReport(BaseReport):
"""
MIME email generation
"""
def __init__(self):
"""
Constructor
"""
super(MIMEReport, self).__init__(MIMEProcessor()) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/zipreport.py | zipreport.py |
import os
from pathlib import Path
import sys
from zipreport.cli.debug.server import DebugServer
from zipreport.report import ReportFileBuilder, ReportFileLoader, const
from zipreport.version import get_version
class Commands:
EXT = '.zpt'
HELP_LINE = " %-30s %s\n"
LIST_LINE = "%-20s %s"
COMMANDS = {
'help': ['', 'Show usage information'],
'version': ['[-m]', 'Show version (or version number only, if -m)'],
'list': ['<path>', 'List reports on the given path'],
'info': ['<file> [<file>...]', 'Show report details'],
'build': ['<directory> [output_file]', 'Build zpt file bundle'],
'debug': ['<directory|file> [[host]:<port>]', 'Run debug server using the directory or specified file'],
}
def run(self, args: list):
if len(args) == 0:
self.help([])
return 0
cmd = args[0]
method = getattr(self, cmd, None)
if not callable(method):
self.error("Error: invalid command {}".format(cmd))
return 1
if method(args[1:]) is False:
return 1
return 0
def version(self, args=None):
minimal = len(args) == 1 and args[0] == '-m'
if minimal:
vstr = "{}"
else:
vstr = "\nVersion: {}\n"
print(vstr.format(get_version()))
def help(self, args=None):
if len(args) == 1:
cmd = args[0]
if cmd in self.COMMANDS.keys():
print("\n{cmd} usage:".format(cmd=cmd))
usage = cmd + " " + self.COMMANDS[cmd][0]
print(self.HELP_LINE % (usage, self.COMMANDS[cmd][1]))
else:
# this shouldn't actually happen, as commands are pre-checked
self.error("Error: invalid command {}".format(cmd))
return
help = ""
for k, v in self.COMMANDS.items():
usage = " ".join([k, v[0]])
help += self.HELP_LINE % (usage, v[1])
print("\nUsage:\n{}\n\n".format(help))
def build(self, args) -> bool:
if len(args) == 0 or len(args) > 2:
self.error("Error: Invalid command syntax")
return False
src = Path(args[0]).resolve()
if not src.exists() or not src.is_dir():
self.error("Error: {path} is not a valid folder".format(path=str(src)))
return False
if len(args) == 2:
dest = Path(args[1]).resolve()
else:
dest = Path(src.name)
if dest.suffix == '':
dest = dest.with_suffix(self.EXT)
result = ReportFileBuilder.build_file(src, dest)
if not result.success():
self.error(" ".join(result.get_errors()))
return result.success()
def debug(self, args) -> bool:
if len(args) == 0 or len(args) > 3:
self.error("Error: Invalid command syntax")
return False
source = Path(args[0])
if not source.exists():
self.error("Error: Specified path not found")
return False
host = DebugServer.DEFAULT_ADDR
port = DebugServer.DEFAULT_PORT
if len(args) > 2:
host = args[1]
port = args[2]
elif len(args) == 2:
port = args[1]
DebugServer(host, port).run(source)
def list(self, args) -> bool:
if len(args) > 1:
self.error("Error: Invalid command syntax")
return False
if len(args) == 0:
path = Path(os.getcwd())
else:
path = Path(args[0])
if not path.exists():
self.error("Error: Invalid path")
return False
if not path.is_dir():
self.error("Error: Path is not a valid directory")
return False
for (dirpath, dirnames, filenames) in os.walk(path):
for f in filenames:
if f.endswith('.zpt'):
try:
zpt = ReportFileLoader.load_file(f)
print(self.LIST_LINE % (f, zpt.get_param(const.MANIFEST_TITLE, "")))
except Exception:
# ignore file
pass
return True
def info(self, args) -> bool:
if len(args) == 0:
self.error("Error: Invalid command syntax")
return False
for fn in args:
path = Path(fn)
if not path.exists():
self.error("Error: Invalid path")
return False
if not path.is_file():
self.error("Error: Path {} is not a valid file".format(path))
return False
try:
zpt = ReportFileLoader.load_file(path)
print(self.LIST_LINE % (path, zpt.get_param(const.MANIFEST_TITLE, "")))
except Exception:
self.error("Error: {} is not a valid zipreport file".format(path))
return True
def error(self, message):
sys.stderr.write("\n{msg}\n\n".format(msg=message))
def main(args=None):
if args is None:
args = sys.argv[1:]
cli = Commands()
exit(cli.run(args)) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/cli/console.py | console.py |
import io
import mimetypes
import posixpath
import shutil
import sys
from functools import partial
from urllib.parse import urlparse, urlsplit, unquote
from pathlib import Path
from typing import Union, Tuple
from http import HTTPStatus
from http.server import HTTPServer, BaseHTTPRequestHandler
from io import BytesIO
from zipreport.report import ReportFile, ReportFileLoader, ReportFileBuilder, const
from zipreport.template import JinjaRender
# shared ReportFile Object to be reused between requests
_zpt = None
class ReportFileHandler(BaseHTTPRequestHandler):
"""Simple HTTP request handler with GET and HEAD commands.
This class is based on the SimpleHTTPRequestHandler in http.server
Due to its simplified implementation, the code is not thread-safe!!!
"""
server_version = "ZipReport HTTP Server"
root_paths = ['/', '/index.html', '/index.htm', '/report.html', '/report.htm', ]
extensions_map = {
'': 'application/octet-stream',
}
def __init__(self, *args, report_path=None, extra_mime_types=None, **kwargs):
"""
Constructor
:param args:
:param report_path: path of the directory or zpt file to process
:param extra_mime_types: optional extra mime types to register
:param kwargs:
"""
self.bootstrap_mime_types(extra_mime_types)
if report_path is None:
raise RuntimeError("Debug HTTP Server must work with a generated report")
self.report_path = Path(report_path)
self.is_report_file = self.report_path.is_file()
self.enc = sys.getfilesystemencoding()
super().__init__(*args, **kwargs)
def bootstrap_mime_types(self, extra_mime_types: dict = None):
"""
Bootstrap mime types
:param mime_types: dictionary of entries to add/replace
:return:
"""
# try to use system mimetypes
if not mimetypes.inited:
mimetypes.init()
self.extensions_map.update(mimetypes.types_map)
# add extra mimetypes if necessary
if extra_mime_types is not None:
self.extensions_map.update(extra_mime_types)
def build_report(self) -> Tuple[bool, Union[io.BytesIO, None]]:
"""
Assembles, builds and renders the report file
In case of errors, response is (False, io.BytesIO(response_message)), with headers already processed
In case of success, response is (True, io.BytesIO()); response should be processed from the zpt path for the report file
:return: (bool, io.BytesIO)
"""
global _zpt
_zpt = None
try:
# load file or build file, according to path
if self.is_report_file:
sys.stdout.write("Reloading report file...\n")
_zpt = ReportFileLoader.load_file(self.report_path)
else:
sys.stdout.write("Rebuilding report from path...\n")
bresult, zpt = ReportFileBuilder.build_zipfs(self.report_path)
if not bresult.success():
return False, self.error_500(";".join(bresult.get_errors()))
# create ReportFile from zipfs
_zpt = ReportFileLoader.load_zipfs(zpt)
# render template to REPORT_FILE_NAME
# returns a dummy BytesIO object
JinjaRender(_zpt).render()
return True, io.BytesIO()
except Exception as e:
return False, self.error_500(str(e))
def do_GET(self):
"""
Process a GET request
:return:
"""
response = self.process_request()
if response:
try:
shutil.copyfileobj(response, self.wfile)
finally:
response.close()
def do_HEAD(self):
"""
Process a HEAD request
:return:
"""
response = self.process_request()
if response:
response.close()
def process_request(self):
"""
Common request processing logic
:return: io.BytesIO()
"""
global _zpt
path = self.clean_path(self.path)
response = None
if _zpt is None:
success, response = self.build_report()
if not success:
return response
if path in self.root_paths:
if response is None:
success, response = self.build_report()
if not success:
return response
# rewrite path to point to report file
path = '/' + const.REPORT_FILE_NAME
if _zpt.exists(path):
return self.handle_file(Path(path).name, _zpt.get(path))
# path not found
return self.error_404(path)
def clean_path(self, path: str) -> str:
"""
Cleans up the request path
:param path:str
"""
# remove ignored parameters
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
try:
path = unquote(path, errors='surrogatepass')
except UnicodeDecodeError:
path = unquote(path)
return posixpath.normpath(path)
def guess_type(self, fname: str):
"""
Tries to determine a filename mime type
"""
base, ext = posixpath.splitext(fname)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
if ext in self.extensions_map:
return self.extensions_map[ext]
return self.extensions_map['']
def handle_file(self, fname: str, contents: io.BytesIO) -> io.BytesIO:
"""
Generates response headers for a given file
:param fname:
:param contents:
:return: io.BytesIO
"""
self.send_response(HTTPStatus.OK)
self.send_header("Content-type", self.guess_type(fname))
self.send_header("Content-Length", str(contents.getbuffer().nbytes))
self.end_headers()
contents.seek(0)
return contents
def error_500(self, item: str):
"""
Generates a customized 500 response
:param item: optional error message
:return: io.BytesIO
"""
if item:
response = "<html><body><h3>Internal Server Error: {}</h3></body></html>".format(item)
else:
response = "<html><body><h3>File not found</h3></body></html>"
return self._error(HTTPStatus.INTERNAL_SERVER_ERROR, response)
def error_404(self, item: str = None) -> io.BytesIO:
"""
Generates a customized 404 response
:param item: optional error message
:return: io.BytesIO
"""
if item:
response = "<html><body><h3>File not found: {}</h3></body></html>".format(item)
else:
response = "<html><body><h3>File not found</h3></body></html>"
return self._error(HTTPStatus.NOT_FOUND, response)
def _error(self, code: int, contents: str) -> io.BytesIO:
"""
Common error response logic
:param code: HTTP status code
:param contents: optional HTML response
:return: io.BytesIO
"""
size = len(contents)
response = io.BytesIO(bytes(contents, encoding='utf-8'))
self.send_response(code)
self.send_header("Content-type", "text/html; charset=%s" % sys.getfilesystemencoding())
self.send_header("Content-Length", str(size))
self.end_headers()
response.seek(0)
return response
class DebugServer:
DEFAULT_ADDR = 'localhost'
DEFAULT_PORT = 8001
def __init__(self, addr: str = DEFAULT_ADDR, port: int = DEFAULT_PORT):
self._addr = addr
self._port = port
def set_addr(self, addr: str):
self._addr = addr
def set_port(self, port: int):
self._port = port
def run(self, report_path: str):
server_address = (self._addr, int(self._port))
handler_class = partial(ReportFileHandler, report_path=report_path)
sys.stdout.write(
"\nStarted debug server at http://{addr}:{port}\nServing from: {path}\nUse Ctrl+C to stop...\n\n".format(
addr=self._addr, port=int(self._port), path=Path(report_path).absolute()))
httpd = HTTPServer(server_address, handler_class)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
return | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/cli/debug/server.py | server.py |
import io
import os
from pathlib import Path
from .interface import FsError, FsInterface
class DiskFs(FsInterface):
"""
Disk-based file operations
Note: due to the way path separators are handled, one should assume compatibility with unix-os only
"""
def __init__(self, path):
"""
Constructor
:param path: filesystem path to use as root
"""
if isinstance(path, Path):
self._basepath = str(path)
else:
self._basepath = path
def get(self, name: str) -> io.BytesIO:
"""
Retrieve contents of a file
:param name: filename with full path
:return: io.BytesIO
"""
path = self._build_path(name)
if not os.path.exists(path):
raise FsError("Path '{}' does not exist".format(path))
if os.path.isfile(path):
with open(path, "rb", buffering=0) as f:
return io.BytesIO(f.read())
else:
raise FsError("Path '{}' is not a file".format(path))
def add(self, name: str, content):
"""
Add a new file
:param name: filename with full path
:param content: file contents
:return:
"""
name = self._build_path(name)
if not self._can_create(os.path.dirname(name), name):
raise FsError("Cannot add file '{}'; Invalid path or already existing file".format(name))
with open(name, "wb", buffering=0) as f:
f.write(content)
def mkdir(self, name: str):
"""
Creates a directory
:param name: full directory path
:return:
"""
name = self._build_path(name)
if not self._can_create(os.path.dirname(name), name):
raise FsError("Cannot add file '{}'; Invalid path or already existing dir".format(name))
os.mkdir(name)
def exists(self, path: str) -> bool:
"""
Check if a given path (file or dir) exists
:param path:
:return:
"""
return os.path.exists(self._build_path(path))
def is_dir(self, path: str) -> bool:
"""
Check if path is a valid directory
:param path: path to check
:return: bool
"""
path = self._build_path(path)
return os.path.exists(path) and os.path.isdir(path)
def list_files(self, path: str) -> list:
"""
List existing files on the given path
:param path: path
:return: list
"""
path = self._build_path(path)
if os.path.exists(path) and os.path.isdir(path):
for _, _, filenames in os.walk(path):
return filenames
else:
raise FsError("Cannot stat '{}'; Invalid path".format(path))
def list(self, path: str) -> list:
"""
List all contents (files and dirs) on the specified path and subpaths
directories are listed with trailing slash (/)
:param path: root path to start listing
:return: list
"""
path = Path(self._build_path(path))
result = []
for dirname, dirs, files in os.walk(path):
dirname = Path(dirname)
for f in dirs:
f = dirname / Path(f)
# directories always have trailing slash
result.append(str(f.relative_to(path)) + os.sep)
for f in files:
f = dirname / Path(f)
result.append(str(f.relative_to(path)))
return result
def list_dirs(self, path: str) -> list:
"""
List all directories in the specified path
:param path: path to check
:return: list
"""
path = self._build_path(path)
result = []
if os.path.exists(path) and os.path.isdir(path):
for _, dirnames, _ in os.walk(path):
for dir in dirnames:
result.append(dir + os.sep)
return result
else:
raise FsError("Cannot stat '{}'; Invalid path".format(path))
def get_backend(self) -> any:
"""
Retrieve fs backend
For disk-based is None
:return: None
"""
return None
def _can_create(self, path, name: str) -> bool:
"""
Verify if a file/dir can be created on a given path
:param path: path to check
:param name: name to check
:return: bool
"""
return os.path.exists(path) and os.path.isdir(path) and not os.path.exists(name)
def _build_path(self, path):
"""
Cleans and build absolute path for internal use
:param path: relative path
:return: absolute path
"""
return os.path.join(self._basepath, path.lstrip(os.sep)) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/fileutils/diskfs.py | diskfs.py |
import io
from pathlib import Path
from zipfile import ZIP_DEFLATED
from zipreport.fileutils.backend.zip import InMemoryZip
from .interface import FsError, FsInterface
from .pathcache import PathCache
class ZipFs(FsInterface):
"""
Implement FsInterface operations on zipfiles
Note: the list_*() operations and is_dir() can be quite slow, due to implementation restrictions
"""
def __init__(self, zip: InMemoryZip):
"""
Constructor
:param zip: Zip Backend
"""
self._sep = '/'
self._zip = zip
self._cache = PathCache(self._sep)
self._build_cache()
def get(self, name: str) -> io.BytesIO:
"""
Read a file
:param name: file path
:return: file stream or None
"""
zipfile = self._zip.zip()
try:
info = zipfile.getinfo(self._clean_path(name))
with zipfile.open(info) as zf:
return io.BytesIO(zf.read())
except ValueError:
raise FsError("Error reading file '{}'. Maybe it doesn't exist?".format(name))
def add(self, name: str, content):
"""
Add a file
:param name: filename to create
:param content: file contents
:return:
"""
zfile = self._zip.zip()
name = self._clean_path(name)
# convert BytesIO to bytes
if isinstance(content, io.BytesIO):
content.seek(0)
content = content.read()
try:
zfile.getinfo(name)
raise FsError("File'{}' already exists".format(name))
except KeyError:
pass
try:
zfile.writestr(name, content, compress_type=ZIP_DEFLATED)
self._cache.add(name)
except Exception as e:
raise FsError("Error adding '{}' to Zip: {}".format(name, e))
def mkdir(self, name: str):
raise FsError("ZipFs does not support creation of explicit directories")
def exists(self, path: str) -> bool:
"""
Check if a given path (file or dir) exists
:param path:
:return:
"""
self._zip.zip()
path = self._clean_path(path)
# check if file exists first, then dir
if not self._cache.file_exists(path):
return self._cache.path_exists(path)
return True
def is_dir(self, path: str) -> bool:
# check if file is still opened
self._zip.zip()
path = self._clean_path(path)
return self._cache.path_exists(path)
def list_dirs(self, path: str) -> list:
"""
List dirs on a given path
:param path:
:return:
"""
# check if file is still opened
self._zip.zip()
return self._cache.list_dirs(self._clean_path(path))
def list_files(self, path: str) -> list:
# check if file is still opened
self._zip.zip()
return self._cache.list_files(self._clean_path(path))
def list(self, path: str) -> list:
"""
List all contents (files and dirs) on the specified path and subpaths
directories are listed with trailing slash (/)
:param path: root path to start listing
:return: list
"""
# check if file is still opened
self._zip.zip()
return self._cache.list(self._clean_path(path))
def get_backend(self) -> any:
return self._zip
def _build_cache(self):
"""
Build path cache
:return:
"""
zipfile = self._zip.zip()
for item in zipfile.namelist():
self._cache.add(item)
def _clean_path(self, path):
"""
Remove self._sep from starting of path, if exists
:param path:
:return:
"""
return str(path).lstrip(self._sep) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/fileutils/zipfs.py | zipfs.py |
from pathlib import Path
class PathCache:
"""
ZipFs content cache
"""
def __init__(self, trailing='/'):
self._sep = trailing
self._cache = {}
def add(self, item: str):
"""
Add a file to the path cache
:param item:
:return:
"""
item = Path(item)
root = self._cache
parts = list(item.parts)
file = parts.pop()
for p in parts:
if not p in root.keys():
root[p] = {}
root = root[p]
root[file] = None
def path_exists(self, path: str) -> bool:
"""
Check if a dir path exists
:param path:
:return:
"""
path = Path(path)
root = self._cache
for p in path.parts:
if p not in root.keys():
return False
root = root[p]
return type(root) is dict
def file_exists(self, path: str) -> bool:
"""
Check if a file path exists
:param path:
:return:
"""
path = Path(path)
root = self._cache
for p in path.parts:
if p not in root.keys():
return False
root = root[p]
return root is None
def list_files(self, path: str) -> list:
"""
List files on a given path
:param path:
:return:
"""
result = []
root = self._cache
path = Path(path)
for p in path.parts:
if p in root.keys():
root = root[p]
else:
# invalid path
return result
if root is None:
# its a file, not a dir
return result
for k in root.keys():
if root[k] is None:
result.append(k)
return result
def list_dirs(self, path: str) -> list:
"""
List dirs on a given path
:param path:
:return:
"""
result = []
root = self._cache
path = Path(path)
for p in path.parts:
if p in root.keys():
root = root[p]
else:
# invalid path
return result
if root is None:
# its a file, not a dir
return result
for k in root.keys():
if root[k] is not None:
result.append(k + self._sep)
return result
def list(self, path) -> list:
"""
List all starting from a given path, recursively
:param path:
:return:
"""
result = []
root = self._cache
path = Path(path)
for p in path.parts:
if p in root.keys():
root = root[p]
else:
# invalid path
return result
if root is None:
# its a file, not a dir
return result
return self._path_transversal(root, Path(''))
def _path_transversal(self, root: dict, path: Path) -> list:
"""
Internal path transversal routine
:param root:
:param path:
:return:
"""
result = []
for k, v in root.items():
if root[k] is None:
result.append(str(path / k))
else:
dir = path / k
result.append(str(dir) + self._sep)
result.extend(self._path_transversal(root[k], dir))
return result
def clear(self):
"""
Clear path cache
:return:
"""
self._cache = {} | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/fileutils/pathcache.py | pathcache.py |
import io
import os
import zipfile
from typing import Union
class InMemoryZipError(Exception):
pass
class InMemoryZip:
"""
Manages a zip file in memory
"""
def __init__(self, source: Union[str, io.BytesIO] = None):
"""
Constructor
:param source: optional buffer with zip contents to use
"""
self._zip = None
self._buffer = None
if isinstance(source, io.BytesIO) or source is None:
# create new zip or from buffer
self.new(source)
else:
# source may be str or Path
self.load(source)
def new(self, buffer: io.BytesIO = None):
"""
Initialize new zip file in memory
:param buffer: optional buffer with zip contents
:return:
"""
flags = 'w'
if isinstance(buffer, io.BytesIO):
self._buffer = buffer
flags = 'a'
else:
self._buffer = io.BytesIO()
self._zip = zipfile.ZipFile(self._buffer, flags, zipfile.ZIP_DEFLATED)
def load(self, disk_file: str):
"""
Load zip form disk
:param disk_file: path to zip file
:return:
"""
if not os.path.exists(disk_file) or not os.path.isfile(disk_file):
raise InMemoryZipError("Zip file '{}' does not exist or is not a valid file")
try:
with open(disk_file, 'rb', buffering=0) as f:
self._buffer = io.BytesIO(f.read())
self._zip = zipfile.ZipFile(self._buffer, mode='a')
except Exception as e:
raise InMemoryZipError("Error reading Zip file: {}".format(e))
def get_buffer(self) -> io.BytesIO:
"""
Get internal buffer
Note: this will force a close on the internal zip file; no other operations can be done afterwards
:return:
"""
if not self.is_open():
raise InMemoryZipError("Cannot get_buffer(); Zip is already closed.")
# flush file, clone stream, reopen file
self._zip.close()
self._buffer.seek(0)
result = io.BytesIO(self._buffer.read())
self.new(self._buffer)
return result
def save_stream(self) -> io.BytesIO:
"""
Flush internal buffer and clean state
Note: this will force a close on the internal zip file; no other operations can be done afterwards
"""
if not self.is_open():
raise InMemoryZipError("Cannot get_buffer(); Zip is already closed.")
self._zip.close()
self._buffer.seek(0)
result = io.BytesIO(self._buffer.read())
self._buffer = None
self._zip = None
return result
def save(self, dest_file: str):
"""
Save internal buffer to a file
:param dest_file: path to destination file
:return:
"""
if not self.is_open():
raise InMemoryZipError("Cannot save(); Zip is already closed.")
try:
self._zip.close()
self._buffer.seek(0)
with open(dest_file, 'wb', buffering=0) as f:
f.write(self._buffer.read())
self._buffer = None
self._zip = None
except Exception as e:
raise InMemoryZipError("Error saving Zip file: {}".format(e))
def zip(self) -> zipfile.ZipFile:
"""
Retrieve internal ZipFile object
:return: ZipFile
"""
if not self.is_open():
raise InMemoryZipError("Cannot zip(); Zip is already closed.")
return self._zip
def is_open(self):
"""
Check if ZipFile is opened
:return:
"""
return self._zip is not None
def __del__(self):
"""
Destructor reliability fix
if zipfile.__del__ is called with a closed buffer, will generate exception because of the ZipFile dependencies
Instead, we initiate an orderly shutdown if the buffer still exists (eg. when the file modified, but not saved)
:return:
"""
if self._buffer is not None:
self._zip.close() | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/fileutils/backend/zip.py | zip.py |
import io
import subprocess
import tempfile
from pathlib import Path
from shutil import rmtree
from zipreport.processors import ProcessorInterface
import zipreport.report.const as const
from zipreport.report.job import ReportJob, JobResult
class WkHtml2PdfProcessor(ProcessorInterface):
"""
wkhtmltopdf report processor
"""
MARGIN_MINIMUM_MM = '5'
def __init__(self, cli_path: str):
"""
Constructor
:param cli_path: full path to wkhtmltopdf binary
"""
self._cli = cli_path
def process(self, job: ReportJob) -> JobResult:
"""
Execute a ReportJob by calling the wkhtmltopdf binary
:param job: ReportJob
:return: JobResult
"""
report = None
success = False
error = ""
path = None
try:
path = Path(tempfile.mkdtemp())
cmd = self.build_cmd(job, path)
job.get_report().get_fs().get_backend().zip().extractall(path)
subprocess.run(cmd, cwd=path, check=True)
report_file = path / const.PDF_FILE_NAME
if report_file.exists():
with open(report_file, 'rb') as f:
report = io.BytesIO(f.read())
success = True
except (subprocess.CalledProcessError, FileNotFoundError, PermissionError, FileExistsError) as e:
error = str(e)
if path:
rmtree(path)
return JobResult(report, success, error)
def build_cmd(self, job: ReportJob, path: Path, dest_file: str = const.PDF_FILE_NAME):
"""
Parse ReportJob options and generate command-line arguments for wkhtmltopdf
:param job: ReportJob
:param path: full path for the report root
:param dest_file: full path for PDF file to be generated
:return: list
"""
opts = job.get_options()
args = [
str(Path(self._cli)),
'--enable-local-file-access',
'--allow', str(path),
'--no-stop-slow-scripts',
'--page-size', opts[ReportJob.OPT_PAGE_SIZE],
'--javascript-delay', str(opts[ReportJob.OPT_SETTLING_TIME]),
]
# non-default margins
if opts[ReportJob.OPT_MARGINS] == const.PDF_MARGIN_NONE:
args.extend([
'--margin-bottom', "0",
'--margin-left', "0",
'--margin-right', "0",
'--margin-top', "0"
])
if opts[ReportJob.OPT_MARGINS] == const.PDF_MARGIN_MINIMUM:
args.extend([
'--margin-bottom', self.MARGIN_MINIMUM_MM,
'--margin-left', self.MARGIN_MINIMUM_MM,
'--margin-right', self.MARGIN_MINIMUM_MM,
'--margin-top', self.MARGIN_MINIMUM_MM,
])
# page orientation
if opts[ReportJob.OPT_LANDSCAPE]:
args.append('--orientation')
args.append('Landscape')
args.extend([opts[ReportJob.OPT_MAIN_SCRIPT], dest_file])
return args | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/processors/wkhtmltopdf.py | wkhtmltopdf.py |
import mimetypes
from email.message import EmailMessage
from email.utils import make_msgid
from html.parser import HTMLParser
from zipreport.processors.interface import ProcessorInterface
from zipreport.report import JobResult, ReportJob
class ResourceParser(HTMLParser):
"""
Custom HTML parser
Generates a list of all local (relative) src and href resources
"""
def __init__(self, convert_charrefs=True):
super().__init__(convert_charrefs=convert_charrefs)
self._href = []
self._src = []
def reset(self):
super().reset()
self._src = []
self._href = []
def handle_starttag(self, tag, attrs):
if not len(attrs):
return
attrs = dict(attrs)
if 'src' in attrs.keys():
src = attrs['src']
if self.is_local(src):
self._src.append(src)
elif 'href' in attrs.keys() and tag.lower() == 'link':
src = attrs['href']
if self.is_local(src):
self._href.append(src)
def get_src_list(self) -> list:
return list(dict.fromkeys(self._src))
def get_href_list(self) -> list:
return list(dict.fromkeys(self._href))
def get_resource_list(self) -> list:
a = self.get_src_list()
a.extend(self.get_href_list())
return list(dict.fromkeys(a))
def is_local(self, url: str):
url = url.lower()
for prefix in ['http://', 'https://', '//']:
if url.startswith(prefix):
return False
return True
class MIMEProcessor(ProcessorInterface):
def process(self, job: ReportJob) -> JobResult:
"""
Executes a rendering job to a MIME message
Local resources such as images are embedded in the message
:param job: ReportJob
:return: JobResult
"""
opts = job.get_options()
rpt = job.get_report()
html = str(rpt.get(opts[job.OPT_MAIN_SCRIPT]).read(), encoding='utf-8')
mime_msg = EmailMessage()
parser = ResourceParser()
parser.feed(html)
resources = {}
# replace html references with cid
for src in parser.get_resource_list():
cid = make_msgid()
resources[cid] = src
html = html.replace('="{}"'.format(src), '="cid:{}"'.format(cid[1:-1]))
html = html.replace("='{}'".format(src), "='cid:{}'".format(cid[1:-1]))
mime_msg.add_alternative(html, subtype='html')
# add related resources
payload = mime_msg.get_payload()[0]
for cid, fname in resources.items():
res = rpt.get(fname)
ctype, encoding = mimetypes.guess_type(fname)
if ctype is None or encoding is not None:
ctype = 'application/octet-stream'
maintype, subtype = ctype.split('/', 1)
payload.add_related(res.read(), maintype, subtype, cid=cid)
return JobResult(mime_msg, True, "") | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/processors/mime.py | mime.py |
import io
import subprocess
import tempfile
from pathlib import Path
from shutil import rmtree
import requests
from zipreport.processors import ProcessorInterface
import zipreport.report.const as const
from zipreport.report.job import ReportJob, JobResult
class ZipReportClient:
"""
zipreport-server API Client
"""
def __init__(self, url: str, api_key: str, api_version: int = 1, secure_ssl: bool = False):
"""
Constructor
:param url: zipreport-server API url
:param api_key: API key
:param api_version: API version (default 1)
:param secure_ssl: check SSL CA (default False)
"""
self._api_key = api_key
self._url = url
self._secure_ssl = secure_ssl
self._api_version = api_version
# assemble headers
def exec(self, job: ReportJob) -> JobResult:
"""
Execute a ReportJob using API
:param job: ReportJob
:return: JobResult
"""
url = "{}/v{}/render".format(self._url, self._api_version)
request_data = {
'report': ('report.zpt', job.get_report().save()),
}
for k, v in job.get_options().items():
request_data[k] = (None, v)
try:
session = requests.sessions.session()
session.headers['X-Auth-Key'] = self._api_key
r = session.post(url, verify=self._secure_ssl, files=request_data)
if r.status_code == 200:
if r.headers.get('Content-Type') == "application/pdf":
return JobResult(io.BytesIO(r.content), True, "")
except Exception as e:
return JobResult(None, False, str(e))
return JobResult(None, False, "HTTP Code {}".format(r.status_code))
class ZipReportProcessor(ProcessorInterface):
"""
Zipreport-server API report processor
"""
def __init__(self, client: ZipReportClient):
"""
Constructor
:param client: API Client
"""
self._client = client
def process(self, job: ReportJob) -> JobResult:
"""
Execute a ReportJob using the API client
:param job: ReportJob
:return: JobResult
"""
zpt = job.get_report()
# if manifest signals js event, enable it
if zpt.get_param(const.MANIFEST_JS_EVENT, False):
job.set_jsevent(True)
return self._client.exec(job)
class ZipReportCliProcessor(ProcessorInterface):
"""
Local zipreport-cli report processor
"""
def __init__(self, cli_path: str):
"""
Constructor
:param cli_path: full path to zipreport-cli binary
"""
self._cli = cli_path
def process(self, job: ReportJob) -> JobResult:
"""
Execute a ReportJob by calling the zipreport-cli binary
:param job: ReportJob
:return: JobResult
"""
cmd = self.build_cmd(job)
report = None
success = False
error = ""
path = None
try:
path = Path(tempfile.mkdtemp())
job.get_report().get_fs().get_backend().zip().extractall(path)
subprocess.run(cmd, cwd=path, check=True)
report_file = path / const.PDF_FILE_NAME
if report_file.exists():
with open(report_file, 'rb') as f:
report = io.BytesIO(f.read())
success = True
except (subprocess.CalledProcessError, FileNotFoundError, PermissionError, FileExistsError) as e:
error = str(e)
if path:
rmtree(path)
return JobResult(report, success, error)
def build_cmd(self, job: ReportJob, dest_file: str = const.PDF_FILE_NAME):
"""
Parse ReportJob options and generate command-line arguments for zipreport-cli
:param job: ReportJob
:param dest_file: full path for PDF file to be generated
:return: list
"""
opts = job.get_options()
args = [
Path(self._cli),
'--pagesize={}'.format(opts[ReportJob.OPT_PAGE_SIZE]),
'--margins={}'.format(opts[ReportJob.OPT_MARGINS]),
'--timeout={}'.format(opts[ReportJob.OPT_RENDER_TIMEOUT]),
'--delay={}'.format(opts[ReportJob.OPT_SETTLING_TIME]),
]
if opts[ReportJob.OPT_LANDSCAPE]:
args.append('--no-portrait')
if opts[ReportJob.OPT_JS_EVENT]:
args.append('--js-event')
args.append('--js-timeout={}'.format(opts[ReportJob.OPT_JS_TIMEOUT]))
if opts[ReportJob.OPT_IGNORE_SSL_ERRORS]:
args.append('--ignore-certificate-errors')
if opts[ReportJob.OPT_NO_INSECURE_CONTENT]:
args.append('--no-insecure')
args.extend([opts[ReportJob.OPT_MAIN_SCRIPT], dest_file])
return args | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/processors/zipreport.py | zipreport.py |
import io
from weasyprint import default_url_fetcher, HTML
from zipreport.processors.interface import ProcessorInterface
from zipreport.report import ReportFile, JobResult, ReportJob
from zipreport.report.const import REPORT_FILE_NAME
class WeasyPrintProcessor(ProcessorInterface):
"""
WeasyPrint API report processor
"""
def __init__(self):
"""
Constructor
"""
super(WeasyPrintProcessor, self).__init__()
self._css = None
self._fconfig = None
def add_css(self, css):
"""
Add CSS item to WeasyPrint stylesheets
:param css:
:return:
"""
if self._css is None:
self._css = [css]
else:
self._css.append(css)
return self
def set_font_config(self, font_config):
"""
Set WeasyPrint font_config
:param font_config:
:return:
"""
self._fconfig = font_config
return self
def process(self, job: ReportJob) -> JobResult:
"""
Execute a job using WeasyPrint
Note: all ReportJob options are ignored
:param job: ReportJob
:return:
"""
zpt = job.get_report()
# custom weasyprint fetcher
def f(url):
return self.fetcher(zpt, url)
rpt = HTML(
base_url="/",
string=io.TextIOWrapper(zpt.get(REPORT_FILE_NAME), encoding='utf-8').read(),
url_fetcher=f
).write_pdf(None, stylesheets=self._css, font_config=self._fconfig)
return JobResult(io.BytesIO(rpt), True, "")
def fetcher(self, zpt, url):
"""
Internal fetcher for WeasyPrint to access in-report resources such as images, css and js
:param zpt: ReportFile
:param url: url of te resource to fetch
:return:
"""
if url.startswith("http"):
return default_url_fetcher(url)
fallback = url
# support for both file:// and relative urls
if url.startswith('file://'):
url = url[7:]
if zpt.exists(url):
return {'string': zpt.get(url).read()}
return default_url_fetcher(fallback) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/processors/weasyprint.py | weasyprint.py |
import json
from io import StringIO
from pathlib import Path
from zipfile import BadZipFile
from zipreport.fileutils import ZipFs, FsError
from zipreport.fileutils.backend.zip import InMemoryZip, InMemoryZipError
from zipreport.report.builder import ReportFileBuilder
from zipreport.report.const import MANIFEST_FILE_NAME
from zipreport.report.reportfile import ReportFile
class ReportFileLoaderError(Exception):
pass
class ReportFileLoader:
@staticmethod
def load(source: str) -> ReportFile:
"""
Load ReportFile from a source (either directory or a ZPT)
:param source:
:return: ReportFile
"""
source = Path(source)
if source.is_dir():
return ReportFileLoader.load_dir(source)
return ReportFileLoader.load_file(source)
@staticmethod
def load_dir(path: str) -> ReportFile:
"""
Generate ReportFile from a directory with a valid report template
:param path: template path
:return: ReportFile
"""
zstatus, zfs = ReportFileBuilder.build_zipfs(path, StringIO())
if not zstatus.success():
error_msg = "; ".join(zstatus.get_errors())
raise ReportFileLoaderError("Error loading report from path '{}': '{}'".format(path, error_msg))
try:
manifest = json.loads(bytes(zfs.get(MANIFEST_FILE_NAME).getbuffer()))
except Exception as e:
raise ReportFileLoaderError("Error: {}".format(e))
return ReportFile(zfs, manifest)
@staticmethod
def load_file(file: str) -> ReportFile:
"""
Load ReportFile from a zpt file
:param file: zpt file path
:return: ReportFile
"""
file = Path(file)
if not file.exists() or not file.is_file():
raise ReportFileLoaderError("Cannot find file '{}".format(file))
try:
zfs = ZipFs(InMemoryZip(file))
except (FsError, InMemoryZipError, BadZipFile, ValueError) as e:
raise ReportFileLoaderError("Error: {}".format(e))
# load manifest & assemble report
return ReportFileLoader.load_zipfs(zfs)
@staticmethod
def load_zipfs(zfs: ZipFs) -> ReportFile:
"""
Generates a ReportFile from a ZipFs
:param zfs: ZipFs
:return: ReportFile
"""
# load manifest
status, manifest = ReportFileBuilder.valid_zpt(zfs)
if not status.success():
raise ReportFileLoaderError("Error: {}".format("; ".join(status.get_errors())))
return ReportFile(zfs, manifest) | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/report/loader.py | loader.py |
import collections
from copy import deepcopy
from .const import *
from .reportfile import ReportFile
# Result type used by Processors
# pdf:io.BytesIO; success:bool, error:str
JobResult = collections.namedtuple('JobResult', ['report', 'success', 'error'])
class ReportJob:
# Available options
OPT_PAGE_SIZE = 'page_size'
OPT_MAIN_SCRIPT = 'script'
OPT_MARGINS = 'margins'
OPT_LANDSCAPE = 'landscape'
OPT_SETTLING_TIME = 'settling_time'
OPT_RENDER_TIMEOUT = 'timeout_render'
OPT_JS_TIMEOUT = 'timeout_js'
OPT_PROCESS_TIMEOUT = 'timeout_process'
OPT_JS_EVENT = 'js_event'
OPT_IGNORE_SSL_ERRORS = 'ignore_ssl_errors'
OPT_NO_INSECURE_CONTENT = 'secure_only'
# option defaults
DEFAULT_OPTIONS = {
OPT_PAGE_SIZE: PDF_PAGE_A4,
OPT_MAIN_SCRIPT: REPORT_FILE_NAME,
OPT_MARGINS: PDF_MARGIN_DEFAULT,
OPT_LANDSCAPE: False,
OPT_SETTLING_TIME: DEFAULT_SETTLING_TIME_MS,
OPT_RENDER_TIMEOUT: DEFAULT_RENDER_TIMEOUT_S,
OPT_JS_TIMEOUT: DEFAULT_JS_TIMEOUT_S,
OPT_PROCESS_TIMEOUT: DEFAULT_PROCESS_TIMEOUT_S,
OPT_JS_EVENT: False,
OPT_IGNORE_SSL_ERRORS: False,
OPT_NO_INSECURE_CONTENT: False,
}
def __init__(self, report: ReportFile):
"""
Constructor
Create a new rendering job from a ReportFile
:param report: ReportFile object to use
"""
self._report = report
self._options = deepcopy(self.DEFAULT_OPTIONS)
# set optional report file name from manifest
if report is not None:
self._options[self.OPT_MAIN_SCRIPT] = report.get_param(MANIFEST_REPORT_FILE, REPORT_FILE_NAME)
def get_options(self) -> dict:
"""
Retrieve job options
:return: dict
"""
return self._options
def get_report(self) -> ReportFile:
"""
Retrieve job ReportFile
:return: ReportFile
"""
return self._report
def set_page_size(self, size: str) -> bool:
"""
Set job page size
:param size: value in const.VALID_PAGE_SIZES
:return: True on success, False on error
"""
if size in VALID_PAGE_SIZES:
self._options[self.OPT_PAGE_SIZE] = size
return True
return False
def set_margins(self, margins: str) -> bool:
"""
Set job margin type
:param margins: value in const.VALID_MARGINS
:return: True on success, False on error
"""
if margins in VALID_MARGINS:
self._options[self.OPT_MARGINS] = margins
return True
return False
def set_main_script(self, script: str) -> bool:
"""
Set rendering output file name (default: const.REPORT_FILE_NAME)
:param script: name to be used for the render output file
:return: True on success, False on error
"""
self._options[self.OPT_MAIN_SCRIPT] = script
return True
def set_landscape(self, landscape: bool) -> bool:
"""
Set landscape mode
:param landscape: bool
:return: True on success, False on error
"""
self._options[self.OPT_LANDSCAPE] = landscape
return True
def set_settling_time(self, ms: int) -> bool:
"""
Set wait time for rendering
Settling time is the time a render backend will wait, after loading the report, to start generating the pdf
:param ms: milisseconds to wait
:return: True on success, False on error
"""
if ms > 0:
self._options[self.OPT_SETTLING_TIME] = ms
return True
return False
def set_render_timeout(self, seconds: int) -> bool:
"""
Set rendering timeout
Render timeout is the time the render backend will wait for the whole rendering task
:param seconds: seconds to wait
:return: True on success, False on error
"""
if seconds > 0:
self._options[self.OPT_RENDER_TIMEOUT] = seconds
return True
return False
def set_jsevent_timeout(self, seconds: int) -> bool:
"""
Set JS Event timeout
Js Event timeout is the amount of time to wait for the zpt-view-ready js event
:param seconds: seconds to wait
:return: True on success, False on error
"""
if seconds > 0:
self._options[self.OPT_JS_EVENT] = True
self._options[self.OPT_JS_TIMEOUT] = seconds
return True
return False
def set_process_timeout(self, seconds: int) -> bool:
"""
Set Process timeout
Time to wait for the render backend
:param seconds:
:return: True on success, False on error
"""
if seconds > 0:
self._options[self.OPT_PROCESS_TIMEOUT] = seconds
return True
return False
def set_jsevent(self, jsevent: bool) -> bool:
"""
Set if renderer backend should wait for zpt-view-ready event
:param jsevent: True to enable
:return: True on success, False on error
"""
self._options[self.OPT_JS_EVENT] = jsevent
return True
def set_ignore_ssl_errors(self, ignore: bool) -> bool:
"""
Enable or disable CA SSL verification
:param ignore: true to disable
:return: True on success, False on error
"""
self._options[self.OPT_IGNORE_SSL_ERRORS] = ignore
return True
def set_no_insecure_content(self, no_insecure: bool) -> bool:
"""
Enable or disable rendering of insecure content
:param no_insecure: true to disable insecure content
:return: True on success, False on error
"""
self._options[self.OPT_NO_INSECURE_CONTENT] = no_insecure
return True | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/report/job.py | job.py |
import json
import os
import sys
from pathlib import Path
from typing import Tuple, Union
from zipreport.fileutils import ZipFs, FsInterface, DiskFs
from zipreport.fileutils.backend.zip import InMemoryZip
from zipreport.report.const import MANIFEST_FILE_NAME, ZIPREPORT_FILE_EXTENSION, INDEX_FILE_NAME, \
MANIFEST_REQUIRED_FIELDS
class BuildResult:
def __init__(self, error=None):
self._err = []
self.add_error(error)
def add_error(self, error):
if type(error) is list:
self._err.extend(error)
elif error is not None:
self._err.append(error)
return self
def get_errors(self) -> list:
return self._err
def success(self) -> bool:
return len(self._err) == 0
class ReportFileBuilder:
"""
Report building object
"""
@staticmethod
def build_file(path: str, output_file: str, console=sys.stdout, overwrite: bool = False) -> BuildResult:
"""
Assemble a report file from a specific path
:param path: report dir path
:param output_file: destination report file
:param console: console writer
:param overwrite: if True, overwrite destination if exists
:return: BuildResult
"""
status = BuildResult()
path = Path(path)
output_file = Path(output_file)
if output_file.suffix != ZIPREPORT_FILE_EXTENSION:
output_file = output_file.parent / (output_file.name + ZIPREPORT_FILE_EXTENSION)
console.write("\n== Building Report {} ==\n".format(output_file))
# check paths
if not path.exists():
return status.add_error("Path '{}' not found".format(path))
if not path.is_dir():
return status.add_error("Path '{}' is not a directory".format(path))
if output_file.exists():
if not output_file.is_file():
return status.add_error(
"Output file '{}' already exists and doesn't seem to be a file".format(output_file))
if not overwrite:
return status.add_error("Output file '{}' already exists".format(output_file))
else:
if not output_file.parent.exists():
return status.add_error("Invalid path for output file: '{}'".format(output_file))
# build ZipFs
zfs_status, zfs = ReportFileBuilder.build_zipfs(path, console)
if not zfs_status.success():
return zfs_status
try:
# save zpt
console.write("Generating {}...\n".format(output_file))
if output_file.exists():
console.write("Report file exists, overwriting...\n")
output_file.unlink()
zfs.get_backend().save(output_file)
except Exception as e:
return status.add_error("Error saving zpt file: {}".format(e))
console.write("Done!\n")
return status
@staticmethod
def build_zipfs(path: str, console=sys.stdout) -> Tuple[BuildResult, Union[ZipFs, None]]:
"""
Assemble a ZipFs structure from a specific path
:param path: report dir path
:param console: console writer
:return: [BuildResult, ZipFs]
"""
status = BuildResult()
path = Path(path)
if not path.exists():
return status.add_error("Path '{}' not found".format(path)), None
if not path.is_dir():
return status.add_error("Path '{}' is not a directory".format(path)), None
# try to load & validate manifest
console.write("Checking manifest & index file...\n")
# valid_zpt() works only on FsInterface
dfs = DiskFs(path)
fstatus, _ = ReportFileBuilder.valid_zpt(dfs)
if not fstatus.success():
return fstatus, None
# build ZPT and copy files
console.write("Building...\n")
zfs = ZipFs(InMemoryZip())
names = []
for dirname, dirs, files in os.walk(path):
dirname = Path(dirname)
for f in files:
names.append(dirname / Path(f))
for name in names:
dest_name = name.relative_to(path)
console.write("Copying {}...\n".format(dest_name))
try:
with open(name, 'rb') as f:
zfs.add(dest_name, f.read())
except Exception as e:
return status.add_error("Error copying file {}: {}".format(name, e)), None
return status, zfs
@staticmethod
def valid_zpt(fs: FsInterface) -> Tuple[BuildResult, Union[dict, None]]:
"""
Validates if a FsInterface is a valid report
:param fs: FsInterface
:return: (BuildResult, manifest_contents)
"""
status = BuildResult()
# check manifest
try:
manifest = json.loads(bytes(fs.get(MANIFEST_FILE_NAME).getbuffer()))
except Exception as e:
return status.add_error("Error processing manifest: {}".format(e)), None
if type(manifest) is not dict:
return status.add_error("Invalid manifest format"), None
for field, _type in MANIFEST_REQUIRED_FIELDS.items():
if field not in manifest.keys():
status.add_error("Missing mandatory field '{}' in manifest file".format(field))
else:
if type(manifest[field]) != _type:
status.add_error("Invalid type in manifest field '{}'".format(field))
if not status.success():
return status, None
# check index.html
try:
fs.get(INDEX_FILE_NAME)
except Exception as e:
return status.add_error("Index file '{}' not found".format(INDEX_FILE_NAME)), None
return status, manifest | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/report/builder.py | builder.py |
import json
from copy import deepcopy
from jinja2 import select_autoescape, Environment
from zipreport.template.jinjaloader import JinjaReportLoader
from zipreport.report import ReportFile
from zipreport.report.const import INDEX_FILE_NAME, MANIFEST_PARAMETERS, REPORT_FILE_NAME, DATA_FILE_NAME
# register filters
from zipreport.template.jinja import filters
class JinjaRender:
OPT_EXTENSIONS = 'extensions'
OPT_STRICT_PARAMS = 'strict_params'
DEFAULT_OPTIONS = {
OPT_EXTENSIONS: [],
OPT_STRICT_PARAMS: True,
}
def __init__(self, zpt: ReportFile, options: dict = None):
"""
jinja Renderer
:param zpt: ReportFile to use
:param options: extension details to be passed to jinja
"""
self.zpt = zpt
self.env = None
self.options = deepcopy(self.DEFAULT_OPTIONS)
if options is not None:
self.options = {**self.options, **options}
def get_env(self) -> Environment:
"""
Build jinja environment
:return: Environment
"""
return Environment(
loader=JinjaReportLoader(self.zpt),
autoescape=select_autoescape(['html', 'xml']),
extensions=self.options[self.OPT_EXTENSIONS]
)
def check_params(self, data: dict):
"""
Check that all parameters specified on the manifest file exist, if strict params enabled
:param data: data to validate
:return: bool
"""
if not self.options[self.OPT_STRICT_PARAMS]:
return
expected = self.zpt.get_param(MANIFEST_PARAMETERS)
for param in expected:
if param not in data.keys():
raise RuntimeError("Parameter '{}' missing on render() call".format(param))
def render(self, data: dict = None, template: str = INDEX_FILE_NAME,
default_data_file: str = DATA_FILE_NAME) -> str:
"""
Render the template into REPORT_FILE_NAME, inside the ReportFile
if data is None, render() will try to load a default json data file to use as data to be passed to the view
Keep in mind, zipreport dynamic filters can't be used as default data
:param data: data to be passed to the template
:param template: main template file
:param default_data_file: optional json data file to be used as default
:return: result of the rendering
"""
if data is None:
data = self._discover_data(default_data_file)
self.check_params(data)
template = self.get_env().get_template(template)
contents = template.render(**data)
self.zpt.add(REPORT_FILE_NAME, contents)
return contents
def _discover_data(self, data_file: str = DATA_FILE_NAME) -> dict:
"""
Loads default data from json, if data_file exists
:param data_file:
:return: dict
"""
result = {}
if self.zpt.exists(data_file):
try:
result = dict(json.loads(self.zpt.get(data_file).read()))
except json.JSONDecodeError:
raise RuntimeError("Default data file {} is invalid".format(data_file))
return result | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/template/jinjarender.py | jinjarender.py |
import base64
import json
from pathlib import Path
from typing import Union
from uuid import uuid4
import markupsafe
from jinja2 import pass_environment
from jinja2.defaults import DEFAULT_FILTERS
from zipreport.template.jinjaloader import JinjaReportLoader
from zipreport.misc import html_tag
# attribute names
ARG_DATA = 'data'
ATTR_SRC = 'src'
ATTR_ALT = 'alt'
ATTR_WIDTH = 'width'
ATTR_HEIGHT = 'height'
ATTR_CLASS = 'class'
# named parameters allowed in image filters
IMAGE_NAMED_PARAMS = [ARG_DATA, ATTR_ALT, ATTR_WIDTH, ATTR_HEIGHT, ATTR_CLASS]
def dynamic_image(args: list, kwargs: Union[dict, None], extension: str):
"""
Dynamic Image tag generator
possible args: [Environment, generator, data, alt, width, height, class]
:param args: argument list
:param kwargs: named argument list
:param extension: generated file extension
"""
al = len(args)
if al < 2:
raise RuntimeError("Invalid number of arguments")
# load & validate env and callable
loader = args[0].loader
generator = args[1]
callable_generator = callable(generator)
if not isinstance(loader, JinjaReportLoader):
raise RuntimeError("Invalid environment. png() filter requires ReportLoader")
if not callable_generator and not isinstance(generator, str):
raise RuntimeError("png() must be applied to a callable function or a placeholder string")
# process args and kwargs
if kwargs is None:
kwargs = {}
img_args = {}
ai = 2 # user arguments start in 3
for arg in IMAGE_NAMED_PARAMS:
if arg in kwargs.keys():
img_args[arg] = kwargs[arg]
else:
if ai < al:
img_args[arg] = args[ai]
ai += 1
# callable may not always require data
if ARG_DATA not in img_args.keys():
img_args[ARG_DATA] = None
# execute callable & save image
zpt = loader.get_report()
if callable_generator:
result = generator(img_args[ARG_DATA])
name = Path('data') / (uuid4().hex + extension)
zpt.add(name, result)
else:
# if generator is string, skip image generation and use specified file
name = generator
# assemble html tag
img_args.pop(ARG_DATA)
img_args[ATTR_SRC] = "{}".format(name)
return markupsafe.Markup(html_tag('img', img_args))
@pass_environment
def dynamic_png(*args, **kwargs) -> markupsafe.Markup:
"""
Dynamic PNG img tag generator
Can be called either via positional arguments or via named arguments, or both
Positional args:
{{ callable | png(data_source, alt_text, width, height, css_class }}
Named args:
{{ callable | png(data=data_source, alt=alt_text, width=width, height=height, class=css_class= }}
Mixed args:
{{ callable | png(with=128, height=128 }}
"""
return dynamic_image(args, kwargs, '.png')
@pass_environment
def dynamic_gif(*args, **kwargs) -> markupsafe.Markup:
"""
Dynamic GIF img tag generator
Can be called either via positional arguments or via named arguments, or both
Positional args:
{{ callable | gif(data_source, alt_text, width, height, css_class }}
Named args:
{{ callable | gif(data=data_source, alt=alt_text, width=width, height=height, class=css_class= }}
Mixed args:
{{ callable | gif(with=128, height=128 }}
"""
return dynamic_image(args, kwargs, '.gif')
@pass_environment
def dynamic_jpg(*args, **kwargs) -> markupsafe.Markup:
"""
Dynamic JPG img tag generator
Can be called either via positional arguments or via named arguments, or both
Positional args:
{{ callable | jpg(data_source, alt_text, width, height, css_class }}
Named args:
{{ callable | jpg(data=data_source, alt=alt_text, width=width, height=height, class=css_class= }}
Mixed args:
{{ callable | jpg(with=128, height=128 }}
"""
return dynamic_image(args, kwargs, '.jpg')
@pass_environment
def dynamic_svg(*args, **kwargs) -> markupsafe.Markup:
"""
Dynamic SVG img tag generator
Can be called either via positional arguments or via named arguments, or both
Positional args:
{{ callable | svg(data_source, alt_text, width, height, css_class }}
Named args:
{{ callable | svg(data=data_source, alt=alt_text, width=width, height=height, class=css_class= }}
Mixed args:
{{ callable | svg(with=128, height=128 }}
"""
return dynamic_image(args, kwargs, '.svg')
def do_json(*args) -> markupsafe.Markup:
if len(*args) != 1:
raise RuntimeError("Invalid number of arguments. json filter requires a variable")
try:
return markupsafe.Markup(json.dumps(args[0]))
except ValueError:
raise
# Register filters
DEFAULT_FILTERS['png'] = dynamic_png
DEFAULT_FILTERS['gif'] = dynamic_gif
DEFAULT_FILTERS['jpg'] = dynamic_jpg
DEFAULT_FILTERS['svg'] = dynamic_svg
DEFAULT_FILTERS['json'] = do_json | zipreport-lib | /zipreport-lib-1.0.0.tar.gz/zipreport-lib-1.0.0/zipreport/template/jinja/filters.py | filters.py |
# zipreport
Very lightweight module for creating PDF reports with Python
## Motivation
This library is meant to be a drop-in replacement for report generation I was doing with Filemaker Pro and
operates under about the same principles as print layout on Filemaker Pro. You, the client, have a list of
ordered records, which can be any subscriptable thing but in my example are `dicts`, and these are fed into
a `Document` object you create and customize which establishes how fields in each record are formatted and
laid out on the page, the formatting of page headers and footers, and summary headers and footers.
## Example
In the [example](test/test_functional.py) you can see how a basic report is customized. All formatting is
contained in a `Document` object, draws `Part` objects in various parts of the document based on certain
conditions. The `page_header` and `page_footer` parts are drawn at the top and bottom of each page.
Each record to be printed is displayed in a `content_part`:
```python
content_part = Part(
elements=[Element(x=0.,
y=0.,
width=72.,
height=18.,
content=FormattedText("N:$name", font_family='Futura', font_size=9.)),
Element(x=96.,
y=0,
width=72.*4.,
height=4.*72.,
can_shrink=True,
content=FormattedText("$comment", font_family='Futura', font_size=9.)),
Element(x=72. * 6, y=0., width=36, height=18,
content=FormattedText("$rn", font_family='Futura', font_size=9.,
alignment='r'))],
minimum_height=72.
)
```
A `Part` contains a list of `Element` objects which define a rectangle (positioned relative to the origin,
the upper-left corner of the parent `Part`), and each element has a corresponding `Content`. `Content`
objects contain specific style and content. The `FormattedText` content has a format string which can
substitute values from a content object. For example above, the first element reades the 'name' key from
the content object and substitutes it into the format string.
## Under Contruction
This project is still under contruction but functions on a basic level.
| zipreport | /zipreport-0.1.tar.gz/zipreport-0.1/README.md | README.md |
Zip file streamer for HTTP
==========================
Similar systems/projects:
* The `Nginx zip module
<https://www.nginx.com/resources/wiki/modules/zip/>`_. Only for Nginx, so
can't be used with other webservers.
* `python-zipstream <https://github.com/allanlei/python-zipstream>`_. Does not
support calculating the file size beforehand or seeing through the file.
Usage:
.. code:: python
import zipseeker
# Create an index
fp = zipseeker.ZipSeeker()
fp.add('some/file.txt')
fp.add('another/file.txt', 'file2.txt')
# Calculate the total file size, e.g. for the Content-Length HTTP header.
contentLength = fp.size()
# Calculate the last-modified date, e.g. for the Last-Modified HTTP header.
lastModified = fp.lastModified()
# Send the ZIP file to the client
# Optionally add the start and end parameters for range requests.
# Note that the ZIP format doesn't support actually skipping parts of the file,
# as it needs to calculate the CRC-32 of every file at the end of the file.
fp.writeStream(outputFile)
Why?
----
While the file size of a ZIP file usually can't be calculated beforehand due to
compression, this is actually optional. The headers itself also have a pretty
constant size. That means that the whole file can have a predetermined file size
(and modtime).
This is useful when you want to provide ZIP downloads of large directories with
uncompressable files (e.g. images). The specific use case I created this media
file for was to provide downloads of whole photo albums without such
inconveniences as requesting a downloading link in an e-mail, using a lot system
resources for the creation of temporary files, and having to delete these files
afterwards.
Of course, it's possible to just stream a ZIP file, but that won't provide any
progress indication for file downloads and certainly doesn't support `Range
requests <https://developer.mozilla.org/en-US/docs/Web/HTTP/Range_requests>`_.
For more information, see the `Nginx zip module
<https://www.nginx.com/resources/wiki/modules/zip/>`_.
TODO
----
* Implement actual seeking in the file - this should be doable.
* Use a CRC-32 cache that can be shared by the calling module.
| zipseeker | /zipseeker-1.0.11.tar.gz/zipseeker-1.0.11/README.rst | README.rst |
# *ZIPSLICER* 📁✂️
[](https://github.com/kir-gadjello/zipslicer/actions/workflows/python-test.yml)
[](https://github.com/kir-gadjello/zipslicer/actions/workflows/pypi-deploy.yml)
A library for incremental loading of large PyTorch checkpoints<br>
[Read a blogpost introduction by yours truly](https://kir-gadjello.github.io/zipslicer)
## Synopsis
```python
import torch
import zipslicer
# Could be a private custom recurrent sentient transformer
# instead of a garden variety resnet
my_complicated_network = torch.hub.load(
"pytorch/vision:v0.10.0", "resnet18", pretrained=True
)
s_dict = my_complicated_network.state_dict()
torch.save(s_dict, "my_network_checkpoint_v123.pth")
del my_complicated_network
# Later, on a smaller unrelated machine you load a "LazyStateDict"
# Which is just like a regular state dict, but it loads tensors only when it has to
lazy_s_dict = zipslicer.load("my_network_checkpoint_v123.pth")
layer3_tensors = {}
for k in lazy_s_dict.keys():
if k.startswith("layer3"):
layer3_tensors[k] = lazy_s_dict[k]
# Now you have layer3's tensors and you can analyze them without breaking your RAM.
# Or you can instantiate the layers' classes in sequence and compute the whole
# network's output for a given input by threading the activations through them.
# But we will just print the tensors instead:
print(layer3_tensors)
```
Run this example and unit-tests:
`python examples/example_resnet18.py`
`pytest -o log_cli=true --capture=tee-sys -p no:asyncio`
Test your checkpoint for compatibility:
`python tests/test_checkpoint_readonly.py your_magnificent_checkpoint.pth`
If it's all green, it will work.
## Prerequisites
* Supported python and torch versions: `python-3.10 + torch-(1.11,1.12,stable)` `python-3.11 + torch:stable`
* Generally, `zipslicer` should work with modern enough install of PyTorch - use [included safe test](https://github.com/kir-gadjello/zipslicer/blob/main/tests/test_checkpoint_readonly.py) to check for compatibility of `zipslicer` with your PyTorch and your checkpoint. This is a pure Python library, so specific CPU architecture shouldn't matter.
* A checkpoint produced by saving your model's `state_dict` via vanilla torch.save(...) - default settings should suffice, as Torch doesn't use ZIP compression.
* An application that can take advantage of incrementally-loaded checkpoint - i.e. if your app just loads all `state_dict.items()` in a loop right away it doesn't make much sense to use this library. Make sure your code reads `state_dict.keys()` (and `state_dict.get_meta(k)` if necessary) and uses these intelligently to work on a subset of `state_dict[k]` tensors at a time. For general inspiration you might read [this (HF)](https://huggingface.co/docs/transformers/v4.26.0/en/main_classes/model#transformers.modeling_utils.load_sharded_checkpoint) and [this (arxiv)](https://arxiv.org/abs/2104.07857). With some additional engineering it should be possible to run Large Language Models like [BLOOM-176B](https://huggingface.co/bigscience/bloom) or [FLAN-T5-XXL](https://huggingface.co/google/flan-t5-xxl) on a single mid-range GPU at home - if you are willing to wait for a night's worth of time. In the large batch regime this might even make some practical sense, for example to process a set of documents into embeddings.
## Install
Generally, copying the `zipslicer/zipslicer` directory into your project's source tree is enough.
If you are a fan of official ceremony-driven install processes for executable modules of dubious provenance, soon there will be a possibility of installing this boutique software module via pip: `pip install zipslicer`
## Notes
* This library is only for reading pytorch tensors from checkpoints. We leave writing for future work.
* Writing to loaded `state_dict` is frowned upon, but it *will* work - though you should avoid doing this while iterating over keys for now and expecting the keys to reflect this update.
* Perhaps more importantly, **general-purpose pickles are not supported** - the design of this library doesn't allow you to load whole neural network class instances. Usually this isn't necessary, and [pytorch official documentation recommends you to use `state_dict` for model serialization](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict). We support `state_dict`'s.
* Some rare tensor types (i.e: pytorch quantized tensors - not to be confused with integer tensors which work fine) are not yet supported. If this bothers you, share your experience in issues.
* We say "Hi" to [HF `safetensors` project](https://github.com/huggingface/safetensors), but note that in comparison to theirs, our approach doesn't require checkpoint conversion which takes significant time and storage. In fact, both approaches could be complementary, as you will have to load tensors from the pytorch checkpoint somehow to convert it to `safetensors` - and the default loading mechanism is constrained by available RAM.
## Prospective features we are considering
If you are interested in some of these features, consider creating an issue:
* Effective loading of tensor slices - to implement tensor parallelism in sharded deployments
* Accessing the source checkpoint over a network
* Writing to a checkpoint in-place
* Incremental conversion to other checkpoint formats
| zipslicer | /zipslicer-0.8.1.tar.gz/zipslicer-0.8.1/README.md | README.md |
import os
import pandas as pd
import glob
from fpdf import FPDF
from pathlib import Path
def generate(invoices_path, pdfs_path, image_path, product_id, product_name,
amount_purchased, price_per_unit, total_price):
"""
This function converts invoice Excel files into PDF invoices.
:param invoices_path:
:param pdfs_path:
:param image_path:
:param product_id:
:param product_name:
:param amount_purchased:
:param price_per_unit:
:param total_price:
:return:
"""
filepaths = glob.glob(f"{invoices_path}/*.xlsx")
for filepath in filepaths:
pdf = FPDF(orientation="P", unit="mm", format="A4")
pdf.add_page()
filename = Path(filepath).stem
invoice_nr, date = filename.split("-")
pdf.set_font(family="Times", size=16, style="B")
pdf.cell(w=50, h=8, txt=f"Invoice nr.{invoice_nr}", ln=1)
pdf.set_font(family="Times", size=16, style="B")
pdf.cell(w=50, h=8, txt=f"Date: {date}", ln=1)
df = pd.read_excel(filepath, sheet_name="Sheet 1")
# Add a header
columns = df.columns
columns = [item.replace("_", " ").title() for item in columns]
pdf.set_font(family="Times", size=10, style="B")
pdf.set_text_color(80, 80, 80)
pdf.cell(w=30, h=8, txt=columns[0], border=1)
pdf.cell(w=70, h=8, txt=columns[1], border=1)
pdf.cell(w=30, h=8, txt=columns[2], border=1)
pdf.cell(w=30, h=8, txt=columns[3], border=1)
pdf.cell(w=30, h=8, txt=columns[4], border=1, ln=1)
# Add rows to the table
for index, row in df.iterrows():
pdf.set_font(family="Times", size=10)
pdf.set_text_color(80, 80, 80)
pdf.cell(w=30, h=8, txt=str(row[product_id]), border=1)
pdf.cell(w=70, h=8, txt=str(row[product_name]), border=1)
pdf.cell(w=30, h=8, txt=str(row[amount_purchased]), border=1)
pdf.cell(w=30, h=8, txt=str(row[price_per_unit]), border=1)
pdf.cell(w=30, h=8, txt=str(row[total_price]), border=1, ln=1)
total_sum = df[total_price].sum()
pdf.set_font(family="Times", size=10)
pdf.set_text_color(80, 80, 80)
pdf.cell(w=30, h=8, txt="", border=1)
pdf.cell(w=70, h=8, txt="", border=1)
pdf.cell(w=30, h=8, txt="", border=1)
pdf.cell(w=30, h=8, txt="", border=1)
pdf.cell(w=30, h=8, txt=str(total_sum), border=1, ln=1)
# Add total sum sentences
pdf.set_font(family="Times", size=10, style="B")
pdf.cell(w=30, h=8, txt=f"The total price is {total_sum}", ln=1)
# Add company name and logo
pdf.set_font(family="Times", size=14, style="B")
pdf.cell(w=25, h=8, txt=f"PythonHow")
pdf.image(image_path, w=10)
if not os.path.exists(pdfs_path):
os.makedirs(pdfs_path)
pdf.output(f"{pdfs_path}/{filename}.pdf") | zipster-invoicing | /zipster-invoicing-1.0.0.tar.gz/zipster-invoicing-1.0.0/invoicing/invoice.py | invoice.py |
from __future__ import print_function, unicode_literals, with_statement
import os
import stat
import struct
import sys
import time
import zipfile
from zipfile import (ZIP64_LIMIT, ZIP_DEFLATED, ZIP_FILECOUNT_LIMIT,
ZIP_MAX_COMMENT, ZIP_STORED, crc32, stringCentralDir,
stringEndArchive, stringEndArchive64,
stringEndArchive64Locator, stringFileHeader,
structCentralDir, structEndArchive, structEndArchive64,
structEndArchive64Locator, structFileHeader, zlib)
from .compat import (BZIP2_VERSION, LZMA_VERSION, SEEK_CUR, SEEK_END, SEEK_SET,
ZIP64_VERSION, ZIP_BZIP2, ZIP_LZMA, bytes, str)
__version__ = '1.1.4'
stringDataDescriptor = b'PK\x07\x08' # magic number for data descriptor
def _get_compressor(compress_type):
if compress_type == ZIP_DEFLATED:
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
elif compress_type == ZIP_BZIP2:
from zipfile import bz2
return bz2.BZ2Compressor()
elif compress_type == ZIP_LZMA:
from zipfile import LZMACompressor
return LZMACompressor()
else:
return None
class PointerIO(object):
def __init__(self, mode='wb'):
if mode not in ('wb', ):
raise RuntimeError('zipstream.ZipFile() requires mode "wb"')
self.data_pointer = 0
self.__mode = mode
self.__closed = False
@property
def mode(self):
return self.__mode
@property
def closed(self):
return self.__closed
def close(self):
self.__closed = True
def flush(self):
pass
def next(self):
raise NotImplementedError()
# def seek(self, offset, whence=None):
# if whence == SEEK_SET:
# if offset < 0:
# raise ValueError('negative seek value -1')
# self.data_pointer = offset
# elif whence == SEEK_CUR:
# self.data_pointer = max(0, self.data_pointer + offset)
# elif whence == SEEK_END:
# self.data_pointer = max(0, offset)
# return self.data_pointer
def tell(self):
return self.data_pointer
def truncate(size=None):
raise NotImplementedError()
def write(self, data):
if self.closed:
raise ValueError('I/O operation on closed file')
if isinstance(data, str):
data = data.encode('utf-8')
if not isinstance(data, bytes):
raise TypeError('expected bytes')
self.data_pointer += len(data)
return data
class ZipInfo(zipfile.ZipInfo):
def __init__(self, *args, **kwargs):
zipfile.ZipInfo.__init__(self, *args, **kwargs)
self.flag_bits = 0x08 # ZIP flag bits, bit 3 indicates presence of data descriptor
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
min_version = 0
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = b'<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
min_version = ZIP64_VERSION
if self.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif self.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
self.extract_version = max(min_version, self.extract_version)
self.create_version = max(min_version, self.create_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def DataDescriptor(self):
"""
crc-32 4 bytes
compressed size 4 bytes
uncompressed size 4 bytes
"""
if self.compress_size > ZIP64_LIMIT or self.file_size > ZIP64_LIMIT:
fmt = b'<4sLQQ'
else:
fmt = b'<4sLLL'
return struct.pack(fmt, stringDataDescriptor, self.CRC, self.compress_size, self.file_size)
class ZipFile(zipfile.ZipFile):
def __init__(self, fileobj=None, mode='w', compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode write "w"."""
if mode not in ('w', ):
raise RuntimeError('zipstream.ZipFile() requires mode "w"')
if fileobj is None:
fileobj = PointerIO()
self._comment = b''
zipfile.ZipFile.__init__(self, fileobj, mode=mode, compression=compression, allowZip64=allowZip64)
# TODO: Refractor to write queue with args + kwargs matching write()
self.paths_to_write = []
def __iter__(self):
for data in self.flush():
yield data
for data in self.__close():
yield data
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def flush(self):
while self.paths_to_write:
kwargs = self.paths_to_write.pop(0)
for data in self.__write(**kwargs):
yield data
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
if not isinstance(comment, bytes):
raise TypeError("comment: expected bytes, got %s" % type(comment))
# check for valid comment length
if len(comment) >= ZIP_MAX_COMMENT:
if self.debug:
print('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def write(self, filename, arcname=None, compress_type=None):
# TODO: Reflect python's Zipfile.write
# - if filename is file, write as file
# - if filename is directory, write an empty directory
kwargs = {'filename': filename, 'arcname': arcname, 'compress_type': compress_type}
self.paths_to_write.append(kwargs)
def write_iter(self, arcname, iterable, compress_type=None, buffer_size=None, filemode=600, dirmode=600):
"""Write the bytes iterable `iterable` to the archive under the name `arcname`."""
kwargs = {'arcname': arcname, 'iterable': iterable, 'compress_type': compress_type, 'buffer_size': buffer_size,
'filemode': filemode, 'dirmode': dirmode}
self.paths_to_write.append(kwargs)
def writestr(self, arcname, data, compress_type=None, buffer_size=None, filemode=600, dirmode=600):
"""
Writes a str into ZipFile by wrapping data as a generator
"""
def _iterable():
yield data
return self.write_iter(arcname, _iterable(), compress_type=compress_type, buffer_size=buffer_size, filemode=filemode, dirmode=dirmode)
def __write(self, filename=None, iterable=None, arcname=None, compress_type=None, buffer_size=None, filemode=600, dirmode=600):
"""Put the bytes from filename into the archive under the name
`arcname`."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if (filename is None and iterable is None) or (filename is not None and iterable is not None):
raise ValueError("either (exclusively) filename or iterable shall be not None")
if filename:
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
else:
st, isdir, date_time = None, False, time.localtime()[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
if arcname[-1] == '/':
isdir = True
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
if st:
zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes
else:
mode = int('100' + str(filemode), 8)
dir_mode = int('40' + str(dirmode), 8)
if isdir:
zinfo.external_attr = dir_mode << 16
else:
zinfo.external_attr = mode << 16
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
if st:
zinfo.file_size = st[6]
else:
zinfo.file_size = buffer_size or 0
zinfo.flag_bits = 0x00
zinfo.flag_bits |= 0x08 # ZIP flag bits, bit 3 indicates presence of data descriptor
zinfo.header_offset = self.fp.tell() # Start of header bytes
if zinfo.compress_type == ZIP_LZMA:
# Compressed data includes an end-of-stream (EOS) marker
zinfo.flag_bits |= 0x02
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
yield self.fp.write(zinfo.FileHeader(False))
return
cmpr = _get_compressor(zinfo.compress_type)
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
yield self.fp.write(zinfo.FileHeader(zip64))
file_size = 0
if filename:
with open(filename, 'rb') as fp:
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
else: # we have an iterable
for buf in iterable:
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
# position = self.fp.tell() # Preserve current position in file
# self.fp.seek(zinfo.header_offset, 0)
# self.fp.write(zinfo.FileHeader(zip64))
# self.fp.seek(position, 0)
yield self.fp.write(zinfo.DataDescriptor())
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __close(self):
"""Close the file, and for mode "w" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ('w', 'a') and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffff
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
min_version = 0
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
b'<HH' + b'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
min_version = ZIP64_VERSION
if zinfo.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif zinfo.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print((structCentralDir, stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset), file=sys.stderr)
raise
yield self.fp.write(centdir)
yield self.fp.write(filename)
yield self.fp.write(extra_data)
yield self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
yield self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
yield self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
yield self.fp.write(endrec)
yield self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close() | zipstream-new-2 | /zipstream-new-2-1.1.8.tar.gz/zipstream-new-2-1.1.8/zipstream/__init__.py | __init__.py |
from __future__ import unicode_literals, print_function, with_statement
__version__ = '1.1.4'
import os
import sys
import stat
import struct
import time
import zipfile
from .compat import (
str, bytes,
ZIP64_VERSION,
ZIP_BZIP2, BZIP2_VERSION,
ZIP_LZMA, LZMA_VERSION,
SEEK_SET, SEEK_CUR, SEEK_END)
from zipfile import (
ZIP_STORED, ZIP64_LIMIT, ZIP_FILECOUNT_LIMIT, ZIP_MAX_COMMENT,
ZIP_DEFLATED,
structCentralDir, structEndArchive64, structEndArchive, structEndArchive64Locator,
stringCentralDir, stringEndArchive64, stringEndArchive, stringEndArchive64Locator,
structFileHeader, stringFileHeader,
zlib, crc32)
stringDataDescriptor = b'PK\x07\x08' # magic number for data descriptor
def _get_compressor(compress_type):
if compress_type == ZIP_DEFLATED:
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
elif compress_type == ZIP_BZIP2:
from zipfile import bz2
return bz2.BZ2Compressor()
elif compress_type == ZIP_LZMA:
from zipfile import LZMACompressor
return LZMACompressor()
else:
return None
class PointerIO(object):
def __init__(self, mode='wb'):
if mode not in ('wb', ):
raise RuntimeError('zipstream.ZipFile() requires mode "wb"')
self.data_pointer = 0
self.__mode = mode
self.__closed = False
@property
def mode(self):
return self.__mode
@property
def closed(self):
return self.__closed
def close(self):
self.__closed = True
def flush(self):
pass
def next(self):
raise NotImplementedError()
# def seek(self, offset, whence=None):
# if whence == SEEK_SET:
# if offset < 0:
# raise ValueError('negative seek value -1')
# self.data_pointer = offset
# elif whence == SEEK_CUR:
# self.data_pointer = max(0, self.data_pointer + offset)
# elif whence == SEEK_END:
# self.data_pointer = max(0, offset)
# return self.data_pointer
def tell(self):
return self.data_pointer
def truncate(size=None):
raise NotImplementedError()
def write(self, data):
if self.closed:
raise ValueError('I/O operation on closed file')
if isinstance(data, str):
data = data.encode('utf-8')
if not isinstance(data, bytes):
raise TypeError('expected bytes')
self.data_pointer += len(data)
return data
class ZipInfo(zipfile.ZipInfo):
def __init__(self, *args, **kwargs):
zipfile.ZipInfo.__init__(self, *args, **kwargs)
self.flag_bits = 0x08 # ZIP flag bits, bit 3 indicates presence of data descriptor
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
min_version = 0
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = b'<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
min_version = ZIP64_VERSION
if self.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif self.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
self.extract_version = max(min_version, self.extract_version)
self.create_version = max(min_version, self.create_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def DataDescriptor(self):
"""
crc-32 4 bytes
compressed size 4 bytes
uncompressed size 4 bytes
"""
if self.compress_size > ZIP64_LIMIT or self.file_size > ZIP64_LIMIT:
fmt = b'<4sLQQ'
else:
fmt = b'<4sLLL'
return struct.pack(fmt, stringDataDescriptor, self.CRC, self.compress_size, self.file_size)
class ZipFile(zipfile.ZipFile):
def __init__(self, fileobj=None, mode='w', compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode write "w"."""
if mode not in ('w', ):
raise RuntimeError('zipstream.ZipFile() requires mode "w"')
if fileobj is None:
fileobj = PointerIO()
self._comment = b''
zipfile.ZipFile.__init__(self, fileobj, mode=mode, compression=compression, allowZip64=allowZip64)
# TODO: Refractor to write queue with args + kwargs matching write()
self.paths_to_write = []
def __iter__(self):
for data in self.flush():
yield data
for data in self.__close():
yield data
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def flush(self):
while self.paths_to_write:
kwargs = self.paths_to_write.pop(0)
for data in self.__write(**kwargs):
yield data
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
if not isinstance(comment, bytes):
raise TypeError("comment: expected bytes, got %s" % type(comment))
# check for valid comment length
if len(comment) >= ZIP_MAX_COMMENT:
if self.debug:
print('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def write(self, filename, arcname=None, compress_type=None):
# TODO: Reflect python's Zipfile.write
# - if filename is file, write as file
# - if filename is directory, write an empty directory
kwargs = {'filename': filename, 'arcname': arcname, 'compress_type': compress_type}
self.paths_to_write.append(kwargs)
def write_iter(self, arcname, iterable, compress_type=None, buffer_size=None, date_time=None):
"""Write the bytes iterable `iterable` to the archive under the name `arcname`."""
kwargs = {'arcname': arcname, 'iterable': iterable, 'compress_type': compress_type, 'buffer_size': buffer_size, 'date_time': date_time}
self.paths_to_write.append(kwargs)
def writestr(self, arcname, data, compress_type=None, buffer_size=None, date_time=None):
"""
Writes a str into ZipFile by wrapping data as a generator
"""
def _iterable():
yield data
return self.write_iter(arcname, _iterable(), compress_type=compress_type, buffer_size=buffer_size, date_time=date_time)
def __write(self, filename=None, iterable=None, arcname=None, compress_type=None, buffer_size=None, date_time=None):
"""Put the bytes from filename into the archive under the name
`arcname`."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if (filename is None and iterable is None) or (filename is not None and iterable is not None):
raise ValueError("either (exclusively) filename or iterable shall be not None")
if filename:
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
else:
st, isdir = None, False
if date_time is not None and isinstance(date_time, time.struct_time):
date_time = date_time[0:6]
if date_time is None:
date_time = time.localtime()[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
if st:
zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes
else:
zinfo.external_attr = 0o600 << 16 # ?rw-------
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
if st:
zinfo.file_size = st[6]
else:
zinfo.file_size = buffer_size or 0
zinfo.flag_bits = 0x00
zinfo.flag_bits |= 0x08 # ZIP flag bits, bit 3 indicates presence of data descriptor
zinfo.header_offset = self.fp.tell() # Start of header bytes
if zinfo.compress_type == ZIP_LZMA:
# Compressed data includes an end-of-stream (EOS) marker
zinfo.flag_bits |= 0x02
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
yield self.fp.write(zinfo.FileHeader(False))
return
cmpr = _get_compressor(zinfo.compress_type)
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
yield self.fp.write(zinfo.FileHeader(zip64))
file_size = 0
if filename:
with open(filename, 'rb') as fp:
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
else: # we have an iterable
for buf in iterable:
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
yield self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
# position = self.fp.tell() # Preserve current position in file
# self.fp.seek(zinfo.header_offset, 0)
# self.fp.write(zinfo.FileHeader(zip64))
# self.fp.seek(position, 0)
yield self.fp.write(zinfo.DataDescriptor())
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __close(self):
"""Close the file, and for mode "w" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ('w', 'a') and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffff
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
min_version = 0
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
b'<HH' + b'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
min_version = ZIP64_VERSION
if zinfo.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif zinfo.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print((structCentralDir, stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset), file=sys.stderr)
raise
yield self.fp.write(centdir)
yield self.fp.write(filename)
yield self.fp.write(extra_data)
yield self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
yield self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
yield self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
yield self.fp.write(endrec)
yield self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close() | zipstream-new | /zipstream-new-1.1.8.tar.gz/zipstream-new-1.1.8/zipstream/__init__.py | __init__.py |
zipstream-ng
============
[](https://github.com/pR0Ps/zipstream-ng/actions/workflows/tests.yml)
[](https://pypi.org/project/zipstream-ng/)

A modern and easy to use streamable zip file generator. It can package and stream many files and
folders into a zip on the fly without needing temporary files or excessive memory. It can also
calculate the final size of the zip file before streaming it.
### Features:
- Generates zip data on the fly as it's requested.
- Can calculate the total size of the resulting zip file before generation even begins.
- Low memory usage: Since the zip is generated as it's requested, very little has to be kept in
memory (peak usage of less than 20MB is typical, even for TBs of files).
- Flexible API: Typical use cases are simple, complicated ones are possible.
- Supports zipping data from files, bytes, strings, and any other iterable objects.
- Keeps track of the date of the most recently modified file added to the zip file.
- Threadsafe: Won't mangle data if multiple threads concurrently add data to the same stream.
- Includes a clone of Python's `http.server` module with zip support added. Try `python -m zipstream.server`.
- Automatically uses Zip64 extensions, but only if they are required.
- No external dependencies.
### Ideal for web backends:
- Generating zip data on the fly requires very little memory, no disk usage, and starts producing
data with less latency than creating the entire zip up-front. This means faster responses, no
temporary files, and very low memory usage.
- The ability to calculate the total size of the stream before any data is actually generated
(provided no compression is used) means web backends can provide a `Content-Length` header in
their responses. This allows clients to show a progress bar as the stream is transferred.
- By keeping track of the date of the most recently modified file added to the zip, web
backends can provide a `Last-Modified` header. This allows clients to check if they have the most
up-to-date version of the zip with just a HEAD request instead of having to download the entire
thing.
Installation
------------
```
pip install zipstream-ng
```
Examples
--------
### Create a local zip file (simple example)
Make an archive named `files.zip` in the current directory that contains all files under
`/path/to/files`.
```python
from zipstream import ZipStream
zs = ZipStream.from_path("/path/to/files/")
with open("files.zip", "wb") as f:
f.writelines(zs)
```
### Create a local zip file (demos more of the API)
```python
from zipstream import ZipStream, ZIP_DEFLATED
# Create a ZipStream that uses the maximum level of Deflate compression.
zs = ZipStream(compress_type=ZIP_DEFLATED, compress_level=9)
# Set the zip file's comment.
zs.comment = "Contains compressed important files"
# Add all the files under a path.
# Will add all files under a top-level folder called "files" in the zip.
zs.add_path("/path/to/files/")
# Add another file (will be added as "data.txt" in the zip file).
zs.add_path("/path/to/file.txt", "data.txt")
# Add some random data from an iterable.
# This generator will only be run when the stream is generated.
def random_data():
import random
for _ in range(10):
yield random.randbytes(1024)
zs.add(random_data(), "random.bin")
# Add a file containing some static text.
# Will automatically be encoded to bytes before being added (uses utf-8).
zs.add("This is some text", "README.txt")
# Write out the zip file as it's being generated.
# At this point the data in the files will be read in and the generator
# will be iterated over.
with open("files.zip", "wb") as f:
f.writelines(zs)
```
### zipserver (included)
A fully-functional and useful example can be found in the included
[`zipstream.server`](zipstream/server.py) module. It's a clone of Python's built in `http.server`
with the added ability to serve multiple files and folders as a single zip file. Try it out by
installing the package and running `zipserver --help` or `python -m zipstream.server --help`.

### Integration with a Flask webapp
A very basic [Flask](https://flask.palletsprojects.com/)-based file server that streams all the
files under the requested path to the client as a zip file. It provides the total size of the stream
in the `Content-Length` header so the client can show a progress bar as the stream is downloaded. It
also provides a `Last-Modified` header so the client can check if it already has the most recent
copy of the zipped data with a `HEAD` request instead of having to download the file and check.
Note that while this example works, it's not a good idea to deploy it as-is due to the lack of input
validation and other checks.
```python
import os.path
from flask import Flask, Response
from zipstream import ZipStream
app = Flask(__name__)
@app.route("/", defaults={"path": "."})
@app.route("/<path:path>")
def stream_zip(path):
name = os.path.basename(os.path.abspath(path))
zs = ZipStream.from_path(path)
return Response(
zs,
mimetype="application/zip",
headers={
"Content-Disposition": f"attachment; filename={name}.zip",
"Content-Length": len(zs),
"Last-Modified": zs.last_modified,
}
)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000)
```
### Partial generation and last-minute file additions
It's possible to generate the zip stream, but stop before finalizing it. This enables adding
something like a file manifest or compression log after all the files have been added.
`ZipStream` provides a `get_info` method that returns information on all the files that have been
added to the stream. In this example, all that information will be added to the zip in a file named
"manifest.json" before finalizing it.
```python
from zipstream import ZipStream
import json
def gen_zipfile()
zs = ZipStream.from_path("/path/to/files")
yield from zs.all_files()
zs.add(
json.dumps(
zs.get_info(),
indent=2
),
"manifest.json"
)
yield from zs.finalize()
```
Comparison to stdlib
--------------------
Since Python 3.6 it has actually been possible to generate zip files as a stream using just the
standard library, it just hasn't been very ergonomic or efficient. Consider the typical use case of
zipping up a directory of files while streaming it over a network connection:
(note that the size of the stream is not pre-calculated in this case as this would make the stdlib
example way too long).
Using ZipStream:
```python
from zipstream import ZipStream
send_stream(
ZipStream.from_path("/path/to/files/")
)
```
<details>
<summary>The same(ish) functionality using just the stdlib:</summary>
```python
import os
import io
from zipfile import ZipFile, ZipInfo
class Stream(io.RawIOBase):
"""An unseekable stream for the ZipFile to write to"""
def __init__(self):
self._buffer = bytearray()
self._closed = False
def close(self):
self._closed = True
def write(self, b):
if self._closed:
raise ValueError("Can't write to a closed stream")
self._buffer += b
return len(b)
def readall(self):
chunk = bytes(self._buffer)
self._buffer.clear()
return chunk
def iter_files(path):
for dirpath, _, files in os.walk(path, followlinks=True):
if not files:
yield dirpath # Preserve empty directories
for f in files:
yield os.path.join(dirpath, f)
def read_file(path):
with open(path, "rb") as fp:
while True:
buf = fp.read(1024 * 64)
if not buf:
break
yield buf
def generate_zipstream(path):
stream = Stream()
with ZipFile(stream, mode="w") as zf:
toplevel = os.path.basename(os.path.normpath(path))
for f in iter_files(path):
# Use the basename of the path to set the arcname
arcname = os.path.join(toplevel, os.path.relpath(f, path))
zinfo = ZipInfo.from_file(f, arcname)
# Write data to the zip file then yield the stream content
with zf.open(zinfo, mode="w") as fp:
if zinfo.is_dir():
continue
for buf in read_file(f):
fp.write(buf)
yield stream.readall()
yield stream.readall()
send_stream(
generate_zipstream("/path/to/files/")
)
```
</details>
Tests
-----
This package contains extensive tests. To run them, install `pytest` (`pip install pytest`) and run
`py.test` in the project directory.
License
-------
Licensed under the [GNU LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.html).
| zipstream-ng | /zipstream-ng-1.6.0.tar.gz/zipstream-ng-1.6.0/README.md | README.md |
import collections
import datetime
import errno
import functools
import logging
import os
import stat
import struct
import sys
import time
import threading
from zipfile import (
# Classes
ZipInfo,
# Constants
ZIP_STORED, ZIP64_LIMIT, ZIP_FILECOUNT_LIMIT, ZIP_MAX_COMMENT,
ZIP64_VERSION, BZIP2_VERSION, ZIP_BZIP2, LZMA_VERSION, ZIP_LZMA,
ZIP_DEFLATED,
# Byte sequence constants
structFileHeader, structCentralDir, structEndArchive64, structEndArchive,
structEndArchive64Locator, stringFileHeader, stringCentralDir,
stringEndArchive64, stringEndArchive, stringEndArchive64Locator,
# Size constants
sizeFileHeader, sizeCentralDir, sizeEndCentDir, sizeEndCentDir64Locator,
sizeEndCentDir64,
# Functions
crc32, _get_compressor, _check_compression as _check_compress_type,
)
# Size of chunks to read out of files
# Note that when compressing data the compressor will operate on bigger chunks
# than this - it keeps a cache as new chunks are fed to it.
READ_BUFFER = 1024 * 64 # 64K
# Min and max dates the Zip format can support
MIN_DATE = (1980, 1, 1, 0, 0, 0)
MAX_DATE = (2107, 12, 31, 23, 59, 59)
# How much to overestimate when checking if a file will require using zip64
# extensions (1.05 = by 5%). This is used because compressed data can sometimes
# be slightly bigger than uncompressed.
ZIP64_ESTIMATE_FACTOR = 1.05
# Characters that are to be considered path separators on the current platform
# (includes "/" regardless of platform as per ZIP format specification)
PATH_SEPARATORS = set(x for x in (os.sep, os.altsep, "/") if x)
# Constants for compatibility modes
PY36_COMPAT = sys.version_info < (3, 7) # disable compress_level
PY35_COMPAT = sys.version_info < (3, 6) # backport ZipInfo functions, stringify path-like objects
__all__ = [
# Defined classes
"ZipStream", "ZipStreamInfo",
# Compression constants (imported from zipfile)
"ZIP_STORED", "ZIP_DEFLATED", "BZIP2_VERSION", "ZIP_BZIP2", "LZMA_VERSION", "ZIP_LZMA",
# Helper functions
"walk"
]
__log__ = logging.getLogger(__name__)
def _check_compression(compress_type, compress_level):
"""Check the specified compression type and level are valid"""
if PY36_COMPAT and compress_level is not None:
raise ValueError("compress_level is not supported on Python <3.7")
_check_compress_type(compress_type)
if compress_level is None:
return
if compress_type in (ZIP_STORED, ZIP_LZMA):
__log__.warning(
"compress_level has no effect when using ZIP_STORED/ZIP_LZMA"
)
elif compress_type == ZIP_DEFLATED and not 0 <= compress_level <= 9:
raise ValueError(
"compress_level must be between 0 and 9 when using ZIP_DEFLATED"
)
elif compress_type == ZIP_BZIP2 and not 1 <= compress_level <= 9:
raise ValueError(
"compress_level must be between 1 and 9 when using ZIP_BZIP2"
)
def _timestamp_to_dos(ts):
"""Takes an integer timestamp and converts it to a (dosdate, dostime) tuple"""
return (
(ts[0] - 1980) << 9 | ts[1] << 5 | ts[2],
ts[3] << 11 | ts[4] << 5 | (ts[5] // 2)
)
class ZipStreamInfo(ZipInfo):
"""A ZipInfo subclass that always uses a data descriptor to store filesize data"""
def __init__(self, filename, date_time=None):
# Default the date_time to the current local time and automatically
# clamp it to the range that the zip format supports.
date_time = date_time or time.localtime()[0:6]
if not (MIN_DATE <= date_time <= MAX_DATE):
__log__.warning(
"Date of %s is outside of the supported range for zip files"
"and was automatically adjusted",
date_time
)
date_time = min(max(MIN_DATE, date_time), MAX_DATE)
super().__init__(filename, date_time)
def DataDescriptor(self, zip64):
"""Return the data descriptor for the file entry"""
# Using a data descriptor is an alternate way to encode the file size
# and CRC that can be inserted after the compressed data instead of
# before it like normal. This is essential for making the zip data
# streamable
return struct.pack(
"<4sLQQ" if zip64 else "<4sLLL",
b"PK\x07\x08", # Data descriptor signature
self.CRC,
self.compress_size,
self.file_size
)
def FileHeader(self, zip64):
"""Return the per-file header as bytes"""
# Based on code in zipfile.ZipInfo.FileHeader
# Logic for where the file sizes are listed is as follows:
# From the zip spec:
# - When using a data descriptor, the file sizes should be listed as 0
# in the file header.
# - When using Zip64, the header size fields should always be set to
# 0xFFFFFFFF to indicate that the size is in the Zip64 extra field.
# - The format of the data descriptor depends on if a Zip64 extra field
# is present in the file header.
# Assumption:
# - When using both a data descriptor and Zip64 extensions, the header
# size fields should be set to 0xFFFFFFFF to indicate that the true
# sizes are in the required Zip64 extra field, which should list the
# sizes as 0 to defer to the data descriptor.
dosdate, dostime = _timestamp_to_dos(self.date_time)
if self.flag_bits & 0x08:
# Using a data descriptor record to record the file sizes, set
# everything to 0 since they'll be written there instead.
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
min_version = 0
extra = self.extra
if zip64:
min_version = ZIP64_VERSION
extra += struct.pack(
"<HHQQ",
0x01, # Zip64 extended information extra field identifier
16, # length of the following "QQ" data
file_size,
compress_size,
)
# Indicate that the size is in the Zip64 extra field instead
file_size = 0xFFFFFFFF
compress_size = 0xFFFFFFFF
if self.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif self.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
self.extract_version = max(min_version, self.extract_version)
self.create_version = max(min_version, self.create_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(
structFileHeader,
stringFileHeader,
self.extract_version,
self.reserved,
flag_bits,
self.compress_type,
dostime,
dosdate,
CRC,
compress_size,
file_size,
len(filename),
len(extra)
)
return header + filename + extra
def _file_data(self, iterable=None, force_zip64=False):
"""Given an iterable of file data, yield a local file header and file
data for it.
If `force_zip64` is True (not default), then zip64 extensions will
always be used for storing files (not directories).
"""
# Based on the code in zipfile.ZipFile.write, zipfile._ZipWriteFile.{write,close}
if self.compress_type == ZIP_LZMA:
# Compressed LZMA data includes an end-of-stream (EOS) marker
self.flag_bits |= 0x02
# Adding a folder - just need the header without any data or a data descriptor
if self.is_dir():
self.CRC = 0
self.compress_size = 0
self.file_size = 0
self.flag_bits &= ~0x08 # Unset the data descriptor flag
yield self.FileHeader(zip64=False)
return
if not iterable: # pragma: no cover
raise ValueError("Not a directory but no data given to encode")
# Set the data descriptor flag so the filesizes and CRC can be added
# after the file data
self.flag_bits |= 0x08
# Compressed size can be larger than uncompressed size - overestimate a bit
zip64 = force_zip64 or self.file_size * ZIP64_ESTIMATE_FACTOR > ZIP64_LIMIT
# Make header
yield self.FileHeader(zip64)
# Store/compress the data while keeping track of size and CRC
if not PY36_COMPAT:
cmpr = _get_compressor(self.compress_type, self._compresslevel)
else:
cmpr = _get_compressor(self.compress_type)
crc = 0
file_size = 0
compress_size = 0
for buf in iterable:
file_size += len(buf)
crc = crc32(buf, crc) & 0xFFFFFFFF
if cmpr:
buf = cmpr.compress(buf)
compress_size += len(buf)
yield buf
if cmpr:
buf = cmpr.flush()
if buf:
compress_size += len(buf)
yield buf
else:
compress_size = file_size
# Update the CRC and filesize info
self.CRC = crc
self.file_size = file_size
self.compress_size = compress_size
if not zip64 and max(file_size, compress_size) > ZIP64_LIMIT:
# Didn't estimate correctly :(
raise RuntimeError(
"Adding file '{}' unexpectedly required using Zip64 extensions".format(
self.filename
)
)
# Yield the data descriptor with the now-valid CRC and file size info
yield self.DataDescriptor(zip64)
def _central_directory_header_data(self):
"""Yield a central directory file header for this file"""
# Based on code in zipfile.ZipFile._write_end_record
dosdate, dostime = _timestamp_to_dos(self.date_time)
extra = []
# Store sizes and offsets in the extra data if they're too big
# for the normal spot
if max(self.file_size, self.compress_size) > ZIP64_LIMIT:
extra.append(self.file_size)
extra.append(self.compress_size)
file_size = 0xFFFFFFFF
compress_size = 0xFFFFFFFF
else:
file_size = self.file_size
compress_size = self.compress_size
if self.header_offset > ZIP64_LIMIT:
extra.append(self.header_offset)
header_offset = 0xFFFFFFFF
else:
header_offset = self.header_offset
extra_data = self.extra
min_version = 0
if extra:
# Append a Zip64 field to the extra's
# Note that zipfile.ZipFile._write_end_record strips any existing
# zip64 records here first - since we control the generation of
# ZipStreamInfo records, there shouldn't ever be any so we don't
# bother.
extra_data = struct.pack(
"<HH" + "Q"*len(extra), 1, 8*len(extra), *extra
) + extra_data
min_version = ZIP64_VERSION
if self.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif self.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
extract_version = max(min_version, self.extract_version)
create_version = max(min_version, self.create_version)
filename, flag_bits = self._encodeFilenameFlags()
centdir = struct.pack(
structCentralDir,
stringCentralDir,
create_version,
self.create_system,
extract_version,
self.reserved,
flag_bits,
self.compress_type,
dostime,
dosdate,
self.CRC,
compress_size,
file_size,
len(filename),
len(extra_data),
len(self.comment),
0,
self.internal_attr,
self.external_attr,
header_offset
)
yield centdir
yield filename
yield extra_data
yield self.comment
if PY35_COMPAT: # pragma: no cover
# Backport essential functions introduced in 3.6
@classmethod
def from_file(cls, filename, arcname=None):
"""Construct an appropriate ZipInfo for a file on the filesystem.
filename should be the path to a file or directory on the filesystem.
arcname is the name which it will have within the archive (by default,
this will be the same as filename, but without a drive letter and with
leading path separators removed).
"""
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = cls(arcname, date_time)
zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes
if isdir:
zinfo.file_size = 0
zinfo.external_attr |= 0x10 # MS-DOS directory flag
else:
zinfo.file_size = st.st_size
return zinfo
def is_dir(self):
"""Return True if this archive member is a directory."""
return self.filename[-1] == '/'
def _validate_final(func):
"""Prevent the wrapped method from being called if the ZipStream is finalized"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self._final:
raise RuntimeError("ZipStream has already been finalized")
return func(self, *args, **kwargs)
return wrapper
def _validate_compression(func):
"""Prevent the wrapped method from using invalid compression options"""
@functools.wraps(func)
def wrapper(self, *args, compress_type=None, compress_level=None, **kwargs):
if compress_type is not None or compress_level is not None:
_check_compression(
compress_type if compress_type is not None else self._compress_type,
compress_level if compress_level is not None else self._compress_level
)
return func(
self,
*args,
compress_type=compress_type,
compress_level=compress_level,
**kwargs
)
return wrapper
def _sanitize_arcname(arcname):
"""Terminate the arcname at the first null byte"""
# based on zipfile._sanitize_filename
if arcname:
# trim the arcname to the first null byte
null_byte = arcname.find(chr(0))
if null_byte >= 0:
arcname = arcname[:null_byte]
if not arcname:
raise ValueError(
"A valid arcname (name of the entry in the zip file) is required"
)
# Ensure paths in the zip always use forward slashes as the directory
# separator
for sep in PATH_SEPARATORS:
if sep != "/":
arcname = arcname.replace(sep, "/")
return arcname
def _iter_file(path):
"""Yield data from a file"""
with open(path, "rb") as fp:
while True:
buf = fp.read(READ_BUFFER)
if not buf:
break
yield buf
def walk(path, preserve_empty=True, followlinks=True):
"""Recursively walk the given the path and yield files/folders under it.
preserve_empty:
If True (the default), empty directories will be included in the
output. The paths of these directories will be yielded with a trailing
path separator.
followlinks:
If True (the default), symlinks to folders will be resolved and
followed unless this would result in infinite recursion (symlinks to
files are always resolved)
"""
# Define a function to return the device and inode for a path.
# Will be used to deduplicate folders to avoid infinite recursion
def _getkey(path):
st = os.stat(path)
return (st.st_dev, st.st_ino)
visited = {_getkey(path)}
for dirpath, dirnames, files in os.walk(path, followlinks=followlinks):
if followlinks:
# Prevent infinite recursion by removing previously-visited
# directories from dirnames.
for i in reversed(range(len(dirnames))):
k = _getkey(os.path.join(dirpath, dirnames[i]))
if k in visited:
dirnames.pop(i)
else:
visited.add(k)
# Preserve empty directories
if preserve_empty and not files and not dirnames:
files = [""]
for f in files:
yield os.path.join(dirpath, f)
class ZipStream:
"""A write-only zip that is generated from source files/data as it's
iterated over.
Ideal for situations where a zip file needs to be dynamically generated
without using temporary files (ie: web applications).
"""
def __init__(self, *, compress_type=ZIP_STORED, compress_level=None, sized=False):
"""Create a ZipStream
compress_type:
The ZIP compression method to use when writing the archive, and
should be ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2 or ZIP_LZMA;
unrecognized values will cause NotImplementedError to be raised. If
ZIP_DEFLATED, ZIP_BZIP2 or ZIP_LZMA is specified but the
corresponding module (zlib, bz2 or lzma) is not available,
RuntimeError is raised. The default is ZIP_STORED.
compress_level:
Controls the compression level to use when writing files to the
archive. When using ZIP_STORED or ZIP_LZMA it has no effect. When
using ZIP_DEFLATED integers 0 through 9 are accepted (see zlib for
more information). When using ZIP_BZIP2 integers 1 through 9 are
accepted (see bz2 for more information). Raises a ValueError if the
provided value isn't valid for the `compress_type`.
Only available in Python 3.7+ (raises a ValueError if used on a
lower version)
sized:
If `True`, will make the ZipStream able to calculate its final size
prior to being generated, making it work with the `len()` function.
Enabling this will enforce two restrictions:
- No compression can be used
- Any iterables added to the stream without also specifying their
size (see `.add` docs) will immediately be read fully into
memory. This is because the size of the data they will produce
must be known prior to the stream being generated.
If `False` (the default), no restrictions are enforced and using the
object with the `len()` function will not work (will raise a
TypeError)
"""
if compress_type and sized:
raise ValueError("Cannot use compression with a sized ZipStream")
_check_compression(compress_type, compress_level)
self._compress_type = compress_type
self._compress_level = compress_level
self._comment = b""
self._last_modified = None
# For adding files
self._filelist = []
self._queue = collections.deque()
# For calculating the size
self._sized = sized
self._to_count = collections.deque()
self._size_prog = (0, 0, 0)
self._size_lock = threading.Lock()
# For generating
self._gen_lock = threading.Lock()
self._pos = 0
self._final = False
def __iter__(self):
"""Generate zipped data from the added files/data"""
return self.finalize()
def __bool__(self):
"""A ZipStream is considered truthy if any files have been added to it"""
return not self.is_empty()
def __len__(self):
"""The final size of the zip stream
Raises a TypeError if the length is unknown
"""
if not self._sized:
raise TypeError("The length of this ZipStream is unknown")
return self._get_size()
def __bytes__(self):
"""Get the bytes of the ZipStream"""
return b"".join(self)
def file(self):
"""Generate data for a single file being added to the ZipStream
Yields the stored data for a single file.
Returns True if a file was available, False otherwise.
"""
if self._final:
return False
try:
kwargs = self._queue.popleft()
except IndexError:
return False
# Since generating the file entry depends on the current number of bytes
# generated, calling this function again without exhausting the generator
# first will cause corrupted streams. Prevent this by adding a lock
# around the functions that actually generate data.
with self._gen_lock:
yield from self._gen_file_entry(**kwargs)
return True
def all_files(self):
"""Generate data for all the currently added files"""
while (yield from self.file()):
pass
def footer(self):
"""Generate the central directory record, signifying the end of the stream
Note that this will NOT ensure all queued files are written to the zip
stream first. For that, see `.finalize()`.
"""
with self._gen_lock:
if self._final:
return
yield from self._gen_archive_footer()
def finalize(self):
"""Finish generating the zip stream and finalize it.
Will finish processing all the files in the queue before writing the
archive footer. To disard the items in the queue instead, see
`.footer()`.
"""
yield from self.all_files()
yield from self.footer()
@_validate_final
@_validate_compression
def add_path(self, path, arcname=None, *, recurse=True, compress_type=None, compress_level=None):
"""Queue up a path to be added to the ZipStream
Queues the `path` up to to be written to the archive, giving it the
name provided by `arcname`. If `arcname` is not provided, it is assumed
to be the last component of the `path` (Ex: "/path/to/files/" -->
"files").
if `recurse` is `True` (the default), and the `path` is a directory,
all contents under the `path` will also be added. By default, this is
done using the `walk` function in this module, which will preserve
empty directories as well as follow symlinks to files and folders
unless this would result in infinite recursion.
If more control over directory walking is required, a function that
takes a `path` and returns an iterable of paths can also be passed in
as `recurse`. Alternatively, the directory can be walked in external
code while calling `add_path(path, arcname, recurse=False)` for each
discovered entry.
If recurse is `False`, only the specified path (file or directory) will
be added.
If given, `compress_type` and `compress_level` override the settings
the ZipStream was initialized with.
Raises a FileNotFoundError if the path does not exist
Raises a ValueError if an arcname isn't provided and the assumed
one is empty.
Raises a RuntimeError if the ZipStream has already been finalized.
"""
# Resolve path objects to strings on Python 3.5
if PY35_COMPAT and hasattr(path, "__fspath__"): # pragma no cover
path = path.__fspath__()
if not os.path.exists(path):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path)
path = os.path.normpath(path)
# special case - discover the arcname from the path
if not arcname:
arcname = os.path.basename(path)
if not arcname:
raise ValueError(
"No arcname for path '{}' could be assumed".format(path)
)
arcname = _sanitize_arcname(arcname)
# Not recursing - just add the path
if not recurse or not os.path.isdir(path):
self._enqueue(
path=path,
arcname=arcname,
compress_type=compress_type,
compress_level=compress_level
)
return
if recurse is True:
recurse = walk
for filepath in recurse(path):
filename = os.path.relpath(filepath, path)
filearcname = os.path.normpath(os.path.join(arcname, filename))
# Check if adding a directory, and if so, add a trailing slash
# (normpath will remove it). Also set the size since we're doing
# the stat anyway
st = os.stat(filepath)
if stat.S_ISDIR(st.st_mode):
filearcname += "/"
size = 0
else:
size = st.st_size
self._enqueue(
path=filepath,
arcname=filearcname,
size=size,
compress_type=compress_type,
compress_level=compress_level
)
@_validate_final
@_validate_compression
def add(self, data, arcname, *, size=None, compress_type=None, compress_level=None):
"""Queue up data to be added to the ZipStream
`data` can be bytes, a string (encoded to bytes using utf-8), or any
object that supports the iterator protocol (ie. objects that provide an
`__iter__` function). If an iterable object is provided, it must return
bytes from its iterator or an exception will be raised when the object
is added to the stream. `None` is also supported (will create an empty
file or a directory)
`arcname` (required) is the name of the file to store the data in. If
any `data` is provided then the `arcname` cannot end with a "/" as this
would create a directory (which can't contain content).
`size` (optional) specifies the size of the `data` ONLY in the case
where it is an iterator. It is ignored in all other cases.
Note that the data provided will not be used until the file is actually
encoded in the ZipStream. This means that strings and bytes will be held
in memory and iterables will not be iterated over until then. For this
reason it's a good idea to use `add_path()` wherever possible.
If given, `compress_type` and `compress_level` override the settings the
ZipStream was initialized with.
Raises a ValueError if an arcname is not provided or ends with a "/"
when data is given.
Raises a TypeError if the data is not str, bytes, or an iterator.
Raises a RuntimeError if the ZipStream has already been finalized.
"""
arcname = _sanitize_arcname(arcname)
if data is None:
data = b""
elif isinstance(data, str):
data = data.encode("utf-8")
elif isinstance(data, bytearray):
# bytearrays are mutable - need to store a copy so it doesn't
# change while we're iterating over it.
data = bytes(data)
is_directory = arcname[-1] in PATH_SEPARATORS
if isinstance(data, bytes):
if is_directory and data:
raise ValueError("Can't store data as a directory")
self._enqueue(
data=data,
arcname=arcname,
compress_type=compress_type,
compress_level=compress_level,
)
elif hasattr(data, "__iter__"):
if is_directory:
raise ValueError("Can't store an iterable as a directory")
self._enqueue(
iterable=data,
size=size,
arcname=arcname,
compress_type=compress_type,
compress_level=compress_level,
)
else:
raise TypeError(
"Data to add must be str, bytes, or an iterable of bytes"
)
def mkdir(self, arcname):
"""Create a directory inside the ZipStream"""
arcname = _sanitize_arcname(arcname)
if arcname[-1] not in PATH_SEPARATORS:
arcname += "/"
self.add(data=None, arcname=arcname)
@property
def sized(self):
"""True if the ZipStream's final size is known"""
return self._sized
@property
def last_modified(self):
"""Return the date of the most recently modified file in the ZipStream
Returns a `datetime.datetime` object or `None` if the ZipStream is
empty.
"""
return datetime.datetime(*self._last_modified) if self._last_modified else None
@property
def comment(self):
"""The comment associated with the the ZipStream"""
return self._comment
@comment.setter
@_validate_final
def comment(self, comment):
"""Set the comment on the ZipStream
If a string is provided it will be encoded to bytes as utf-8.
If the comment is longer than 65,535 characters it will be truncated.
Raises a RuntimeError if the ZipStream has already been finalized.
"""
if comment is None:
comment = b""
elif isinstance(comment, str):
comment = comment.encode("utf-8")
elif isinstance(comment, bytearray):
comment = bytes(comment)
if not isinstance(comment, bytes):
raise TypeError(
"Expected bytes, got {}".format(type(comment).__name__)
)
if len(comment) > ZIP_MAX_COMMENT:
__log__.warning(
"Archive comment is too long; truncating to %d bytes",
ZIP_MAX_COMMENT
)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
def is_empty(self):
"""Check if any files have been added to the ZipStream"""
return not self._queue and not self._filelist
def num_queued(self):
"""The number of files queued up to be added to the stream"""
return len(self._queue)
def num_streamed(self):
"""The number of files that have already been added to the stream"""
return len(self._filelist)
def get_info(self):
"""Get a list of dicts containing data about each file currently in the
ZipStream.
Note that this ONLY includes files that have already been written to the
ZipStream. Queued files are NOT included.
"""
return [
{
"name": x.filename,
"size": x.file_size,
"compressed_size": x.compress_size,
"datetime": datetime.datetime(*x.date_time).isoformat(),
"CRC": x.CRC,
"compress_type": x.compress_type,
"compress_level": getattr(x, "_compresslevel", None), # <3.7 compat
"extract_version": x.extract_version,
}
for x in self._filelist
]
@classmethod
def from_path(cls, path, *, compress_type=ZIP_STORED, compress_level=None, sized=None, **kwargs):
"""Convenience method that creates a ZipStream and adds the contents of
a path to it.
`sized` defaults to `True` if no compression is used, `False`
otherwise. All other parameter defaults are the same as those in
`__init__` and `add_path`.
The `compress_type`, `compress_level`, and `sized` parameters will be
passed to `__init__`, all other args and kwargs are passed to
`add_path`.
"""
if sized is None:
sized = compress_type == ZIP_STORED
z = cls(
compress_type=compress_type,
compress_level=compress_level,
sized=sized
)
z.add_path(path, **kwargs)
return z
def _enqueue(self, **kwargs):
"""Internal method to enqueue files, data, and iterables to be streamed"""
path = kwargs.get("path")
data = kwargs.get("data")
size = kwargs.get("size")
if path:
st = os.stat(path)
else:
st = None
# Get the modified time of the added path (use current time for
# non-paths) and use it to update the last_modified property
mtime = time.localtime(st.st_mtime if path else None)[0:6]
if self._last_modified is None or self._last_modified < mtime:
self._last_modified = mtime
# Get the expected size of the data where not specified and possible
if size is None:
if data is not None:
kwargs["size"] = len(data)
elif path is not None:
if stat.S_ISDIR(st.st_mode):
kwargs["size"] = 0
else:
kwargs["size"] = st.st_size
# If the ZipStream is sized then it will look at what is being added and
# queue up some information for _get_size to use to compute the total
# length of the stream. It will also read any iterables fully into
# memory so their size is known.
if self._sized:
if kwargs.get("compress_type"):
raise ValueError("Cannot use compression with a sized ZipStream")
# Iterate the iterable data to get the size and replace it with the static data
if path is None and data is None and size is None:
data = b"".join(kwargs.pop("iterable"))
kwargs["size"] = len(data)
kwargs["data"] = data
self._to_count.append((kwargs["arcname"], kwargs["size"]))
# Remove any default/redundant compression parameters
if kwargs.get("compress_type") in (None, self._compress_type):
kwargs.pop("compress_type", None)
if kwargs.get("compress_level") in (None, self._compress_level):
kwargs.pop("compress_level", None)
self._queue.append(kwargs)
def _track(self, data):
"""Data passthrough with byte counting"""
self._pos += len(data)
return data
def _gen_file_entry(self, *, path=None, iterable=None, data=None, size=None, arcname, compress_type=None, compress_level=None):
"""Yield the zipped data generated by the specified path/iterator/data"""
assert bool(path) ^ bool(iterable) ^ bool(data is not None)
assert not (self._sized and size is None)
if path:
zinfo = ZipStreamInfo.from_file(path, arcname)
else:
zinfo = ZipStreamInfo(arcname)
# Set the external attributes in the same way as ZipFile.writestr
if zinfo.is_dir():
zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
zinfo.external_attr |= 0x10 # MS-DOS directory flag
else:
zinfo.external_attr = 0o600 << 16 # ?rw-------
if data is not None:
zinfo.file_size = len(data)
elif size is not None:
zinfo.file_size = size
zinfo.compress_type = compress_type if compress_type is not None else self._compress_type
if not PY36_COMPAT:
if zinfo.compress_type in (ZIP_STORED, ZIP_LZMA):
# Make sure the zinfo properties are accurate for get_info
zinfo._compresslevel = None
else:
zinfo._compresslevel = compress_level if compress_level is not None else self._compress_level
# Store the position of the header
zinfo.header_offset = self._pos
# We need to force using zip64 extensions for unsized iterables since
# we don't know how big they'll end up being.
force_zip64 = bool(iterable) and size is None
# Convert paths and data into iterables
if path:
if zinfo.is_dir():
iterable = None
else:
iterable = _iter_file(path)
elif data is not None:
def gen():
yield data
iterable = gen()
# Generate the file data
for x in zinfo._file_data(iterable, force_zip64=force_zip64):
yield self._track(x)
if size is not None and size != zinfo.file_size:
# The size of the data that was stored didn't match what was
# expected. Note that this still produces a valid zip file, just
# one with a different amount of data than was expected.
# If the ZipStream is sized, this will raise an error since the
# actual size will no longer match the calculated size.
__log__.warning(
"Size mismatch when adding data for '%s' (expected %d bytes, got %d)",
arcname,
size,
zinfo.file_size
)
if self._sized:
raise RuntimeError(
"Error adding '{}' to sized ZipStream - "
"actual size did not match the computed size".format(arcname)
)
self._filelist.append(zinfo)
def _gen_archive_footer(self):
"""Yield data for the end of central directory record"""
# Based on zipfile.ZipFile._write_end_record
# Mark the ZipStream as finalized so no other data can be added to it
self._final = True
# Write central directory file headers
centDirOffset = self._pos
for zinfo in self._filelist:
for x in zinfo._central_directory_header_data():
yield self._track(x)
# Write end of central directory record
zip64EndRecStart = self._pos
centDirCount = len(self._filelist)
centDirSize = zip64EndRecStart - centDirOffset
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT
):
# Need to write the Zip64 end-of-archive records
zip64EndRec = struct.pack(
structEndArchive64,
stringEndArchive64,
44, 45, 45, 0, 0,
centDirCount,
centDirCount,
centDirSize,
centDirOffset
)
yield self._track(zip64EndRec)
zip64LocRec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator,
0,
zip64EndRecStart,
1
)
yield self._track(zip64LocRec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endRec = struct.pack(
structEndArchive,
stringEndArchive,
0, 0,
centDirCount,
centDirCount,
centDirSize,
centDirOffset,
len(self._comment)
)
yield self._track(endRec)
yield self._track(self._comment)
def _get_size(self):
"""Calculate the final size of the zip stream as files are added"""
# The aim is to store as little data as possible and avoid recalculating
# the size of every file every time. By storing some data on how much
# space is required for the currently-counted files, we can just add to
# it for every new file.
# Need to prevent multiple threads from reading _size_prog, calculating
# independently, then all writing back conflicting progress.
with self._size_lock:
# These 3 metrics need to be tracked separately since the decision to
# add a zip64 header on the end of the stream depends on any of these
# exceeding a limit.
(num_files, files_size, cdfh_size) = self._size_prog
while True:
try:
arcname, size = self._to_count.popleft()
except IndexError:
break
# Get the number of bytes the arcname uses by encoding it in
# the same way that ZipStreamInfo._encodeFilenameFlags does
try:
arcname_len = len(arcname.encode("ascii"))
except UnicodeEncodeError:
arcname_len = len(arcname.encode("utf-8"))
# Calculate if zip64 extensions are required in the same way that
# ZipStreamInfo.file_data does
uses_zip64 = size * ZIP64_ESTIMATE_FACTOR > ZIP64_LIMIT
# Track the number of extra records in the central directory file
# header encoding this file will require
cdfh_extras = 0
# Any files added after the size exceeds the zip64 limit will
# require an extra record to encode their location.
if files_size > ZIP64_LIMIT:
cdfh_extras += 1
# FileHeader
files_size += sizeFileHeader + arcname_len # 30 + name len
# Folders don't have any data or require any extra records
if arcname[-1] not in PATH_SEPARATORS:
# When using zip64, the size and compressed size of the file are
# written as an extra field in the FileHeader.
if uses_zip64:
files_size += 20 # struct.calcsize('<HHQQ')
# file data
files_size += size
# DataDescriptor
files_size += 24 if uses_zip64 else 16 # struct.calcsize('<LLQQ' if zip64 else '<LLLL')
# Storing the size of a large file requires 2 extra records
# (size and compressed size)
if size > ZIP64_LIMIT:
cdfh_extras += 2
cdfh_size += sizeCentralDir # 46
cdfh_size += arcname_len
# Add space for extra data
if cdfh_extras:
cdfh_size += 4 + (8 * cdfh_extras) # struct.calcsize('<HH' + 'Q' * cdfh_extras)
num_files += 1
# Record the current progress for next time
self._size_prog = (num_files, files_size, cdfh_size)
# Calculate the amount of data the end of central directory needs. This
# is computed every time since it depends on the other metrics. Also,
# it means that we don't have to deal with detecting if the comment
# changes.
eocd_size = sizeEndCentDir + len(self._comment) # 22 + comment len
if (
num_files > ZIP_FILECOUNT_LIMIT or
files_size > ZIP64_LIMIT or
cdfh_size > ZIP64_LIMIT
):
eocd_size += sizeEndCentDir64 # 56
eocd_size += sizeEndCentDir64Locator # 20
return cdfh_size + files_size + eocd_size | zipstream-ng | /zipstream-ng-1.6.0.tar.gz/zipstream-ng-1.6.0/zipstream/ng.py | ng.py |
import contextlib
import functools
import html
from http import HTTPStatus
import http.server
import io
import os
import sys
import urllib
from zipstream.ng import ZipStream
class ZippingHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
def list_directory(self, path):
"""
A clone of `http.server.SimpleHTTPRequestHandler.list_directory` method
with slight modifications to add checkboxes beside each entry and a
download button at the bottom that submits the checked files as a POST
request.
"""
# Additions to the original `list_directory` are marked with `ADDED`
# comments.
try:
filelist = os.listdir(path)
except OSError:
self.send_error(HTTPStatus.NOT_FOUND, "No permission to list directory")
return None
try:
displaypath = urllib.parse.unquote(self.path, errors='surrogatepass')
except UnicodeDecodeError:
displaypath = urllib.parse.unquote(path)
displaypath = html.escape(displaypath, quote=False)
title = 'Directory listing for %s' % displaypath
enc = sys.getfilesystemencoding()
r = []
r.append('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">')
r.append('<html>')
r.append('<head>')
r.append('<meta http-equiv="Content-Type" content="text/html; charset=%s">' % enc)
r.append('<title>%s</title>' % title)
r.append('</head>')
r.append('<body>')
r.append('<h1>%s</h1>' % title)
r.append('<hr>')
r.append('<form method="post">') # ADDED
r.append('<ul>')
for name in sorted(filelist, key=lambda x: x.lower()):
fullname = os.path.join(path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
linkname = urllib.parse.quote(linkname, errors='surrogatepass')
displayname = html.escape(displayname, quote=False)
r.append(
'<li>'
'<input type="checkbox" name="files" value="{0}"/> ' # ADDED
'<a href="{0}">{1}</a></li>'
''.format(linkname, displayname)
)
r.append('</ul>')
r.append('<hr>')
r.append('<button>Download zip of checked files</button>') # ADDED
r.append('</form>') # ADDED
r.append('</body>')
r.append('</html>')
encoded = '\n'.join(r).encode(enc, 'surrogateescape')
f = io.BytesIO()
f.write(encoded)
f.seek(0)
self.send_response(HTTPStatus.OK)
self.send_header("Content-type", "text/html; charset=%s" % enc)
self.send_header("Content-Length", str(len(encoded)))
self.end_headers()
return f
def do_POST(self):
"""Return a zip of all the files specified in the POST data as a
stream"""
# Get the content length so the POST data can be read
try:
content_length = int(self.headers.get('Content-Length'))
if not content_length:
raise ValueError()
except (KeyError, ValueError, TypeError):
self.send_error(HTTPStatus.BAD_REQUEST, "Invalid content length")
return
# Read and decode the POST data
enc = sys.getfilesystemencoding()
try:
post_data = self.rfile.read(content_length).decode(enc)
except UnicodeDecodeError:
self.send_error(HTTPStatus.BAD_REQUEST, "Invalid encoding of POST data")
return
# Parse the filename(s) to add to the zip out of the POST data
try:
data = urllib.parse.parse_qs(post_data, strict_parsing=True)
except ValueError:
self.send_error(HTTPStatus.BAD_REQUEST, "No files selected")
return
# Generate the ZipStream from the POSTed filenames and send it as the
# response body.
# Note that since the ZipStream is sized, the total size of it can be
# calulated before starting to stream it. This is used to set the
# "Content-Length" header, giving the client the ability to show a
# download progress bar, estimate the time remaining, etc.
zs = ZipStream(sized=True)
zs.comment = "Generated by https://github.com/pR0Ps/zipstream-ng"
for x in data.get("files") or []:
with contextlib.suppress(OSError, ValueError):
zs.add_path(self.translate_path(os.path.join(self.path, x)))
# Don't send back an empty zip
if not zs:
self.send_error(HTTPStatus.BAD_REQUEST, "No files to zip up")
return
# Send response headers
self.send_response(HTTPStatus.OK)
self.send_header("Content-Type", "application/zip")
self.send_header("Content-Disposition", "attachment; filename=files.zip")
self.send_header("Content-Length", len(zs))
self.send_header("Last-Modified", zs.last_modified)
self.end_headers()
# Generate the data of the ZipStream as it's sent to the client
self.wfile.writelines(zs)
def main():
import argparse
parser = argparse.ArgumentParser(description=(
"Simple fileserver with support for downloading multiple files and "
"folders as a single zip file."
))
parser.add_argument(
"--bind", "-b",
metavar="ADDRESS",
help="Specify alternate bind address [default: all interfaces]"
)
parser.add_argument(
"--directory", "-d",
default=os.getcwd(),
help="Specify alternative directory [default:current directory]"
)
parser.add_argument(
"port",
action="store",
default=8000,
type=int,
nargs="?",
help="Specify alternate port [default: 8000]"
)
args = parser.parse_args()
http.server.test(
HandlerClass=functools.partial(
ZippingHTTPRequestHandler,
directory=args.directory
),
ServerClass=http.server.ThreadingHTTPServer,
port=args.port,
bind=args.bind
)
if __name__ == "__main__":
main() | zipstream-ng | /zipstream-ng-1.6.0.tar.gz/zipstream-ng-1.6.0/zipstream/server.py | server.py |
# ZipStreamer
ZipStreamer is a Python library for generating ZIP files on-the-fly with ZIP
file size information.
This library was implemented using logic from Python's `zipfile` library and
Golang's `archive/zip` library.
```python
z = ZipStream(files=[
ZipFile('file.txt', 4, lambda: StringIO('test'), None, None),
ZipFile('emptydir/', None, None, None, None),
ZipFile('dir/remote.txt', remote_file_size, get_remote_file, None, None),
])
size = z.size()
res = Response(z.generate(), mimetype='application/zip')
res.headers['Content-Length'] = str(size)
```
## Installation
```
pip install zipstreamer
```
## Examples
```
pip install flask requests
PYTHONPATH=. FLASK_APP=examples/flask_example.py flask run
```
## Testing
```
pipenv install --dev --skip-lock
pipenv run nosetests
```
Testing multiple versions:
```
pip install pyenv tox tox-pyenv
pyenv install 2.7.14
pyenv install 3.4.8
pyenv install 3.5.5
pyenv install 3.6.4
pyenv install 3.7-dev
pyenv local 2.7.14 3.4.8 3.5.5 3.6.4 3.7-dev
tox
```
| zipstreamer | /zipstreamer-0.1.3.tar.gz/zipstreamer-0.1.3/README.md | README.md |
[](https://ziptool.readthedocs.io/en/latest/?badge=latest)
# ZIPtool
This tool is designed to analyze microdata from the American Community Survey (ACS) on a ZIP-code level. The Census Bureau publishes microdata only on a Public Use Microdata Area (PUMA) basis, so this package converts ZIP to PUMA and returns the relevant data as either summary statistics or the raw data.
### Requirements
This project requires Python 3.8.0 or higher. Install using:
### Getting Started
You can find the project's documentation <a href = https://ziptool.readthedocs.io/>here</a>.
### Development
This project is in the early stages of development, so please email <a href = mailto:[email protected]>[email protected]</a> with any problems you encounter.
| ziptool | /ziptool-0.0.1.tar.gz/ziptool-0.0.1/README.md | README.md |
# zipwalk
A very simple walker that recursively walks through nested zipfiles
## About
This project was created because I needed a way to iterate over nested zipfiles
without unzipping them.
## Install
```sh
pip install zipwalk
```
## Usage
It has a similar interface to `os.walk`:
```py
from zipwalk import zipwalk
for root, zips, files in zipwalk('tests/1.zip'):
print('root:', root.filename)
print('zips:', zips)
print('files:', files)
# output:
# root: tests/1.zip
# zips: {'2.zip'}
# files: {'1c.txt', 'dir/d1.txt', '1b.txt', '1a.txt'}
# root: 2.zip
# zips: set()
# files: {'2c.txt', '2b.txt', '2a.txt'}
```
`root` is an [ZipFile][1] instance opened on read mode, `r`. All zip files are
opened using `with` context manager and will be closed once the generator is
exhausted.
You can use the zip walker like the following:
```py
from pathlib import Path
from zipfile import ZipFile
from zipwalk import zipwalk
zipwalk(ZipFile('tests/1.zip'))
zipwalk(Path('tests/1.zip'))
zipwalk('tests/1.zip')
```
[1]: https://docs.python.org/3/library/zipfile.html | zipwalk | /zipwalk-0.1.3.tar.gz/zipwalk-0.1.3/README.md | README.md |
Internet Relay Chat (IRC) Protocol client library
=================================================
|Build Status| |Snippets Stats|
Quick Start
-----------
.. code:: python
import zirc, ssl
class Bot(zirc.Client):
def __init__(self):
self.connection = zirc.Socket(wrapper=ssl.wrap_socket)
self.config = zirc.IRCConfig(host="irc.freenode.net",
port=6697,
nickname="zirctest",
ident="bot",
realname="test bot",
channels=["##chat"],
caps=zirc.Caps(zirc.Sasl(username="username", password="password")))
self.connect(self.config)
self.start()
def on_privmsg(self, event, irc):
irc.reply(event, "It works!")
#Or alternatively:
#irc.privmsg(event.target, "It works!")
Bot()
This library implements the IRC protocol, it's an event-driven IRC
Protocol framework.
Installation
------------
PyPi
~~~~
::
sudo pip install zirc
sudo pip3 install zirc
Github
~~~~~~
::
sudo pip install git+https://github.com/itslukej/zirc.git
sudo pip3 install git+https://github.com/itslukej/zirc.git
**Github will contain the latest bug fixes and improvements but sometimes also "bad quality" code.**
Features
--------
- Automatic PING/PONG between the server
- IRC Message parsing
- A simple set up and connection method
- Easy installation
- Easy CTCP Set-up
IPv6
~~~~
To use IPv6 with ``zirc.Socket``, you can use the family
``socket.AF_INET6``:
.. code:: python
import socket
self.connection = zirc.Socket(family=socket.AF_INET6)
Proxy
~~~~~
Initialize ``zirc.Socket`` with argument ``socket_class``:
.. code:: python
self.connection = zirc.Socket(socket_class=zirc.Proxy(host="localhost", port=1080, protocol=zirc.SOCKS5))
Examples
--------
You can `find examples for zIRC by me and other users on
CodeBottle <https://codebottle.io/?q=%22zirc%22>`__
Ideas
-----
- Multiple connection support
TODO
----
- More documentation
Contributing
------------
Talk to us on #zirc at Freenode
Please discuss code changes that significantly affect client use of the
library before merging to the master branch. Change the version in
``setup.py`` ahead if the change should be uploaded to PyPi.
.. |Build Status| image:: https://travis-ci.org/itslukej/zirc.svg?branch=master
:target: https://travis-ci.org/itslukej/zirc
.. |Snippets Stats| image:: https://codebottle.io/embed/search-badge?keywords=zirc&language=4
:target: https://codebottle.io/?q=zirc
| zirc | /zirc-1.2.10.tar.gz/zirc-1.2.10/PyPi-README.rst | PyPi-README.rst |
# Internet Relay Chat (IRC) Protocol client library
[](https://travis-ci.org/itslukej/zirc)
[](https://codebottle.io/?q=zirc)
## Quick Start
```python
import zirc, ssl
class Bot(zirc.Client):
def __init__(self):
self.connection = zirc.Socket(wrapper=ssl.wrap_socket)
self.config = zirc.IRCConfig(host="irc.freenode.net",
port=6697,
nickname="zirctest",
ident="bot",
realname="test bot",
channels=["##chat"],
caps=zirc.Caps(zirc.Sasl(username="username", password="password")))
self.connect(self.config)
self.start()
def on_privmsg(self, event, irc):
irc.reply(event, "It works!")
#Or alternatively:
#irc.privmsg(event.target, "It works!")
Bot()
```
This library implements the IRC protocol, it's an event-driven IRC Protocol framework.
## Installation
### PyPi
```
sudo pip install zirc
sudo pip3 install zirc
```
### Github
```
sudo pip install git+https://github.com/itslukej/zirc.git
sudo pip3 install git+https://github.com/itslukej/zirc.git
```
> Github will contain the latest bug fixes and improvements but sometimes also "bad quality" code.
## Features
- Automatic PING/PONG between the server
- IRC Message parsing
- A simple set up and connection method
- Easy installation
- Easy CTCP Set-up
### IPv6
To use IPv6 with `zirc.Socket`, you can use the family `socket.AF_INET6`:
```python
import socket
self.connection = zirc.Socket(family=socket.AF_INET6)
```
### Proxy
Initialize `zirc.Socket` with argument `socket_class`:
```python
self.connection = zirc.Socket(socket_class=zirc.Proxy(host="localhost", port=1080, protocol=zirc.SOCKS5))
```
## Examples
You can [find examples for zIRC by me and other users on CodeBottle](https://codebottle.io/?q=%22zirc%22)
## Ideas
- Multiple connection support
## TODO
- More documentation
## Contributing
> Talk to us on #zirc at Freenode
Please discuss code changes that significantly affect client use of the library before merging to the master branch. Change the version in `setup.py` ahead if the change should be uploaded to PyPi.
| zirc | /zirc-1.2.10.tar.gz/zirc-1.2.10/README.md | README.md |
# Zirconium
Zirconium is a powerful configuration tool for loading and using configuration in your application.
## Key Features
### Features
* Support for libraries to provide their own default configuration and/or configuration file locations
* Applications specify their own configuration with `@zirconium.configure` decorator
* Automatic replacement of ${ENVIRONMENT_VARIABLES} in strings
* Consistent type coercion for common data types: paths, ints, floats, decimals, dates, and datetimes
* Where dictionary-style declarations are not supported, instead use the dot syntax (e.g. "foo.bar")
* Supports multiple file encodings
* Extensible to other formats as needed
* Configuration is dict-like for ease-of-use in existing locations (e.g. Flask)
* Multiple files can be specified with different weights to control loading order
* Supports default vs. normal configuration file (defaults always loaded first)
* Supports thread-safe injection of the configuration into your application via autoinject
* Supports specifying default configuration for libraries in entry points `zirconium.config` and for parsers in
`zirconium.parsers`, as well as using the `@zirconium.configure` decorator.
### Supported configuration methods
* Database tables (with SQLAlchemy installed)
* YAML (with pyyaml installed)
* TOML (with toml installed or Python >= 3.11)
* JSON
* Setuptools-like CFG files
* INI files (following the defaults of the configparser module)
* Environment variables
### Priority Order
Later items in this list will override previous items
1. Files registered with `register_default_file()`, in ascending order by `weight` (or order called)
2. Files registered with `register_file()`, in ascending order by `weight`
3. Files from environment variables registered with `register_file_from_environ()`, in ascending order by `weight`
5. Values from environment variables registered with `register_environ_var()`
## Example Usage
```python
import pathlib
import zirconium
from autoinject import injector
@zirconium.configure
def add_config(config):
# Direct load configuration from dict:
config.load_from_dict({
"version": "0.0.1",
"database": {
# Load these from environment variables
"username": "${MYAPP_DATABASE_USERNAME}",
"password": "${MYAPP_DATABASE_PASSWORD}",
},
"escaped_environment_example": "$${NOT_AN_ENVIRONMENT VARIABLE",
"preceding_dollar_sign": "$$${STOCK_PRICE_ENV_VARIABLE}",
})
# Default configuration, relative to this file, will override the above dict
base_file = pathlib.Path(__file__).parent / ".myapp.defaults.toml"
config.register_default_file(base_file)
# File in user home directory, overrides the defaults
config.register_file("~/.myapp.toml")
# File in CWD, will override whatever is in home
config.register_file("./.myapp.toml")
# Load a file path from environment variable, will override ALL registered files
config.register_file_from_environ("MYAPP_CONFIG_FILE")
# Load values direct from the environment, will override ALL files including those specific in environment variables
# sets config["database"]["password"]
config.register_environ_var("MYAPP_DATABASE_PASSWORD", "database", "password")
# sets config["database"]["username"]
config.register_environ_var("MYAPP_DATABASE_USERNAME", "database", "username")
# Injection example
class NeedsConfiguration:
config: zirconium.ApplicationConfig = None
@injector.construct
def __init__(self):
# you have self.config available as of here
pass
```
## Change Log
### Version 1.1.0
- Added `as_list()` and `as_set()` which return as expected
- Type-hinting added to the `as_X()` methods to help with usage in your IDE
- Added support for `register_files()` which takes a set of directories to use and registers a set of files and default files in each.
### Version 1.0.0
- Stable release after extensive testing on my own
- Python 3.11's tomllib now supported for parsing TOML files
- Using `pymitter` to manage configuration registration was proving problematic when called from
a different thread than where the application config object was instatiated. Replaced it with a more robust solution.
- Fixed a bug for registering default files
- Added `as_dict()` to the configuration object which returns an instance of `MutableDeepDict`.
| zirconium | /zirconium-1.1.2.tar.gz/zirconium-1.1.2/README.md | README.md |
import sys
import argparse
import time
from colorama import Fore
import baseconv
def return_time_list(ts, base=10):
"""Returns array of [yy, m, w, d, h, mn, sc] in base (default 10)
:param int ts: unix timestamp.
:param int base: number base for output. default 10. valid 2-64.
"""
tl = []
bconv = baseconv.BaseConverter(baseconv.BASE64_ALPHABET[:base])
t = bconv.encode(ts)
e = None
if len(t) < 12:
t = '0' * (10-len(t)) + t
for i in [-2, -4, -5, -6, -7, -8]:
tl.append(t[i:e])
e = i
rest = t[:-8]
tl.append(rest)
tl.reverse()
return(tl)
def color_time_list(tl):
"""Wraps ansi color codes around string time values in list.
ANSI color scheme: red, green, yellow, blue, magenta, cyan, white
ANSI color scheme rainbow: red, yellow, green, cyan, blue, magenta
:param list tl: list of string values of time parts.
:returns list:
"""
colors = [Fore.RED, Fore.GREEN, Fore.YELLOW, Fore.BLUE, Fore.MAGENTA,
Fore.CYAN, Fore.WHITE]
colors = [
Fore.MAGENTA,
Fore.BLUE,
Fore.CYAN,
Fore.GREEN,
Fore.YELLOW,
Fore.RED,
Fore.WHITE
]
l = []
for tpl in zip(colors, tl, [Fore.RESET]*len(colors)):
_t = list(tpl)
l.append("".join(_t))
return l
def time_string(ts, color=False, base=10):
"""Returns a timestring in 'yy:m:w:d h:mm:ss' format, optionally ansi color coded.
Uses base 10 as default.
"""
a = return_time_list(ts, base=base)
if color:
a = color_time_list(a)
return(':'.join(a[:-3]) + ' ' + ':'.join(a[-3:]))
def main(argv=None):
"""main(sys.argv[1:])
"""
parser = argparse.ArgumentParser(description='template cli script')
parser.add_argument('--debug', default=False, action='store_true',
help="debug flag.")
parser.add_argument('--ts', default=int(time.time()), action='store',
help="timestamp. default now().")
parser.add_argument('--base', default=10, action='store', type=int,
help="base from 2-62. default 10.")
parser.add_argument('--color', default=False, action='store_true',
help="colorize time positions.")
if argv is not None:
args = parser.parse_args(argv)
else:
args = parser.parse_args()
a = time_string(args.ts, color=args.color, base=args.base)
print(a)
if __name__ == '__main__':
main(sys.argv[1:]) | zirpu-utils | /zirpu-utils-1.0.0.linux-x86_64.tar.gz/usr/local/lib/python3.7/dist-packages/zirpu/time.py | time.py |
from skimage import io
from matplotlib import pyplot as plt
import random
import cv2
import os
import numpy as np
def pngToJpg(src):
jpgimg=np.zeros([src.shape[0],src.shape[1],3]).astype(np.uint8)#因为plt的图片是RGB而cv的图片是BGR
jpgimg[:,:,(0,1,2)]=src[:,:,(0,1,2)]#通道转换
return jpgimg
def plot_one_box(x, img, color=None, label=None, line_thickness=None):
# Plots one bounding box on image img
tl = line_thickness or round(0.002 * max(img.shape[0:2])) + 1 # line thickness
color = [random.randint(0, 255) for _ in range(3)]
c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3]))
cv2.rectangle(img, c1, c2, color, thickness=tl)
if label:
tf = max(tl - 1, 1) # font thickness
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3
cv2.rectangle(img, c1, c2, color, -1) # filled
cv2.putText(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA)
def getFiles(dir):
flist=[]
for root,dirs,files in os.walk(dir+'/'):
for file in files:
flist.append(os.path.join(root,file))
return flist
def add_roi(backimg,logoimg,logox0,logoy0):
backimg=backimg.copy()
logoRs,logoCs=logoimg.shape[0],logoimg.shape[1]#算出logo的空间参数
for y in range(logoy0,logoy0+logoRs):
for x in range(logox0,logox0+logoCs):
backimg[y,x,:]=logoimg[y-logoy0,x-logox0,:]
return backimg
def scaleTransfrom(tar_img,W,H,x0,x1,y0,y1): # 对坐标进行放缩
h,w,_=tar_img.shape
scale={'x_w':float(w/W),'y_h':float(h/H)} #scale for shrink
x0 *= scale['x_w']
x1 *= scale['x_w']
y0 *= scale['y_h']
y1 *= scale['y_h']
return int(x0),int(x1),int(y0),int(y1)
def get_random_files(dir,proportion):
fl = getFiles(dir)
rlist=[ i for i in range(0,len(fl))]
random.shuffle(rlist)
fslist = []
for idx in range(0,int(proportion*len(fl))):
fslist.append(fl[rlist[idx]])
return fslist
def newMatUC3(width,height,colorR,colorG,colorB):#创建空白图像
img=np.zeros([height,width,3], np.uint8)
img[:,:,0]=colorR
img[:,:,1]=colorG
img[:,:,2]=colorB
return img
def roi_cutPoint(srcimg,x0,x1,y0,y1):#切割图像
return srcimg[y0:y1,x0:x1] | zisan | /FileTools/tools.py | tools.py |
=================
Zish Python ANTLR
=================
A Python library for the `Zish format <https://github.com/tlocke/zish>`_ format,
released under the `MIT-0 licence <https://choosealicense.com/licenses/mit-0/>`_.
.. image:: https://github.com/tlocke/zish_python_antlr/workflows/zish_python_antlr/badge.svg
:alt: Build Status
.. contents:: Table of Contents
:depth: 2
:local:
Installation
------------
- Create a virtual environment: ``python3 -m venv venv``
- Activate the virtual environment: ``source venv/bin/activate``
- Install: ``pip install zish_antlr``
Quickstart
----------
To go from a Python object to an Zish string use ``zish.dumps``. To go from a Zish
string to a Python object use ``zish.loads``. Eg.
>>> from zish import loads, dumps
>>> from datetime import datetime, timezone
>>> from decimal import Decimal
>>>
>>> # Take a Python object
>>> book = {
... 'title': 'A Hero of Our Time',
... 'read_date': datetime(2017, 7, 16, 14, 5, tzinfo=timezone.utc),
... 'would_recommend': True,
... 'description': None,
... 'number_of_novellas': 5,
... 'price': Decimal('7.99'),
... 'weight': 6.88,
... 'key': b'kshhgrl',
... 'tags': [
... 'russian',
... 'novel',
... '19th century',
... ],
... }
>>>
>>> # Output it as an Zish string
>>> zish_str = dumps(book)
>>> print(zish_str)
{
"description": null,
"key": 'a3NoaGdybA==',
"number_of_novellas": 5,
"price": 7.99,
"read_date": 2017-07-16T14:05:00Z,
"tags": [
"russian",
"novel",
"19th century",
],
"title": "A Hero of Our Time",
"weight": 6.88,
"would_recommend": true,
}
>>>
>>> # Load the Zish string, to give us back the Python object
>>> reloaded_book = loads(zish_str)
>>>
>>> # Print the title
>>> print(reloaded_book['title'])
A Hero of Our Time
.. table:: Python To Zish Type Mapping
+-----------------------+-----------------------------------------------------------+
| Python Type | Zish Type |
+=======================+===========================================================+
| bool | bool |
+-----------------------+-----------------------------------------------------------+
| int | integer |
+-----------------------+-----------------------------------------------------------+
| str | string |
+-----------------------+-----------------------------------------------------------+
| datetime.datetime | timestamp |
+-----------------------+-----------------------------------------------------------+
| dict | map |
+-----------------------+-----------------------------------------------------------+
| decimal.Decimal | decimal |
+-----------------------+-----------------------------------------------------------+
| float | decimal |
+-----------------------+-----------------------------------------------------------+
| bytearray | bytes |
+-----------------------+-----------------------------------------------------------+
| bytes | bytes |
+-----------------------+-----------------------------------------------------------+
| list | list |
+-----------------------+-----------------------------------------------------------+
| tuple | list |
+-----------------------+-----------------------------------------------------------+
Contributing
------------
Useful link:
* `ANTLR JavaDocs <http://www.antlr.org/api/Java/index.html?overview-summary.html>`_
To run the tests:
- Change to the ``zish_python_antlr`` directory: ``cd zish_python_antlr``
- Create a virtual environment: ``python3 -m venv venv``
- Activate the virtual environment: ``source venv/bin/activate``
- Install tox: ``pip install tox``
- Run tox: ``tox``
The core parser is created using `ANTLR <https://github.com/antlr/antlr4>`_ from the
Zish grammar. To create the parser files, go to the ``zish/antlr`` directory and
download the ANTLR jar and then run the following command:
``java -jar antlr-4.11.1-complete.jar -Dlanguage=Python3 Zish.g4``
Making A New Release
--------------------
* Run ``tox`` to make sure all tests pass
* Update the `Release Notes` section.
* Ensure ``build`` and ``twine`` are installed: ``pip install wheel twine``
Then do::
git tag -a x.y.z -m "version x.y.z"
rm -r dist
python -m build
twine upload --sign dist/*
Release Notes
-------------
Version 0.0.14 (2022-10-30)
```````````````````````````
- The U+00A0 NO-BREAK SPACE is now treated as whitespace.
Version 0.0.13 (2021-04-04)
```````````````````````````
- Trailing commas in list and maps are now allowed.
Version 0.0.12 (2017-09-07)
```````````````````````````
- Rename to `zish_antlr` to distinguish it from `zish`.
Version 0.0.11 (2017-09-07)
```````````````````````````
- Upload to PyPI failed for previous release.
Version 0.0.10 (2017-09-07)
```````````````````````````
- Allow lists and sets as keys to maps.
Version 0.0.9 (2017-08-24)
``````````````````````````
- Fix bug where ``int`` was being parsed as ``Decimal``.
- Make bytes type return a ``bytes`` rather than a ``bytearray``.
Version 0.0.8 (2017-08-24)
``````````````````````````
- Container types aren't allowed as map keys.
- Performance improvements.
Version 0.0.7 (2017-08-22)
``````````````````````````
- Fix bug with UTC timestamp formatting.
Version 0.0.6 (2017-08-22)
``````````````````````````
- Fix bug in timestamp formatting.
- Add note about comments.
Version 0.0.5 (2017-08-18)
``````````````````````````
- Fix bug where ``dumps`` fails for a ``tuple``.
Version 0.0.4 (2017-08-15)
``````````````````````````
- Simplify integer types.
Version 0.0.3 (2017-08-09)
``````````````````````````
- Fixed bug where interpreter couldn't find the ``zish.antlr`` package in eggs.
- Removed a few superfluous escape sequences.
Version 0.0.2 (2017-08-05)
``````````````````````````
- Now uses RFC3339 for timestamps.
Version 0.0.1 (2017-08-03)
``````````````````````````
- Fix bug where an EOF could cause an infinite loop.
Version 0.0.0 (2017-08-01)
``````````````````````````
- First public release. Passes all the tests.
| zish-antlr | /zish_antlr-0.0.14.tar.gz/zish_antlr-0.0.14/README.rst | README.rst |
from base64 import b64decode, b64encode
from collections.abc import Mapping
from datetime import datetime as Datetime, timezone as Timezone
from decimal import Decimal
from antlr4 import CommonTokenStream, InputStream
from antlr4.error import ErrorListener, Errors
from antlr4.tree.Tree import TerminalNodeImpl
import arrow
from zish.antlr.ZishLexer import ZishLexer
from zish.antlr.ZishParser import ZishParser
QUOTE = '"'
UTC_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
class ZishException(Exception):
pass
class ThrowingErrorListener(ErrorListener.ErrorListener):
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
raise Errors.ParseCancellationException(f"line {line}: {column} {msg}")
def load(file_like):
return loads(file_like.read())
def dump(obj, file_like):
file_like.write(dumps(obj))
def loads(zish_str):
lexer = ZishLexer(InputStream(zish_str))
lexer.removeErrorListeners()
lexer.addErrorListener(ThrowingErrorListener())
stream = CommonTokenStream(lexer)
parser = ZishParser(stream)
parser.removeErrorListeners()
parser.addErrorListener(ThrowingErrorListener())
try:
tree = parser.start()
except Errors.ParseCancellationException as e:
raise ZishException(str(e)) from e
return parse(tree)
def parse(node):
# print("parse start")
# print(str(type(node)))
# print("node text" + node.getText())
if isinstance(node, ZishParser.Map_typeContext):
val = {}
for child in node.getChildren():
if isinstance(child, ZishParser.PairContext):
k, v = [
parse(c)
for c in child.getChildren()
if isinstance(c, (ZishParser.ElementContext, ZishParser.KeyContext))
]
val[k] = v
return val
elif isinstance(node, ZishParser.List_typeContext):
val = []
for child in node.getChildren():
if isinstance(child, ZishParser.ElementContext):
val.append(parse(child))
return tuple(val)
elif isinstance(
node,
(ZishParser.StartContext, ZishParser.ElementContext, ZishParser.KeyContext),
):
for c in node.getChildren():
if (
isinstance(c, TerminalNodeImpl)
and c.getPayload().type == ZishParser.EOF
):
continue
return parse(c)
elif isinstance(node, TerminalNodeImpl):
token = node.getPayload()
token_type = token.type
token_text = token.text
if token_type == ZishParser.TIMESTAMP:
try:
return arrow.get(token_text).datetime
except arrow.parser.ParserError as e:
raise ZishException(f"Can't parse the timestamp '{token.text}'.") from e
elif token_type == ZishParser.NULL:
return None
elif token_type == ZishParser.BOOL:
return token.text == "true"
elif token_type == ZishParser.INTEGER:
return int(token.text)
elif token_type == ZishParser.DECIMAL:
return Decimal(token.text)
elif token_type == ZishParser.STRING:
return unescape(token.text[1:-1])
elif token_type == ZishParser.BLOB:
return b64decode(token.text)
else:
raise ZishException(f"Don't recognize the token type: {token_type}.")
else:
raise ZishException(
f"Don't know what to do with type {type(node)} with value {node}."
)
ESCAPES = {
"0": "\u0000", # NUL
"a": "\u0007", # alert BEL
"b": "\u0008", # backspace BS
"t": "\u0009", # horizontal tab HT
"n": "\u000A", # linefeed LF
"f": "\u000C", # form feed FF
"r": "\u000D", # carriage return CR
"v": "\u000B", # vertical tab VT
'"': "\u0022", # double quote
"'": "\u0027", # single quote
"?": "\u003F", # question mark
"\\": "\u005C", # backslash
"/": "\u002F", # forward slash
"\u000D\u000A": "", # empty string
"\u000D": "", # empty string
"\u000A": "",
} # empty string
def unescape(escaped_str):
i = escaped_str.find("\\")
if i == -1:
return escaped_str
else:
head_str = escaped_str[:i]
tail_str = escaped_str[i + 1 :]
for k, v in ESCAPES.items():
if tail_str.startswith(k):
return head_str + v + unescape(tail_str[len(k) :])
for prefix, digits in (("x", 2), ("u", 4), ("U", 8)):
if tail_str.startswith(prefix):
hex_str = tail_str[1 : 1 + digits]
v = chr(int(hex_str, 16))
return head_str + v + unescape(tail_str[1 + digits :])
raise ZishException(
f"Can't find a valid string following the first backslash of "
f"'{escaped_str}'."
)
def dumps(obj):
return _dump(obj, "")
def _dump(obj, indent):
if isinstance(obj, Mapping):
new_indent = f"{indent} "
b = "".join(
f"\n{new_indent}{_dump(k, new_indent)}: {_dump(v, new_indent)},"
for k, v in sorted(obj.items())
)
return "{}" if len(b) == 0 else "{" + b + "\n}"
elif isinstance(obj, bool):
return "true" if obj else "false"
elif isinstance(obj, (list, tuple)):
new_indent = f"{indent} "
b = "".join(f"\n{new_indent}{_dump(v, new_indent)}," for v in obj)
return "[]" if len(b) == 0 else f"[{b}\n{indent}]"
elif isinstance(obj, (int, float, Decimal)):
return str(obj)
elif obj is None:
return "null"
elif isinstance(obj, str):
return QUOTE + obj + QUOTE
elif isinstance(obj, (bytes, bytearray)):
return f"'{b64encode(obj).decode()}'"
elif isinstance(obj, Datetime):
tzinfo = obj.tzinfo
if tzinfo is None:
return f"{obj.isoformat()}-00:00"
elif tzinfo.utcoffset(obj) == Timezone.utc.utcoffset(obj):
return obj.strftime(UTC_FORMAT)
else:
return obj.isoformat()
else:
raise ZishException(f"Type {type(obj)} not recognised.") | zish-antlr | /zish_antlr-0.0.14.tar.gz/zish_antlr-0.0.14/zish/core.py | core.py |
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
return [
4,1,14,64,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,1,0,1,
0,1,0,1,1,1,1,1,1,1,1,3,1,20,8,1,1,2,1,2,1,3,1,3,1,3,1,3,5,3,28,
8,3,10,3,12,3,31,9,3,1,3,3,3,34,8,3,1,3,1,3,1,3,1,3,3,3,40,8,3,1,
4,1,4,1,4,1,4,5,4,46,8,4,10,4,12,4,49,9,4,1,4,3,4,52,8,4,1,4,1,4,
1,4,1,4,3,4,58,8,4,1,5,1,5,1,5,1,5,1,5,0,0,6,0,2,4,6,8,10,0,1,1,
0,9,14,66,0,12,1,0,0,0,2,19,1,0,0,0,4,21,1,0,0,0,6,39,1,0,0,0,8,
57,1,0,0,0,10,59,1,0,0,0,12,13,3,2,1,0,13,14,5,0,0,1,14,1,1,0,0,
0,15,20,3,4,2,0,16,20,3,6,3,0,17,20,3,8,4,0,18,20,5,8,0,0,19,15,
1,0,0,0,19,16,1,0,0,0,19,17,1,0,0,0,19,18,1,0,0,0,20,3,1,0,0,0,21,
22,7,0,0,0,22,5,1,0,0,0,23,24,5,1,0,0,24,29,3,2,1,0,25,26,5,3,0,
0,26,28,3,2,1,0,27,25,1,0,0,0,28,31,1,0,0,0,29,27,1,0,0,0,29,30,
1,0,0,0,30,33,1,0,0,0,31,29,1,0,0,0,32,34,5,3,0,0,33,32,1,0,0,0,
33,34,1,0,0,0,34,35,1,0,0,0,35,36,5,2,0,0,36,40,1,0,0,0,37,38,5,
1,0,0,38,40,5,2,0,0,39,23,1,0,0,0,39,37,1,0,0,0,40,7,1,0,0,0,41,
42,5,4,0,0,42,47,3,10,5,0,43,44,5,3,0,0,44,46,3,10,5,0,45,43,1,0,
0,0,46,49,1,0,0,0,47,45,1,0,0,0,47,48,1,0,0,0,48,51,1,0,0,0,49,47,
1,0,0,0,50,52,5,3,0,0,51,50,1,0,0,0,51,52,1,0,0,0,52,53,1,0,0,0,
53,54,5,5,0,0,54,58,1,0,0,0,55,56,5,4,0,0,56,58,5,5,0,0,57,41,1,
0,0,0,57,55,1,0,0,0,58,9,1,0,0,0,59,60,3,4,2,0,60,61,5,6,0,0,61,
62,3,2,1,0,62,11,1,0,0,0,7,19,29,33,39,47,51,57
]
class ZishParser ( Parser ):
grammarFileName = "Zish.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'['", "']'", "','", "'{'", "'}'", "':'",
"<INVALID>", "'null'" ]
symbolicNames = [ "<INVALID>", "LIST_START", "LIST_FINISH", "COMMA",
"MAP_START", "MAP_FINISH", "COLON", "WS", "NULL",
"BOOL", "TIMESTAMP", "INTEGER", "DECIMAL", "STRING",
"BLOB" ]
RULE_start = 0
RULE_element = 1
RULE_key = 2
RULE_list_type = 3
RULE_map_type = 4
RULE_pair = 5
ruleNames = [ "start", "element", "key", "list_type", "map_type", "pair" ]
EOF = Token.EOF
LIST_START=1
LIST_FINISH=2
COMMA=3
MAP_START=4
MAP_FINISH=5
COLON=6
WS=7
NULL=8
BOOL=9
TIMESTAMP=10
INTEGER=11
DECIMAL=12
STRING=13
BLOB=14
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.11.1")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class StartContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def element(self):
return self.getTypedRuleContext(ZishParser.ElementContext,0)
def EOF(self):
return self.getToken(ZishParser.EOF, 0)
def getRuleIndex(self):
return ZishParser.RULE_start
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStart" ):
listener.enterStart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStart" ):
listener.exitStart(self)
def start(self):
localctx = ZishParser.StartContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_start)
try:
self.enterOuterAlt(localctx, 1)
self.state = 12
self.element()
self.state = 13
self.match(ZishParser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElementContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def key(self):
return self.getTypedRuleContext(ZishParser.KeyContext,0)
def list_type(self):
return self.getTypedRuleContext(ZishParser.List_typeContext,0)
def map_type(self):
return self.getTypedRuleContext(ZishParser.Map_typeContext,0)
def NULL(self):
return self.getToken(ZishParser.NULL, 0)
def getRuleIndex(self):
return ZishParser.RULE_element
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElement" ):
listener.enterElement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElement" ):
listener.exitElement(self)
def element(self):
localctx = ZishParser.ElementContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_element)
try:
self.state = 19
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [9, 10, 11, 12, 13, 14]:
self.enterOuterAlt(localctx, 1)
self.state = 15
self.key()
pass
elif token in [1]:
self.enterOuterAlt(localctx, 2)
self.state = 16
self.list_type()
pass
elif token in [4]:
self.enterOuterAlt(localctx, 3)
self.state = 17
self.map_type()
pass
elif token in [8]:
self.enterOuterAlt(localctx, 4)
self.state = 18
self.match(ZishParser.NULL)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeyContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def BOOL(self):
return self.getToken(ZishParser.BOOL, 0)
def TIMESTAMP(self):
return self.getToken(ZishParser.TIMESTAMP, 0)
def INTEGER(self):
return self.getToken(ZishParser.INTEGER, 0)
def DECIMAL(self):
return self.getToken(ZishParser.DECIMAL, 0)
def STRING(self):
return self.getToken(ZishParser.STRING, 0)
def BLOB(self):
return self.getToken(ZishParser.BLOB, 0)
def getRuleIndex(self):
return ZishParser.RULE_key
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKey" ):
listener.enterKey(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKey" ):
listener.exitKey(self)
def key(self):
localctx = ZishParser.KeyContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_key)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 21
_la = self._input.LA(1)
if not(((_la) & ~0x3f) == 0 and ((1 << _la) & 32256) != 0):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class List_typeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LIST_START(self):
return self.getToken(ZishParser.LIST_START, 0)
def element(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ZishParser.ElementContext)
else:
return self.getTypedRuleContext(ZishParser.ElementContext,i)
def LIST_FINISH(self):
return self.getToken(ZishParser.LIST_FINISH, 0)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(ZishParser.COMMA)
else:
return self.getToken(ZishParser.COMMA, i)
def getRuleIndex(self):
return ZishParser.RULE_list_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterList_type" ):
listener.enterList_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitList_type" ):
listener.exitList_type(self)
def list_type(self):
localctx = ZishParser.List_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_list_type)
self._la = 0 # Token type
try:
self.state = 39
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 23
self.match(ZishParser.LIST_START)
self.state = 24
self.element()
self.state = 29
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 25
self.match(ZishParser.COMMA)
self.state = 26
self.element()
self.state = 31
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
self.state = 33
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==3:
self.state = 32
self.match(ZishParser.COMMA)
self.state = 35
self.match(ZishParser.LIST_FINISH)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 37
self.match(ZishParser.LIST_START)
self.state = 38
self.match(ZishParser.LIST_FINISH)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Map_typeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def MAP_START(self):
return self.getToken(ZishParser.MAP_START, 0)
def pair(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ZishParser.PairContext)
else:
return self.getTypedRuleContext(ZishParser.PairContext,i)
def MAP_FINISH(self):
return self.getToken(ZishParser.MAP_FINISH, 0)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(ZishParser.COMMA)
else:
return self.getToken(ZishParser.COMMA, i)
def getRuleIndex(self):
return ZishParser.RULE_map_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMap_type" ):
listener.enterMap_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMap_type" ):
listener.exitMap_type(self)
def map_type(self):
localctx = ZishParser.Map_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_map_type)
self._la = 0 # Token type
try:
self.state = 57
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,6,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 41
self.match(ZishParser.MAP_START)
self.state = 42
self.pair()
self.state = 47
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 43
self.match(ZishParser.COMMA)
self.state = 44
self.pair()
self.state = 49
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
self.state = 51
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==3:
self.state = 50
self.match(ZishParser.COMMA)
self.state = 53
self.match(ZishParser.MAP_FINISH)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 55
self.match(ZishParser.MAP_START)
self.state = 56
self.match(ZishParser.MAP_FINISH)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PairContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def key(self):
return self.getTypedRuleContext(ZishParser.KeyContext,0)
def COLON(self):
return self.getToken(ZishParser.COLON, 0)
def element(self):
return self.getTypedRuleContext(ZishParser.ElementContext,0)
def getRuleIndex(self):
return ZishParser.RULE_pair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPair" ):
listener.enterPair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPair" ):
listener.exitPair(self)
def pair(self):
localctx = ZishParser.PairContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_pair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 59
self.key()
self.state = 60
self.match(ZishParser.COLON)
self.state = 61
self.element()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx | zish-antlr | /zish_antlr-0.0.14.tar.gz/zish_antlr-0.0.14/zish/antlr/ZishParser.py | ZishParser.py |
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
return [
4,0,14,396,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,
2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,
13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,
19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,
26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7,
32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,1,
0,1,0,1,1,1,1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,6,4,6,93,8,6,11,
6,12,6,94,1,6,3,6,98,8,6,1,6,1,6,1,7,1,7,1,7,1,7,5,7,106,8,7,10,
7,12,7,109,9,7,1,7,1,7,1,7,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,
9,1,9,1,9,1,9,1,9,3,9,128,8,9,1,10,1,10,1,10,1,10,1,10,1,10,1,10,
1,10,1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,12,1,12,
1,12,1,12,3,12,152,8,12,1,13,1,13,1,13,1,13,1,13,1,13,3,13,160,8,
13,1,14,1,14,1,14,1,14,1,14,1,14,3,14,168,8,14,1,15,1,15,1,15,1,
15,3,15,174,8,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,4,17,183,8,17,
11,17,12,17,184,3,17,187,8,17,1,18,1,18,1,19,1,19,3,19,193,8,19,
1,19,3,19,196,8,19,1,19,1,19,1,19,1,19,3,19,202,8,19,1,19,1,19,1,
19,1,19,1,19,1,19,1,19,1,19,3,19,212,8,19,1,20,3,20,215,8,20,1,20,
1,20,1,20,5,20,220,8,20,10,20,12,20,223,9,20,3,20,225,8,20,1,21,
1,21,3,21,229,8,21,1,21,4,21,232,8,21,11,21,12,21,233,1,22,1,22,
1,22,1,22,5,22,240,8,22,10,22,12,22,243,9,22,1,22,1,22,1,23,1,23,
1,23,5,23,250,8,23,10,23,12,23,253,9,23,1,23,3,23,256,8,23,1,23,
5,23,259,8,23,10,23,12,23,262,9,23,1,23,1,23,1,24,1,24,3,24,268,
8,24,1,25,1,25,5,25,272,8,25,10,25,12,25,275,9,25,1,25,1,25,5,25,
279,8,25,10,25,12,25,282,9,25,1,25,1,25,5,25,286,8,25,10,25,12,25,
289,9,25,1,25,1,25,1,26,1,26,5,26,295,8,26,10,26,12,26,298,9,26,
1,26,1,26,5,26,302,8,26,10,26,12,26,305,9,26,1,26,1,26,5,26,309,
8,26,10,26,12,26,312,9,26,1,26,1,26,1,27,1,27,5,27,318,8,27,10,27,
12,27,321,9,27,1,27,1,27,5,27,325,8,27,10,27,12,27,328,9,27,1,27,
1,27,5,27,332,8,27,10,27,12,27,335,9,27,1,27,1,27,1,28,1,28,1,29,
1,29,5,29,343,8,29,10,29,12,29,346,9,29,1,30,1,30,1,31,1,31,1,32,
1,32,1,32,1,33,1,33,3,33,357,8,33,1,34,1,34,1,34,1,34,1,34,1,34,
1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,
1,34,1,34,3,34,380,8,34,1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,37,
1,37,1,38,1,38,1,38,1,38,3,38,395,8,38,2,107,241,0,39,1,1,3,2,5,
3,7,4,9,5,11,6,13,7,15,0,17,8,19,9,21,10,23,0,25,0,27,0,29,0,31,
0,33,0,35,0,37,11,39,12,41,0,43,0,45,13,47,14,49,0,51,0,53,0,55,
0,57,0,59,0,61,0,63,0,65,0,67,0,69,0,71,0,73,0,75,0,77,0,1,0,17,
2,0,84,84,116,116,1,0,49,57,1,0,48,50,1,0,49,50,1,0,48,49,2,0,90,
90,122,122,1,0,48,51,1,0,48,53,2,0,69,69,101,101,2,0,34,34,92,92,
4,0,43,43,47,57,65,90,97,122,1,0,48,57,2,0,43,43,45,45,9,0,34,34,
47,47,92,92,97,98,102,102,110,110,114,114,116,116,118,118,3,0,48,
57,65,70,97,102,3,0,9,13,32,32,160,160,2,0,133,133,8232,8233,414,
0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,
1,0,0,0,0,13,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,37,
1,0,0,0,0,39,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,1,79,1,0,0,0,3,81,
1,0,0,0,5,83,1,0,0,0,7,85,1,0,0,0,9,87,1,0,0,0,11,89,1,0,0,0,13,
97,1,0,0,0,15,101,1,0,0,0,17,113,1,0,0,0,19,127,1,0,0,0,21,129,1,
0,0,0,23,142,1,0,0,0,25,151,1,0,0,0,27,159,1,0,0,0,29,167,1,0,0,
0,31,173,1,0,0,0,33,175,1,0,0,0,35,178,1,0,0,0,37,188,1,0,0,0,39,
211,1,0,0,0,41,214,1,0,0,0,43,226,1,0,0,0,45,235,1,0,0,0,47,246,
1,0,0,0,49,267,1,0,0,0,51,269,1,0,0,0,53,292,1,0,0,0,55,315,1,0,
0,0,57,338,1,0,0,0,59,340,1,0,0,0,61,347,1,0,0,0,63,349,1,0,0,0,
65,351,1,0,0,0,67,356,1,0,0,0,69,379,1,0,0,0,71,381,1,0,0,0,73,386,
1,0,0,0,75,388,1,0,0,0,77,394,1,0,0,0,79,80,5,91,0,0,80,2,1,0,0,
0,81,82,5,93,0,0,82,4,1,0,0,0,83,84,5,44,0,0,84,6,1,0,0,0,85,86,
5,123,0,0,86,8,1,0,0,0,87,88,5,125,0,0,88,10,1,0,0,0,89,90,5,58,
0,0,90,12,1,0,0,0,91,93,3,75,37,0,92,91,1,0,0,0,93,94,1,0,0,0,94,
92,1,0,0,0,94,95,1,0,0,0,95,98,1,0,0,0,96,98,3,15,7,0,97,92,1,0,
0,0,97,96,1,0,0,0,98,99,1,0,0,0,99,100,6,6,0,0,100,14,1,0,0,0,101,
102,5,47,0,0,102,103,5,42,0,0,103,107,1,0,0,0,104,106,9,0,0,0,105,
104,1,0,0,0,106,109,1,0,0,0,107,108,1,0,0,0,107,105,1,0,0,0,108,
110,1,0,0,0,109,107,1,0,0,0,110,111,5,42,0,0,111,112,5,47,0,0,112,
16,1,0,0,0,113,114,5,110,0,0,114,115,5,117,0,0,115,116,5,108,0,0,
116,117,5,108,0,0,117,18,1,0,0,0,118,119,5,116,0,0,119,120,5,114,
0,0,120,121,5,117,0,0,121,128,5,101,0,0,122,123,5,102,0,0,123,124,
5,97,0,0,124,125,5,108,0,0,125,126,5,115,0,0,126,128,5,101,0,0,127,
118,1,0,0,0,127,122,1,0,0,0,128,20,1,0,0,0,129,130,3,23,11,0,130,
131,5,45,0,0,131,132,3,25,12,0,132,133,5,45,0,0,133,134,3,27,13,
0,134,135,7,0,0,0,135,136,3,31,15,0,136,137,5,58,0,0,137,138,3,33,
16,0,138,139,5,58,0,0,139,140,3,35,17,0,140,141,3,29,14,0,141,22,
1,0,0,0,142,143,3,61,30,0,143,144,3,61,30,0,144,145,3,61,30,0,145,
146,3,61,30,0,146,24,1,0,0,0,147,148,5,48,0,0,148,152,7,1,0,0,149,
150,5,49,0,0,150,152,7,2,0,0,151,147,1,0,0,0,151,149,1,0,0,0,152,
26,1,0,0,0,153,154,5,48,0,0,154,160,7,1,0,0,155,156,7,3,0,0,156,
160,3,61,30,0,157,158,5,51,0,0,158,160,7,4,0,0,159,153,1,0,0,0,159,
155,1,0,0,0,159,157,1,0,0,0,160,28,1,0,0,0,161,168,7,5,0,0,162,163,
3,63,31,0,163,164,3,31,15,0,164,165,5,58,0,0,165,166,3,33,16,0,166,
168,1,0,0,0,167,161,1,0,0,0,167,162,1,0,0,0,168,30,1,0,0,0,169,170,
7,4,0,0,170,174,3,61,30,0,171,172,5,50,0,0,172,174,7,6,0,0,173,169,
1,0,0,0,173,171,1,0,0,0,174,32,1,0,0,0,175,176,7,7,0,0,176,177,3,
61,30,0,177,34,1,0,0,0,178,179,7,7,0,0,179,186,3,61,30,0,180,182,
5,46,0,0,181,183,3,61,30,0,182,181,1,0,0,0,183,184,1,0,0,0,184,182,
1,0,0,0,184,185,1,0,0,0,185,187,1,0,0,0,186,180,1,0,0,0,186,187,
1,0,0,0,187,36,1,0,0,0,188,189,3,41,20,0,189,38,1,0,0,0,190,192,
3,41,20,0,191,193,3,59,29,0,192,191,1,0,0,0,192,193,1,0,0,0,193,
195,1,0,0,0,194,196,3,43,21,0,195,194,1,0,0,0,195,196,1,0,0,0,196,
212,1,0,0,0,197,198,5,78,0,0,198,199,5,97,0,0,199,212,5,78,0,0,200,
202,3,63,31,0,201,200,1,0,0,0,201,202,1,0,0,0,202,203,1,0,0,0,203,
204,5,73,0,0,204,205,5,110,0,0,205,206,5,102,0,0,206,207,5,105,0,
0,207,208,5,110,0,0,208,209,5,105,0,0,209,210,5,116,0,0,210,212,
5,121,0,0,211,190,1,0,0,0,211,197,1,0,0,0,211,201,1,0,0,0,212,40,
1,0,0,0,213,215,5,45,0,0,214,213,1,0,0,0,214,215,1,0,0,0,215,224,
1,0,0,0,216,225,5,48,0,0,217,221,7,1,0,0,218,220,3,61,30,0,219,218,
1,0,0,0,220,223,1,0,0,0,221,219,1,0,0,0,221,222,1,0,0,0,222,225,
1,0,0,0,223,221,1,0,0,0,224,216,1,0,0,0,224,217,1,0,0,0,225,42,1,
0,0,0,226,228,7,8,0,0,227,229,3,63,31,0,228,227,1,0,0,0,228,229,
1,0,0,0,229,231,1,0,0,0,230,232,3,61,30,0,231,230,1,0,0,0,232,233,
1,0,0,0,233,231,1,0,0,0,233,234,1,0,0,0,234,44,1,0,0,0,235,241,5,
34,0,0,236,240,3,65,32,0,237,240,3,69,34,0,238,240,8,9,0,0,239,236,
1,0,0,0,239,237,1,0,0,0,239,238,1,0,0,0,240,243,1,0,0,0,241,242,
1,0,0,0,241,239,1,0,0,0,242,244,1,0,0,0,243,241,1,0,0,0,244,245,
5,34,0,0,245,46,1,0,0,0,246,251,5,39,0,0,247,250,3,51,25,0,248,250,
3,75,37,0,249,247,1,0,0,0,249,248,1,0,0,0,250,253,1,0,0,0,251,249,
1,0,0,0,251,252,1,0,0,0,252,255,1,0,0,0,253,251,1,0,0,0,254,256,
3,49,24,0,255,254,1,0,0,0,255,256,1,0,0,0,256,260,1,0,0,0,257,259,
3,75,37,0,258,257,1,0,0,0,259,262,1,0,0,0,260,258,1,0,0,0,260,261,
1,0,0,0,261,263,1,0,0,0,262,260,1,0,0,0,263,264,5,39,0,0,264,48,
1,0,0,0,265,268,3,53,26,0,266,268,3,55,27,0,267,265,1,0,0,0,267,
266,1,0,0,0,268,50,1,0,0,0,269,273,3,57,28,0,270,272,3,75,37,0,271,
270,1,0,0,0,272,275,1,0,0,0,273,271,1,0,0,0,273,274,1,0,0,0,274,
276,1,0,0,0,275,273,1,0,0,0,276,280,3,57,28,0,277,279,3,75,37,0,
278,277,1,0,0,0,279,282,1,0,0,0,280,278,1,0,0,0,280,281,1,0,0,0,
281,283,1,0,0,0,282,280,1,0,0,0,283,287,3,57,28,0,284,286,3,75,37,
0,285,284,1,0,0,0,286,289,1,0,0,0,287,285,1,0,0,0,287,288,1,0,0,
0,288,290,1,0,0,0,289,287,1,0,0,0,290,291,3,57,28,0,291,52,1,0,0,
0,292,296,3,57,28,0,293,295,3,75,37,0,294,293,1,0,0,0,295,298,1,
0,0,0,296,294,1,0,0,0,296,297,1,0,0,0,297,299,1,0,0,0,298,296,1,
0,0,0,299,303,3,57,28,0,300,302,3,75,37,0,301,300,1,0,0,0,302,305,
1,0,0,0,303,301,1,0,0,0,303,304,1,0,0,0,304,306,1,0,0,0,305,303,
1,0,0,0,306,310,3,57,28,0,307,309,3,75,37,0,308,307,1,0,0,0,309,
312,1,0,0,0,310,308,1,0,0,0,310,311,1,0,0,0,311,313,1,0,0,0,312,
310,1,0,0,0,313,314,5,61,0,0,314,54,1,0,0,0,315,319,3,57,28,0,316,
318,3,75,37,0,317,316,1,0,0,0,318,321,1,0,0,0,319,317,1,0,0,0,319,
320,1,0,0,0,320,322,1,0,0,0,321,319,1,0,0,0,322,326,3,57,28,0,323,
325,3,75,37,0,324,323,1,0,0,0,325,328,1,0,0,0,326,324,1,0,0,0,326,
327,1,0,0,0,327,329,1,0,0,0,328,326,1,0,0,0,329,333,5,61,0,0,330,
332,3,75,37,0,331,330,1,0,0,0,332,335,1,0,0,0,333,331,1,0,0,0,333,
334,1,0,0,0,334,336,1,0,0,0,335,333,1,0,0,0,336,337,5,61,0,0,337,
56,1,0,0,0,338,339,7,10,0,0,339,58,1,0,0,0,340,344,5,46,0,0,341,
343,3,61,30,0,342,341,1,0,0,0,343,346,1,0,0,0,344,342,1,0,0,0,344,
345,1,0,0,0,345,60,1,0,0,0,346,344,1,0,0,0,347,348,7,11,0,0,348,
62,1,0,0,0,349,350,7,12,0,0,350,64,1,0,0,0,351,352,5,92,0,0,352,
353,3,67,33,0,353,66,1,0,0,0,354,357,7,13,0,0,355,357,3,77,38,0,
356,354,1,0,0,0,356,355,1,0,0,0,357,68,1,0,0,0,358,359,5,92,0,0,
359,360,5,117,0,0,360,361,1,0,0,0,361,380,3,71,35,0,362,363,5,92,
0,0,363,364,5,85,0,0,364,365,5,48,0,0,365,366,5,48,0,0,366,367,5,
48,0,0,367,368,1,0,0,0,368,369,3,71,35,0,369,370,3,73,36,0,370,380,
1,0,0,0,371,372,5,92,0,0,372,373,5,85,0,0,373,374,5,48,0,0,374,375,
5,48,0,0,375,376,5,49,0,0,376,377,5,48,0,0,377,378,1,0,0,0,378,380,
3,71,35,0,379,358,1,0,0,0,379,362,1,0,0,0,379,371,1,0,0,0,380,70,
1,0,0,0,381,382,3,73,36,0,382,383,3,73,36,0,383,384,3,73,36,0,384,
385,3,73,36,0,385,72,1,0,0,0,386,387,7,14,0,0,387,74,1,0,0,0,388,
389,7,15,0,0,389,76,1,0,0,0,390,395,2,10,13,0,391,392,5,13,0,0,392,
395,5,10,0,0,393,395,7,16,0,0,394,390,1,0,0,0,394,391,1,0,0,0,394,
393,1,0,0,0,395,78,1,0,0,0,40,0,94,97,107,127,151,159,167,173,184,
186,192,195,201,211,214,221,224,228,233,239,241,249,251,255,260,
267,273,280,287,296,303,310,319,326,333,344,356,379,394,1,6,0,0
]
class ZishLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
LIST_START = 1
LIST_FINISH = 2
COMMA = 3
MAP_START = 4
MAP_FINISH = 5
COLON = 6
WS = 7
NULL = 8
BOOL = 9
TIMESTAMP = 10
INTEGER = 11
DECIMAL = 12
STRING = 13
BLOB = 14
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'['", "']'", "','", "'{'", "'}'", "':'", "'null'" ]
symbolicNames = [ "<INVALID>",
"LIST_START", "LIST_FINISH", "COMMA", "MAP_START", "MAP_FINISH",
"COLON", "WS", "NULL", "BOOL", "TIMESTAMP", "INTEGER", "DECIMAL",
"STRING", "BLOB" ]
ruleNames = [ "LIST_START", "LIST_FINISH", "COMMA", "MAP_START", "MAP_FINISH",
"COLON", "WS", "BLOCK_COMMENT", "NULL", "BOOL", "TIMESTAMP",
"YEAR", "MONTH", "DAY", "OFFSET", "HOUR", "MINUTE", "SECOND",
"INTEGER", "DECIMAL", "INT", "EXP", "STRING", "BLOB",
"BASE_64_PAD", "BASE_64_QUARTET", "BASE_64_PAD1", "BASE_64_PAD2",
"BASE_64_CHAR", "FRAC", "DIGIT", "PLUS_OR_MINUS", "COMMON_ESCAPE",
"COMMON_ESCAPE_CODE", "UNICODE_ESCAPE", "HEX_DIGIT_QUARTET",
"HEX_DIGIT", "SPACE", "EOL" ]
grammarFileName = "Zish.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.11.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None | zish-antlr | /zish_antlr-0.0.14.tar.gz/zish_antlr-0.0.14/zish/antlr/ZishLexer.py | ZishLexer.py |
====
Zish
====
A Python library for the `Zish format <https://github.com/tlocke/zish>`_, released under
the `MIT-0 licence <https://choosealicense.com/licenses/mit-0/>`_.
.. image:: https://github.com/tlocke/zish_python/workflows/zish_python/badge.svg
:alt: Build Status
.. contents:: Table of Contents
:depth: 2
:local:
Installation
------------
- Create a virtual environment: ``python3 -m venv venv``
- Activate the virtual environment: ``source venv/bin/activate``
- Install: ``pip install zish``
Quickstart
----------
To go from a Python object to an Zish string use ``zish.dumps``. To go from a Zish
string to a Python object use ``zish.loads``. Eg.
>>> from zish import loads, dumps
>>> from datetime import datetime, timezone
>>> from decimal import Decimal
>>>
>>> # Take a Python object
>>> book = {
... 'title': 'A Hero of Our Time',
... 'read_date': datetime(2017, 7, 16, 14, 5, tzinfo=timezone.utc),
... 'would_recommend': True,
... 'description': None,
... 'number_of_novellas': 5,
... 'price': Decimal('7.99'),
... 'weight': 6.88,
... 'key': b'kshhgrl',
... 'tags': ['russian', 'novel', '19th century']}
>>>
>>> # Output it as an Zish string
>>> zish_str = dumps(book)
>>> print(zish_str)
{
"description": null,
"key": 'a3NoaGdybA==',
"number_of_novellas": 5,
"price": 7.99,
"read_date": 2017-07-16T14:05:00Z,
"tags": [
"russian",
"novel",
"19th century",
],
"title": "A Hero of Our Time",
"weight": 6.88,
"would_recommend": true,
}
>>>
>>> # Load the Zish string, to give us back the Python object
>>> reloaded_book = loads(zish_str)
>>>
>>> # Print the title
>>> print(reloaded_book['title'])
A Hero of Our Time
.. table:: Python To Zish Type Mapping
+-----------------------+-----------------------------------------------------------+
| Python Type | Zish Type |
+=======================+===========================================================+
| bool | bool |
+-----------------------+-----------------------------------------------------------+
| int | integer |
+-----------------------+-----------------------------------------------------------+
| str | string |
+-----------------------+-----------------------------------------------------------+
| datetime.datetime | timestamp |
+-----------------------+-----------------------------------------------------------+
| dict | map |
+-----------------------+-----------------------------------------------------------+
| decimal.Decimal | decimal |
+-----------------------+-----------------------------------------------------------+
| float | decimal |
+-----------------------+-----------------------------------------------------------+
| bytearray | bytes |
+-----------------------+-----------------------------------------------------------+
| bytes | bytes |
+-----------------------+-----------------------------------------------------------+
| list | list |
+-----------------------+-----------------------------------------------------------+
| tuple | list |
+-----------------------+-----------------------------------------------------------+
Running The Tests
-----------------
- Change to the ``zish`` directory: ``cd zish``
- Create a virtual environment: ``python3 -m venv venv``
- Activate the virtual environment: ``source venv/bin/activate``
- Install tox: ``pip install tox``
- Run tox: ``tox``
README.rst
----------
This file is written in the `reStructuredText
<https://docutils.sourceforge.io/docs/user/rst/quickref.html>`_ format. To generate an
HTML page from it, do:
- Activate the virtual environment: ``source venv/bin/activate``
- Install ``Sphinx``: ``pip install Sphinx``
- Run ``rst2html.py``: ``rst2html.py README.rst README.html``
Making A New Release
--------------------
Run ``tox`` to make sure all tests pass, then update the 'Release Notes' section then
do::
git tag -a x.y.z -m "version x.y.z"
rm -r dist
python -m build
twine upload --sign dist/*
Release Notes
-------------
Version 0.1.10 (2022-10-29)
```````````````````````````
- Switch to MIT-0 licence.
- Make the U+00A0 NO-BREAK SPACE character whitespace
- Better error message when ``dump()`` encounters an unrecognised type.
Version 0.1.9 (2021-04-05)
``````````````````````````
- Allow trailing commas in maps and lists.
Version 0.1.8 (2020-06-25)
``````````````````````````
- Make `dumps` sort the `set` type before outputing as a list.
Version 0.1.7 (2020-02-11)
``````````````````````````
- Use 1-based line and character numbers, rather than zero-based.
- Arrow time library upgraded.
- Line and character numbers now available in errors
Version 0.1.6 (2018-11-12)
``````````````````````````
- Better error message when parsing an empty string.
Version 0.1.5 (2018-10-30)
``````````````````````````
- Fix new Flake8 errors.
Version 0.1.4 (2018-10-30)
``````````````````````````
- Better error message if there's a duplicate key in a map.
Version 0.1.3 (2018-10-30)
``````````````````````````
- An exception is thrown if there's a duplicate key in a map.
Version 0.1.2 (2018-09-04)
``````````````````````````
- Change formatting for map and list in dumps. The trailing } and ] are now on a line
down and at the original index.
Version 0.1.1 (2018-03-13)
``````````````````````````
- A decimal with an uppercase 'E' in the exponent wasn't being recognized.
Version 0.1.0 (2018-01-29)
``````````````````````````
- A map key can't be null, following change in spec.
Version 0.0.26 (2018-01-29)
```````````````````````````
- Remove '//' as a comment, following change in spec.
- Allow 'e' and 'E' in the exponent of a decimal, following change in spec.
Version 0.0.25 (2018-01-12)
```````````````````````````
- Better error message when the end of the document is reached without a map being
closed.
Version 0.0.24 (2018-01-11)
```````````````````````````
- Fix bug where an integer after a value (and before a ',' or '}') in a map doesn't
give a good error.
Version 0.0.23 (2018-01-09)
```````````````````````````
- A map key can't now be a list or a map.
Version 0.0.22 (2018-01-08)
```````````````````````````
- A map key can now be of any type.
- The 'set' type has been removed from Zish.
- Zish now recognizes the full set of Unicode EOL sequences.
- The 'float' type has been removed from Zish.
- Fixed bug when sorting map with keys of more than one type.
Version 0.0.21 (2018-01-04)
```````````````````````````
- Give a better error if the end of the document is reached before a map is completed.
Version 0.0.20 (2018-01-04)
```````````````````````````
- Give an error if there are multiple top-level values, rather than silently truncating.
Version 0.0.19 (2017-09-27)
```````````````````````````
- Decimal exponent dumped as ``E`` rather than ``d``.
Version 0.0.18 (2017-09-12)
```````````````````````````
- Add tests for float formatting.
Version 0.0.17 (2017-09-12)
```````````````````````````
- Tighten up parsing of container types.
- Make sure floats are formatted without an uppercase E.
Version 0.0.16 (2017-09-06)
```````````````````````````
- Allow lists and sets as keys.
Version 0.0.15 (2017-09-05)
```````````````````````````
- Fixed map parsing bug where an error wasn't reported properly if it was expecting a
``:`` but got an integer.
Version 0.0.14 (2017-09-05)
```````````````````````````
- Fixed bug where sets couldn't be formatted.
Version 0.0.13 (2017-08-30)
```````````````````````````
- Performance improvement.
Version 0.0.12 (2017-08-30)
```````````````````````````
- Add Travis configuration.
Version 0.0.11 (2017-08-30)
```````````````````````````
- Give a better error message if a string isn't closed.
Version 0.0.10 (2017-08-29)
```````````````````````````
- New native parser that doesn't use antlr. It's about twice as fast.
Version 0.0.9 (2017-08-24)
``````````````````````````
- Fix bug where ``int`` was being parsed as ``Decimal``.
- Make bytes type return a ``bytes`` rather than a ``bytearray``.
Version 0.0.8 (2017-08-24)
``````````````````````````
- Container types aren't allowed as map keys.
- Performance improvements.
Version 0.0.7 (2017-08-22)
``````````````````````````
- Fix bug with UTC timestamp formatting.
Version 0.0.6 (2017-08-22)
``````````````````````````
- Fix bug in timestamp formatting.
- Add note about comments.
Version 0.0.5 (2017-08-18)
``````````````````````````
- Fix bug where ``dumps`` fails for a ``tuple``.
Version 0.0.4 (2017-08-15)
``````````````````````````
- Simplify integer types.
Version 0.0.3 (2017-08-09)
``````````````````````````
- Fixed bug where interpreter couldn't find the ``zish.antlr`` package in eggs.
- Removed a few superfluous escape sequences.
Version 0.0.2 (2017-08-05)
``````````````````````````
- Now uses RFC3339 for timestamps.
Version 0.0.1 (2017-08-03)
``````````````````````````
- Fix bug where an EOF could cause an infinite loop.
Version 0.0.0 (2017-08-01)
``````````````````````````
- First public release. Passes all the tests.
| zish | /zish-0.1.10.tar.gz/zish-0.1.10/README.rst | README.rst |
Read Carl Zeiss(r) ZISRAW (CZI) files
=====================================
CZI is the native image file format of the ZEN(r) software by Carl Zeiss
Microscopy GmbH. It stores multidimensional images and metadata from
microscopy experiments.
:Author:
`Christoph Gohlke <https://www.lfd.uci.edu/~gohlke/>`_
:Organization:
Laboratory for Fluorescence Dynamics. University of California, Irvine
:Version: 2018.8.29
Requirements
------------
* `CPython 2.7 or 3.5+ <https://www.python.org>`_
* `Numpy 1.14 <https://www.numpy.org>`_
* `Scipy 1.1 <https://www.scipy.org>`_
* `Tiffile 2018.8.29 <https://www.lfd.uci.edu/~gohlke/>`_
* `Imagecodecs 2018.8.29 <https://www.lfd.uci.edu/~gohlke/>`_
Revisions
---------
2018.8.29
Move czifile.py and related modules into zisraw package.
Move usage examples to main docstring.
Require imagecodecs package for decoding JpegXrFile, JpgFile, and LZW.
2018.6.18
Save CZI metadata to TIFF description in czi2tif.
Fix AttributeError using max_workers=1.
Make Segment.SID and DimensionEntryDV1.dimension str types.
Return metadata as XML unicode string or dict, not etree.
Return timestamps, focus positions, events, and luts as tuple or ndarray
2017.7.21
Use multi-threading in CziFile.asarray to decode and copy segment data.
Always convert BGR to RGB. Remove bgr2rgb options.
Decode JpegXR directly from byte arrays.
2017.7.13
Add function to convert CZI file to memory-mappable TIFF file.
2017.7.11
Add 'out' parameter to CziFile.asarray.
Remove memmap option from CziFile.asarray (backwards incompatible).
Change spline interpolation order to 0 (backwards incompatible).
Make axes return a string.
Require tifffile 2017.7.11.
2014.10.10
Read data into a memory mapped array (optional).
2013.12.4
Decode JpegXrFile and JpgFile via _czifle extension module.
Attempt to reconstruct tiled mosaic images.
2013.11.20
Initial release.
Notes
-----
The API is not stable yet and might change between revisions.
Python 2.7 and 3.4 are deprecated.
The file format design specification [1] is confidential and the licence
agreement does not permit to write data into CZI files.
Only a subset of the 2016 specification is implemented. Specifically,
multifile images, image pyramids, and topography images are not yet supported.
Tested on Windows with a few example files only.
References
----------
(1) ZISRAW (CZI) File Format Design specification Release Version 1.2.2.
CZI 07-2016/CZI-DOC ZEN 2.3/DS_ZISRAW-FileFormat.pdf (confidential).
Documentation can be requested at
<http://microscopy.zeiss.com/microscopy/en_us/downloads/zen.html>
(2) CZI The File Format for the Microscope | ZEISS International
<http://microscopy.zeiss.com/microscopy/en_us/products/microscope-software/
zen-2012/czi.html>
Examples
--------
Read image data from a CZI file as numpy array:
>>> image = imread('test.czi')
>>> image.shape
(3, 3, 3, 250, 200, 3)
>>> image[0, 0, 0, 0, 0]
array([10, 10, 10], dtype=uint8)
| zisraw | /zisraw-2018.8.29.tar.gz/zisraw-2018.8.29/README.rst | README.rst |
======
Zither
======
Command-line tool to pull raw depths and alt freqs from BAM file(s) based on an existing VCF, writing output as new VCF to stdout.
.. image:: https://travis-ci.org/umich-brcf-bioinf/Zither.svg?branch=develop
:target: https://travis-ci.org/umich-brcf-bioinf/Zither
:alt: Build Status
.. image:: https://coveralls.io/repos/umich-brcf-bioinf/zither/badge.svg?branch=develop&service=github
:target: https://coveralls.io/github/umich-brcf-bioinf/zither?branch=develop
:alt: Coverage Status
The official repository is at:
https://github.com/umich-brcf-bioinf/Zither
----------
Quickstart
----------
Read a single BAM file
======================
$ zither --bam examples/explicit_bam/Sample_X.bam examples/explicit_bam/input.vcf > output.vcf
Given a VCF and a BAM file, read positions in the input VCF and corresponding pileups
from Sample_X.bam.
Read a set of matched VCF sample names and BAM files
====================================================
$ zither examples/matching_names/input.vcf > output.vcf
Given a VCF and a collection of BAM files whose file names
match the VCF sample names, reads positions from the
input VCF and corresponding BAM pileups.
Explicitly map VCF sample names to BAM files
====================================================
$ zither --mapping_file=examples/mapping_files/mapping_file.txt examples/mapping_files/input.vcf > output.vcf
Given a VCF, a collection of BAMs, and a file that maps sample names to BAM paths,
reads positions from the input VCF and corresponding pileups
from BAM files names.
The mapping file is a tab-separated text file where each line has a sample
name and the path to the corresponding BAM file. Paths to BAM files can be
absolute or relative; relative paths are resolved relative to the directory
that contains the mapping file.
====
Email [email protected] for support and questions.
UM BRCF Bioinformatics Core
| zither | /zither-0.2.tar.gz/zither-0.2/README.rst | README.rst |
# Zivid Python
Zivid Python is the official Python package for Zivid 3D cameras. Read more about Zivid at [zivid.com](https://www.zivid.com/).
[![Build Status][ci-badge]][ci-url] [![PyPI Package][pypi-badge]][pypi-url]
![Zivid Image][header-image]
---
*Contents:* **[Installation](#installation)** | **[Quick Start](#quick-start)** | **[Examples](#examples)** | **[Versioning](#versioning)** | **[License](#license)** | **[Support](#support)** | **[Test Matrix](#test-matrix)**
---
## Installation
### Dependencies
* [Python](https://www.python.org/) version 3.7 or higher
* [Zivid SDK][zivid-download-software-url] version 2.10.0 (see [here][zivid-software-installation-url] for help)
* [Compiler](doc/CompilerInstallation.md) with C++17 support
*Ubuntu users must install Python headers (`apt install python3-dev`) in addition to the regular `python3` package.*
*Windows users also needs to make sure that the Zivid SDK installation folder is in system `PATH` before using the package, not only the terminal PATH variable. The default install location that should be added to system `PATH` is `C:\Program Files\Zivid\bin`.*
### Installing official version from PyPI using PIP
After having installed the latest Zivid SDK, the easiest way to install Zivid Python is to use PIP to fetch the latest official version from PyPI:
pip install zivid
Note:
> If you don't use the latest Zivid SDK version you need to manually specify the version. See [Versioning](#versioning).
Installation may take some time since the `setup.py` script will download additional dependencies and compile C++ source code in the background.
On some systems Python 3 `pip` is called `pip3`. In this guide we assume it is called `pip`. When using PIP version 19 or higher build dependencies are handled automatically.
#### Old PIP
If you are using a version of PIP older than version 19 please manually install the dependencies listed in [pyproject.toml](pyproject.toml) before installing zivid.
pip install <packages listed in pyproject.toml>
pip install zivid
### Installing from source
git clone <zivid-python clone URL>
cd zivid-python
pip install .
You may want to build Zivid Python against a different (but compatible) version of Zivid SDK. An example would be if Zivid SDK 2.1 was released but the official
Zivid Python still formally only supports SDK 2.0. Since all the features of the 2.0 API exist in the 2.1 API, Zivid Python can still be built with the new SDK
(but without wrapping the latest features). In order to achieve this, edit `setup.py` to target the new SDK version before doing `pip install .`. Note that
this option is considered experimental/unofficial.
## Quick Start
### Point cloud capture
To quickly capture a point cloud using default settings, run the following code:
import zivid
app = zivid.Application()
camera = app.connect_camera()
settings = zivid.Settings(acquisitions=[zivid.Settings.Acquisition()])
frame = camera.capture(settings)
frame.save("result.zdf")
Instead of using the API to define capture settings, it is also possible to load them from YML files that
have been exported from [Zivid Studio][zivid-studio-guide-url] or downloaded from the Zivid Knowledge Base
[settings library][zivid-two-standard-settings-url]. This can be done by providing the filesystem path to
such a file, for example:
settings = Settings.load("ZividTwo_Settings_2xHDR_Normal.yml")
frame = camera.capture(settings)
### Point cloud data access
Data can easily be accessed in the form of Numpy arrays:
import zivid
app = zivid.Application()
camera = app.connect_camera()
settings = zivid.Settings(acquisitions=[zivid.Settings.Acquisition()])
frame = camera.capture(settings)
xyz = frame.point_cloud().copy_data("xyz") # Get point coordinates as [Height,Width,3] float array
rgba = frame.point_cloud().copy_data("rgba") # Get point colors as [Height,Width,4] uint8 array
bgra = frame.point_cloud().copy_data("bgra") # Get point colors as [Height,Width,4] uint8 array
### Capture Assistant
Instead of manually adjusting settings, the Capture Assistant may be used to find the optimal settings for your scene:
import zivid
app = zivid.Application()
camera = app.connect_camera()
capture_assistant_params = zivid.capture_assistant.SuggestSettingsParameters()
settings = zivid.capture_assistant.suggest_settings(camera, capture_assistant_params)
frame = camera.capture(settings)
frame.save("result.zdf")
### Using camera emulation
If you do not have a camera, you can use the `FileCameraZivid2M70.zfc` file in the [Sample Data][zivid-download-sampledata-url] to emulate a camera.
import zivid
app = zivid.Application()
camera = app.create_file_camera("path/to/FileCameraZivid2M70.zfc")
settings = zivid.Settings(acquisitions=[zivid.Settings.Acquisition()])
frame = camera.capture(settings)
frame.save("result.zdf")
## Examples
Basic example programs can be found in the [samples](samples) directory.
Many more advanced example programs may be found in the separate
[zivid-python-samples](https://github.com/zivid/zivid-python-samples) repository.
## Versioning
This python module is using [PEP 440](https://www.python.org/dev/peps/pep-0440) for versioning. The features available in the module depends on the Zivid SDK version used when building the module. When updating this Python package it is *recommended* to also update to the latest [Zivid SDK][zivid-software-url]. Refer to the [Test Matrix](#test-matrix) for supported version.
The version number of the Zivid Python module consists of six numbers. The three first numbers of the version is the [semantic version](https://semver.org/) of the code in this repository. The last three numbers is the version of the underlying Zivid SDK library used by the Python module.
To check which version of zivid-python that corresponds to a specific version of Zivid SDK, check out [zivid-python-releases-url] or run `pip index versions zivid`.
### Version breakdown
Zivid SDK version = 1.4.1 (semantic version)
v v v
Zivid Python module version = 1.0.0.1.4.1
^ ^ ^
Wrapper code version = 1.0.0 (semantic version)
### PyPI
When installing using PIP it is possible to specify the required version. This can be useful if upgrading Zivid SDK is not desired, but you want to update Zivid Python.
#### Install latest version of Zivid Python using latest version of Zivid SDK
pip install zivid
#### Install version 1.0.0 of Zivid Python using latest version of Zivid SDK
pip install zivid==1.0.0
#### Install version 1.0.0 of Zivid Python using Zivid SDK version 1.4.0
pip install zivid==1.0.0.1.4.0
#### Install version 1.0.0 of Zivid Python using Zivid SDK version 1.3.0
pip install zivid==1.0.0.1.3.0
*Support for older versions of Zivid SDK will be discontinued when they are no longer compatible with latest version of the wrapper code.*
## License
This project is licensed, see the [LICENSE](LICENSE) file for details. The licenses of dependencies are listed [here](./licenses-dependencies).
## Support
Please visit [Zivid Knowledge Base][zivid-knowledge-base-url] for general information on using Zivid 3D cameras. If you cannot find a solution to your issue, please contact [email protected].
## Test matrix
| Operating System | Python version |
| :--------------- | :------------------------ |
| Ubuntu 23.04 | 3.11 |
| Ubuntu 22.10 | 3.10 |
| Ubuntu 22.04 | 3.10 |
| Ubuntu 20.04 | 3.8 |
| Fedora 37 | 3.11 |
| Fedora 36 | 3.10 |
| Fedora 35 | 3.10 |
| Windows 10 | 3.7, 3.8, 3.9, 3.10, 3.11 |
[header-image]: https://www.zivid.com/hubfs/softwarefiles/images/zivid-generic-github-header.png
[ci-badge]: https://img.shields.io/github/actions/workflow/status/zivid/zivid-python/main.yml?branch=master
[ci-url]: https://github.com/zivid/zivid-python/actions?query=workflow%3A%22Main+CI+workflow%22+branch%3Amaster
[pypi-badge]: https://img.shields.io/pypi/v/zivid.svg
[pypi-url]: https://pypi.org/project/zivid
[zivid-knowledge-base-url]: http://support.zivid.com
[zivid-software-installation-url]: https://support.zivid.com/latest/getting-started/software-installation.html
[zivid-download-software-url]: https://www.zivid.com/downloads
[zivid-download-sampledata-url]: https://support.zivid.com/en/latest/api-reference/samples/sample-data.html
[zivid-software-url]: http://www.zivid.com/software
[zivid-python-releases-url]: https://pypi.org/project/zivid/#history
[zivid-studio-guide-url]: https://support.zivid.com/en/latest/getting-started/studio-guide.html
[zivid-two-standard-settings-url]: https://support.zivid.com/en/latest/reference-articles/standard-acquisition-settings-zivid-two.html | zivid | /zivid-2.10.0.2.10.0.tar.gz/zivid-2.10.0.2.10.0/README.md | README.md |
# zivverscim
Zivver Python pip module to CRUD accounts dynamically.
This library enables the use of SCIM to talk to [Zivver](https://www.zivver.com)
## Requirements
You will need to get the API key from Zivver to allow REST calls.
1. Generate API Keys, go to this URL [Generate API key](https://app.zivver.com/organization/api-keys)
or follow these steps:
* In Zivver, go to Organization Settings -> API Keys

* Click on Generate API Keys


* Save the API Key, you will need the key to setup a connection to Zivver

2. Python >=3.6
3. Endpoint URLs to Zivver for CRUD actions.
* Example endpoint: https://app.zivver.com/api/scim/v2/Users
## Install
Install via pip
$: pip install zivverscim
## Testing
You should create an `.env` file, we already have added the `.env.dummy` file that you need to rename to `.env`.
Update the enviroment variables inside that file.
Clone this repo and install requirements:
$: git clone [email protected]:Adapta-dev/zivverscim.git
$: cd zivverscim
$: pip install -r requirements.txt
Install Zivverscim locally:
$: pip install -e .
Run the tests:
$: python tests/crud_accounts.py
## Exceptions
Use the custom `ZivverCRUDError` object to get the exception messages:
```python
ZivverCRUDError.get_message() # Returns the error message
ZivvZivverCRUDError.get_sollution() # Returns the possible sollution
```
## Create account
Before you do anything in Python with Zivver, you will need to import the Zivver library:
```python
from zivverscim import scim_connection_crud
# ...
```
create a new Zivver Scim Connection Object:
```python
zivver_scim_connection = scim_connection_crud.ZivverSCIMConnection(
external_oauth_token_value=self.external_oauth_token.token_value, # Generated API key
scim_api_create_url='https://app.zivver.com/api/scim/v2/Users/', # Endpoint URL from Zivver
scim_api_update_url='https://app.zivver.com/api/scim/v2/Users/', # Endpoint URL from Zivver
scim_api_get_url='https://app.zivver.com/api/scim/v2/Users/', # Endpoint URL from Zivver
scim_api_delete_url='https://app.zivver.com/api/scim/v2/Users/', # Endpoint URL from Zivver
)
```
You can use the `zivver_scim_connection` object to create new accounts:
```python
zivver_user_object = zivver_scim_connection.create_user_in_zivver(
first_name='John',
last_name='Doe',
nick_name='{} {}'.format('John', 'Doe'),
user_name='[email protected]',
zivver_account_key='[email protected]',
sso_connection=True, # Only if SSO is enabled
is_active=True # If the user should be active upon creation
)
print(zivver_user_object) # Prints a json represetation of the object
```
You can also use `aliases` and `delegates` attributes to append those:
```python
zivver_user_object = zivver_scim_connection.create_user_in_zivver(
# ...
aliases=['[email protected]'], # Alias for current user
delegates=['[email protected]'] # Delegate access for other users
)
```
## Reference
Create accounts:
```python
zivver_user_object = zivver_scim_connection.create_user_in_zivver(
first_name='John',
last_name='Doe',
nick_name='{} {}'.format('John', 'Doe'),
user_name='[email protected]',
zivver_account_key='[email protected]',
sso_connection=True, # Only if SSO is enabled
is_active=True, # If the user should be active upon creation
aliases=['[email protected]'], # Alias for current user
delegates=['[email protected]'] # Delegate access for other users
)
```
Update accounts:
```python
zivver_user_object = zivver_scim_connection.update_user_in_zivver(
account_id='12412412-4124124124-12412412412-124124412241',
first_name='John',
last_name='Doe',
nick_name='{} {}'.format('John', 'Doe'),
user_name='[email protected]',
zivver_account_key='[email protected]',
sso_connection=True, # Only if SSO is enabled
is_active=True, # If the user should be active upon creation
aliases=['[email protected]'], # Alias for current user
delegates=['[email protected]'] # Delegate access for other users
)
```
Get one account:
```python
zivver_user_object = zivver_scim_connection.get_user_from_zivver(account_id=zivver_user_object.account_id)
```
Get bulk accounts
```python
zivver_users_object = zivver_scim_connection.get_all_users_from_zivver()
```
Delete account
```python
zivver_scim_connection.delete_user_from_zivver(account_id=zivver_user_object.account_id)
```
### zivver_users_object
Zivver returns a `zivver_users_object` object containing the account information.
The most important one is the account_id, which you will need to update/get/delete the existing account.
The `account_id` is a UUID randomly generated by Zivver, so save it.
```python
class ZivverUser:
"""
ZivverUser Class object created from the ZivverUser create/update response
"""
def __init__(self, account_id=None, name_formatted=None, meta_created_at=None, meta_location=None,
meta_resource_type=None, phone_numbers=None, user_name=None, nick_name=None, is_active=False,
schemas=None, enterprise_user=None, zivver_scim_user_aliases=None, zivver_scim_user_delegates=None):
self.account_id = account_id
self.name_formatted = name_formatted
self.meta_created_at = meta_created_at
self.meta_location = meta_location
self.meta_resource_type = meta_resource_type
self.phone_numbers = phone_numbers
self.user_name = user_name
self.nick_name = nick_name
self.is_active = is_active
self.schemas = schemas
self.enterprise_user = enterprise_user
self.zivver_scim_user_aliases = zivver_scim_user_aliases
self.zivver_scim_user_delegates = zivver_scim_user_delegates
#...
```
# Contribution
Adapta welcomes any contributions to the open source ZivverSCIM library, so feel free to contribute.
## Issues
Feel free to submit issues and enhancement requests.
### Contributing (how to)
Please refer to each project's style and contribution guidelines for submitting patches and additions. In general, we follow the "fork-and-pull" Git workflow.
1. **Fork** the repo on GitHub
2. **Clone** the project to your own machine
3. **Commit** changes to your own branch
4. **Push** your work back up to your fork
5. Submit a **Pull request** so that we can review your changes
NOTE: Be sure to merge the latest from "upstream" before making a pull request!
## Copyright and Licensing
The Zivverscim library software is licensed under GNU GENERAL PUBLIC LICENSE V3.0
| zivverscim | /zivverscim-1.0.8.tar.gz/zivverscim-1.0.8/README.md | README.md |
import requests
class ZixiBroadcaster:
def __init__(self, hostname, creds=None, secure=None, port=None):
base_url_template = "{protocol}://{hostname}:{port}/"
self.hostname = hostname
if port is None:
self.port = "4444"
else:
self.port = port
if creds is None:
self.creds = ("admin","1234")
else:
self.creds = creds
if secure is None:
self.protocol = "http"
elif secure is True:
self.protocol = "https"
else:
self.protocol = "http"
self.base_url = base_url_template.format(protocol=self.protocol, hostname=self.hostname, port=self.port)
self.inputs_url = self.base_url + "zixi/streams.json"
self.outputs_url = self.base_url + "zixi/outputs.json"
self.reset_input_stats_url_template = self.base_url + "reset_input_stats.json?id={input_id}"
self.input_stats_url_template = self.base_url + "input_stream_stats.json?id={input_id}"
self.reset_output_stats_url_template = self.base_url + "reset_output_stream_stats.json?id={output_id}"
self.output_stats_url_template = self.base_url + "output_stream_stats.json?id={output_id}"
def get_inputs(self):
inputs_response = requests.get(url=self.inputs_url, auth=self.creds)
if inputs_response.status_code != requests.codes.ok:
inputs_response.raise_for_status()
return inputs_response.json()["streams"]
def get_all_input_stats(self):
input_stats = []
for input in self.get_inputs():
input_stats_response = requests.get(url=self.input_stats_url_template.format(input_id=input["id"]), auth=self.creds)
if input_stats_response.status_code != requests.codes.ok:
input_stats_response.raise_for_status()
input_stats.append(input_stats_response.json())
return input_stats
def reset_input_stats(self, input_id):
reset_response = requests.get(url=self.reset_input_stats_url_template.format(input_id=input["id"]), auth=self.creds)
if reset_response.status_code != requests.codes.ok:
reset_response.raise_for_status()
def reset_all_input_stats(self):
for input in self.get_inputs():
self.reset_input_stats(input_id=input["id"])
def get_outputs(self, complete=None):
if complete is None:
outputs_response = requests.get(url=self.outputs_url, auth=self.creds)
else:
outputs_response = requests.get(url=self.outputs_url+"?complete=1", auth=self.creds)
if outputs_response.status_code != requests.codes.ok:
outputs_response.raise_for_status()
return outputs_response.json()["outputs"]
def get_output_stats(self, output_id):
output_stats_response = requests.get(url=self.output_stats_url_template.format(output_id=output_id), auth=self.creds)
return output_stats_response.json()
def reset_output_stats(self, output_id):
reset_response = requests.get(url=self.reset_output_stats_url_template.format(output_id=output_id), auth=self.creds)
if reset_response.status_code != requests.codes.ok:
reset_response.raise_for_status()
def reset_all_output_stats(self):
for output in self.get_outputs():
self.reset_output_stats(output_id=output["id"]) | zixi-api | /zixi_api-0.0.1.tar.gz/zixi_api-0.0.1/zixi_api/ZixiBroadcaster.py | ZixiBroadcaster.py |
# 紫竹张先生公众号备份(一)
## 下载
### Docker
```
docker pull apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
docker run -tid -p <port>:80 apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 <port>
# 访问 http://localhost:{port} 查看文档
``` | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/README.md | README.md |
# 紫竹张先生公众号备份(一)
## 下载
### Docker
```
docker pull apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
docker run -tid -p <port>:80 apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zizhu-zhangxiansheng-gongzhonggao-beifen-vol1
zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 <port>
# 访问 http://localhost:{port} 查看文档
``` | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/README.md | README.md |
!function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):b(a.RSVP=a.RSVP||{})}(this,function(a){"use strict";function b(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return c;return-1}function c(a){var b=a._promiseCallbacks;return b||(b=a._promiseCallbacks={}),b}function d(a,b){if(2!==arguments.length)return wa[a];wa[a]=b}function e(a){var b=typeof a;return null!==a&&("object"===b||"function"===b)}function f(a){return"function"==typeof a}function g(a){return null!==a&&"object"==typeof a}function h(a){return null!==a&&"object"==typeof a}function i(){setTimeout(function(){for(var a=0;a<Aa.length;a++){var b=Aa[a],c=b.payload;c.guid=c.key+c.id,c.childGuid=c.key+c.childId,c.error&&(c.stack=c.error.stack),wa.trigger(b.name,b.payload)}Aa.length=0},50)}function j(a,b,c){1===Aa.push({name:a,payload:{key:b._guidKey,id:b._id,eventName:a,detail:b._result,childId:c&&c._id,label:b._label,timeStamp:za(),error:wa["instrument-with-stack"]?new Error(b._label):null}})&&i()}function k(a,b){var c=this;if(a&&"object"==typeof a&&a.constructor===c)return a;var d=new c(m,b);return s(d,a),d}function l(){return new TypeError("A promises callback cannot return that same promise.")}function m(){}function n(a){try{return a.then}catch(a){return Ea.error=a,Ea}}function o(a,b,c,d){try{a.call(b,c,d)}catch(a){return a}}function p(a,b,c){wa.async(function(a){var d=!1,e=o(c,b,function(c){d||(d=!0,b!==c?s(a,c,void 0):u(a,c))},function(b){d||(d=!0,v(a,b))},"Settle: "+(a._label||" unknown promise"));!d&&e&&(d=!0,v(a,e))},a)}function q(a,b){b._state===Ca?u(a,b._result):b._state===Da?(b._onError=null,v(a,b._result)):w(b,void 0,function(c){b!==c?s(a,c,void 0):u(a,c)},function(b){return v(a,b)})}function r(a,b,c){b.constructor===a.constructor&&c===C&&a.constructor.resolve===k?q(a,b):c===Ea?(v(a,Ea.error),Ea.error=null):f(c)?p(a,b,c):u(a,b)}function s(a,b){a===b?u(a,b):e(b)?r(a,b,n(b)):u(a,b)}function t(a){a._onError&&a._onError(a._result),x(a)}function u(a,b){a._state===Ba&&(a._result=b,a._state=Ca,0===a._subscribers.length?wa.instrument&&j("fulfilled",a):wa.async(x,a))}function v(a,b){a._state===Ba&&(a._state=Da,a._result=b,wa.async(t,a))}function w(a,b,c,d){var e=a._subscribers,f=e.length;a._onError=null,e[f]=b,e[f+Ca]=c,e[f+Da]=d,0===f&&a._state&&wa.async(x,a)}function x(a){var b=a._subscribers,c=a._state;if(wa.instrument&&j(c===Ca?"fulfilled":"rejected",a),0!==b.length){for(var d=void 0,e=void 0,f=a._result,g=0;g<b.length;g+=3)d=b[g],e=b[g+c],d?A(c,d,e,f):e(f);a._subscribers.length=0}}function y(){this.error=null}function z(a,b){try{return a(b)}catch(a){return Fa.error=a,Fa}}function A(a,b,c,d){var e=f(c),g=void 0,h=void 0;if(e){if((g=z(c,d))===Fa)h=g.error,g.error=null;else if(g===b)return void v(b,l())}else g=d;b._state!==Ba||(e&&void 0===h?s(b,g):void 0!==h?v(b,h):a===Ca?u(b,g):a===Da&&v(b,g))}function B(a,b){var c=!1;try{b(function(b){c||(c=!0,s(a,b))},function(b){c||(c=!0,v(a,b))})}catch(b){v(a,b)}}function C(a,b,c){var d=this,e=d._state;if(e===Ca&&!a||e===Da&&!b)return wa.instrument&&j("chained",d,d),d;d._onError=null;var f=new d.constructor(m,c),g=d._result;if(wa.instrument&&j("chained",d,f),e===Ba)w(d,f,a,b);else{var h=e===Ca?a:b;wa.async(function(){return A(e,f,h,g)})}return f}function D(a,b,c){return a===Ca?{state:"fulfilled",value:c}:{state:"rejected",reason:c}}function E(a,b){return ya(a)?new Ga(this,a,!0,b).promise:this.reject(new TypeError("Promise.all must be called with an array"),b)}function F(a,b){var c=this,d=new c(m,b);if(!ya(a))return v(d,new TypeError("Promise.race must be called with an array")),d;for(var e=0;d._state===Ba&&e<a.length;e++)w(c.resolve(a[e]),void 0,function(a){return s(d,a)},function(a){return v(d,a)});return d}function G(a,b){var c=this,d=new c(m,b);return v(d,a),d}function H(){throw new TypeError("You must pass a resolver function as the first argument to the promise constructor")}function I(){throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.")}function J(){this.value=void 0}function K(a){try{return a.then}catch(a){return Ka.value=a,Ka}}function L(a,b,c){try{a.apply(b,c)}catch(a){return Ka.value=a,Ka}}function M(a,b){for(var c={},d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=a[f];for(var g=0;g<b.length;g++){c[b[g]]=e[g+1]}return c}function N(a){for(var b=a.length,c=new Array(b-1),d=1;d<b;d++)c[d-1]=a[d];return c}function O(a,b){return{then:function(c,d){return a.call(b,c,d)}}}function P(a,b){var c=function(){for(var c=this,d=arguments.length,e=new Array(d+1),f=!1,g=0;g<d;++g){var h=arguments[g];if(!f){if((f=S(h))===La){var i=new Ja(m);return v(i,La.value),i}f&&!0!==f&&(h=O(f,h))}e[g]=h}var j=new Ja(m);return e[d]=function(a,c){a?v(j,a):void 0===b?s(j,c):!0===b?s(j,N(arguments)):ya(b)?s(j,M(arguments,b)):s(j,c)},f?R(j,e,a,c):Q(j,e,a,c)};return c.__proto__=a,c}function Q(a,b,c,d){var e=L(c,d,b);return e===Ka&&v(a,e.value),a}function R(a,b,c,d){return Ja.all(b).then(function(b){var e=L(c,d,b);return e===Ka&&v(a,e.value),a})}function S(a){return!(!a||"object"!=typeof a)&&(a.constructor===Ja||K(a))}function T(a,b){return Ja.all(a,b)}function U(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function V(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function W(a,b){return ya(a)?new Ma(Ja,a,b).promise:Ja.reject(new TypeError("Promise.allSettled must be called with an array"),b)}function X(a,b){return Ja.race(a,b)}function Y(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function Z(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function $(a,b){return g(a)?new Oa(Ja,a,b).promise:Ja.reject(new TypeError("Promise.hash must be called with an object"),b)}function _(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function aa(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function ba(a,b){return g(a)?new Pa(Ja,a,!1,b).promise:Ja.reject(new TypeError("RSVP.hashSettled must be called with an object"),b)}function ca(a){throw setTimeout(function(){throw a}),a}function da(a){var b={resolve:void 0,reject:void 0};return b.promise=new Ja(function(a,c){b.resolve=a,b.reject=c},a),b}function ea(a,b,c){return ya(a)?f(b)?Ja.all(a,c).then(function(a){for(var d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=b(a[f]);return Ja.all(e,c)}):Ja.reject(new TypeError("RSVP.map expects a function as a second argument"),c):Ja.reject(new TypeError("RSVP.map must be called with an array"),c)}function fa(a,b){return Ja.resolve(a,b)}function ga(a,b){return Ja.reject(a,b)}function ha(a,b){return Ja.all(a,b)}function ia(a,b){return Ja.resolve(a,b).then(function(a){return ha(a,b)})}function ja(a,b,c){return ya(a)||g(a)&&void 0!==a.then?f(b)?(ya(a)?ha(a,c):ia(a,c)).then(function(a){for(var d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=b(a[f]);return ha(e,c).then(function(b){for(var c=new Array(d),e=0,f=0;f<d;f++)b[f]&&(c[e]=a[f],e++);return c.length=e,c})}):Ja.reject(new TypeError("RSVP.filter expects function as a second argument"),c):Ja.reject(new TypeError("RSVP.filter must be called with an array or promise"),c)}function ka(a,b){Xa[Qa]=a,Xa[Qa+1]=b,2===(Qa+=2)&&Ya()}function la(){var a=process.nextTick,b=process.versions.node.match(/^(?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)$/);return Array.isArray(b)&&"0"===b[1]&&"10"===b[2]&&(a=setImmediate),function(){return a(qa)}}function ma(){return void 0!==Ra?function(){Ra(qa)}:pa()}function na(){var a=0,b=new Ua(qa),c=document.createTextNode("");return b.observe(c,{characterData:!0}),function(){return c.data=a=++a%2}}function oa(){var a=new MessageChannel;return a.port1.onmessage=qa,function(){return a.port2.postMessage(0)}}function pa(){return function(){return setTimeout(qa,1)}}function qa(){for(var a=0;a<Qa;a+=2){(0,Xa[a])(Xa[a+1]),Xa[a]=void 0,Xa[a+1]=void 0}Qa=0}function ra(){try{var a=require,b=a("vertx");return Ra=b.runOnLoop||b.runOnContext,ma()}catch(a){return pa()}}function sa(a,b,c){return b in a?Object.defineProperty(a,b,{value:c,enumerable:!0,configurable:!0,writable:!0}):a[b]=c,a}function ta(){wa.on.apply(wa,arguments)}function ua(){wa.off.apply(wa,arguments)}var va={mixin:function(a){return a.on=this.on,a.off=this.off,a.trigger=this.trigger,a._promiseCallbacks=void 0,a},on:function(a,d){if("function"!=typeof d)throw new TypeError("Callback must be a function");var e=c(this),f=void 0;f=e[a],f||(f=e[a]=[]),-1===b(f,d)&&f.push(d)},off:function(a,d){var e=c(this),f=void 0,g=void 0;if(!d)return void(e[a]=[]);f=e[a],-1!==(g=b(f,d))&&f.splice(g,1)},trigger:function(a,b,d){var e=c(this),f=void 0;if(f=e[a])for(var g=0;g<f.length;g++)(0,f[g])(b,d)}},wa={instrument:!1};va.mixin(wa);var xa=void 0;xa=Array.isArray?Array.isArray:function(a){return"[object Array]"===Object.prototype.toString.call(a)};var ya=xa,za=Date.now||function(){return(new Date).getTime()},Aa=[],Ba=void 0,Ca=1,Da=2,Ea=new y,Fa=new y,Ga=function(){function a(a,b,c,d){this._instanceConstructor=a,this.promise=new a(m,d),this._abortOnReject=c,this._init.apply(this,arguments)}return a.prototype._init=function(a,b){var c=b.length||0;this.length=c,this._remaining=c,this._result=new Array(c),this._enumerate(b),0===this._remaining&&u(this.promise,this._result)},a.prototype._enumerate=function(a){for(var b=this.length,c=this.promise,d=0;c._state===Ba&&d<b;d++)this._eachEntry(a[d],d)},a.prototype._settleMaybeThenable=function(a,b){var c=this._instanceConstructor,d=c.resolve;if(d===k){var e=n(a);if(e===C&&a._state!==Ba)a._onError=null,this._settledAt(a._state,b,a._result);else if("function"!=typeof e)this._remaining--,this._result[b]=this._makeResult(Ca,b,a);else if(c===Ja){var f=new c(m);r(f,a,e),this._willSettleAt(f,b)}else this._willSettleAt(new c(function(b){return b(a)}),b)}else this._willSettleAt(d(a),b)},a.prototype._eachEntry=function(a,b){h(a)?this._settleMaybeThenable(a,b):(this._remaining--,this._result[b]=this._makeResult(Ca,b,a))},a.prototype._settledAt=function(a,b,c){var d=this.promise;d._state===Ba&&(this._abortOnReject&&a===Da?v(d,c):(this._remaining--,this._result[b]=this._makeResult(a,b,c),0===this._remaining&&u(d,this._result)))},a.prototype._makeResult=function(a,b,c){return c},a.prototype._willSettleAt=function(a,b){var c=this;w(a,void 0,function(a){return c._settledAt(Ca,b,a)},function(a){return c._settledAt(Da,b,a)})},a}(),Ha="rsvp_"+za()+"-",Ia=0,Ja=function(){function a(b,c){this._id=Ia++,this._label=c,this._state=void 0,this._result=void 0,this._subscribers=[],wa.instrument&&j("created",this),m!==b&&("function"!=typeof b&&H(),this instanceof a?B(this,b):I())}return a.prototype._onError=function(a){var b=this;wa.after(function(){b._onError&&wa.trigger("error",a,b._label)})},a.prototype.catch=function(a,b){return this.then(void 0,a,b)},a.prototype.finally=function(a,b){var c=this,d=c.constructor;return c.then(function(b){return d.resolve(a()).then(function(){return b})},function(b){return d.resolve(a()).then(function(){throw b})},b)},a}();Ja.cast=k,Ja.all=E,Ja.race=F,Ja.resolve=k,Ja.reject=G,Ja.prototype._guidKey=Ha,Ja.prototype.then=C;var Ka=new J,La=new J,Ma=function(a){function b(b,c,d){return U(this,a.call(this,b,c,!1,d))}return V(b,a),b}(Ga);Ma.prototype._makeResult=D;var Na=Object.prototype.hasOwnProperty,Oa=function(a){function b(b,c){var d=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],e=arguments[3];return Y(this,a.call(this,b,c,d,e))}return Z(b,a),b.prototype._init=function(a,b){this._result={},this._enumerate(b),0===this._remaining&&u(this.promise,this._result)},b.prototype._enumerate=function(a){var b=this.promise,c=[];for(var d in a)Na.call(a,d)&&c.push({position:d,entry:a[d]});var e=c.length;this._remaining=e;for(var f=void 0,g=0;b._state===Ba&&g<e;g++)f=c[g],this._eachEntry(f.entry,f.position)},b}(Ga),Pa=function(a){function b(b,c,d){return _(this,a.call(this,b,c,!1,d))}return aa(b,a),b}(Oa);Pa.prototype._makeResult=D;var Qa=0,Ra=void 0,Sa="undefined"!=typeof window?window:void 0,Ta=Sa||{},Ua=Ta.MutationObserver||Ta.WebKitMutationObserver,Va="undefined"==typeof self&&"undefined"!=typeof process&&"[object process]"==={}.toString.call(process),Wa="undefined"!=typeof Uint8ClampedArray&&"undefined"!=typeof importScripts&&"undefined"!=typeof MessageChannel,Xa=new Array(1e3),Ya=void 0;Ya=Va?la():Ua?na():Wa?oa():void 0===Sa&&"function"==typeof require?ra():pa();if("object"==typeof self)self;else{if("object"!=typeof global)throw new Error("no global: `self` or `global` found");global}var Za;wa.async=ka,wa.after=function(a){return setTimeout(a,0)};var $a=fa,_a=function(a,b){return wa.async(a,b)};if("undefined"!=typeof window&&"object"==typeof window.__PROMISE_INSTRUMENTATION__){var ab=window.__PROMISE_INSTRUMENTATION__;d("instrument",!0);for(var bb in ab)ab.hasOwnProperty(bb)&&ta(bb,ab[bb])}var cb=(Za={asap:ka,cast:$a,Promise:Ja,EventTarget:va,all:T,allSettled:W,race:X,hash:$,hashSettled:ba,rethrow:ca,defer:da,denodeify:P,configure:d,on:ta,off:ua,resolve:fa,reject:ga,map:ea},sa(Za,"async",_a),sa(Za,"filter",ja),Za);a.default=cb,a.asap=ka,a.cast=$a,a.Promise=Ja,a.EventTarget=va,a.all=T,a.allSettled=W,a.race=X,a.hash=$,a.hashSettled=ba,a.rethrow=ca,a.defer=da,a.denodeify=P,a.configure=d,a.on=ta,a.off=ua,a.resolve=fa,a.reject=ga,a.map=ea,a.async=_a,a.filter=ja,Object.defineProperty(a,"__esModule",{value:!0})});var EPUBJS=EPUBJS||{};EPUBJS.core={};var ELEMENT_NODE=1,TEXT_NODE=3,COMMENT_NODE=8,DOCUMENT_NODE=9;EPUBJS.core.getEl=function(a){return document.getElementById(a)},EPUBJS.core.getEls=function(a){return document.getElementsByClassName(a)},EPUBJS.core.request=function(a,b,c){var d,e=window.URL,f=e?"blob":"arraybuffer",g=new RSVP.defer,h=new XMLHttpRequest,i=XMLHttpRequest.prototype,j=function(){var a;this.readyState==this.DONE&&(200!==this.status&&0!==this.status||!this.response?g.reject({message:this.response,stack:(new Error).stack}):(a="xml"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"application/xml"):"xhtml"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"application/xhtml+xml"):"html"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"text/html"):"json"==b?JSON.parse(this.response):"blob"==b?e?this.response:new Blob([this.response]):this.response,g.resolve(a)))};return"overrideMimeType"in i||Object.defineProperty(i,"overrideMimeType",{value:function(a){}}),h.onreadystatechange=j,h.open("GET",a,!0),c&&(h.withCredentials=!0),b||(d=EPUBJS.core.uri(a),b=d.extension,b={htm:"html"}[b]||b),"blob"==b&&(h.responseType=f),"json"==b&&h.setRequestHeader("Accept","application/json"),"xml"==b&&(h.responseType="document",h.overrideMimeType("text/xml")),"xhtml"==b&&(h.responseType="document"),"html"==b&&(h.responseType="document"),"binary"==b&&(h.responseType="arraybuffer"),h.send(),g.promise},EPUBJS.core.toArray=function(a){var b=[];for(var c in a){var d;a.hasOwnProperty(c)&&(d=a[c],d.ident=c,b.push(d))}return b},EPUBJS.core.uri=function(a){var b,c,d,e={protocol:"",host:"",path:"",origin:"",directory:"",base:"",filename:"",extension:"",fragment:"",href:a},f=a.indexOf("blob:"),g=a.indexOf("://"),h=a.indexOf("?"),i=a.indexOf("#");return 0===f?(e.protocol="blob",e.base=a.indexOf(0,i),e):(-1!=i&&(e.fragment=a.slice(i+1),a=a.slice(0,i)),-1!=h&&(e.search=a.slice(h+1),a=a.slice(0,h),href=e.href),-1!=g?(e.protocol=a.slice(0,g),b=a.slice(g+3),d=b.indexOf("/"),-1===d?(e.host=e.path,e.path=""):(e.host=b.slice(0,d),e.path=b.slice(d)),e.origin=e.protocol+"://"+e.host,e.directory=EPUBJS.core.folder(e.path),e.base=e.origin+e.directory):(e.path=a,e.directory=EPUBJS.core.folder(a),e.base=e.directory),e.filename=a.replace(e.base,""),c=e.filename.lastIndexOf("."),-1!=c&&(e.extension=e.filename.slice(c+1)),e)},EPUBJS.core.folder=function(a){var b=a.lastIndexOf("/");if(-1==b);return a.slice(0,b+1)},EPUBJS.core.dataURLToBlob=function(a){var b,c,d,e,f,g=";base64,";if(-1==a.indexOf(g))return b=a.split(","),c=b[0].split(":")[1],d=b[1],new Blob([d],{type:c});b=a.split(g),c=b[0].split(":")[1],d=window.atob(b[1]),e=d.length,f=new Uint8Array(e);for(var h=0;h<e;++h)f[h]=d.charCodeAt(h);return new Blob([f],{type:c})},EPUBJS.core.addScript=function(a,b,c){var d,e;e=!1,d=document.createElement("script"),d.type="text/javascript",d.async=!1,d.src=a,d.onload=d.onreadystatechange=function(){e||this.readyState&&"complete"!=this.readyState||(e=!0,b&&b())},c=c||document.body,c.appendChild(d)},EPUBJS.core.addScripts=function(a,b,c){var d=a.length,e=0,f=function(){e++,d==e?b&&b():EPUBJS.core.addScript(a[e],f,c)};EPUBJS.core.addScript(a[e],f,c)},EPUBJS.core.addCss=function(a,b,c){var d,e;e=!1,d=document.createElement("link"),d.type="text/css",d.rel="stylesheet",d.href=a,d.onload=d.onreadystatechange=function(){e||this.readyState&&"complete"!=this.readyState||(e=!0,b&&b())},c=c||document.body,c.appendChild(d)},EPUBJS.core.prefixed=function(a){var b=["Webkit","Moz","O","ms"],c=a[0].toUpperCase()+a.slice(1),d=b.length;if(void 0!==document.documentElement.style[a])return a;for(var e=0;e<d;e++)if(void 0!==document.documentElement.style[b[e]+c])return b[e]+c;return a},EPUBJS.core.resolveUrl=function(a,b){var c,d,e=[],f=EPUBJS.core.uri(b),g=a.split("/");return f.host?b:(g.pop(),d=b.split("/"),d.forEach(function(a){".."===a?g.pop():e.push(a)}),c=g.concat(e),c.join("/"))},EPUBJS.core.uuid=function(){var a=(new Date).getTime();return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,function(b){var c=(a+16*Math.random())%16|0;return a=Math.floor(a/16),("x"==b?c:7&c|8).toString(16)})},EPUBJS.core.insert=function(a,b,c){var d=EPUBJS.core.locationOf(a,b,c);return b.splice(d,0,a),d},EPUBJS.core.locationOf=function(a,b,c,d,e){var f,g=d||0,h=e||b.length,i=parseInt(g+(h-g)/2);return c||(c=function(a,b){return a>b?1:a<b?-1:(a=b)?0:void 0}),h-g<=0?i:(f=c(b[i],a),h-g==1?f>0?i:i+1:0===f?i:-1===f?EPUBJS.core.locationOf(a,b,c,i,h):EPUBJS.core.locationOf(a,b,c,g,i))},EPUBJS.core.indexOfSorted=function(a,b,c,d,e){var f,g=d||0,h=e||b.length,i=parseInt(g+(h-g)/2);return c||(c=function(a,b){return a>b?1:a<b?-1:(a=b)?0:void 0}),h-g<=0?-1:(f=c(b[i],a),h-g==1?0===f?i:-1:0===f?i:-1===f?EPUBJS.core.indexOfSorted(a,b,c,i,h):EPUBJS.core.indexOfSorted(a,b,c,g,i))},EPUBJS.core.queue=function(a){var b=[],c=a,d=function(a,c,d){return b.push({funcName:a,args:c,context:d}),b},e=function(){var a;b.length&&(a=b.shift(),c[a.funcName].apply(a.context||c,a.args))};return{enqueue:d,dequeue:e,flush:function(){for(;b.length;)e()},clear:function(){b=[]},length:function(){return b.length}}},EPUBJS.core.getElementXPath=function(a){return a&&a.id?'//*[@id="'+a.id+'"]':EPUBJS.core.getElementTreeXPath(a)},EPUBJS.core.getElementTreeXPath=function(a){var b,c,d,e,f=[],g="http://www.w3.org/1999/xhtml"===a.ownerDocument.documentElement.getAttribute("xmlns");for(a.nodeType===Node.TEXT_NODE&&(b=EPUBJS.core.indexOfTextNode(a)+1,f.push("text()["+b+"]"),a=a.parentNode);a&&1==a.nodeType;a=a.parentNode){b=0;for(var h=a.previousSibling;h;h=h.previousSibling)h.nodeType!=Node.DOCUMENT_TYPE_NODE&&h.nodeName==a.nodeName&&++b;c=a.nodeName.toLowerCase(),d=g?"xhtml:"+c:c,e=b?"["+(b+1)+"]":"",f.splice(0,0,d+e)}return f.length?"./"+f.join("/"):null},EPUBJS.core.nsResolver=function(a){return{xhtml:"http://www.w3.org/1999/xhtml",epub:"http://www.idpf.org/2007/ops"}[a]||null},EPUBJS.core.cleanStringForXpath=function(a){var b=a.match(/[^'"]+|['"]/g);return b=b.map(function(a){return"'"===a?'"\'"':'"'===a?"'\"'":"'"+a+"'"}),"concat('',"+b.join(",")+")"},EPUBJS.core.indexOfTextNode=function(a){for(var b,c=a.parentNode,d=c.childNodes,e=-1,f=0;f<d.length&&(b=d[f],b.nodeType===Node.TEXT_NODE&&e++,b!=a);f++);return e},EPUBJS.core.defaults=function(a){for(var b=1,c=arguments.length;b<c;b++){var d=arguments[b];for(var e in d)void 0===a[e]&&(a[e]=d[e])}return a},EPUBJS.core.extend=function(a){return[].slice.call(arguments,1).forEach(function(b){b&&Object.getOwnPropertyNames(b).forEach(function(c){Object.defineProperty(a,c,Object.getOwnPropertyDescriptor(b,c))})}),a},EPUBJS.core.clone=function(a){return EPUBJS.core.isArray(a)?a.slice():EPUBJS.core.extend({},a)},EPUBJS.core.isElement=function(a){return!(!a||1!=a.nodeType)},EPUBJS.core.isNumber=function(a){return!isNaN(parseFloat(a))&&isFinite(a)},EPUBJS.core.isString=function(a){return"string"==typeof a||a instanceof String},EPUBJS.core.isArray=Array.isArray||function(a){return"[object Array]"===Object.prototype.toString.call(a)},EPUBJS.core.values=function(a){var b,c,d,e=-1;if(!a)return[];for(b=Object.keys(a),c=b.length,d=Array(c);++e<c;)d[e]=a[b[e]];return d},EPUBJS.core.indexOfNode=function(a,b){for(var c,d=a.parentNode,e=d.childNodes,f=-1,g=0;g<e.length&&(c=e[g],c.nodeType===b&&f++,c!=a);g++);return f},EPUBJS.core.indexOfTextNode=function(a){return EPUBJS.core.indexOfNode(a,TEXT_NODE)},EPUBJS.core.indexOfElementNode=function(a){return EPUBJS.core.indexOfNode(a,ELEMENT_NODE)};var EPUBJS=EPUBJS||{};EPUBJS.reader={},EPUBJS.reader.plugins={},function(a,b){var c=(a.ePubReader,a.ePubReader=function(a,b){return new EPUBJS.Reader(a,b)});"function"==typeof define&&define.amd?define(function(){return Reader}):"undefined"!=typeof module&&module.exports&&(module.exports=c)}(window,jQuery),EPUBJS.Reader=function(a,b){var c,d,e,f=this,g=$("#viewer"),h=window.location.search;this.settings=EPUBJS.core.defaults(b||{},{bookPath:a,restore:!1,reload:!1,bookmarks:void 0,annotations:void 0,contained:void 0,bookKey:void 0,styles:void 0,sidebarReflow:!1,generatePagination:!1,history:!0}),h&&(e=h.slice(1).split("&"),e.forEach(function(a){var b=a.split("="),c=b[0],d=b[1]||"";f.settings[c]=decodeURIComponent(d)})),this.setBookKey(this.settings.bookPath),this.settings.restore&&this.isSaved()&&this.applySavedSettings(),this.settings.styles=this.settings.styles||{fontSize:"100%"},this.book=c=new ePub(this.settings.bookPath,this.settings),this.offline=!1,this.sidebarOpen=!1,this.settings.bookmarks||(this.settings.bookmarks=[]),this.settings.annotations||(this.settings.annotations=[]),this.settings.generatePagination&&c.generatePagination(g.width(),g.height()),this.rendition=c.renderTo("viewer",{ignoreClass:"annotator-hl",width:"100%",height:"100%"}),this.settings.previousLocationCfi?this.displayed=this.rendition.display(this.settings.previousLocationCfi):this.displayed=this.rendition.display(),c.ready.then(function(){f.ReaderController=EPUBJS.reader.ReaderController.call(f,c),f.SettingsController=EPUBJS.reader.SettingsController.call(f,c),f.ControlsController=EPUBJS.reader.ControlsController.call(f,c),f.SidebarController=EPUBJS.reader.SidebarController.call(f,c),f.BookmarksController=EPUBJS.reader.BookmarksController.call(f,c),f.NotesController=EPUBJS.reader.NotesController.call(f,c),window.addEventListener("hashchange",this.hashChanged.bind(this),!1),document.addEventListener("keydown",this.adjustFontSize.bind(this),!1),this.rendition.on("keydown",this.adjustFontSize.bind(this)),this.rendition.on("keydown",f.ReaderController.arrowKeys.bind(this)),this.rendition.on("selected",this.selectedRange.bind(this))}.bind(this)).then(function(){f.ReaderController.hideLoader()}.bind(this));for(d in EPUBJS.reader.plugins)EPUBJS.reader.plugins.hasOwnProperty(d)&&(f[d]=EPUBJS.reader.plugins[d].call(f,c));return c.loaded.metadata.then(function(a){f.MetaController=EPUBJS.reader.MetaController.call(f,a)}),c.loaded.navigation.then(function(a){f.TocController=EPUBJS.reader.TocController.call(f,a)}),window.addEventListener("beforeunload",this.unload.bind(this),!1),this},EPUBJS.Reader.prototype.adjustFontSize=function(a){var b,c=2,d=a.ctrlKey||a.metaKey;this.settings.styles&&(this.settings.styles.fontSize||(this.settings.styles.fontSize="100%"),b=parseInt(this.settings.styles.fontSize.slice(0,-1)),d&&187==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize",b+c+"%")),d&&189==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize",b-c+"%")),d&&48==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize","100%")))},EPUBJS.Reader.prototype.addBookmark=function(a){this.isBookmarked(a)>-1||(this.settings.bookmarks.push(a),this.trigger("reader:bookmarked",a))},EPUBJS.Reader.prototype.removeBookmark=function(a){var b=this.isBookmarked(a);-1!==b&&(this.settings.bookmarks.splice(b,1),this.trigger("reader:unbookmarked",b))},EPUBJS.Reader.prototype.isBookmarked=function(a){return this.settings.bookmarks.indexOf(a)},EPUBJS.Reader.prototype.clearBookmarks=function(){this.settings.bookmarks=[]},EPUBJS.Reader.prototype.addNote=function(a){this.settings.annotations.push(a)},EPUBJS.Reader.prototype.removeNote=function(a){var b=this.settings.annotations.indexOf(a);-1!==b&&delete this.settings.annotations[b]},EPUBJS.Reader.prototype.clearNotes=function(){this.settings.annotations=[]},EPUBJS.Reader.prototype.setBookKey=function(a){return this.settings.bookKey||(this.settings.bookKey="epubjsreader:"+EPUBJS.VERSION+":"+window.location.host+":"+a),this.settings.bookKey},EPUBJS.Reader.prototype.isSaved=function(a){return!!localStorage&&null!==localStorage.getItem(this.settings.bookKey)},EPUBJS.Reader.prototype.removeSavedSettings=function(){if(!localStorage)return!1;localStorage.removeItem(this.settings.bookKey)},EPUBJS.Reader.prototype.applySavedSettings=function(){var a;if(!localStorage)return!1;try{a=JSON.parse(localStorage.getItem(this.settings.bookKey))}catch(a){return!1}return!!a&&(a.styles&&(this.settings.styles=EPUBJS.core.defaults(this.settings.styles||{},a.styles)),this.settings=EPUBJS.core.defaults(this.settings,a),!0)},EPUBJS.Reader.prototype.saveSettings=function(){if(this.book&&(this.settings.previousLocationCfi=this.rendition.currentLocation().start.cfi),!localStorage)return!1;localStorage.setItem(this.settings.bookKey,JSON.stringify(this.settings))},EPUBJS.Reader.prototype.unload=function(){this.settings.restore&&localStorage&&this.saveSettings()},EPUBJS.Reader.prototype.hashChanged=function(){var a=window.location.hash.slice(1);this.rendition.display(a)},EPUBJS.Reader.prototype.selectedRange=function(a){var b="#"+a;this.settings.history&&window.location.hash!=b&&(history.pushState({},"",b),this.currentLocationCfi=a)},RSVP.EventTarget.mixin(EPUBJS.Reader.prototype),EPUBJS.reader.BookmarksController=function(){var a=this.book,b=this.rendition,c=$("#bookmarksView"),d=c.find("#bookmarks"),e=document.createDocumentFragment(),f=function(){c.show()},g=function(){c.hide()},h=0,i=function(c){var d=document.createElement("li"),e=document.createElement("a");d.id="bookmark-"+h,d.classList.add("list_item");var f,g=a.spine.get(c);return g.index in a.navigation.toc?(f=a.navigation.toc[g.index],e.textContent=f.label):e.textContent=c,e.href=c,e.classList.add("bookmark_link"),e.addEventListener("click",function(a){var c=this.getAttribute("href");b.display(c),a.preventDefault()},!1),d.appendChild(e),h++,d};return this.settings.bookmarks.forEach(function(a){var b=i(a);e.appendChild(b)}),d.append(e),this.on("reader:bookmarked",function(a){var b=i(a);d.append(b)}),this.on("reader:unbookmarked",function(a){$("#bookmark-"+a).remove()}),{show:f,hide:g}},EPUBJS.reader.ControlsController=function(a){var b=this,c=this.rendition,d=($("#store"),$("#fullscreen")),e=($("#fullscreenicon"),$("#cancelfullscreenicon"),$("#slider")),f=($("#main"),$("#sidebar"),$("#setting")),g=$("#bookmark");return e.on("click",function(){b.sidebarOpen?(b.SidebarController.hide(),e.addClass("icon-menu"),e.removeClass("icon-right")):(b.SidebarController.show(),e.addClass("icon-right"),e.removeClass("icon-menu"))}),"undefined"!=typeof screenfull&&(d.on("click",function(){screenfull.toggle($("#container")[0])}),screenfull.raw&&document.addEventListener(screenfull.raw.fullscreenchange,function(){fullscreen=screenfull.isFullscreen,fullscreen?d.addClass("icon-resize-small").removeClass("icon-resize-full"):d.addClass("icon-resize-full").removeClass("icon-resize-small")})),f.on("click",function(){b.SettingsController.show()}),g.on("click",function(){var a=b.rendition.currentLocation().start.cfi;-1===b.isBookmarked(a)?(b.addBookmark(a),g.addClass("icon-bookmark").removeClass("icon-bookmark-empty")):(b.removeBookmark(a),g.removeClass("icon-bookmark").addClass("icon-bookmark-empty"))}),c.on("relocated",function(a){var c=a.start.cfi,d="#"+c;-1===b.isBookmarked(c)?g.removeClass("icon-bookmark").addClass("icon-bookmark-empty"):g.addClass("icon-bookmark").removeClass("icon-bookmark-empty"),b.currentLocationCfi=c,b.settings.history&&window.location.hash!=d&&history.pushState({},"",d)}),{}},EPUBJS.reader.MetaController=function(a){var b=a.title,c=a.creator,d=$("#book-title"),e=$("#chapter-title"),f=$("#title-seperator");document.title=b+" – "+c,d.html(b),e.html(c),f.show()},EPUBJS.reader.NotesController=function(){var a=this.book,b=this.rendition,c=this,d=$("#notesView"),e=$("#notes"),f=$("#note-text"),g=$("#note-anchor"),h=c.settings.annotations,i=a.renderer,j=[],k=new ePub.CFI,l=function(){d.show()},m=function(){d.hide()},n=function(d){var e,h,i,j,l,m=a.renderer.doc;if(m.caretPositionFromPoint?(e=m.caretPositionFromPoint(d.clientX,d.clientY),h=e.offsetNode,i=e.offset):m.caretRangeFromPoint&&(e=m.caretRangeFromPoint(d.clientX,d.clientY),h=e.startContainer,i=e.startOffset),3!==h.nodeType)for(var q=0;q<h.childNodes.length;q++)if(3==h.childNodes[q].nodeType){h=h.childNodes[q];break}i=h.textContent.indexOf(".",i),-1===i?i=h.length:i+=1,j=k.generateCfiFromTextNode(h,i,a.renderer.currentChapter.cfiBase),l={annotatedAt:new Date,anchor:j,body:f.val()},c.addNote(l),o(l),p(l),f.val(""),g.text("Attach"),f.prop("disabled",!1),b.off("click",n)},o=function(a){var c=document.createElement("li"),d=document.createElement("a");c.innerHTML=a.body,d.innerHTML=" context »",d.href="#"+a.anchor,d.onclick=function(){return b.display(a.anchor),!1},c.appendChild(d),e.append(c)},p=function(b){var c=a.renderer.doc,d=document.createElement("span"),e=document.createElement("a");d.classList.add("footnotesuperscript","reader_generated"),d.style.verticalAlign="super",d.style.fontSize=".75em",d.style.lineHeight="1em",e.style.padding="2px",e.style.backgroundColor="#fffa96",e.style.borderRadius="5px",e.style.cursor="pointer",d.id="note-"+EPUBJS.core.uuid(),e.innerHTML=h.indexOf(b)+1+"[Reader]",d.appendChild(e),k.addMarker(b.anchor,c,d),q(d,b.body)},q=function(a,d){var e=a.id,f=function(){var c,f,l,m,n=i.height,o=i.width,p=225;j[e]||(j[e]=document.createElement("div"),j[e].setAttribute("class","popup"),pop_content=document.createElement("div"),j[e].appendChild(pop_content),pop_content.innerHTML=d,pop_content.setAttribute("class","pop_content"),i.render.document.body.appendChild(j[e]),j[e].addEventListener("mouseover",g,!1),j[e].addEventListener("mouseout",h,!1),b.on("locationChanged",k,this),b.on("locationChanged",h,this)),c=j[e],f=a.getBoundingClientRect(),l=f.left,m=f.top,c.classList.add("show"),popRect=c.getBoundingClientRect(),c.style.left=l-popRect.width/2+"px",c.style.top=m+"px",p>n/2.5&&(p=n/2.5,pop_content.style.maxHeight=p+"px"),popRect.height+m>=n-25?(c.style.top=m-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),l-popRect.width<=0?(c.style.left=l+"px",c.classList.add("left")):c.classList.remove("left"),l+popRect.width/2>=o?(c.style.left=l-300+"px",popRect=c.getBoundingClientRect(),c.style.left=l-popRect.width+"px",popRect.height+m>=n-25?(c.style.top=m-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),c.classList.add("right")):c.classList.remove("right")},g=function(){j[e].classList.add("on")},h=function(){j[e].classList.remove("on")},k=function(){setTimeout(function(){j[e].classList.remove("show")},100)},m=function(){c.ReaderController.slideOut(),l()};a.addEventListener("mouseover",f,!1),a.addEventListener("mouseout",k,!1),a.addEventListener("click",m,!1)};return g.on("click",function(a){g.text("Cancel"),f.prop("disabled","true"),b.on("click",n)}),h.forEach(function(a){o(a)}),{show:l,hide:m}},EPUBJS.reader.ReaderController=function(a){var b=$("#main"),c=$("#divider"),d=$("#loader"),e=$("#next"),f=$("#prev"),g=this,a=this.book,h=this.rendition,i=function(){h.currentLocation().start.cfi;g.settings.sidebarReflow?(b.removeClass("single"),b.one("transitionend",function(){h.resize()})):b.removeClass("closed")},j=function(){var a=h.currentLocation();if(a){a.start.cfi;g.settings.sidebarReflow?(b.addClass("single"),b.one("transitionend",function(){h.resize()})):b.addClass("closed")}},k=function(){d.show(),n()},l=function(){d.hide()},m=function(){c.addClass("show")},n=function(){c.removeClass("show")},o=!1,p=function(b){37==b.keyCode&&("rtl"===a.package.metadata.direction?h.next():h.prev(),f.addClass("active"),o=!0,setTimeout(function(){o=!1,f.removeClass("active")},100),b.preventDefault()),39==b.keyCode&&("rtl"===a.package.metadata.direction?h.prev():h.next(),e.addClass("active"),o=!0,setTimeout(function(){o=!1,e.removeClass("active")},100),b.preventDefault())};return document.addEventListener("keydown",p,!1),e.on("click",function(b){"rtl"===a.package.metadata.direction?h.prev():h.next(),b.preventDefault()}),f.on("click",function(b){"rtl"===a.package.metadata.direction?h.next():h.prev(),b.preventDefault()}),h.on("layout",function(a){!0===a.spread?m():n()}),h.on("relocated",function(a){a.atStart&&f.addClass("disabled"),a.atEnd&&e.addClass("disabled")}),{slideOut:j,slideIn:i,showLoader:k,hideLoader:l,showDivider:m,hideDivider:n,arrowKeys:p}},EPUBJS.reader.SettingsController=function(){var a=(this.book,this),b=$("#settings-modal"),c=$(".overlay"),d=function(){b.addClass("md-show")},e=function(){b.removeClass("md-show")};return $("#sidebarReflow").on("click",function(){a.settings.sidebarReflow=!a.settings.sidebarReflow}),b.find(".closer").on("click",function(){e()}),c.on("click",function(){e()}),{show:d,hide:e}},EPUBJS.reader.SidebarController=function(a){var b=this,c=$("#sidebar"),d=$("#panels"),e="Toc",f=function(a){var c=a+"Controller";e!=a&&void 0!==b[c]&&(b[e+"Controller"].hide(),b[c].show(),e=a,d.find(".active").removeClass("active"),d.find("#show-"+a).addClass("active"))},g=function(){return e},h=function(){b.sidebarOpen=!0,b.ReaderController.slideOut(),c.addClass("open")},i=function(){b.sidebarOpen=!1,b.ReaderController.slideIn(),c.removeClass("open")};return d.find(".show_view").on("click",function(a){var b=$(this).data("view");f(b),a.preventDefault()}),{show:h,hide:i,getActivePanel:g,changePanelTo:f}},EPUBJS.reader.TocController=function(a){var b=(this.book,this.rendition),c=$("#tocView"),d=document.createDocumentFragment(),e=!1,f=function(a,b){var c=document.createElement("ul");return b||(b=1),a.forEach(function(a){var d=document.createElement("li"),e=document.createElement("a");toggle=document.createElement("a");var g;d.id="toc-"+a.id,d.classList.add("list_item"),e.textContent=a.label,e.href=a.href,e.classList.add("toc_link"),d.appendChild(e),a.subitems&&a.subitems.length>0&&(b++,g=f(a.subitems,b),toggle.classList.add("toc_toggle"),d.insertBefore(toggle,e),d.appendChild(g)),c.appendChild(d)}),c},g=function(){c.show()},h=function(){c.hide()},i=function(a){var b=a.id,d=c.find("#toc-"+b),f=c.find(".currentChapter");c.find(".openChapter");d.length&&(d!=f&&d.has(e).length>0&&f.removeClass("currentChapter"),d.addClass("currentChapter"),d.parents("li").addClass("openChapter"))};b.on("renderered",i);var j=f(a);return d.appendChild(j),c.append(d),c.find(".toc_link").on("click",function(a){var d=this.getAttribute("href");a.preventDefault(),b.display(d),c.find(".currentChapter").addClass("openChapter").removeClass("currentChapter"),$(this).parent("li").addClass("currentChapter")}),c.find(".toc_toggle").on("click",function(a){var b=$(this).parent("li"),c=b.hasClass("openChapter");a.preventDefault(),c?b.removeClass("openChapter"):b.addClass("openChapter")}),{show:g,hide:h}}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/reader.min.js | reader.min.js |
EPUBJS.Hooks.register("beforeChapterDisplay").endnotes=function(a,b){var c=b.contents.querySelectorAll("a[href]"),d=Array.prototype.slice.call(c),e=EPUBJS.core.folder(location.pathname),f=(EPUBJS.cssPath,{});EPUBJS.core.addCss(EPUBJS.cssPath+"popup.css",!1,b.render.document.head),d.forEach(function(a){function c(){var c,h,n=b.height,o=b.width,p=225;m||(c=j.cloneNode(!0),m=c.querySelector("p")),f[i]||(f[i]=document.createElement("div"),f[i].setAttribute("class","popup"),pop_content=document.createElement("div"),f[i].appendChild(pop_content),pop_content.appendChild(m),pop_content.setAttribute("class","pop_content"),b.render.document.body.appendChild(f[i]),f[i].addEventListener("mouseover",d,!1),f[i].addEventListener("mouseout",e,!1),b.on("renderer:pageChanged",g,this),b.on("renderer:pageChanged",e,this)),c=f[i],h=a.getBoundingClientRect(),k=h.left,l=h.top,c.classList.add("show"),popRect=c.getBoundingClientRect(),c.style.left=k-popRect.width/2+"px",c.style.top=l+"px",p>n/2.5&&(p=n/2.5,pop_content.style.maxHeight=p+"px"),popRect.height+l>=n-25?(c.style.top=l-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),k-popRect.width<=0?(c.style.left=k+"px",c.classList.add("left")):c.classList.remove("left"),k+popRect.width/2>=o?(c.style.left=k-300+"px",popRect=c.getBoundingClientRect(),c.style.left=k-popRect.width+"px",popRect.height+l>=n-25?(c.style.top=l-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),c.classList.add("right")):c.classList.remove("right")}function d(){f[i].classList.add("on")}function e(){f[i].classList.remove("on")}function g(){setTimeout(function(){f[i].classList.remove("show")},100)}var h,i,j,k,l,m;"noteref"==a.getAttribute("epub:type")&&(h=a.getAttribute("href"),i=h.replace("#",""),j=b.render.document.getElementById(i),a.addEventListener("mouseover",c,!1),a.addEventListener("mouseout",g,!1))}),a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").mathml=function(a,b){if(b.currentChapter.manifestProperties.indexOf("mathml")!==-1){b.render.iframe.contentWindow.mathmlCallback=a;var c=document.createElement("script");c.type="text/x-mathjax-config",c.innerHTML=' MathJax.Hub.Register.StartupHook("End",function () { window.mathmlCallback(); }); MathJax.Hub.Config({jax: ["input/TeX","input/MathML","output/SVG"],extensions: ["tex2jax.js","mml2jax.js","MathEvents.js"],TeX: {extensions: ["noErrors.js","noUndefined.js","autoload-all.js"]},MathMenu: {showRenderer: false},menuSettings: {zoom: "Click"},messageStyle: "none"}); ',b.doc.body.appendChild(c),EPUBJS.core.addScript("http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML",null,b.doc.head)}else a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").smartimages=function(a,b){var c=b.contents.querySelectorAll("img"),d=Array.prototype.slice.call(c),e=b.height;if("reflowable"!=b.layoutSettings.layout)return void a();d.forEach(function(a){var c=function(){var c,d=a.getBoundingClientRect(),f=d.height,g=d.top,h=a.getAttribute("data-height"),i=h||f,j=Number(getComputedStyle(a,"").fontSize.match(/(\d*(\.\d*)?)px/)[1]),k=j?j/2:0;e=b.contents.clientHeight,g<0&&(g=0),a.style.maxWidth="100%",i+g>=e?(g<e/2?(c=e-g-k,a.style.maxHeight=c+"px",a.style.width="auto"):(i>e&&(a.style.maxHeight=e+"px",a.style.width="auto",d=a.getBoundingClientRect(),i=d.height),a.style.display="block",a.style.WebkitColumnBreakBefore="always",a.style.breakBefore="column"),a.setAttribute("data-height",c)):(a.style.removeProperty("max-height"),a.style.removeProperty("margin-top"))},d=function(){b.off("renderer:resized",c),b.off("renderer:chapterUnload",this)};a.addEventListener("load",c,!1),b.on("renderer:resized",c),b.on("renderer:chapterUnload",d),c()}),a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").transculsions=function(a,b){var c=b.contents.querySelectorAll("[transclusion]");Array.prototype.slice.call(c).forEach(function(a){function c(){j=g,k=h,j>chapter.colWidth&&(d=chapter.colWidth/j,j=chapter.colWidth,k*=d),f.width=j,f.height=k}var d,e=a.getAttribute("ref"),f=document.createElement("iframe"),g=a.getAttribute("width"),h=a.getAttribute("height"),i=a.parentNode,j=g,k=h;c(),b.listenUntil("renderer:resized","renderer:chapterUnloaded",c),f.src=e,i.replaceChild(f,a)}),a&&a()}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/hooks.min.js | hooks.min.js |
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(factory((global.RSVP = global.RSVP || {})));
}(this, (function (exports) { 'use strict';
function indexOf(callbacks, callback) {
for (var i = 0, l = callbacks.length; i < l; i++) {
if (callbacks[i] === callback) {
return i;
}
}
return -1;
}
function callbacksFor(object) {
var callbacks = object._promiseCallbacks;
if (!callbacks) {
callbacks = object._promiseCallbacks = {};
}
return callbacks;
}
/**
@class RSVP.EventTarget
*/
var EventTarget = {
/**
`RSVP.EventTarget.mixin` extends an object with EventTarget methods. For
Example:
```javascript
let object = {};
RSVP.EventTarget.mixin(object);
object.on('finished', function(event) {
// handle event
});
object.trigger('finished', { detail: value });
```
`EventTarget.mixin` also works with prototypes:
```javascript
let Person = function() {};
RSVP.EventTarget.mixin(Person.prototype);
let yehuda = new Person();
let tom = new Person();
yehuda.on('poke', function(event) {
console.log('Yehuda says OW');
});
tom.on('poke', function(event) {
console.log('Tom says OW');
});
yehuda.trigger('poke');
tom.trigger('poke');
```
@method mixin
@for RSVP.EventTarget
@private
@param {Object} object object to extend with EventTarget methods
*/
mixin: function (object) {
object['on'] = this['on'];
object['off'] = this['off'];
object['trigger'] = this['trigger'];
object._promiseCallbacks = undefined;
return object;
},
/**
Registers a callback to be executed when `eventName` is triggered
```javascript
object.on('event', function(eventInfo){
// handle the event
});
object.trigger('event');
```
@method on
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to listen for
@param {Function} callback function to be called when the event is triggered.
*/
on: function (eventName, callback) {
if (typeof callback !== 'function') {
throw new TypeError('Callback must be a function');
}
var allCallbacks = callbacksFor(this),
callbacks = void 0;
callbacks = allCallbacks[eventName];
if (!callbacks) {
callbacks = allCallbacks[eventName] = [];
}
if (indexOf(callbacks, callback) === -1) {
callbacks.push(callback);
}
},
/**
You can use `off` to stop firing a particular callback for an event:
```javascript
function doStuff() { // do stuff! }
object.on('stuff', doStuff);
object.trigger('stuff'); // doStuff will be called
// Unregister ONLY the doStuff callback
object.off('stuff', doStuff);
object.trigger('stuff'); // doStuff will NOT be called
```
If you don't pass a `callback` argument to `off`, ALL callbacks for the
event will not be executed when the event fires. For example:
```javascript
let callback1 = function(){};
let callback2 = function(){};
object.on('stuff', callback1);
object.on('stuff', callback2);
object.trigger('stuff'); // callback1 and callback2 will be executed.
object.off('stuff');
object.trigger('stuff'); // callback1 and callback2 will not be executed!
```
@method off
@for RSVP.EventTarget
@private
@param {String} eventName event to stop listening to
@param {Function} callback optional argument. If given, only the function
given will be removed from the event's callback queue. If no `callback`
argument is given, all callbacks will be removed from the event's callback
queue.
*/
off: function (eventName, callback) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
index = void 0;
if (!callback) {
allCallbacks[eventName] = [];
return;
}
callbacks = allCallbacks[eventName];
index = indexOf(callbacks, callback);
if (index !== -1) {
callbacks.splice(index, 1);
}
},
/**
Use `trigger` to fire custom events. For example:
```javascript
object.on('foo', function(){
console.log('foo event happened!');
});
object.trigger('foo');
// 'foo event happened!' logged to the console
```
You can also pass a value as a second argument to `trigger` that will be
passed as an argument to all event listeners for the event:
```javascript
object.on('foo', function(value){
console.log(value.name);
});
object.trigger('foo', { name: 'bar' });
// 'bar' logged to the console
```
@method trigger
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to be triggered
@param {*} options optional value to be passed to any event handlers for
the given `eventName`
*/
trigger: function (eventName, options, label) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
callback = void 0;
if (callbacks = allCallbacks[eventName]) {
// Don't cache the callbacks.length since it may grow
for (var i = 0; i < callbacks.length; i++) {
callback = callbacks[i];
callback(options, label);
}
}
}
};
var config = {
instrument: false
};
EventTarget['mixin'](config);
function configure(name, value) {
if (arguments.length === 2) {
config[name] = value;
} else {
return config[name];
}
}
function objectOrFunction(x) {
var type = typeof x;
return x !== null && (type === 'object' || type === 'function');
}
function isFunction(x) {
return typeof x === 'function';
}
function isObject(x) {
return x !== null && typeof x === 'object';
}
function isMaybeThenable(x) {
return x !== null && typeof x === 'object';
}
var _isArray = void 0;
if (Array.isArray) {
_isArray = Array.isArray;
} else {
_isArray = function (x) {
return Object.prototype.toString.call(x) === '[object Array]';
};
}
var isArray = _isArray;
// Date.now is not available in browsers < IE9
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/now#Compatibility
var now = Date.now || function () {
return new Date().getTime();
};
var queue = [];
function scheduleFlush() {
setTimeout(function () {
for (var i = 0; i < queue.length; i++) {
var entry = queue[i];
var payload = entry.payload;
payload.guid = payload.key + payload.id;
payload.childGuid = payload.key + payload.childId;
if (payload.error) {
payload.stack = payload.error.stack;
}
config['trigger'](entry.name, entry.payload);
}
queue.length = 0;
}, 50);
}
function instrument(eventName, promise, child) {
if (1 === queue.push({
name: eventName,
payload: {
key: promise._guidKey,
id: promise._id,
eventName: eventName,
detail: promise._result,
childId: child && child._id,
label: promise._label,
timeStamp: now(),
error: config["instrument-with-stack"] ? new Error(promise._label) : null
} })) {
scheduleFlush();
}
}
/**
`RSVP.Promise.resolve` returns a promise that will become resolved with the
passed `value`. It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
resolve(1);
});
promise.then(function(value){
// value === 1
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.resolve(1);
promise.then(function(value){
// value === 1
});
```
@method resolve
@static
@param {*} object value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$1(object, label) {
/*jshint validthis:true */
var Constructor = this;
if (object && typeof object === 'object' && object.constructor === Constructor) {
return object;
}
var promise = new Constructor(noop, label);
resolve(promise, object);
return promise;
}
function withOwnPromise() {
return new TypeError('A promises callback cannot return that same promise.');
}
function noop() {}
var PENDING = void 0;
var FULFILLED = 1;
var REJECTED = 2;
var GET_THEN_ERROR = new ErrorObject();
function getThen(promise) {
try {
return promise.then;
} catch (error) {
GET_THEN_ERROR.error = error;
return GET_THEN_ERROR;
}
}
function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) {
try {
then$$1.call(value, fulfillmentHandler, rejectionHandler);
} catch (e) {
return e;
}
}
function handleForeignThenable(promise, thenable, then$$1) {
config.async(function (promise) {
var sealed = false;
var error = tryThen(then$$1, thenable, function (value) {
if (sealed) {
return;
}
sealed = true;
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
if (sealed) {
return;
}
sealed = true;
reject(promise, reason);
}, 'Settle: ' + (promise._label || ' unknown promise'));
if (!sealed && error) {
sealed = true;
reject(promise, error);
}
}, promise);
}
function handleOwnThenable(promise, thenable) {
if (thenable._state === FULFILLED) {
fulfill(promise, thenable._result);
} else if (thenable._state === REJECTED) {
thenable._onError = null;
reject(promise, thenable._result);
} else {
subscribe(thenable, undefined, function (value) {
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
return reject(promise, reason);
});
}
}
function handleMaybeThenable(promise, maybeThenable, then$$1) {
var isOwnThenable = maybeThenable.constructor === promise.constructor && then$$1 === then && promise.constructor.resolve === resolve$1;
if (isOwnThenable) {
handleOwnThenable(promise, maybeThenable);
} else if (then$$1 === GET_THEN_ERROR) {
reject(promise, GET_THEN_ERROR.error);
GET_THEN_ERROR.error = null;
} else if (isFunction(then$$1)) {
handleForeignThenable(promise, maybeThenable, then$$1);
} else {
fulfill(promise, maybeThenable);
}
}
function resolve(promise, value) {
if (promise === value) {
fulfill(promise, value);
} else if (objectOrFunction(value)) {
handleMaybeThenable(promise, value, getThen(value));
} else {
fulfill(promise, value);
}
}
function publishRejection(promise) {
if (promise._onError) {
promise._onError(promise._result);
}
publish(promise);
}
function fulfill(promise, value) {
if (promise._state !== PENDING) {
return;
}
promise._result = value;
promise._state = FULFILLED;
if (promise._subscribers.length === 0) {
if (config.instrument) {
instrument('fulfilled', promise);
}
} else {
config.async(publish, promise);
}
}
function reject(promise, reason) {
if (promise._state !== PENDING) {
return;
}
promise._state = REJECTED;
promise._result = reason;
config.async(publishRejection, promise);
}
function subscribe(parent, child, onFulfillment, onRejection) {
var subscribers = parent._subscribers;
var length = subscribers.length;
parent._onError = null;
subscribers[length] = child;
subscribers[length + FULFILLED] = onFulfillment;
subscribers[length + REJECTED] = onRejection;
if (length === 0 && parent._state) {
config.async(publish, parent);
}
}
function publish(promise) {
var subscribers = promise._subscribers;
var settled = promise._state;
if (config.instrument) {
instrument(settled === FULFILLED ? 'fulfilled' : 'rejected', promise);
}
if (subscribers.length === 0) {
return;
}
var child = void 0,
callback = void 0,
result = promise._result;
for (var i = 0; i < subscribers.length; i += 3) {
child = subscribers[i];
callback = subscribers[i + settled];
if (child) {
invokeCallback(settled, child, callback, result);
} else {
callback(result);
}
}
promise._subscribers.length = 0;
}
function ErrorObject() {
this.error = null;
}
var TRY_CATCH_ERROR = new ErrorObject();
function tryCatch(callback, result) {
try {
return callback(result);
} catch (e) {
TRY_CATCH_ERROR.error = e;
return TRY_CATCH_ERROR;
}
}
function invokeCallback(state, promise, callback, result) {
var hasCallback = isFunction(callback);
var value = void 0,
error = void 0;
if (hasCallback) {
value = tryCatch(callback, result);
if (value === TRY_CATCH_ERROR) {
error = value.error;
value.error = null; // release
} else if (value === promise) {
reject(promise, withOwnPromise());
return;
}
} else {
value = result;
}
if (promise._state !== PENDING) {
// noop
} else if (hasCallback && error === undefined) {
resolve(promise, value);
} else if (error !== undefined) {
reject(promise, error);
} else if (state === FULFILLED) {
fulfill(promise, value);
} else if (state === REJECTED) {
reject(promise, value);
}
}
function initializePromise(promise, resolver) {
var resolved = false;
try {
resolver(function (value) {
if (resolved) {
return;
}
resolved = true;
resolve(promise, value);
}, function (reason) {
if (resolved) {
return;
}
resolved = true;
reject(promise, reason);
});
} catch (e) {
reject(promise, e);
}
}
function then(onFulfillment, onRejection, label) {
var parent = this;
var state = parent._state;
if (state === FULFILLED && !onFulfillment || state === REJECTED && !onRejection) {
config.instrument && instrument('chained', parent, parent);
return parent;
}
parent._onError = null;
var child = new parent.constructor(noop, label);
var result = parent._result;
config.instrument && instrument('chained', parent, child);
if (state === PENDING) {
subscribe(parent, child, onFulfillment, onRejection);
} else {
var callback = state === FULFILLED ? onFulfillment : onRejection;
config.async(function () {
return invokeCallback(state, child, callback, result);
});
}
return child;
}
var Enumerator = function () {
function Enumerator(Constructor, input, abortOnReject, label) {
this._instanceConstructor = Constructor;
this.promise = new Constructor(noop, label);
this._abortOnReject = abortOnReject;
this._init.apply(this, arguments);
}
Enumerator.prototype._init = function _init(Constructor, input) {
var len = input.length || 0;
this.length = len;
this._remaining = len;
this._result = new Array(len);
this._enumerate(input);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
Enumerator.prototype._enumerate = function _enumerate(input) {
var length = this.length;
var promise = this.promise;
for (var i = 0; promise._state === PENDING && i < length; i++) {
this._eachEntry(input[i], i);
}
};
Enumerator.prototype._settleMaybeThenable = function _settleMaybeThenable(entry, i) {
var c = this._instanceConstructor;
var resolve$$1 = c.resolve;
if (resolve$$1 === resolve$1) {
var then$$1 = getThen(entry);
if (then$$1 === then && entry._state !== PENDING) {
entry._onError = null;
this._settledAt(entry._state, i, entry._result);
} else if (typeof then$$1 !== 'function') {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
} else if (c === Promise) {
var promise = new c(noop);
handleMaybeThenable(promise, entry, then$$1);
this._willSettleAt(promise, i);
} else {
this._willSettleAt(new c(function (resolve$$1) {
return resolve$$1(entry);
}), i);
}
} else {
this._willSettleAt(resolve$$1(entry), i);
}
};
Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {
if (isMaybeThenable(entry)) {
this._settleMaybeThenable(entry, i);
} else {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
}
};
Enumerator.prototype._settledAt = function _settledAt(state, i, value) {
var promise = this.promise;
if (promise._state === PENDING) {
if (this._abortOnReject && state === REJECTED) {
reject(promise, value);
} else {
this._remaining--;
this._result[i] = this._makeResult(state, i, value);
if (this._remaining === 0) {
fulfill(promise, this._result);
}
}
}
};
Enumerator.prototype._makeResult = function _makeResult(state, i, value) {
return value;
};
Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {
var enumerator = this;
subscribe(promise, undefined, function (value) {
return enumerator._settledAt(FULFILLED, i, value);
}, function (reason) {
return enumerator._settledAt(REJECTED, i, reason);
});
};
return Enumerator;
}();
function makeSettledResult(state, position, value) {
if (state === FULFILLED) {
return {
state: 'fulfilled',
value: value
};
} else {
return {
state: 'rejected',
reason: value
};
}
}
/**
`RSVP.Promise.all` accepts an array of promises, and returns a new promise which
is fulfilled with an array of fulfillment values for the passed promises, or
rejected with the reason of the first passed promise to be rejected. It casts all
elements of the passed iterable to promises as it runs this algorithm.
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// The array here would be [ 1, 2, 3 ];
});
```
If any of the `promises` given to `RSVP.all` are rejected, the first promise
that is rejected will be given as an argument to the returned promises's
rejection handler. For example:
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error("2"));
let promise3 = RSVP.reject(new Error("3"));
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// Code here never runs because there are rejected promises!
}, function(error) {
// error.message === "2"
});
```
@method all
@static
@param {Array} entries array of promises
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all `promises` have been
fulfilled, or rejected if any of them become rejected.
@static
*/
function all(entries, label) {
if (!isArray(entries)) {
return this.reject(new TypeError("Promise.all must be called with an array"), label);
}
return new Enumerator(this, entries, true /* abort on reject */, label).promise;
}
/**
`RSVP.Promise.race` returns a new promise which is settled in the same way as the
first passed promise to settle.
Example:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 2');
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// result === 'promise 2' because it was resolved before promise1
// was resolved.
});
```
`RSVP.Promise.race` is deterministic in that only the state of the first
settled promise matters. For example, even if other promises given to the
`promises` array argument are resolved, but the first settled promise has
become rejected before the other promises became fulfilled, the returned
promise will become rejected:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
reject(new Error('promise 2'));
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// Code here never runs
}, function(reason){
// reason.message === 'promise 2' because promise 2 became rejected before
// promise 1 became fulfilled
});
```
An example real-world use case is implementing timeouts:
```javascript
RSVP.Promise.race([ajax('foo.json'), timeout(5000)])
```
@method race
@static
@param {Array} entries array of promises to observe
@param {String} label optional string for describing the promise returned.
Useful for tooling.
@return {Promise} a promise which settles in the same way as the first passed
promise to settle.
*/
function race(entries, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
if (!isArray(entries)) {
reject(promise, new TypeError('Promise.race must be called with an array'));
return promise;
}
for (var i = 0; promise._state === PENDING && i < entries.length; i++) {
subscribe(Constructor.resolve(entries[i]), undefined, function (value) {
return resolve(promise, value);
}, function (reason) {
return reject(promise, reason);
});
}
return promise;
}
/**
`RSVP.Promise.reject` returns a promise rejected with the passed `reason`.
It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
reject(new Error('WHOOPS'));
});
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.reject(new Error('WHOOPS'));
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
@method reject
@static
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$1(reason, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
reject(promise, reason);
return promise;
}
var guidKey = 'rsvp_' + now() + '-';
var counter = 0;
function needsResolver() {
throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
}
function needsNew() {
throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
}
/**
Promise objects represent the eventual result of an asynchronous operation. The
primary way of interacting with a promise is through its `then` method, which
registers callbacks to receive either a promise’s eventual value or the reason
why the promise cannot be fulfilled.
Terminology
-----------
- `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- `thenable` is an object or function that defines a `then` method.
- `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- `exception` is a value that is thrown using the throw statement.
- `reason` is a value that indicates why a promise was rejected.
- `settled` the final resting state of a promise, fulfilled or rejected.
A promise can be in one of three states: pending, fulfilled, or rejected.
Promises that are fulfilled have a fulfillment value and are in the fulfilled
state. Promises that are rejected have a rejection reason and are in the
rejected state. A fulfillment value is never a thenable.
Promises can also be said to *resolve* a value. If this value is also a
promise, then the original promise's settled state will match the value's
settled state. So a promise that *resolves* a promise that rejects will
itself reject, and a promise that *resolves* a promise that fulfills will
itself fulfill.
Basic Usage:
------------
```js
let promise = new Promise(function(resolve, reject) {
// on success
resolve(value);
// on failure
reject(reason);
});
promise.then(function(value) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Advanced Usage:
---------------
Promises shine when abstracting away asynchronous interactions such as
`XMLHttpRequest`s.
```js
function getJSON(url) {
return new Promise(function(resolve, reject){
let xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = handler;
xhr.responseType = 'json';
xhr.setRequestHeader('Accept', 'application/json');
xhr.send();
function handler() {
if (this.readyState === this.DONE) {
if (this.status === 200) {
resolve(this.response);
} else {
reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
}
}
};
});
}
getJSON('/posts.json').then(function(json) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Unlike callbacks, promises are great composable primitives.
```js
Promise.all([
getJSON('/posts'),
getJSON('/comments')
]).then(function(values){
values[0] // => postsJSON
values[1] // => commentsJSON
return values;
});
```
@class RSVP.Promise
@param {function} resolver
@param {String} label optional string for labeling the promise.
Useful for tooling.
@constructor
*/
var Promise = function () {
function Promise(resolver, label) {
this._id = counter++;
this._label = label;
this._state = undefined;
this._result = undefined;
this._subscribers = [];
config.instrument && instrument('created', this);
if (noop !== resolver) {
typeof resolver !== 'function' && needsResolver();
this instanceof Promise ? initializePromise(this, resolver) : needsNew();
}
}
Promise.prototype._onError = function _onError(reason) {
var _this = this;
config.after(function () {
if (_this._onError) {
config.trigger('error', reason, _this._label);
}
});
};
/**
`catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
as the catch block of a try/catch statement.
```js
function findAuthor(){
throw new Error('couldn\'t find that author');
}
// synchronous
try {
findAuthor();
} catch(reason) {
// something went wrong
}
// async with promises
findAuthor().catch(function(reason){
// something went wrong
});
```
@method catch
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.catch = function _catch(onRejection, label) {
return this.then(undefined, onRejection, label);
};
/**
`finally` will be invoked regardless of the promise's fate just as native
try/catch/finally behaves
Synchronous example:
```js
findAuthor() {
if (Math.random() > 0.5) {
throw new Error();
}
return new Author();
}
try {
return findAuthor(); // succeed or fail
} catch(error) {
return findOtherAuthor();
} finally {
// always runs
// doesn't affect the return value
}
```
Asynchronous example:
```js
findAuthor().catch(function(reason){
return findOtherAuthor();
}).finally(function(){
// author was either found, or not
});
```
@method finally
@param {Function} callback
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.finally = function _finally(callback, label) {
var promise = this;
var constructor = promise.constructor;
return promise.then(function (value) {
return constructor.resolve(callback()).then(function () {
return value;
});
}, function (reason) {
return constructor.resolve(callback()).then(function () {
throw reason;
});
}, label);
};
return Promise;
}();
Promise.cast = resolve$1; // deprecated
Promise.all = all;
Promise.race = race;
Promise.resolve = resolve$1;
Promise.reject = reject$1;
Promise.prototype._guidKey = guidKey;
/**
The primary way of interacting with a promise is through its `then` method,
which registers callbacks to receive either a promise's eventual value or the
reason why the promise cannot be fulfilled.
```js
findUser().then(function(user){
// user is available
}, function(reason){
// user is unavailable, and you are given the reason why
});
```
Chaining
--------
The return value of `then` is itself a promise. This second, 'downstream'
promise is resolved with the return value of the first promise's fulfillment
or rejection handler, or rejected if the handler throws an exception.
```js
findUser().then(function (user) {
return user.name;
}, function (reason) {
return 'default name';
}).then(function (userName) {
// If `findUser` fulfilled, `userName` will be the user's name, otherwise it
// will be `'default name'`
});
findUser().then(function (user) {
throw new Error('Found user, but still unhappy');
}, function (reason) {
throw new Error('`findUser` rejected and we\'re unhappy');
}).then(function (value) {
// never reached
}, function (reason) {
// if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
// If `findUser` rejected, `reason` will be '`findUser` rejected and we\'re unhappy'.
});
```
If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
```js
findUser().then(function (user) {
throw new PedagogicalException('Upstream error');
}).then(function (value) {
// never reached
}).then(function (value) {
// never reached
}, function (reason) {
// The `PedgagocialException` is propagated all the way down to here
});
```
Assimilation
------------
Sometimes the value you want to propagate to a downstream promise can only be
retrieved asynchronously. This can be achieved by returning a promise in the
fulfillment or rejection handler. The downstream promise will then be pending
until the returned promise is settled. This is called *assimilation*.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// The user's comments are now available
});
```
If the assimliated promise rejects, then the downstream promise will also reject.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// If `findCommentsByAuthor` fulfills, we'll have the value here
}, function (reason) {
// If `findCommentsByAuthor` rejects, we'll have the reason here
});
```
Simple Example
--------------
Synchronous Example
```javascript
let result;
try {
result = findResult();
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
findResult(function(result, err){
if (err) {
// failure
} else {
// success
}
});
```
Promise Example;
```javascript
findResult().then(function(result){
// success
}, function(reason){
// failure
});
```
Advanced Example
--------------
Synchronous Example
```javascript
let author, books;
try {
author = findAuthor();
books = findBooksByAuthor(author);
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
function foundBooks(books) {
}
function failure(reason) {
}
findAuthor(function(author, err){
if (err) {
failure(err);
// failure
} else {
try {
findBoooksByAuthor(author, function(books, err) {
if (err) {
failure(err);
} else {
try {
foundBooks(books);
} catch(reason) {
failure(reason);
}
}
});
} catch(error) {
failure(err);
}
// success
}
});
```
Promise Example;
```javascript
findAuthor().
then(findBooksByAuthor).
then(function(books){
// found books
}).catch(function(reason){
// something went wrong
});
```
@method then
@param {Function} onFulfillment
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.then = then;
function Result() {
this.value = undefined;
}
var ERROR = new Result();
var GET_THEN_ERROR$1 = new Result();
function getThen$1(obj) {
try {
return obj.then;
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function tryApply(f, s, a) {
try {
f.apply(s, a);
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function makeObject(_, argumentNames) {
var obj = {};
var length = _.length;
var args = new Array(length);
for (var x = 0; x < length; x++) {
args[x] = _[x];
}
for (var i = 0; i < argumentNames.length; i++) {
var name = argumentNames[i];
obj[name] = args[i + 1];
}
return obj;
}
function arrayResult(_) {
var length = _.length;
var args = new Array(length - 1);
for (var i = 1; i < length; i++) {
args[i - 1] = _[i];
}
return args;
}
function wrapThenable(then, promise) {
return {
then: function (onFulFillment, onRejection) {
return then.call(promise, onFulFillment, onRejection);
}
};
}
/**
`RSVP.denodeify` takes a 'node-style' function and returns a function that
will return an `RSVP.Promise`. You can use `denodeify` in Node.js or the
browser when you'd prefer to use promises over using callbacks. For example,
`denodeify` transforms the following:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) return handleError(err);
handleData(data);
});
```
into:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
readFile('myfile.txt').then(handleData, handleError);
```
If the node function has multiple success parameters, then `denodeify`
just returns the first one:
```javascript
let request = RSVP.denodeify(require('request'));
request('http://example.com').then(function(res) {
// ...
});
```
However, if you need all success parameters, setting `denodeify`'s
second parameter to `true` causes it to return all success parameters
as an array:
```javascript
let request = RSVP.denodeify(require('request'), true);
request('http://example.com').then(function(result) {
// result[0] -> res
// result[1] -> body
});
```
Or if you pass it an array with names it returns the parameters as a hash:
```javascript
let request = RSVP.denodeify(require('request'), ['res', 'body']);
request('http://example.com').then(function(result) {
// result.res
// result.body
});
```
Sometimes you need to retain the `this`:
```javascript
let app = require('express')();
let render = RSVP.denodeify(app.render.bind(app));
```
The denodified function inherits from the original function. It works in all
environments, except IE 10 and below. Consequently all properties of the original
function are available to you. However, any properties you change on the
denodeified function won't be changed on the original function. Example:
```javascript
let request = RSVP.denodeify(require('request')),
cookieJar = request.jar(); // <- Inheritance is used here
request('http://example.com', {jar: cookieJar}).then(function(res) {
// cookieJar.cookies holds now the cookies returned by example.com
});
```
Using `denodeify` makes it easier to compose asynchronous operations instead
of using callbacks. For example, instead of:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) { ... } // Handle error
fs.writeFile('myfile2.txt', data, function(err){
if (err) { ... } // Handle error
console.log('done')
});
});
```
you can chain the operations together using `then` from the returned promise:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
let writeFile = RSVP.denodeify(fs.writeFile);
readFile('myfile.txt').then(function(data){
return writeFile('myfile2.txt', data);
}).then(function(){
console.log('done')
}).catch(function(error){
// Handle error
});
```
@method denodeify
@static
@for RSVP
@param {Function} nodeFunc a 'node-style' function that takes a callback as
its last argument. The callback expects an error to be passed as its first
argument (if an error occurred, otherwise null), and the value from the
operation as its second argument ('function(err, value){ }').
@param {Boolean|Array} [options] An optional paramter that if set
to `true` causes the promise to fulfill with the callback's success arguments
as an array. This is useful if the node function has multiple success
paramters. If you set this paramter to an array with names, the promise will
fulfill with a hash with these names as keys and the success parameters as
values.
@return {Function} a function that wraps `nodeFunc` to return an
`RSVP.Promise`
@static
*/
function denodeify(nodeFunc, options) {
var fn = function () {
var self = this;
var l = arguments.length;
var args = new Array(l + 1);
var promiseInput = false;
for (var i = 0; i < l; ++i) {
var arg = arguments[i];
if (!promiseInput) {
// TODO: clean this up
promiseInput = needsPromiseInput(arg);
if (promiseInput === GET_THEN_ERROR$1) {
var p = new Promise(noop);
reject(p, GET_THEN_ERROR$1.value);
return p;
} else if (promiseInput && promiseInput !== true) {
arg = wrapThenable(promiseInput, arg);
}
}
args[i] = arg;
}
var promise = new Promise(noop);
args[l] = function (err, val) {
if (err) reject(promise, err);else if (options === undefined) resolve(promise, val);else if (options === true) resolve(promise, arrayResult(arguments));else if (isArray(options)) resolve(promise, makeObject(arguments, options));else resolve(promise, val);
};
if (promiseInput) {
return handlePromiseInput(promise, args, nodeFunc, self);
} else {
return handleValueInput(promise, args, nodeFunc, self);
}
};
fn.__proto__ = nodeFunc;
return fn;
}
function handleValueInput(promise, args, nodeFunc, self) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
}
function handlePromiseInput(promise, args, nodeFunc, self) {
return Promise.all(args).then(function (args) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
});
}
function needsPromiseInput(arg) {
if (arg && typeof arg === 'object') {
if (arg.constructor === Promise) {
return true;
} else {
return getThen$1(arg);
}
} else {
return false;
}
}
/**
This is a convenient alias for `RSVP.Promise.all`.
@method all
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function all$1(array, label) {
return Promise.all(array, label);
}
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var AllSettled = function (_Enumerator) {
_inherits(AllSettled, _Enumerator);
function AllSettled(Constructor, entries, label) {
return _possibleConstructorReturn(this, _Enumerator.call(this, Constructor, entries, false /* don't abort on reject */, label));
}
return AllSettled;
}(Enumerator);
AllSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.allSettled` is similar to `RSVP.all`, but instead of implementing
a fail-fast method, it waits until all the promises have returned and
shows you all the results. This is useful if you want to handle multiple
promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled. The return promise is fulfilled with an array of the states of
the promises passed into the `promises` array argument.
Each state object will either indicate fulfillment or rejection, and
provide the corresponding value or reason. The states will take one of
the following formats:
```javascript
{ state: 'fulfilled', value: value }
or
{ state: 'rejected', reason: reason }
```
Example:
```javascript
let promise1 = RSVP.Promise.resolve(1);
let promise2 = RSVP.Promise.reject(new Error('2'));
let promise3 = RSVP.Promise.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
RSVP.allSettled(promises).then(function(array){
// array == [
// { state: 'fulfilled', value: 1 },
// { state: 'rejected', reason: Error },
// { state: 'rejected', reason: Error }
// ]
// Note that for the second item, reason.message will be '2', and for the
// third item, reason.message will be '3'.
}, function(error) {
// Not run. (This block would only be called if allSettled had failed,
// for instance if passed an incorrect argument type.)
});
```
@method allSettled
@static
@for RSVP
@param {Array} entries
@param {String} label - optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with an array of the settled
states of the constituent promises.
*/
function allSettled(entries, label) {
if (!isArray(entries)) {
return Promise.reject(new TypeError("Promise.allSettled must be called with an array"), label);
}
return new AllSettled(Promise, entries, label).promise;
}
/**
This is a convenient alias for `RSVP.Promise.race`.
@method race
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function race$1(array, label) {
return Promise.race(array, label);
}
function _possibleConstructorReturn$1(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$1(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var hasOwnProperty = Object.prototype.hasOwnProperty;
var PromiseHash = function (_Enumerator) {
_inherits$1(PromiseHash, _Enumerator);
function PromiseHash(Constructor, object) {
var abortOnReject = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
var label = arguments[3];
return _possibleConstructorReturn$1(this, _Enumerator.call(this, Constructor, object, abortOnReject, label));
}
PromiseHash.prototype._init = function _init(Constructor, object) {
this._result = {};
this._enumerate(object);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
PromiseHash.prototype._enumerate = function _enumerate(input) {
var promise = this.promise;
var results = [];
for (var key in input) {
if (hasOwnProperty.call(input, key)) {
results.push({
position: key,
entry: input[key]
});
}
}
var length = results.length;
this._remaining = length;
var result = void 0;
for (var i = 0; promise._state === PENDING && i < length; i++) {
result = results[i];
this._eachEntry(result.entry, result.position);
}
};
return PromiseHash;
}(Enumerator);
/**
`RSVP.hash` is similar to `RSVP.all`, but takes an object instead of an array
for its `promises` argument.
Returns a promise that is fulfilled when all the given promises have been
fulfilled, or rejected if any of them become rejected. The returned promise
is fulfilled with a hash that has the same key names as the `promises` object
argument. If any of the values in the object are not promises, they will
simply be copied over to the fulfilled object.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
yourPromise: RSVP.resolve(2),
theirPromise: RSVP.resolve(3),
notAPromise: 4
};
RSVP.hash(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: 1,
// yourPromise: 2,
// theirPromise: 3,
// notAPromise: 4
// }
});
````
If any of the `promises` given to `RSVP.hash` are rejected, the first promise
that is rejected will be given as the reason to the rejection handler.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
rejectedPromise: RSVP.reject(new Error('rejectedPromise')),
anotherRejectedPromise: RSVP.reject(new Error('anotherRejectedPromise')),
};
RSVP.hash(promises).then(function(hash){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === 'rejectedPromise'
});
```
An important note: `RSVP.hash` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hash` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hash(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: 'Example'
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hash
@static
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all properties of `promises`
have been fulfilled, or rejected if any of them become rejected.
*/
function hash(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("Promise.hash must be called with an object"), label);
}
return new PromiseHash(Promise, object, label).promise;
}
function _possibleConstructorReturn$2(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$2(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var HashSettled = function (_PromiseHash) {
_inherits$2(HashSettled, _PromiseHash);
function HashSettled(Constructor, object, label) {
return _possibleConstructorReturn$2(this, _PromiseHash.call(this, Constructor, object, false, label));
}
return HashSettled;
}(PromiseHash);
HashSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.hashSettled` is similar to `RSVP.allSettled`, but takes an object
instead of an array for its `promises` argument.
Unlike `RSVP.all` or `RSVP.hash`, which implement a fail-fast method,
but like `RSVP.allSettled`, `hashSettled` waits until all the
constituent promises have returned and then shows you all the results
with their states and values/reasons. This is useful if you want to
handle multiple promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled, or rejected if the passed parameters are invalid.
The returned promise is fulfilled with a hash that has the same key names as
the `promises` object argument. If any of the values in the object are not
promises, they will be copied over to the fulfilled object and marked with state
'fulfilled'.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
yourPromise: RSVP.Promise.resolve(2),
theirPromise: RSVP.Promise.resolve(3),
notAPromise: 4
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// yourPromise: { state: 'fulfilled', value: 2 },
// theirPromise: { state: 'fulfilled', value: 3 },
// notAPromise: { state: 'fulfilled', value: 4 }
// }
});
```
If any of the `promises` given to `RSVP.hash` are rejected, the state will
be set to 'rejected' and the reason for rejection provided.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
rejectedPromise: RSVP.Promise.reject(new Error('rejection')),
anotherRejectedPromise: RSVP.Promise.reject(new Error('more rejection')),
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// rejectedPromise: { state: 'rejected', reason: Error },
// anotherRejectedPromise: { state: 'rejected', reason: Error },
// }
// Note that for rejectedPromise, reason.message == 'rejection',
// and for anotherRejectedPromise, reason.message == 'more rejection'.
});
```
An important note: `RSVP.hashSettled` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hashSettled` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.Promise.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.Promise.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hashSettled(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: { state: 'fulfilled', value: 'Example' }
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hashSettled
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when when all properties of `promises`
have been settled.
@static
*/
function hashSettled(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("RSVP.hashSettled must be called with an object"), label);
}
return new HashSettled(Promise, object, false, label).promise;
}
/**
`RSVP.rethrow` will rethrow an error on the next turn of the JavaScript event
loop in order to aid debugging.
Promises A+ specifies that any exceptions that occur with a promise must be
caught by the promises implementation and bubbled to the last handler. For
this reason, it is recommended that you always specify a second rejection
handler function to `then`. However, `RSVP.rethrow` will throw the exception
outside of the promise, so it bubbles up to your console if in the browser,
or domain/cause uncaught exception in Node. `rethrow` will also throw the
error again so the error can be handled by the promise per the spec.
```javascript
function throws(){
throw new Error('Whoops!');
}
let promise = new RSVP.Promise(function(resolve, reject){
throws();
});
promise.catch(RSVP.rethrow).then(function(){
// Code here doesn't run because the promise became rejected due to an
// error!
}, function (err){
// handle the error here
});
```
The 'Whoops' error will be thrown on the next turn of the event loop
and you can watch for it in your console. You can also handle it using a
rejection handler given to `.then` or `.catch` on the returned promise.
@method rethrow
@static
@for RSVP
@param {Error} reason reason the promise became rejected.
@throws Error
@static
*/
function rethrow(reason) {
setTimeout(function () {
throw reason;
});
throw reason;
}
/**
`RSVP.defer` returns an object similar to jQuery's `$.Deferred`.
`RSVP.defer` should be used when porting over code reliant on `$.Deferred`'s
interface. New code should use the `RSVP.Promise` constructor instead.
The object returned from `RSVP.defer` is a plain object with three properties:
* promise - an `RSVP.Promise`.
* reject - a function that causes the `promise` property on this object to
become rejected
* resolve - a function that causes the `promise` property on this object to
become fulfilled.
Example:
```javascript
let deferred = RSVP.defer();
deferred.resolve("Success!");
deferred.promise.then(function(value){
// value here is "Success!"
});
```
@method defer
@static
@for RSVP
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Object}
*/
function defer(label) {
var deferred = { resolve: undefined, reject: undefined };
deferred.promise = new Promise(function (resolve, reject) {
deferred.resolve = resolve;
deferred.reject = reject;
}, label);
return deferred;
}
/**
`RSVP.map` is similar to JavaScript's native `map` method, except that it
waits for all promises to become fulfilled before running the `mapFn` on
each item in given to `promises`. `RSVP.map` returns a promise that will
become fulfilled with the result of running `mapFn` on the values the promises
become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(result){
// result is [ 2, 3, 4 ]
});
```
If any of the `promises` given to `RSVP.map` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.map` will also wait if a promise is returned from `mapFn`. For example,
say you want to get all comments from a set of blog posts, but you need
the blog posts first because they contain a url to those comments.
```javscript
let mapFn = function(blogPost){
// getComments does some ajax and returns an RSVP.Promise that is fulfilled
// with some comments data
return getComments(blogPost.comments_url);
};
// getBlogPosts does some ajax and returns an RSVP.Promise that is fulfilled
// with some blog post data
RSVP.map(getBlogPosts(), mapFn).then(function(comments){
// comments is the result of asking the server for the comments
// of all blog posts returned from getBlogPosts()
});
```
@method map
@static
@for RSVP
@param {Array} promises
@param {Function} mapFn function to be called on each fulfilled promise.
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with the result of calling
`mapFn` on each fulfilled promise or value when they become fulfilled.
The promise will be rejected if any of the given `promises` become rejected.
@static
*/
function map(promises, mapFn, label) {
if (!isArray(promises)) {
return Promise.reject(new TypeError("RSVP.map must be called with an array"), label);
}
if (!isFunction(mapFn)) {
return Promise.reject(new TypeError("RSVP.map expects a function as a second argument"), label);
}
return Promise.all(promises, label).then(function (values) {
var length = values.length;
var results = new Array(length);
for (var i = 0; i < length; i++) {
results[i] = mapFn(values[i]);
}
return Promise.all(results, label);
});
}
/**
This is a convenient alias for `RSVP.Promise.resolve`.
@method resolve
@static
@for RSVP
@param {*} value value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$2(value, label) {
return Promise.resolve(value, label);
}
/**
This is a convenient alias for `RSVP.Promise.reject`.
@method reject
@static
@for RSVP
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$2(reason, label) {
return Promise.reject(reason, label);
}
/**
`RSVP.filter` is similar to JavaScript's native `filter` method, except that it
waits for all promises to become fulfilled before running the `filterFn` on
each item in given to `promises`. `RSVP.filter` returns a promise that will
become fulfilled with the result of running `filterFn` on the values the
promises become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [promise1, promise2, promise3];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(result){
// result is [ 2, 3 ]
});
```
If any of the `promises` given to `RSVP.filter` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.filter` will also wait for any promises returned from `filterFn`.
For instance, you may want to fetch a list of users then return a subset
of those users based on some asynchronous operation:
```javascript
let alice = { name: 'alice' };
let bob = { name: 'bob' };
let users = [ alice, bob ];
let promises = users.map(function(user){
return RSVP.resolve(user);
});
let filterFn = function(user){
// Here, Alice has permissions to create a blog post, but Bob does not.
return getPrivilegesForUser(user).then(function(privs){
return privs.can_create_blog_post === true;
});
};
RSVP.filter(promises, filterFn).then(function(users){
// true, because the server told us only Alice can create a blog post.
users.length === 1;
// false, because Alice is the only user present in `users`
users[0] === bob;
});
```
@method filter
@static
@for RSVP
@param {Array} promises
@param {Function} filterFn - function to be called on each resolved value to
filter the final results.
@param {String} label optional string describing the promise. Useful for
tooling.
@return {Promise}
*/
function resolveAll(promises, label) {
return Promise.all(promises, label);
}
function resolveSingle(promise, label) {
return Promise.resolve(promise, label).then(function (promises) {
return resolveAll(promises, label);
});
}
function filter(promises, filterFn, label) {
if (!isArray(promises) && !(isObject(promises) && promises.then !== undefined)) {
return Promise.reject(new TypeError("RSVP.filter must be called with an array or promise"), label);
}
if (!isFunction(filterFn)) {
return Promise.reject(new TypeError("RSVP.filter expects function as a second argument"), label);
}
var promise = isArray(promises) ? resolveAll(promises, label) : resolveSingle(promises, label);
return promise.then(function (values) {
var length = values.length;
var filtered = new Array(length);
for (var i = 0; i < length; i++) {
filtered[i] = filterFn(values[i]);
}
return resolveAll(filtered, label).then(function (filtered) {
var results = new Array(length);
var newLength = 0;
for (var _i = 0; _i < length; _i++) {
if (filtered[_i]) {
results[newLength] = values[_i];
newLength++;
}
}
results.length = newLength;
return results;
});
});
}
var len = 0;
var vertxNext = void 0;
function asap(callback, arg) {
queue$1[len] = callback;
queue$1[len + 1] = arg;
len += 2;
if (len === 2) {
// If len is 1, that means that we need to schedule an async flush.
// If additional callbacks are queued before the queue is flushed, they
// will be processed by this flush that we are scheduling.
scheduleFlush$1();
}
}
var browserWindow = typeof window !== 'undefined' ? window : undefined;
var browserGlobal = browserWindow || {};
var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
var isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
// test for web worker but not in IE10
var isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';
// node
function useNextTick() {
var nextTick = process.nextTick;
// node version 0.10.x displays a deprecation warning when nextTick is used recursively
// setImmediate should be used instead instead
var version = process.versions.node.match(/^(?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)$/);
if (Array.isArray(version) && version[1] === '0' && version[2] === '10') {
nextTick = setImmediate;
}
return function () {
return nextTick(flush);
};
}
// vertx
function useVertxTimer() {
if (typeof vertxNext !== 'undefined') {
return function () {
vertxNext(flush);
};
}
return useSetTimeout();
}
function useMutationObserver() {
var iterations = 0;
var observer = new BrowserMutationObserver(flush);
var node = document.createTextNode('');
observer.observe(node, { characterData: true });
return function () {
return node.data = iterations = ++iterations % 2;
};
}
// web worker
function useMessageChannel() {
var channel = new MessageChannel();
channel.port1.onmessage = flush;
return function () {
return channel.port2.postMessage(0);
};
}
function useSetTimeout() {
return function () {
return setTimeout(flush, 1);
};
}
var queue$1 = new Array(1000);
function flush() {
for (var i = 0; i < len; i += 2) {
var callback = queue$1[i];
var arg = queue$1[i + 1];
callback(arg);
queue$1[i] = undefined;
queue$1[i + 1] = undefined;
}
len = 0;
}
function attemptVertex() {
try {
var r = require;
var vertx = r('vertx');
vertxNext = vertx.runOnLoop || vertx.runOnContext;
return useVertxTimer();
} catch (e) {
return useSetTimeout();
}
}
var scheduleFlush$1 = void 0;
// Decide what async method to use to triggering processing of queued callbacks:
if (isNode) {
scheduleFlush$1 = useNextTick();
} else if (BrowserMutationObserver) {
scheduleFlush$1 = useMutationObserver();
} else if (isWorker) {
scheduleFlush$1 = useMessageChannel();
} else if (browserWindow === undefined && typeof require === 'function') {
scheduleFlush$1 = attemptVertex();
} else {
scheduleFlush$1 = useSetTimeout();
}
var platform = void 0;
/* global self */
if (typeof self === 'object') {
platform = self;
/* global global */
} else if (typeof global === 'object') {
platform = global;
} else {
throw new Error('no global: `self` or `global` found');
}
var _asap$cast$Promise$Ev;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
// defaults
config.async = asap;
config.after = function (cb) {
return setTimeout(cb, 0);
};
var cast = resolve$2;
var async = function (callback, arg) {
return config.async(callback, arg);
};
function on() {
config['on'].apply(config, arguments);
}
function off() {
config['off'].apply(config, arguments);
}
// Set up instrumentation through `window.__PROMISE_INTRUMENTATION__`
if (typeof window !== 'undefined' && typeof window['__PROMISE_INSTRUMENTATION__'] === 'object') {
var callbacks = window['__PROMISE_INSTRUMENTATION__'];
configure('instrument', true);
for (var eventName in callbacks) {
if (callbacks.hasOwnProperty(eventName)) {
on(eventName, callbacks[eventName]);
}
}
}
// the default export here is for backwards compat:
// https://github.com/tildeio/rsvp.js/issues/434
var rsvp = (_asap$cast$Promise$Ev = {
asap: asap,
cast: cast,
Promise: Promise,
EventTarget: EventTarget,
all: all$1,
allSettled: allSettled,
race: race$1,
hash: hash,
hashSettled: hashSettled,
rethrow: rethrow,
defer: defer,
denodeify: denodeify,
configure: configure,
on: on,
off: off,
resolve: resolve$2,
reject: reject$2,
map: map
}, _defineProperty(_asap$cast$Promise$Ev, 'async', async), _defineProperty(_asap$cast$Promise$Ev, 'filter', filter), _asap$cast$Promise$Ev);
exports['default'] = rsvp;
exports.asap = asap;
exports.cast = cast;
exports.Promise = Promise;
exports.EventTarget = EventTarget;
exports.all = all$1;
exports.allSettled = allSettled;
exports.race = race$1;
exports.hash = hash;
exports.hashSettled = hashSettled;
exports.rethrow = rethrow;
exports.defer = defer;
exports.denodeify = denodeify;
exports.configure = configure;
exports.on = on;
exports.off = off;
exports.resolve = resolve$2;
exports.reject = reject$2;
exports.map = map;
exports.async = async;
exports.filter = filter;
Object.defineProperty(exports, '__esModule', { value: true });
})));
//
var EPUBJS = EPUBJS || {};
EPUBJS.core = {};
var ELEMENT_NODE = 1;
var TEXT_NODE = 3;
var COMMENT_NODE = 8;
var DOCUMENT_NODE = 9;
//-- Get a element for an id
EPUBJS.core.getEl = function(elem) {
return document.getElementById(elem);
};
//-- Get all elements for a class
EPUBJS.core.getEls = function(classes) {
return document.getElementsByClassName(classes);
};
EPUBJS.core.request = function(url, type, withCredentials) {
var supportsURL = window.URL;
var BLOB_RESPONSE = supportsURL ? "blob" : "arraybuffer";
var deferred = new RSVP.defer();
var xhr = new XMLHttpRequest();
var uri;
//-- Check from PDF.js:
// https://github.com/mozilla/pdf.js/blob/master/web/compatibility.js
var xhrPrototype = XMLHttpRequest.prototype;
var handler = function() {
var r;
if (this.readyState != this.DONE) return;
if ((this.status === 200 || this.status === 0) && this.response) { // Android & Firefox reporting 0 for local & blob urls
if (type == 'xml'){
// If this.responseXML wasn't set, try to parse using a DOMParser from text
if(!this.responseXML) {
r = new DOMParser().parseFromString(this.response, "application/xml");
} else {
r = this.responseXML;
}
} else if (type == 'xhtml') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "application/xhtml+xml");
} else {
r = this.responseXML;
}
} else if (type == 'html') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "text/html");
} else {
r = this.responseXML;
}
} else if (type == 'json') {
r = JSON.parse(this.response);
} else if (type == 'blob') {
if (supportsURL) {
r = this.response;
} else {
//-- Safari doesn't support responseType blob, so create a blob from arraybuffer
r = new Blob([this.response]);
}
} else {
r = this.response;
}
deferred.resolve(r);
} else {
deferred.reject({
message : this.response,
stack : new Error().stack
});
}
};
if (!('overrideMimeType' in xhrPrototype)) {
// IE10 might have response, but not overrideMimeType
Object.defineProperty(xhrPrototype, 'overrideMimeType', {
value: function xmlHttpRequestOverrideMimeType(mimeType) {}
});
}
xhr.onreadystatechange = handler;
xhr.open("GET", url, true);
if(withCredentials) {
xhr.withCredentials = true;
}
// If type isn't set, determine it from the file extension
if(!type) {
uri = EPUBJS.core.uri(url);
type = uri.extension;
type = {
'htm': 'html'
}[type] || type;
}
if(type == 'blob'){
xhr.responseType = BLOB_RESPONSE;
}
if(type == "json") {
xhr.setRequestHeader("Accept", "application/json");
}
if(type == 'xml') {
xhr.responseType = "document";
xhr.overrideMimeType('text/xml'); // for OPF parsing
}
if(type == 'xhtml') {
xhr.responseType = "document";
}
if(type == 'html') {
xhr.responseType = "document";
}
if(type == "binary") {
xhr.responseType = "arraybuffer";
}
xhr.send();
return deferred.promise;
};
EPUBJS.core.toArray = function(obj) {
var arr = [];
for (var member in obj) {
var newitm;
if ( obj.hasOwnProperty(member) ) {
newitm = obj[member];
newitm.ident = member;
arr.push(newitm);
}
}
return arr;
};
//-- Parse the different parts of a url, returning a object
EPUBJS.core.uri = function(url){
var uri = {
protocol : '',
host : '',
path : '',
origin : '',
directory : '',
base : '',
filename : '',
extension : '',
fragment : '',
href : url
},
blob = url.indexOf('blob:'),
doubleSlash = url.indexOf('://'),
search = url.indexOf('?'),
fragment = url.indexOf("#"),
withoutProtocol,
dot,
firstSlash;
if(blob === 0) {
uri.protocol = "blob";
uri.base = url.indexOf(0, fragment);
return uri;
}
if(fragment != -1) {
uri.fragment = url.slice(fragment + 1);
url = url.slice(0, fragment);
}
if(search != -1) {
uri.search = url.slice(search + 1);
url = url.slice(0, search);
href = uri.href;
}
if(doubleSlash != -1) {
uri.protocol = url.slice(0, doubleSlash);
withoutProtocol = url.slice(doubleSlash+3);
firstSlash = withoutProtocol.indexOf('/');
if(firstSlash === -1) {
uri.host = uri.path;
uri.path = "";
} else {
uri.host = withoutProtocol.slice(0, firstSlash);
uri.path = withoutProtocol.slice(firstSlash);
}
uri.origin = uri.protocol + "://" + uri.host;
uri.directory = EPUBJS.core.folder(uri.path);
uri.base = uri.origin + uri.directory;
// return origin;
} else {
uri.path = url;
uri.directory = EPUBJS.core.folder(url);
uri.base = uri.directory;
}
//-- Filename
uri.filename = url.replace(uri.base, '');
dot = uri.filename.lastIndexOf('.');
if(dot != -1) {
uri.extension = uri.filename.slice(dot+1);
}
return uri;
};
//-- Parse out the folder, will return everything before the last slash
EPUBJS.core.folder = function(url){
var lastSlash = url.lastIndexOf('/');
if(lastSlash == -1) var folder = '';
folder = url.slice(0, lastSlash + 1);
return folder;
};
//-- https://github.com/ebidel/filer.js/blob/master/src/filer.js#L128
EPUBJS.core.dataURLToBlob = function(dataURL) {
var BASE64_MARKER = ';base64,',
parts, contentType, raw, rawLength, uInt8Array;
if (dataURL.indexOf(BASE64_MARKER) == -1) {
parts = dataURL.split(',');
contentType = parts[0].split(':')[1];
raw = parts[1];
return new Blob([raw], {type: contentType});
}
parts = dataURL.split(BASE64_MARKER);
contentType = parts[0].split(':')[1];
raw = window.atob(parts[1]);
rawLength = raw.length;
uInt8Array = new Uint8Array(rawLength);
for (var i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], {type: contentType});
};
//-- Load scripts async: http://stackoverflow.com/questions/7718935/load-scripts-asynchronously
EPUBJS.core.addScript = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('script');
s.type = 'text/javascript';
s.async = false;
s.src = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.addScripts = function(srcArr, callback, target) {
var total = srcArr.length,
curr = 0,
cb = function(){
curr++;
if(total == curr){
if(callback) callback();
}else{
EPUBJS.core.addScript(srcArr[curr], cb, target);
}
};
EPUBJS.core.addScript(srcArr[curr], cb, target);
};
EPUBJS.core.addCss = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('link');
s.type = 'text/css';
s.rel = "stylesheet";
s.href = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.prefixed = function(unprefixed) {
var vendors = ["Webkit", "Moz", "O", "ms" ],
prefixes = ['-Webkit-', '-moz-', '-o-', '-ms-'],
upper = unprefixed[0].toUpperCase() + unprefixed.slice(1),
length = vendors.length;
if (typeof(document.documentElement.style[unprefixed]) != 'undefined') {
return unprefixed;
}
for ( var i=0; i < length; i++ ) {
if (typeof(document.documentElement.style[vendors[i] + upper]) != 'undefined') {
return vendors[i] + upper;
}
}
return unprefixed;
};
EPUBJS.core.resolveUrl = function(base, path) {
var url,
segments = [],
uri = EPUBJS.core.uri(path),
folders = base.split("/"),
paths;
if(uri.host) {
return path;
}
folders.pop();
paths = path.split("/");
paths.forEach(function(p){
if(p === ".."){
folders.pop();
}else{
segments.push(p);
}
});
url = folders.concat(segments);
return url.join("/");
};
// http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript
EPUBJS.core.uuid = function() {
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x7|0x8)).toString(16);
});
return uuid;
};
// Fast quicksort insert for sorted array -- based on:
// http://stackoverflow.com/questions/1344500/efficient-way-to-insert-a-number-into-a-sorted-array-of-numbers
EPUBJS.core.insert = function(item, array, compareFunction) {
var location = EPUBJS.core.locationOf(item, array, compareFunction);
array.splice(location, 0, item);
return location;
};
EPUBJS.core.locationOf = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return pivot;
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared > 0 ? pivot : pivot + 1;
}
if(compared === 0) {
return pivot;
}
if(compared === -1) {
return EPUBJS.core.locationOf(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.locationOf(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.indexOfSorted = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return -1; // Not found
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared === 0 ? pivot : -1;
}
if(compared === 0) {
return pivot; // Found
}
if(compared === -1) {
return EPUBJS.core.indexOfSorted(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.indexOfSorted(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.queue = function(_scope){
var _q = [];
var scope = _scope;
// Add an item to the queue
var enqueue = function(funcName, args, context) {
_q.push({
"funcName" : funcName,
"args" : args,
"context" : context
});
return _q;
};
// Run one item
var dequeue = function(){
var inwait;
if(_q.length) {
inwait = _q.shift();
// Defer to any current tasks
// setTimeout(function(){
scope[inwait.funcName].apply(inwait.context || scope, inwait.args);
// }, 0);
}
};
// Run All
var flush = function(){
while(_q.length) {
dequeue();
}
};
// Clear all items in wait
var clear = function(){
_q = [];
};
var length = function(){
return _q.length;
};
return {
"enqueue" : enqueue,
"dequeue" : dequeue,
"flush" : flush,
"clear" : clear,
"length" : length
};
};
// From: https://code.google.com/p/fbug/source/browse/branches/firebug1.10/content/firebug/lib/xpath.js
/**
* Gets an XPath for an element which describes its hierarchical location.
*/
EPUBJS.core.getElementXPath = function(element) {
if (element && element.id) {
return '//*[@id="' + element.id + '"]';
} else {
return EPUBJS.core.getElementTreeXPath(element);
}
};
EPUBJS.core.getElementTreeXPath = function(element) {
var paths = [];
var isXhtml = (element.ownerDocument.documentElement.getAttribute('xmlns') === "http://www.w3.org/1999/xhtml");
var index, nodeName, tagName, pathIndex;
if(element.nodeType === Node.TEXT_NODE){
// index = Array.prototype.indexOf.call(element.parentNode.childNodes, element) + 1;
index = EPUBJS.core.indexOfTextNode(element) + 1;
paths.push("text()["+index+"]");
element = element.parentNode;
}
// Use nodeName (instead of localName) so namespace prefix is included (if any).
for (; element && element.nodeType == 1; element = element.parentNode)
{
index = 0;
for (var sibling = element.previousSibling; sibling; sibling = sibling.previousSibling)
{
// Ignore document type declaration.
if (sibling.nodeType == Node.DOCUMENT_TYPE_NODE) {
continue;
}
if (sibling.nodeName == element.nodeName) {
++index;
}
}
nodeName = element.nodeName.toLowerCase();
tagName = (isXhtml ? "xhtml:" + nodeName : nodeName);
pathIndex = (index ? "[" + (index+1) + "]" : "");
paths.splice(0, 0, tagName + pathIndex);
}
return paths.length ? "./" + paths.join("/") : null;
};
EPUBJS.core.nsResolver = function(prefix) {
var ns = {
'xhtml' : 'http://www.w3.org/1999/xhtml',
'epub': 'http://www.idpf.org/2007/ops'
};
return ns[prefix] || null;
};
//https://stackoverflow.com/questions/13482352/xquery-looking-for-text-with-single-quote/13483496#13483496
EPUBJS.core.cleanStringForXpath = function(str) {
var parts = str.match(/[^'"]+|['"]/g);
parts = parts.map(function(part){
if (part === "'") {
return '\"\'\"'; // output "'"
}
if (part === '"') {
return "\'\"\'"; // output '"'
}
return "\'" + part + "\'";
});
return "concat(\'\'," + parts.join(",") + ")";
};
EPUBJS.core.indexOfTextNode = function(textNode){
var parent = textNode.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if(sib.nodeType === Node.TEXT_NODE){
index++;
}
if(sib == textNode) break;
}
return index;
};
// Underscore
EPUBJS.core.defaults = function(obj) {
for (var i = 1, length = arguments.length; i < length; i++) {
var source = arguments[i];
for (var prop in source) {
if (obj[prop] === void 0) obj[prop] = source[prop];
}
}
return obj;
};
EPUBJS.core.extend = function(target) {
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
if(!source) return;
Object.getOwnPropertyNames(source).forEach(function(propName) {
Object.defineProperty(target, propName, Object.getOwnPropertyDescriptor(source, propName));
});
});
return target;
};
EPUBJS.core.clone = function(obj) {
return EPUBJS.core.isArray(obj) ? obj.slice() : EPUBJS.core.extend({}, obj);
};
EPUBJS.core.isElement = function(obj) {
return !!(obj && obj.nodeType == 1);
};
EPUBJS.core.isNumber = function(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
};
EPUBJS.core.isString = function(str) {
return (typeof str === 'string' || str instanceof String);
};
EPUBJS.core.isArray = Array.isArray || function(obj) {
return Object.prototype.toString.call(obj) === '[object Array]';
};
// Lodash
EPUBJS.core.values = function(object) {
var index = -1;
var props, length, result;
if(!object) return [];
props = Object.keys(object);
length = props.length;
result = Array(length);
while (++index < length) {
result[index] = object[props[index]];
}
return result;
};
EPUBJS.core.indexOfNode = function(node, typeId) {
var parent = node.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if (sib.nodeType === typeId) {
index++;
}
if (sib == node) break;
}
return index;
}
EPUBJS.core.indexOfTextNode = function(textNode) {
return EPUBJS.core.indexOfNode(textNode, TEXT_NODE);
}
EPUBJS.core.indexOfElementNode = function(elementNode) {
return EPUBJS.core.indexOfNode(elementNode, ELEMENT_NODE);
}
var EPUBJS = EPUBJS || {};
EPUBJS.reader = {};
EPUBJS.reader.plugins = {}; //-- Attach extra Controllers as plugins (like search?)
(function(root, $) {
var previousReader = root.ePubReader || {};
var ePubReader = root.ePubReader = function(path, options) {
return new EPUBJS.Reader(path, options);
};
//exports to multiple environments
if (typeof define === 'function' && define.amd) {
//AMD
define(function(){ return Reader; });
} else if (typeof module != "undefined" && module.exports) {
//Node
module.exports = ePubReader;
}
})(window, jQuery);
EPUBJS.Reader = function(bookPath, _options) {
var reader = this;
var book;
var plugin;
var $viewer = $("#viewer");
var search = window.location.search;
var parameters;
this.settings = EPUBJS.core.defaults(_options || {}, {
bookPath : bookPath,
restore : false,
reload : false,
bookmarks : undefined,
annotations : undefined,
contained : undefined,
bookKey : undefined,
styles : undefined,
sidebarReflow: false,
generatePagination: false,
history: true
});
// Overide options with search parameters
if(search) {
parameters = search.slice(1).split("&");
parameters.forEach(function(p){
var split = p.split("=");
var name = split[0];
var value = split[1] || '';
reader.settings[name] = decodeURIComponent(value);
});
}
this.setBookKey(this.settings.bookPath); //-- This could be username + path or any unique string
if(this.settings.restore && this.isSaved()) {
this.applySavedSettings();
}
this.settings.styles = this.settings.styles || {
fontSize : "100%"
};
this.book = book = new ePub(this.settings.bookPath, this.settings);
this.offline = false;
this.sidebarOpen = false;
if(!this.settings.bookmarks) {
this.settings.bookmarks = [];
}
if(!this.settings.annotations) {
this.settings.annotations = [];
}
if(this.settings.generatePagination) {
book.generatePagination($viewer.width(), $viewer.height());
}
this.rendition = book.renderTo("viewer", {
ignoreClass: "annotator-hl",
width: "100%",
height: "100%"
});
if(this.settings.previousLocationCfi) {
this.displayed = this.rendition.display(this.settings.previousLocationCfi);
} else {
this.displayed = this.rendition.display();
}
book.ready.then(function () {
reader.ReaderController = EPUBJS.reader.ReaderController.call(reader, book);
reader.SettingsController = EPUBJS.reader.SettingsController.call(reader, book);
reader.ControlsController = EPUBJS.reader.ControlsController.call(reader, book);
reader.SidebarController = EPUBJS.reader.SidebarController.call(reader, book);
reader.BookmarksController = EPUBJS.reader.BookmarksController.call(reader, book);
reader.NotesController = EPUBJS.reader.NotesController.call(reader, book);
window.addEventListener("hashchange", this.hashChanged.bind(this), false);
document.addEventListener('keydown', this.adjustFontSize.bind(this), false);
this.rendition.on("keydown", this.adjustFontSize.bind(this));
this.rendition.on("keydown", reader.ReaderController.arrowKeys.bind(this));
this.rendition.on("selected", this.selectedRange.bind(this));
}.bind(this)).then(function() {
reader.ReaderController.hideLoader();
}.bind(this));
// Call Plugins
for(plugin in EPUBJS.reader.plugins) {
if(EPUBJS.reader.plugins.hasOwnProperty(plugin)) {
reader[plugin] = EPUBJS.reader.plugins[plugin].call(reader, book);
}
}
book.loaded.metadata.then(function(meta) {
reader.MetaController = EPUBJS.reader.MetaController.call(reader, meta);
});
book.loaded.navigation.then(function(navigation) {
reader.TocController = EPUBJS.reader.TocController.call(reader, navigation);
});
window.addEventListener("beforeunload", this.unload.bind(this), false);
return this;
};
EPUBJS.Reader.prototype.adjustFontSize = function(e) {
var fontSize;
var interval = 2;
var PLUS = 187;
var MINUS = 189;
var ZERO = 48;
var MOD = (e.ctrlKey || e.metaKey );
if(!this.settings.styles) return;
if(!this.settings.styles.fontSize) {
this.settings.styles.fontSize = "100%";
}
fontSize = parseInt(this.settings.styles.fontSize.slice(0, -1));
if(MOD && e.keyCode == PLUS) {
e.preventDefault();
this.book.setStyle("fontSize", (fontSize + interval) + "%");
}
if(MOD && e.keyCode == MINUS){
e.preventDefault();
this.book.setStyle("fontSize", (fontSize - interval) + "%");
}
if(MOD && e.keyCode == ZERO){
e.preventDefault();
this.book.setStyle("fontSize", "100%");
}
};
EPUBJS.Reader.prototype.addBookmark = function(cfi) {
var present = this.isBookmarked(cfi);
if(present > -1 ) return;
this.settings.bookmarks.push(cfi);
this.trigger("reader:bookmarked", cfi);
};
EPUBJS.Reader.prototype.removeBookmark = function(cfi) {
var bookmark = this.isBookmarked(cfi);
if( bookmark === -1 ) return;
this.settings.bookmarks.splice(bookmark, 1);
this.trigger("reader:unbookmarked", bookmark);
};
EPUBJS.Reader.prototype.isBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks;
return bookmarks.indexOf(cfi);
};
/*
EPUBJS.Reader.prototype.searchBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks,
len = bookmarks.length,
i;
for(i = 0; i < len; i++) {
if (bookmarks[i]['cfi'] === cfi) return i;
}
return -1;
};
*/
EPUBJS.Reader.prototype.clearBookmarks = function() {
this.settings.bookmarks = [];
};
//-- Notes
EPUBJS.Reader.prototype.addNote = function(note) {
this.settings.annotations.push(note);
};
EPUBJS.Reader.prototype.removeNote = function(note) {
var index = this.settings.annotations.indexOf(note);
if( index === -1 ) return;
delete this.settings.annotations[index];
};
EPUBJS.Reader.prototype.clearNotes = function() {
this.settings.annotations = [];
};
//-- Settings
EPUBJS.Reader.prototype.setBookKey = function(identifier){
if(!this.settings.bookKey) {
this.settings.bookKey = "epubjsreader:" + EPUBJS.VERSION + ":" + window.location.host + ":" + identifier;
}
return this.settings.bookKey;
};
//-- Checks if the book setting can be retrieved from localStorage
EPUBJS.Reader.prototype.isSaved = function(bookPath) {
var storedSettings;
if(!localStorage) {
return false;
}
storedSettings = localStorage.getItem(this.settings.bookKey);
if(storedSettings === null) {
return false;
} else {
return true;
}
};
EPUBJS.Reader.prototype.removeSavedSettings = function() {
if(!localStorage) {
return false;
}
localStorage.removeItem(this.settings.bookKey);
};
EPUBJS.Reader.prototype.applySavedSettings = function() {
var stored;
if(!localStorage) {
return false;
}
try {
stored = JSON.parse(localStorage.getItem(this.settings.bookKey));
} catch (e) { // parsing error of localStorage
return false;
}
if(stored) {
// Merge styles
if(stored.styles) {
this.settings.styles = EPUBJS.core.defaults(this.settings.styles || {}, stored.styles);
}
// Merge the rest
this.settings = EPUBJS.core.defaults(this.settings, stored);
return true;
} else {
return false;
}
};
EPUBJS.Reader.prototype.saveSettings = function(){
if(this.book) {
this.settings.previousLocationCfi = this.rendition.currentLocation().start.cfi;
}
if(!localStorage) {
return false;
}
localStorage.setItem(this.settings.bookKey, JSON.stringify(this.settings));
};
EPUBJS.Reader.prototype.unload = function(){
if(this.settings.restore && localStorage) {
this.saveSettings();
}
};
EPUBJS.Reader.prototype.hashChanged = function(){
var hash = window.location.hash.slice(1);
this.rendition.display(hash);
};
EPUBJS.Reader.prototype.selectedRange = function(cfiRange){
var cfiFragment = "#"+cfiRange;
// Update the History Location
if(this.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
this.currentLocationCfi = cfiRange;
}
};
//-- Enable binding events to reader
RSVP.EventTarget.mixin(EPUBJS.Reader.prototype);
EPUBJS.reader.BookmarksController = function() {
var reader = this;
var book = this.book;
var rendition = this.rendition;
var $bookmarks = $("#bookmarksView"),
$list = $bookmarks.find("#bookmarks");
var docfrag = document.createDocumentFragment();
var show = function() {
$bookmarks.show();
};
var hide = function() {
$bookmarks.hide();
};
var counter = 0;
var createBookmarkItem = function(cfi) {
var listitem = document.createElement("li"),
link = document.createElement("a");
listitem.id = "bookmark-"+counter;
listitem.classList.add('list_item');
var spineItem = book.spine.get(cfi);
var tocItem;
if (spineItem.index in book.navigation.toc) {
tocItem = book.navigation.toc[spineItem.index];
link.textContent = tocItem.label;
} else {
link.textContent = cfi;
}
link.href = cfi;
link.classList.add('bookmark_link');
link.addEventListener("click", function(event){
var cfi = this.getAttribute('href');
rendition.display(cfi);
event.preventDefault();
}, false);
listitem.appendChild(link);
counter++;
return listitem;
};
this.settings.bookmarks.forEach(function(cfi) {
var bookmark = createBookmarkItem(cfi);
docfrag.appendChild(bookmark);
});
$list.append(docfrag);
this.on("reader:bookmarked", function(cfi) {
var item = createBookmarkItem(cfi);
$list.append(item);
});
this.on("reader:unbookmarked", function(index) {
var $item = $("#bookmark-"+index);
$item.remove();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ControlsController = function(book) {
var reader = this;
var rendition = this.rendition;
var $store = $("#store"),
$fullscreen = $("#fullscreen"),
$fullscreenicon = $("#fullscreenicon"),
$cancelfullscreenicon = $("#cancelfullscreenicon"),
$slider = $("#slider"),
$main = $("#main"),
$sidebar = $("#sidebar"),
$settings = $("#setting"),
$bookmark = $("#bookmark");
/*
var goOnline = function() {
reader.offline = false;
// $store.attr("src", $icon.data("save"));
};
var goOffline = function() {
reader.offline = true;
// $store.attr("src", $icon.data("saved"));
};
var fullscreen = false;
book.on("book:online", goOnline);
book.on("book:offline", goOffline);
*/
$slider.on("click", function () {
if(reader.sidebarOpen) {
reader.SidebarController.hide();
$slider.addClass("icon-menu");
$slider.removeClass("icon-right");
} else {
reader.SidebarController.show();
$slider.addClass("icon-right");
$slider.removeClass("icon-menu");
}
});
if(typeof screenfull !== 'undefined') {
$fullscreen.on("click", function() {
screenfull.toggle($('#container')[0]);
});
if(screenfull.raw) {
document.addEventListener(screenfull.raw.fullscreenchange, function() {
fullscreen = screenfull.isFullscreen;
if(fullscreen) {
$fullscreen
.addClass("icon-resize-small")
.removeClass("icon-resize-full");
} else {
$fullscreen
.addClass("icon-resize-full")
.removeClass("icon-resize-small");
}
});
}
}
$settings.on("click", function() {
reader.SettingsController.show();
});
$bookmark.on("click", function() {
var cfi = reader.rendition.currentLocation().start.cfi;
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Add bookmark
reader.addBookmark(cfi);
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
} else { //-- Remove Bookmark
reader.removeBookmark(cfi);
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
}
});
rendition.on('relocated', function(location){
var cfi = location.start.cfi;
var cfiFragment = "#" + cfi;
//-- Check if bookmarked
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Not bookmarked
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
} else { //-- Bookmarked
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
}
reader.currentLocationCfi = cfi;
// Update the History Location
if(reader.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
}
});
return {
};
};
EPUBJS.reader.MetaController = function(meta) {
var title = meta.title,
author = meta.creator;
var $title = $("#book-title"),
$author = $("#chapter-title"),
$dash = $("#title-seperator");
document.title = title+" – "+author;
$title.html(title);
$author.html(author);
$dash.show();
};
EPUBJS.reader.NotesController = function() {
var book = this.book;
var rendition = this.rendition;
var reader = this;
var $notesView = $("#notesView");
var $notes = $("#notes");
var $text = $("#note-text");
var $anchor = $("#note-anchor");
var annotations = reader.settings.annotations;
var renderer = book.renderer;
var popups = [];
var epubcfi = new ePub.CFI();
var show = function() {
$notesView.show();
};
var hide = function() {
$notesView.hide();
}
var insertAtPoint = function(e) {
var range;
var textNode;
var offset;
var doc = book.renderer.doc;
var cfi;
var annotation;
// standard
if (doc.caretPositionFromPoint) {
range = doc.caretPositionFromPoint(e.clientX, e.clientY);
textNode = range.offsetNode;
offset = range.offset;
// WebKit
} else if (doc.caretRangeFromPoint) {
range = doc.caretRangeFromPoint(e.clientX, e.clientY);
textNode = range.startContainer;
offset = range.startOffset;
}
if (textNode.nodeType !== 3) {
for (var i=0; i < textNode.childNodes.length; i++) {
if (textNode.childNodes[i].nodeType == 3) {
textNode = textNode.childNodes[i];
break;
}
}
}
// Find the end of the sentance
offset = textNode.textContent.indexOf(".", offset);
if(offset === -1){
offset = textNode.length; // Last item
} else {
offset += 1; // After the period
}
cfi = epubcfi.generateCfiFromTextNode(textNode, offset, book.renderer.currentChapter.cfiBase);
annotation = {
annotatedAt: new Date(),
anchor: cfi,
body: $text.val()
}
// add to list
reader.addNote(annotation);
// attach
addAnnotation(annotation);
placeMarker(annotation);
// clear
$text.val('');
$anchor.text("Attach");
$text.prop("disabled", false);
rendition.off("click", insertAtPoint);
};
var addAnnotation = function(annotation){
var note = document.createElement("li");
var link = document.createElement("a");
note.innerHTML = annotation.body;
// note.setAttribute("ref", annotation.anchor);
link.innerHTML = " context »";
link.href = "#"+annotation.anchor;
link.onclick = function(){
rendition.display(annotation.anchor);
return false;
};
note.appendChild(link);
$notes.append(note);
};
var placeMarker = function(annotation){
var doc = book.renderer.doc;
var marker = document.createElement("span");
var mark = document.createElement("a");
marker.classList.add("footnotesuperscript", "reader_generated");
marker.style.verticalAlign = "super";
marker.style.fontSize = ".75em";
// marker.style.position = "relative";
marker.style.lineHeight = "1em";
// mark.style.display = "inline-block";
mark.style.padding = "2px";
mark.style.backgroundColor = "#fffa96";
mark.style.borderRadius = "5px";
mark.style.cursor = "pointer";
marker.id = "note-"+EPUBJS.core.uuid();
mark.innerHTML = annotations.indexOf(annotation) + 1 + "[Reader]";
marker.appendChild(mark);
epubcfi.addMarker(annotation.anchor, doc, marker);
markerEvents(marker, annotation.body);
}
var markerEvents = function(item, txt){
var id = item.id;
var showPop = function(){
var poppos,
iheight = renderer.height,
iwidth = renderer.width,
tip,
pop,
maxHeight = 225,
itemRect,
left,
top,
pos;
//-- create a popup with endnote inside of it
if(!popups[id]) {
popups[id] = document.createElement("div");
popups[id].setAttribute("class", "popup");
pop_content = document.createElement("div");
popups[id].appendChild(pop_content);
pop_content.innerHTML = txt;
pop_content.setAttribute("class", "pop_content");
renderer.render.document.body.appendChild(popups[id]);
//-- TODO: will these leak memory? - Fred
popups[id].addEventListener("mouseover", onPop, false);
popups[id].addEventListener("mouseout", offPop, false);
//-- Add hide on page change
rendition.on("locationChanged", hidePop, this);
rendition.on("locationChanged", offPop, this);
// chapter.book.on("renderer:chapterDestroy", hidePop, this);
}
pop = popups[id];
//-- get location of item
itemRect = item.getBoundingClientRect();
left = itemRect.left;
top = itemRect.top;
//-- show the popup
pop.classList.add("show");
//-- locations of popup
popRect = pop.getBoundingClientRect();
//-- position the popup
pop.style.left = left - popRect.width / 2 + "px";
pop.style.top = top + "px";
//-- Adjust max height
if(maxHeight > iheight / 2.5) {
maxHeight = iheight / 2.5;
pop_content.style.maxHeight = maxHeight + "px";
}
//-- switch above / below
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
//-- switch left
if(left - popRect.width <= 0) {
pop.style.left = left + "px";
pop.classList.add("left");
}else{
pop.classList.remove("left");
}
//-- switch right
if(left + popRect.width / 2 >= iwidth) {
//-- TEMP MOVE: 300
pop.style.left = left - 300 + "px";
popRect = pop.getBoundingClientRect();
pop.style.left = left - popRect.width + "px";
//-- switch above / below again
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
pop.classList.add("right");
}else{
pop.classList.remove("right");
}
}
var onPop = function(){
popups[id].classList.add("on");
}
var offPop = function(){
popups[id].classList.remove("on");
}
var hidePop = function(){
setTimeout(function(){
popups[id].classList.remove("show");
}, 100);
}
var openSidebar = function(){
reader.ReaderController.slideOut();
show();
};
item.addEventListener("mouseover", showPop, false);
item.addEventListener("mouseout", hidePop, false);
item.addEventListener("click", openSidebar, false);
}
$anchor.on("click", function(e){
$anchor.text("Cancel");
$text.prop("disabled", "true");
// listen for selection
rendition.on("click", insertAtPoint);
});
annotations.forEach(function(note) {
addAnnotation(note);
});
/*
renderer.registerHook("beforeChapterDisplay", function(callback, renderer){
var chapter = renderer.currentChapter;
annotations.forEach(function(note) {
var cfi = epubcfi.parse(note.anchor);
if(cfi.spinePos === chapter.spinePos) {
try {
placeMarker(note);
} catch(e) {
console.log("anchoring failed", note.anchor);
}
}
});
callback();
}, true);
*/
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ReaderController = function(book) {
var $main = $("#main"),
$divider = $("#divider"),
$loader = $("#loader"),
$next = $("#next"),
$prev = $("#prev");
var reader = this;
var book = this.book;
var rendition = this.rendition;
var slideIn = function() {
var currentPosition = rendition.currentLocation().start.cfi;
if (reader.settings.sidebarReflow){
$main.removeClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.removeClass("closed");
}
};
var slideOut = function() {
var location = rendition.currentLocation();
if (!location) {
return;
}
var currentPosition = location.start.cfi;
if (reader.settings.sidebarReflow){
$main.addClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.addClass("closed");
}
};
var showLoader = function() {
$loader.show();
hideDivider();
};
var hideLoader = function() {
$loader.hide();
//-- If the book is using spreads, show the divider
// if(book.settings.spreads) {
// showDivider();
// }
};
var showDivider = function() {
$divider.addClass("show");
};
var hideDivider = function() {
$divider.removeClass("show");
};
var keylock = false;
var arrowKeys = function(e) {
if(e.keyCode == 37) {
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
$prev.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$prev.removeClass("active");
}, 100);
e.preventDefault();
}
if(e.keyCode == 39) {
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
$next.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$next.removeClass("active");
}, 100);
e.preventDefault();
}
}
document.addEventListener('keydown', arrowKeys, false);
$next.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
e.preventDefault();
});
$prev.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
e.preventDefault();
});
rendition.on("layout", function(props){
if(props.spread === true) {
showDivider();
} else {
hideDivider();
}
});
rendition.on('relocated', function(location){
if (location.atStart) {
$prev.addClass("disabled");
}
if (location.atEnd) {
$next.addClass("disabled");
}
});
return {
"slideOut" : slideOut,
"slideIn" : slideIn,
"showLoader" : showLoader,
"hideLoader" : hideLoader,
"showDivider" : showDivider,
"hideDivider" : hideDivider,
"arrowKeys" : arrowKeys
};
};
EPUBJS.reader.SettingsController = function() {
var book = this.book;
var reader = this;
var $settings = $("#settings-modal"),
$overlay = $(".overlay");
var show = function() {
$settings.addClass("md-show");
};
var hide = function() {
$settings.removeClass("md-show");
};
var $sidebarReflowSetting = $('#sidebarReflow');
$sidebarReflowSetting.on('click', function() {
reader.settings.sidebarReflow = !reader.settings.sidebarReflow;
});
$settings.find(".closer").on("click", function() {
hide();
});
$overlay.on("click", function() {
hide();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.SidebarController = function(book) {
var reader = this;
var $sidebar = $("#sidebar"),
$panels = $("#panels");
var activePanel = "Toc";
var changePanelTo = function(viewName) {
var controllerName = viewName + "Controller";
if(activePanel == viewName || typeof reader[controllerName] === 'undefined' ) return;
reader[activePanel+ "Controller"].hide();
reader[controllerName].show();
activePanel = viewName;
$panels.find('.active').removeClass("active");
$panels.find("#show-" + viewName ).addClass("active");
};
var getActivePanel = function() {
return activePanel;
};
var show = function() {
reader.sidebarOpen = true;
reader.ReaderController.slideOut();
$sidebar.addClass("open");
}
var hide = function() {
reader.sidebarOpen = false;
reader.ReaderController.slideIn();
$sidebar.removeClass("open");
}
$panels.find(".show_view").on("click", function(event) {
var view = $(this).data("view");
changePanelTo(view);
event.preventDefault();
});
return {
'show' : show,
'hide' : hide,
'getActivePanel' : getActivePanel,
'changePanelTo' : changePanelTo
};
};
EPUBJS.reader.TocController = function(toc) {
var book = this.book;
var rendition = this.rendition;
var $list = $("#tocView"),
docfrag = document.createDocumentFragment();
var currentChapter = false;
var generateTocItems = function(toc, level) {
var container = document.createElement("ul");
if(!level) level = 1;
toc.forEach(function(chapter) {
var listitem = document.createElement("li"),
link = document.createElement("a");
toggle = document.createElement("a");
var subitems;
listitem.id = "toc-"+chapter.id;
listitem.classList.add('list_item');
link.textContent = chapter.label;
link.href = chapter.href;
link.classList.add('toc_link');
listitem.appendChild(link);
if(chapter.subitems && chapter.subitems.length > 0) {
level++;
subitems = generateTocItems(chapter.subitems, level);
toggle.classList.add('toc_toggle');
listitem.insertBefore(toggle, link);
listitem.appendChild(subitems);
}
container.appendChild(listitem);
});
return container;
};
var onShow = function() {
$list.show();
};
var onHide = function() {
$list.hide();
};
var chapterChange = function(e) {
var id = e.id,
$item = $list.find("#toc-"+id),
$current = $list.find(".currentChapter"),
$open = $list.find('.openChapter');
if($item.length){
if($item != $current && $item.has(currentChapter).length > 0) {
$current.removeClass("currentChapter");
}
$item.addClass("currentChapter");
// $open.removeClass("openChapter");
$item.parents('li').addClass("openChapter");
}
};
rendition.on('renderered', chapterChange);
var tocitems = generateTocItems(toc);
docfrag.appendChild(tocitems);
$list.append(docfrag);
$list.find(".toc_link").on("click", function(event){
var url = this.getAttribute('href');
event.preventDefault();
//-- Provide the Book with the url to show
// The Url must be found in the books manifest
rendition.display(url);
$list.find(".currentChapter")
.addClass("openChapter")
.removeClass("currentChapter");
$(this).parent('li').addClass("currentChapter");
});
$list.find(".toc_toggle").on("click", function(event){
var $el = $(this).parent('li'),
open = $el.hasClass("openChapter");
event.preventDefault();
if(open){
$el.removeClass("openChapter");
} else {
$el.addClass("openChapter");
}
});
return {
"show" : onShow,
"hide" : onHide
};
};
//# sourceMappingURL=reader.js.map | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/reader.js | reader.js |
window.hypothesisConfig = function() {
var Annotator = window.Annotator;
var $main = $("#main");
function EpubAnnotationSidebar(elem, options) {
options = {
server: true,
origin: true,
showHighlights: true,
Toolbar: {container: '#annotation-controls'}
}
Annotator.Host.call(this, elem, options);
}
EpubAnnotationSidebar.prototype = Object.create(Annotator.Host.prototype);
EpubAnnotationSidebar.prototype.show = function() {
this.frame.css({
'margin-left': (-1 * this.frame.width()) + "px"
});
this.frame.removeClass('annotator-collapsed');
if (!$main.hasClass('single')) {
$main.addClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-left').addClass('h-icon-chevron-right');
this.setVisibleHighlights(true);
}
};
EpubAnnotationSidebar.prototype.hide = function() {
this.frame.css({
'margin-left': ''
});
this.frame.addClass('annotator-collapsed');
if ($main.hasClass('single')) {
$main.removeClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-right').addClass('h-icon-chevron-left');
this.setVisibleHighlights(false);
}
};
return {
constructor: EpubAnnotationSidebar,
}
};
// This is the Epub.js plugin. Annotations are updated on location change.
EPUBJS.reader.plugins.HypothesisController = function (Book) {
var reader = this;
var $main = $("#main");
var updateAnnotations = function () {
var annotator = Book.renderer.render.window.annotator;
if (annotator && annotator.constructor.$) {
var annotations = getVisibleAnnotations(annotator.constructor.$);
annotator.showAnnotations(annotations)
}
};
var getVisibleAnnotations = function ($) {
var width = Book.renderer.render.iframe.clientWidth;
return $('.annotator-hl').map(function() {
var $this = $(this),
left = this.getBoundingClientRect().left;
if (left >= 0 && left <= width) {
return $this.data('annotation');
}
}).get();
};
Book.on("renderer:locationChanged", updateAnnotations);
return {}
}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/plugins/hypothesis.js | hypothesis.js |
EPUBJS.reader.search = {};
// Search Server -- https://github.com/futurepress/epubjs-search
EPUBJS.reader.search.SERVER = "https://pacific-cliffs-3579.herokuapp.com";
EPUBJS.reader.search.request = function(q, callback) {
var fetch = $.ajax({
dataType: "json",
url: EPUBJS.reader.search.SERVER + "/search?q=" + encodeURIComponent(q)
});
fetch.fail(function(err) {
console.error(err);
});
fetch.done(function(results) {
callback(results);
});
};
EPUBJS.reader.plugins.SearchController = function(Book) {
var reader = this;
var $searchBox = $("#searchBox"),
$searchResults = $("#searchResults"),
$searchView = $("#searchView"),
iframeDoc;
var searchShown = false;
var onShow = function() {
query();
searchShown = true;
$searchView.addClass("shown");
};
var onHide = function() {
searchShown = false;
$searchView.removeClass("shown");
};
var query = function() {
var q = $searchBox.val();
if(q == '') {
return;
}
$searchResults.empty();
$searchResults.append("<li><p>Searching...</p></li>");
EPUBJS.reader.search.request(q, function(data) {
var results = data.results;
$searchResults.empty();
if(iframeDoc) {
$(iframeDoc).find('body').unhighlight();
}
if(results.length == 0) {
$searchResults.append("<li><p>No Results Found</p></li>");
return;
}
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
results.forEach(function(result) {
var $li = $("<li></li>");
var $item = $("<a href='"+result.href+"' data-cfi='"+result.cfi+"'><span>"+result.title+"</span><p>"+result.highlight+"</p></a>");
$item.on("click", function(e) {
var $this = $(this),
cfi = $this.data("cfi");
e.preventDefault();
Book.gotoCfi(cfi+"/1:0");
Book.on("renderer:chapterDisplayed", function() {
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
})
});
$li.append($item);
$searchResults.append($li);
});
});
};
$searchBox.on("search", function(e) {
var q = $searchBox.val();
//-- SearchBox is empty or cleared
if(q == '') {
$searchResults.empty();
if(reader.SidebarController.getActivePanel() == "Search") {
reader.SidebarController.changePanelTo("Toc");
}
$(iframeDoc).find('body').unhighlight();
iframeDoc = false;
return;
}
reader.SidebarController.changePanelTo("Search");
e.preventDefault();
});
return {
"show" : onShow,
"hide" : onHide
};
}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/plugins/search.js | search.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.JSZip=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){"use strict";var d=a("./utils"),e=a("./support"),f="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";c.encode=function(a){for(var b,c,e,g,h,i,j,k=[],l=0,m=a.length,n=m,o="string"!==d.getTypeOf(a);l<a.length;)n=m-l,o?(b=a[l++],c=l<m?a[l++]:0,e=l<m?a[l++]:0):(b=a.charCodeAt(l++),c=l<m?a.charCodeAt(l++):0,e=l<m?a.charCodeAt(l++):0),g=b>>2,h=(3&b)<<4|c>>4,i=n>1?(15&c)<<2|e>>6:64,j=n>2?63&e:64,k.push(f.charAt(g)+f.charAt(h)+f.charAt(i)+f.charAt(j));return k.join("")},c.decode=function(a){var b,c,d,g,h,i,j,k=0,l=0,m="data:";if(a.substr(0,m.length)===m)throw new Error("Invalid base64 input, it looks like a data url.");a=a.replace(/[^A-Za-z0-9\+\/\=]/g,"");var n=3*a.length/4;if(a.charAt(a.length-1)===f.charAt(64)&&n--,a.charAt(a.length-2)===f.charAt(64)&&n--,n%1!==0)throw new Error("Invalid base64 input, bad content length.");var o;for(o=e.uint8array?new Uint8Array(0|n):new Array(0|n);k<a.length;)g=f.indexOf(a.charAt(k++)),h=f.indexOf(a.charAt(k++)),i=f.indexOf(a.charAt(k++)),j=f.indexOf(a.charAt(k++)),b=g<<2|h>>4,c=(15&h)<<4|i>>2,d=(3&i)<<6|j,o[l++]=b,64!==i&&(o[l++]=c),64!==j&&(o[l++]=d);return o}},{"./support":30,"./utils":32}],2:[function(a,b,c){"use strict";function d(a,b,c,d,e){this.compressedSize=a,this.uncompressedSize=b,this.crc32=c,this.compression=d,this.compressedContent=e}var e=a("./external"),f=a("./stream/DataWorker"),g=a("./stream/DataLengthProbe"),h=a("./stream/Crc32Probe"),g=a("./stream/DataLengthProbe");d.prototype={getContentWorker:function(){var a=new f(e.Promise.resolve(this.compressedContent)).pipe(this.compression.uncompressWorker()).pipe(new g("data_length")),b=this;return a.on("end",function(){if(this.streamInfo.data_length!==b.uncompressedSize)throw new Error("Bug : uncompressed data size mismatch")}),a},getCompressedWorker:function(){return new f(e.Promise.resolve(this.compressedContent)).withStreamInfo("compressedSize",this.compressedSize).withStreamInfo("uncompressedSize",this.uncompressedSize).withStreamInfo("crc32",this.crc32).withStreamInfo("compression",this.compression)}},d.createWorkerFrom=function(a,b,c){return a.pipe(new h).pipe(new g("uncompressedSize")).pipe(b.compressWorker(c)).pipe(new g("compressedSize")).withStreamInfo("compression",b)},b.exports=d},{"./external":6,"./stream/Crc32Probe":25,"./stream/DataLengthProbe":26,"./stream/DataWorker":27}],3:[function(a,b,c){"use strict";var d=a("./stream/GenericWorker");c.STORE={magic:"\0\0",compressWorker:function(a){return new d("STORE compression")},uncompressWorker:function(){return new d("STORE decompression")}},c.DEFLATE=a("./flate")},{"./flate":7,"./stream/GenericWorker":28}],4:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b[g])];return a^-1}function f(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b.charCodeAt(g))];return a^-1}var g=a("./utils"),h=d();b.exports=function(a,b){if("undefined"==typeof a||!a.length)return 0;var c="string"!==g.getTypeOf(a);return c?e(0|b,a,a.length,0):f(0|b,a,a.length,0)}},{"./utils":32}],5:[function(a,b,c){"use strict";c.base64=!1,c.binary=!1,c.dir=!1,c.createFolders=!0,c.date=null,c.compression=null,c.compressionOptions=null,c.comment=null,c.unixPermissions=null,c.dosPermissions=null},{}],6:[function(a,b,c){"use strict";var d=null;d="undefined"!=typeof Promise?Promise:a("lie"),b.exports={Promise:d}},{lie:58}],7:[function(a,b,c){"use strict";function d(a,b){h.call(this,"FlateWorker/"+a),this._pako=null,this._pakoAction=a,this._pakoOptions=b,this.meta={}}var e="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Uint32Array,f=a("pako"),g=a("./utils"),h=a("./stream/GenericWorker"),i=e?"uint8array":"array";c.magic="\b\0",g.inherits(d,h),d.prototype.processChunk=function(a){this.meta=a.meta,null===this._pako&&this._createPako(),this._pako.push(g.transformTo(i,a.data),!1)},d.prototype.flush=function(){h.prototype.flush.call(this),null===this._pako&&this._createPako(),this._pako.push([],!0)},d.prototype.cleanUp=function(){h.prototype.cleanUp.call(this),this._pako=null},d.prototype._createPako=function(){this._pako=new f[this._pakoAction]({raw:!0,level:this._pakoOptions.level||-1});var a=this;this._pako.onData=function(b){a.push({data:b,meta:a.meta})}},c.compressWorker=function(a){return new d("Deflate",a)},c.uncompressWorker=function(){return new d("Inflate",{})}},{"./stream/GenericWorker":28,"./utils":32,pako:59}],8:[function(a,b,c){"use strict";function d(a,b,c,d){f.call(this,"ZipFileWorker"),this.bytesWritten=0,this.zipComment=b,this.zipPlatform=c,this.encodeFileName=d,this.streamFiles=a,this.accumulate=!1,this.contentBuffer=[],this.dirRecords=[],this.currentSourceOffset=0,this.entriesCount=0,this.currentFile=null,this._sources=[]}var e=a("../utils"),f=a("../stream/GenericWorker"),g=a("../utf8"),h=a("../crc32"),i=a("../signature"),j=function(a,b){var c,d="";for(c=0;c<b;c++)d+=String.fromCharCode(255&a),a>>>=8;return d},k=function(a,b){var c=a;return a||(c=b?16893:33204),(65535&c)<<16},l=function(a,b){return 63&(a||0)},m=function(a,b,c,d,f,m){var n,o,p=a.file,q=a.compression,r=m!==g.utf8encode,s=e.transformTo("string",m(p.name)),t=e.transformTo("string",g.utf8encode(p.name)),u=p.comment,v=e.transformTo("string",m(u)),w=e.transformTo("string",g.utf8encode(u)),x=t.length!==p.name.length,y=w.length!==u.length,z="",A="",B="",C=p.dir,D=p.date,E={crc32:0,compressedSize:0,uncompressedSize:0};b&&!c||(E.crc32=a.crc32,E.compressedSize=a.compressedSize,E.uncompressedSize=a.uncompressedSize);var F=0;b&&(F|=8),r||!x&&!y||(F|=2048);var G=0,H=0;C&&(G|=16),"UNIX"===f?(H=798,G|=k(p.unixPermissions,C)):(H=20,G|=l(p.dosPermissions,C)),n=D.getUTCHours(),n<<=6,n|=D.getUTCMinutes(),n<<=5,n|=D.getUTCSeconds()/2,o=D.getUTCFullYear()-1980,o<<=4,o|=D.getUTCMonth()+1,o<<=5,o|=D.getUTCDate(),x&&(A=j(1,1)+j(h(s),4)+t,z+="up"+j(A.length,2)+A),y&&(B=j(1,1)+j(h(v),4)+w,z+="uc"+j(B.length,2)+B);var I="";I+="\n\0",I+=j(F,2),I+=q.magic,I+=j(n,2),I+=j(o,2),I+=j(E.crc32,4),I+=j(E.compressedSize,4),I+=j(E.uncompressedSize,4),I+=j(s.length,2),I+=j(z.length,2);var J=i.LOCAL_FILE_HEADER+I+s+z,K=i.CENTRAL_FILE_HEADER+j(H,2)+I+j(v.length,2)+"\0\0\0\0"+j(G,4)+j(d,4)+s+z+v;return{fileRecord:J,dirRecord:K}},n=function(a,b,c,d,f){var g="",h=e.transformTo("string",f(d));return g=i.CENTRAL_DIRECTORY_END+"\0\0\0\0"+j(a,2)+j(a,2)+j(b,4)+j(c,4)+j(h.length,2)+h},o=function(a){var b="";return b=i.DATA_DESCRIPTOR+j(a.crc32,4)+j(a.compressedSize,4)+j(a.uncompressedSize,4)};e.inherits(d,f),d.prototype.push=function(a){var b=a.meta.percent||0,c=this.entriesCount,d=this._sources.length;this.accumulate?this.contentBuffer.push(a):(this.bytesWritten+=a.data.length,f.prototype.push.call(this,{data:a.data,meta:{currentFile:this.currentFile,percent:c?(b+100*(c-d-1))/c:100}}))},d.prototype.openedSource=function(a){this.currentSourceOffset=this.bytesWritten,this.currentFile=a.file.name;var b=this.streamFiles&&!a.file.dir;if(b){var c=m(a,b,!1,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);this.push({data:c.fileRecord,meta:{percent:0}})}else this.accumulate=!0},d.prototype.closedSource=function(a){this.accumulate=!1;var b=this.streamFiles&&!a.file.dir,c=m(a,b,!0,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);if(this.dirRecords.push(c.dirRecord),b)this.push({data:o(a),meta:{percent:100}});else for(this.push({data:c.fileRecord,meta:{percent:0}});this.contentBuffer.length;)this.push(this.contentBuffer.shift());this.currentFile=null},d.prototype.flush=function(){for(var a=this.bytesWritten,b=0;b<this.dirRecords.length;b++)this.push({data:this.dirRecords[b],meta:{percent:100}});var c=this.bytesWritten-a,d=n(this.dirRecords.length,c,a,this.zipComment,this.encodeFileName);this.push({data:d,meta:{percent:100}})},d.prototype.prepareNextSource=function(){this.previous=this._sources.shift(),this.openedSource(this.previous.streamInfo),this.isPaused?this.previous.pause():this.previous.resume()},d.prototype.registerPrevious=function(a){this._sources.push(a);var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.closedSource(b.previous.streamInfo),b._sources.length?b.prepareNextSource():b.end()}),a.on("error",function(a){b.error(a)}),this},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this.previous&&this._sources.length?(this.prepareNextSource(),!0):this.previous||this._sources.length||this.generatedError?void 0:(this.end(),!0))},d.prototype.error=function(a){var b=this._sources;if(!f.prototype.error.call(this,a))return!1;for(var c=0;c<b.length;c++)try{b[c].error(a)}catch(a){}return!0},d.prototype.lock=function(){f.prototype.lock.call(this);for(var a=this._sources,b=0;b<a.length;b++)a[b].lock()},b.exports=d},{"../crc32":4,"../signature":23,"../stream/GenericWorker":28,"../utf8":31,"../utils":32}],9:[function(a,b,c){"use strict";var d=a("../compressions"),e=a("./ZipFileWorker"),f=function(a,b){var c=a||b,e=d[c];if(!e)throw new Error(c+" is not a valid compression method !");return e};c.generateWorker=function(a,b,c){var d=new e(b.streamFiles,c,b.platform,b.encodeFileName),g=0;try{a.forEach(function(a,c){g++;var e=f(c.options.compression,b.compression),h=c.options.compressionOptions||b.compressionOptions||{},i=c.dir,j=c.date;c._compressWorker(e,h).withStreamInfo("file",{name:a,dir:i,date:j,comment:c.comment||"",unixPermissions:c.unixPermissions,dosPermissions:c.dosPermissions}).pipe(d)}),d.entriesCount=g}catch(h){d.error(h)}return d}},{"../compressions":3,"./ZipFileWorker":8}],10:[function(a,b,c){"use strict";function d(){if(!(this instanceof d))return new d;if(arguments.length)throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");this.files={},this.comment=null,this.root="",this.clone=function(){var a=new d;for(var b in this)"function"!=typeof this[b]&&(a[b]=this[b]);return a}}d.prototype=a("./object"),d.prototype.loadAsync=a("./load"),d.support=a("./support"),d.defaults=a("./defaults"),d.version="3.1.5",d.loadAsync=function(a,b){return(new d).loadAsync(a,b)},d.external=a("./external"),b.exports=d},{"./defaults":5,"./external":6,"./load":11,"./object":15,"./support":30}],11:[function(a,b,c){"use strict";function d(a){return new f.Promise(function(b,c){var d=a.decompressed.getContentWorker().pipe(new i);d.on("error",function(a){c(a)}).on("end",function(){d.streamInfo.crc32!==a.decompressed.crc32?c(new Error("Corrupted zip : CRC32 mismatch")):b()}).resume()})}var e=a("./utils"),f=a("./external"),g=a("./utf8"),e=a("./utils"),h=a("./zipEntries"),i=a("./stream/Crc32Probe"),j=a("./nodejsUtils");b.exports=function(a,b){var c=this;return b=e.extend(b||{},{base64:!1,checkCRC32:!1,optimizedBinaryString:!1,createFolders:!1,decodeFileName:g.utf8decode}),j.isNode&&j.isStream(a)?f.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file.")):e.prepareContent("the loaded zip file",a,!0,b.optimizedBinaryString,b.base64).then(function(a){var c=new h(b);return c.load(a),c}).then(function(a){var c=[f.Promise.resolve(a)],e=a.files;if(b.checkCRC32)for(var g=0;g<e.length;g++)c.push(d(e[g]));return f.Promise.all(c)}).then(function(a){for(var d=a.shift(),e=d.files,f=0;f<e.length;f++){var g=e[f];c.file(g.fileNameStr,g.decompressed,{binary:!0,optimizedBinaryString:!0,date:g.date,dir:g.dir,comment:g.fileCommentStr.length?g.fileCommentStr:null,unixPermissions:g.unixPermissions,dosPermissions:g.dosPermissions,createFolders:b.createFolders})}return d.zipComment.length&&(c.comment=d.zipComment),c})}},{"./external":6,"./nodejsUtils":14,"./stream/Crc32Probe":25,"./utf8":31,"./utils":32,"./zipEntries":33}],12:[function(a,b,c){"use strict";function d(a,b){f.call(this,"Nodejs stream input adapter for "+a),this._upstreamEnded=!1,this._bindStream(b)}var e=a("../utils"),f=a("../stream/GenericWorker");e.inherits(d,f),d.prototype._bindStream=function(a){var b=this;this._stream=a,a.pause(),a.on("data",function(a){b.push({data:a,meta:{percent:0}})}).on("error",function(a){b.isPaused?this.generatedError=a:b.error(a)}).on("end",function(){b.isPaused?b._upstreamEnded=!0:b.end()})},d.prototype.pause=function(){return!!f.prototype.pause.call(this)&&(this._stream.pause(),!0)},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(this._upstreamEnded?this.end():this._stream.resume(),!0)},b.exports=d},{"../stream/GenericWorker":28,"../utils":32}],13:[function(a,b,c){"use strict";function d(a,b,c){e.call(this,b),this._helper=a;var d=this;a.on("data",function(a,b){d.push(a)||d._helper.pause(),c&&c(b)}).on("error",function(a){d.emit("error",a)}).on("end",function(){d.push(null)})}var e=a("readable-stream").Readable,f=a("../utils");f.inherits(d,e),d.prototype._read=function(){this._helper.resume()},b.exports=d},{"../utils":32,"readable-stream":16}],14:[function(a,b,c){"use strict";b.exports={isNode:"undefined"!=typeof Buffer,newBufferFrom:function(a,b){return new Buffer(a,b)},allocBuffer:function(a){return Buffer.alloc?Buffer.alloc(a):new Buffer(a)},isBuffer:function(a){return Buffer.isBuffer(a)},isStream:function(a){return a&&"function"==typeof a.on&&"function"==typeof a.pause&&"function"==typeof a.resume}}},{}],15:[function(a,b,c){"use strict";function d(a){return"[object RegExp]"===Object.prototype.toString.call(a)}var e=a("./utf8"),f=a("./utils"),g=a("./stream/GenericWorker"),h=a("./stream/StreamHelper"),i=a("./defaults"),j=a("./compressedObject"),k=a("./zipObject"),l=a("./generate"),m=a("./nodejsUtils"),n=a("./nodejs/NodejsStreamInputAdapter"),o=function(a,b,c){var d,e=f.getTypeOf(b),h=f.extend(c||{},i);h.date=h.date||new Date,null!==h.compression&&(h.compression=h.compression.toUpperCase()),"string"==typeof h.unixPermissions&&(h.unixPermissions=parseInt(h.unixPermissions,8)),h.unixPermissions&&16384&h.unixPermissions&&(h.dir=!0),h.dosPermissions&&16&h.dosPermissions&&(h.dir=!0),h.dir&&(a=q(a)),h.createFolders&&(d=p(a))&&r.call(this,d,!0);var l="string"===e&&h.binary===!1&&h.base64===!1;c&&"undefined"!=typeof c.binary||(h.binary=!l);var o=b instanceof j&&0===b.uncompressedSize;(o||h.dir||!b||0===b.length)&&(h.base64=!1,h.binary=!0,b="",h.compression="STORE",e="string");var s=null;s=b instanceof j||b instanceof g?b:m.isNode&&m.isStream(b)?new n(a,b):f.prepareContent(a,b,h.binary,h.optimizedBinaryString,h.base64);var t=new k(a,s,h);this.files[a]=t},p=function(a){"/"===a.slice(-1)&&(a=a.substring(0,a.length-1));var b=a.lastIndexOf("/");return b>0?a.substring(0,b):""},q=function(a){return"/"!==a.slice(-1)&&(a+="/"),a},r=function(a,b){return b="undefined"!=typeof b?b:i.createFolders,a=q(a),this.files[a]||o.call(this,a,null,{dir:!0,createFolders:b}),this.files[a]},s={load:function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},forEach:function(a){var b,c,d;for(b in this.files)this.files.hasOwnProperty(b)&&(d=this.files[b],c=b.slice(this.root.length,b.length),c&&b.slice(0,this.root.length)===this.root&&a(c,d))},filter:function(a){var b=[];return this.forEach(function(c,d){a(c,d)&&b.push(d)}),b},file:function(a,b,c){if(1===arguments.length){if(d(a)){var e=a;return this.filter(function(a,b){return!b.dir&&e.test(a)})}var f=this.files[this.root+a];return f&&!f.dir?f:null}return a=this.root+a,o.call(this,a,b,c),this},folder:function(a){if(!a)return this;if(d(a))return this.filter(function(b,c){return c.dir&&a.test(b)});var b=this.root+a,c=r.call(this,b),e=this.clone();return e.root=c.name,e},remove:function(a){a=this.root+a;var b=this.files[a];if(b||("/"!==a.slice(-1)&&(a+="/"),b=this.files[a]),b&&!b.dir)delete this.files[a];else for(var c=this.filter(function(b,c){return c.name.slice(0,a.length)===a}),d=0;d<c.length;d++)delete this.files[c[d].name];return this},generate:function(a){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},generateInternalStream:function(a){var b,c={};try{if(c=f.extend(a||{},{streamFiles:!1,compression:"STORE",compressionOptions:null,type:"",platform:"DOS",comment:null,mimeType:"application/zip",encodeFileName:e.utf8encode}),c.type=c.type.toLowerCase(),c.compression=c.compression.toUpperCase(),"binarystring"===c.type&&(c.type="string"),!c.type)throw new Error("No output type specified.");f.checkSupport(c.type),"darwin"!==c.platform&&"freebsd"!==c.platform&&"linux"!==c.platform&&"sunos"!==c.platform||(c.platform="UNIX"),"win32"===c.platform&&(c.platform="DOS");var d=c.comment||this.comment||"";b=l.generateWorker(this,c,d)}catch(i){b=new g("error"),b.error(i)}return new h(b,c.type||"string",c.mimeType)},generateAsync:function(a,b){return this.generateInternalStream(a).accumulate(b)},generateNodeStream:function(a,b){return a=a||{},a.type||(a.type="nodebuffer"),this.generateInternalStream(a).toNodejsStream(b)}};b.exports=s},{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":12,"./nodejsUtils":14,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31,"./utils":32,"./zipObject":35}],16:[function(a,b,c){b.exports=a("stream")},{stream:void 0}],17:[function(a,b,c){"use strict";function d(a){e.call(this,a);for(var b=0;b<this.data.length;b++)a[b]=255&a[b]}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data[this.zero+a]},d.prototype.lastIndexOfSignature=function(a){for(var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.length-4;f>=0;--f)if(this.data[f]===b&&this.data[f+1]===c&&this.data[f+2]===d&&this.data[f+3]===e)return f-this.zero;return-1},d.prototype.readAndCheckSignature=function(a){var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.readData(4);return b===f[0]&&c===f[1]&&d===f[2]&&e===f[3]},d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return[];var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],18:[function(a,b,c){"use strict";function d(a){this.data=a,this.length=a.length,this.index=0,this.zero=0}var e=a("../utils");d.prototype={checkOffset:function(a){this.checkIndex(this.index+a)},checkIndex:function(a){if(this.length<this.zero+a||a<0)throw new Error("End of data reached (data length = "+this.length+", asked index = "+a+"). Corrupted zip ?")},setIndex:function(a){this.checkIndex(a),this.index=a},skip:function(a){this.setIndex(this.index+a)},byteAt:function(a){},readInt:function(a){var b,c=0;for(this.checkOffset(a),b=this.index+a-1;b>=this.index;b--)c=(c<<8)+this.byteAt(b);return this.index+=a,c},readString:function(a){return e.transformTo("string",this.readData(a))},readData:function(a){},lastIndexOfSignature:function(a){},readAndCheckSignature:function(a){},readDate:function(){var a=this.readInt(4);return new Date(Date.UTC((a>>25&127)+1980,(a>>21&15)-1,a>>16&31,a>>11&31,a>>5&63,(31&a)<<1))}},b.exports=d},{"../utils":32}],19:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./Uint8ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./Uint8ArrayReader":21}],20:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data.charCodeAt(this.zero+a)},d.prototype.lastIndexOfSignature=function(a){return this.data.lastIndexOf(a)-this.zero},d.prototype.readAndCheckSignature=function(a){var b=this.readData(4);return a===b},d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],21:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return new Uint8Array(0);var b=this.data.subarray(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./ArrayReader":17}],22:[function(a,b,c){"use strict";var d=a("../utils"),e=a("../support"),f=a("./ArrayReader"),g=a("./StringReader"),h=a("./NodeBufferReader"),i=a("./Uint8ArrayReader");b.exports=function(a){var b=d.getTypeOf(a);return d.checkSupport(b),"string"!==b||e.uint8array?"nodebuffer"===b?new h(a):e.uint8array?new i(d.transformTo("uint8array",a)):new f(d.transformTo("array",a)):new g(a)}},{"../support":30,"../utils":32,"./ArrayReader":17,"./NodeBufferReader":19,"./StringReader":20,"./Uint8ArrayReader":21}],23:[function(a,b,c){"use strict";c.LOCAL_FILE_HEADER="PK",c.CENTRAL_FILE_HEADER="PK",c.CENTRAL_DIRECTORY_END="PK",c.ZIP64_CENTRAL_DIRECTORY_LOCATOR="PK",c.ZIP64_CENTRAL_DIRECTORY_END="PK",c.DATA_DESCRIPTOR="PK\b"},{}],24:[function(a,b,c){"use strict";function d(a){e.call(this,"ConvertWorker to "+a),this.destType=a}var e=a("./GenericWorker"),f=a("../utils");f.inherits(d,e),d.prototype.processChunk=function(a){this.push({data:f.transformTo(this.destType,a.data),meta:a.meta})},b.exports=d},{"../utils":32,"./GenericWorker":28}],25:[function(a,b,c){"use strict";function d(){e.call(this,"Crc32Probe"),this.withStreamInfo("crc32",0)}var e=a("./GenericWorker"),f=a("../crc32"),g=a("../utils");g.inherits(d,e),d.prototype.processChunk=function(a){this.streamInfo.crc32=f(a.data,this.streamInfo.crc32||0),this.push(a)},b.exports=d},{"../crc32":4,"../utils":32,"./GenericWorker":28}],26:[function(a,b,c){"use strict";function d(a){f.call(this,"DataLengthProbe for "+a),this.propName=a,this.withStreamInfo(a,0)}var e=a("../utils"),f=a("./GenericWorker");e.inherits(d,f),d.prototype.processChunk=function(a){if(a){var b=this.streamInfo[this.propName]||0;this.streamInfo[this.propName]=b+a.data.length}f.prototype.processChunk.call(this,a)},b.exports=d},{"../utils":32,"./GenericWorker":28}],27:[function(a,b,c){"use strict";function d(a){f.call(this,"DataWorker");var b=this;this.dataIsReady=!1,this.index=0,this.max=0,this.data=null,this.type="",this._tickScheduled=!1,a.then(function(a){b.dataIsReady=!0,b.data=a,b.max=a&&a.length||0,b.type=e.getTypeOf(a),b.isPaused||b._tickAndRepeat()},function(a){b.error(a)})}var e=a("../utils"),f=a("./GenericWorker"),g=16384;e.inherits(d,f),d.prototype.cleanUp=function(){f.prototype.cleanUp.call(this),this.data=null},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this._tickScheduled&&this.dataIsReady&&(this._tickScheduled=!0,e.delay(this._tickAndRepeat,[],this)),!0)},d.prototype._tickAndRepeat=function(){this._tickScheduled=!1,this.isPaused||this.isFinished||(this._tick(),this.isFinished||(e.delay(this._tickAndRepeat,[],this),this._tickScheduled=!0))},d.prototype._tick=function(){if(this.isPaused||this.isFinished)return!1;var a=g,b=null,c=Math.min(this.max,this.index+a);if(this.index>=this.max)return this.end();switch(this.type){case"string":b=this.data.substring(this.index,c);break;case"uint8array":b=this.data.subarray(this.index,c);break;case"array":case"nodebuffer":b=this.data.slice(this.index,c)}return this.index=c,this.push({data:b,meta:{percent:this.max?this.index/this.max*100:0}})},b.exports=d},{"../utils":32,"./GenericWorker":28}],28:[function(a,b,c){"use strict";function d(a){this.name=a||"default",this.streamInfo={},this.generatedError=null,this.extraStreamInfo={},this.isPaused=!0,this.isFinished=!1,this.isLocked=!1,this._listeners={data:[],end:[],error:[]},this.previous=null}d.prototype={push:function(a){this.emit("data",a)},end:function(){if(this.isFinished)return!1;this.flush();try{this.emit("end"),this.cleanUp(),this.isFinished=!0}catch(a){this.emit("error",a)}return!0},error:function(a){return!this.isFinished&&(this.isPaused?this.generatedError=a:(this.isFinished=!0,this.emit("error",a),this.previous&&this.previous.error(a),this.cleanUp()),!0)},on:function(a,b){return this._listeners[a].push(b),this},cleanUp:function(){this.streamInfo=this.generatedError=this.extraStreamInfo=null,this._listeners=[]},emit:function(a,b){if(this._listeners[a])for(var c=0;c<this._listeners[a].length;c++)this._listeners[a][c].call(this,b)},pipe:function(a){return a.registerPrevious(this)},registerPrevious:function(a){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.streamInfo=a.streamInfo,this.mergeStreamInfo(),this.previous=a;var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.end()}),a.on("error",function(a){b.error(a)}),this},pause:function(){return!this.isPaused&&!this.isFinished&&(this.isPaused=!0,this.previous&&this.previous.pause(),!0)},resume:function(){if(!this.isPaused||this.isFinished)return!1;this.isPaused=!1;var a=!1;return this.generatedError&&(this.error(this.generatedError),a=!0),this.previous&&this.previous.resume(),!a},flush:function(){},processChunk:function(a){this.push(a)},withStreamInfo:function(a,b){return this.extraStreamInfo[a]=b,this.mergeStreamInfo(),this},mergeStreamInfo:function(){for(var a in this.extraStreamInfo)this.extraStreamInfo.hasOwnProperty(a)&&(this.streamInfo[a]=this.extraStreamInfo[a])},lock:function(){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.isLocked=!0,this.previous&&this.previous.lock()},toString:function(){var a="Worker "+this.name;return this.previous?this.previous+" -> "+a:a}},b.exports=d},{}],29:[function(a,b,c){"use strict";function d(a,b,c){switch(a){case"blob":return h.newBlob(h.transformTo("arraybuffer",b),c);case"base64":return k.encode(b);default:return h.transformTo(a,b)}}function e(a,b){var c,d=0,e=null,f=0;for(c=0;c<b.length;c++)f+=b[c].length;switch(a){case"string":return b.join("");case"array":return Array.prototype.concat.apply([],b);case"uint8array":for(e=new Uint8Array(f),c=0;c<b.length;c++)e.set(b[c],d),d+=b[c].length;return e;case"nodebuffer":return Buffer.concat(b);default:throw new Error("concat : unsupported type '"+a+"'")}}function f(a,b){return new m.Promise(function(c,f){var g=[],h=a._internalType,i=a._outputType,j=a._mimeType;a.on("data",function(a,c){g.push(a),b&&b(c)}).on("error",function(a){g=[],f(a)}).on("end",function(){try{var a=d(i,e(h,g),j);c(a)}catch(b){f(b)}g=[]}).resume()})}function g(a,b,c){var d=b;switch(b){case"blob":case"arraybuffer":d="uint8array";break;case"base64":d="string"}try{this._internalType=d,this._outputType=b,this._mimeType=c,h.checkSupport(d),this._worker=a.pipe(new i(d)),a.lock()}catch(e){this._worker=new j("error"),this._worker.error(e)}}var h=a("../utils"),i=a("./ConvertWorker"),j=a("./GenericWorker"),k=a("../base64"),l=a("../support"),m=a("../external"),n=null;if(l.nodestream)try{n=a("../nodejs/NodejsStreamOutputAdapter")}catch(o){}g.prototype={accumulate:function(a){return f(this,a)},on:function(a,b){var c=this;return"data"===a?this._worker.on(a,function(a){b.call(c,a.data,a.meta)}):this._worker.on(a,function(){h.delay(b,arguments,c)}),this},resume:function(){return h.delay(this._worker.resume,[],this._worker),this},pause:function(){return this._worker.pause(),this},toNodejsStream:function(a){if(h.checkSupport("nodestream"),"nodebuffer"!==this._outputType)throw new Error(this._outputType+" is not supported by this method");return new n(this,{objectMode:"nodebuffer"!==this._outputType},a)}},b.exports=g},{"../base64":1,"../external":6,"../nodejs/NodejsStreamOutputAdapter":13,"../support":30,"../utils":32,"./ConvertWorker":24,"./GenericWorker":28}],30:[function(a,b,c){"use strict";if(c.base64=!0,c.array=!0,c.string=!0,c.arraybuffer="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof Uint8Array,c.nodebuffer="undefined"!=typeof Buffer,c.uint8array="undefined"!=typeof Uint8Array,"undefined"==typeof ArrayBuffer)c.blob=!1;else{var d=new ArrayBuffer(0);try{c.blob=0===new Blob([d],{type:"application/zip"}).size}catch(e){try{var f=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,g=new f;g.append(d),c.blob=0===g.getBlob("application/zip").size}catch(e){c.blob=!1}}}try{c.nodestream=!!a("readable-stream").Readable}catch(e){c.nodestream=!1}},{"readable-stream":16}],31:[function(a,b,c){"use strict";function d(){i.call(this,"utf-8 decode"),this.leftOver=null}function e(){i.call(this,"utf-8 encode")}for(var f=a("./utils"),g=a("./support"),h=a("./nodejsUtils"),i=a("./stream/GenericWorker"),j=new Array(256),k=0;k<256;k++)j[k]=k>=252?6:k>=248?5:k>=240?4:k>=224?3:k>=192?2:1;j[254]=j[254]=1;var l=function(a){var b,c,d,e,f,h=a.length,i=0;for(e=0;e<h;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=g.uint8array?new Uint8Array(i):new Array(i),f=0,e=0;f<i;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),c<128?b[f++]=c:c<2048?(b[f++]=192|c>>>6,b[f++]=128|63&c):c<65536?(b[f++]=224|c>>>12,b[f++]=128|c>>>6&63,b[f++]=128|63&c):(b[f++]=240|c>>>18,b[f++]=128|c>>>12&63,b[f++]=128|c>>>6&63,b[f++]=128|63&c);return b},m=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+j[a[c]]>b?c:b},n=function(a){var b,c,d,e,g=a.length,h=new Array(2*g);for(c=0,b=0;b<g;)if(d=a[b++],d<128)h[c++]=d;else if(e=j[d],e>4)h[c++]=65533,b+=e-1;else{for(d&=2===e?31:3===e?15:7;e>1&&b<g;)d=d<<6|63&a[b++],e--;e>1?h[c++]=65533:d<65536?h[c++]=d:(d-=65536,h[c++]=55296|d>>10&1023,h[c++]=56320|1023&d)}return h.length!==c&&(h.subarray?h=h.subarray(0,c):h.length=c),f.applyFromCharCode(h)};c.utf8encode=function(a){return g.nodebuffer?h.newBufferFrom(a,"utf-8"):l(a)},c.utf8decode=function(a){return g.nodebuffer?f.transformTo("nodebuffer",a).toString("utf-8"):(a=f.transformTo(g.uint8array?"uint8array":"array",a),n(a))},f.inherits(d,i),d.prototype.processChunk=function(a){var b=f.transformTo(g.uint8array?"uint8array":"array",a.data);if(this.leftOver&&this.leftOver.length){if(g.uint8array){var d=b;b=new Uint8Array(d.length+this.leftOver.length),b.set(this.leftOver,0),b.set(d,this.leftOver.length)}else b=this.leftOver.concat(b);this.leftOver=null}var e=m(b),h=b;e!==b.length&&(g.uint8array?(h=b.subarray(0,e),this.leftOver=b.subarray(e,b.length)):(h=b.slice(0,e),this.leftOver=b.slice(e,b.length))),this.push({data:c.utf8decode(h),meta:a.meta})},d.prototype.flush=function(){this.leftOver&&this.leftOver.length&&(this.push({data:c.utf8decode(this.leftOver),meta:{}}),this.leftOver=null)},c.Utf8DecodeWorker=d,f.inherits(e,i),e.prototype.processChunk=function(a){this.push({data:c.utf8encode(a.data),meta:a.meta})},c.Utf8EncodeWorker=e},{"./nodejsUtils":14,"./stream/GenericWorker":28,"./support":30,"./utils":32}],32:[function(a,b,c){"use strict";function d(a){var b=null;return b=i.uint8array?new Uint8Array(a.length):new Array(a.length),f(a,b)}function e(a){return a}function f(a,b){for(var c=0;c<a.length;++c)b[c]=255&a.charCodeAt(c);return b}function g(a){var b=65536,d=c.getTypeOf(a),e=!0;if("uint8array"===d?e=n.applyCanBeUsed.uint8array:"nodebuffer"===d&&(e=n.applyCanBeUsed.nodebuffer),e)for(;b>1;)try{return n.stringifyByChunk(a,d,b)}catch(f){b=Math.floor(b/2)}return n.stringifyByChar(a)}function h(a,b){for(var c=0;c<a.length;c++)b[c]=a[c];
return b}var i=a("./support"),j=a("./base64"),k=a("./nodejsUtils"),l=a("core-js/library/fn/set-immediate"),m=a("./external");c.newBlob=function(a,b){c.checkSupport("blob");try{return new Blob([a],{type:b})}catch(d){try{var e=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,f=new e;return f.append(a),f.getBlob(b)}catch(d){throw new Error("Bug : can't construct the Blob.")}}};var n={stringifyByChunk:function(a,b,c){var d=[],e=0,f=a.length;if(f<=c)return String.fromCharCode.apply(null,a);for(;e<f;)"array"===b||"nodebuffer"===b?d.push(String.fromCharCode.apply(null,a.slice(e,Math.min(e+c,f)))):d.push(String.fromCharCode.apply(null,a.subarray(e,Math.min(e+c,f)))),e+=c;return d.join("")},stringifyByChar:function(a){for(var b="",c=0;c<a.length;c++)b+=String.fromCharCode(a[c]);return b},applyCanBeUsed:{uint8array:function(){try{return i.uint8array&&1===String.fromCharCode.apply(null,new Uint8Array(1)).length}catch(a){return!1}}(),nodebuffer:function(){try{return i.nodebuffer&&1===String.fromCharCode.apply(null,k.allocBuffer(1)).length}catch(a){return!1}}()}};c.applyFromCharCode=g;var o={};o.string={string:e,array:function(a){return f(a,new Array(a.length))},arraybuffer:function(a){return o.string.uint8array(a).buffer},uint8array:function(a){return f(a,new Uint8Array(a.length))},nodebuffer:function(a){return f(a,k.allocBuffer(a.length))}},o.array={string:g,array:e,arraybuffer:function(a){return new Uint8Array(a).buffer},uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(a)}},o.arraybuffer={string:function(a){return g(new Uint8Array(a))},array:function(a){return h(new Uint8Array(a),new Array(a.byteLength))},arraybuffer:e,uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(new Uint8Array(a))}},o.uint8array={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return a.buffer},uint8array:e,nodebuffer:function(a){return k.newBufferFrom(a)}},o.nodebuffer={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return o.nodebuffer.uint8array(a).buffer},uint8array:function(a){return h(a,new Uint8Array(a.length))},nodebuffer:e},c.transformTo=function(a,b){if(b||(b=""),!a)return b;c.checkSupport(a);var d=c.getTypeOf(b),e=o[d][a](b);return e},c.getTypeOf=function(a){return"string"==typeof a?"string":"[object Array]"===Object.prototype.toString.call(a)?"array":i.nodebuffer&&k.isBuffer(a)?"nodebuffer":i.uint8array&&a instanceof Uint8Array?"uint8array":i.arraybuffer&&a instanceof ArrayBuffer?"arraybuffer":void 0},c.checkSupport=function(a){var b=i[a.toLowerCase()];if(!b)throw new Error(a+" is not supported by this platform")},c.MAX_VALUE_16BITS=65535,c.MAX_VALUE_32BITS=-1,c.pretty=function(a){var b,c,d="";for(c=0;c<(a||"").length;c++)b=a.charCodeAt(c),d+="\\x"+(b<16?"0":"")+b.toString(16).toUpperCase();return d},c.delay=function(a,b,c){l(function(){a.apply(c||null,b||[])})},c.inherits=function(a,b){var c=function(){};c.prototype=b.prototype,a.prototype=new c},c.extend=function(){var a,b,c={};for(a=0;a<arguments.length;a++)for(b in arguments[a])arguments[a].hasOwnProperty(b)&&"undefined"==typeof c[b]&&(c[b]=arguments[a][b]);return c},c.prepareContent=function(a,b,e,f,g){var h=m.Promise.resolve(b).then(function(a){var b=i.blob&&(a instanceof Blob||["[object File]","[object Blob]"].indexOf(Object.prototype.toString.call(a))!==-1);return b&&"undefined"!=typeof FileReader?new m.Promise(function(b,c){var d=new FileReader;d.onload=function(a){b(a.target.result)},d.onerror=function(a){c(a.target.error)},d.readAsArrayBuffer(a)}):a});return h.then(function(b){var h=c.getTypeOf(b);return h?("arraybuffer"===h?b=c.transformTo("uint8array",b):"string"===h&&(g?b=j.decode(b):e&&f!==!0&&(b=d(b))),b):m.Promise.reject(new Error("Can't read the data of '"+a+"'. Is it in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?"))})}},{"./base64":1,"./external":6,"./nodejsUtils":14,"./support":30,"core-js/library/fn/set-immediate":36}],33:[function(a,b,c){"use strict";function d(a){this.files=[],this.loadOptions=a}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./signature"),h=a("./zipEntry"),i=(a("./utf8"),a("./support"));d.prototype={checkSignature:function(a){if(!this.reader.readAndCheckSignature(a)){this.reader.index-=4;var b=this.reader.readString(4);throw new Error("Corrupted zip or bug: unexpected signature ("+f.pretty(b)+", expected "+f.pretty(a)+")")}},isSignature:function(a,b){var c=this.reader.index;this.reader.setIndex(a);var d=this.reader.readString(4),e=d===b;return this.reader.setIndex(c),e},readBlockEndOfCentral:function(){this.diskNumber=this.reader.readInt(2),this.diskWithCentralDirStart=this.reader.readInt(2),this.centralDirRecordsOnThisDisk=this.reader.readInt(2),this.centralDirRecords=this.reader.readInt(2),this.centralDirSize=this.reader.readInt(4),this.centralDirOffset=this.reader.readInt(4),this.zipCommentLength=this.reader.readInt(2);var a=this.reader.readData(this.zipCommentLength),b=i.uint8array?"uint8array":"array",c=f.transformTo(b,a);this.zipComment=this.loadOptions.decodeFileName(c)},readBlockZip64EndOfCentral:function(){this.zip64EndOfCentralSize=this.reader.readInt(8),this.reader.skip(4),this.diskNumber=this.reader.readInt(4),this.diskWithCentralDirStart=this.reader.readInt(4),this.centralDirRecordsOnThisDisk=this.reader.readInt(8),this.centralDirRecords=this.reader.readInt(8),this.centralDirSize=this.reader.readInt(8),this.centralDirOffset=this.reader.readInt(8),this.zip64ExtensibleData={};for(var a,b,c,d=this.zip64EndOfCentralSize-44,e=0;e<d;)a=this.reader.readInt(2),b=this.reader.readInt(4),c=this.reader.readData(b),this.zip64ExtensibleData[a]={id:a,length:b,value:c}},readBlockZip64EndOfCentralLocator:function(){if(this.diskWithZip64CentralDirStart=this.reader.readInt(4),this.relativeOffsetEndOfZip64CentralDir=this.reader.readInt(8),this.disksCount=this.reader.readInt(4),this.disksCount>1)throw new Error("Multi-volumes zip are not supported")},readLocalFiles:function(){var a,b;for(a=0;a<this.files.length;a++)b=this.files[a],this.reader.setIndex(b.localHeaderOffset),this.checkSignature(g.LOCAL_FILE_HEADER),b.readLocalPart(this.reader),b.handleUTF8(),b.processAttributes()},readCentralDir:function(){var a;for(this.reader.setIndex(this.centralDirOffset);this.reader.readAndCheckSignature(g.CENTRAL_FILE_HEADER);)a=new h({zip64:this.zip64},this.loadOptions),a.readCentralPart(this.reader),this.files.push(a);if(this.centralDirRecords!==this.files.length&&0!==this.centralDirRecords&&0===this.files.length)throw new Error("Corrupted zip or bug: expected "+this.centralDirRecords+" records in central dir, got "+this.files.length)},readEndOfCentral:function(){var a=this.reader.lastIndexOfSignature(g.CENTRAL_DIRECTORY_END);if(a<0){var b=!this.isSignature(0,g.LOCAL_FILE_HEADER);throw b?new Error("Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html"):new Error("Corrupted zip: can't find end of central directory")}this.reader.setIndex(a);var c=a;if(this.checkSignature(g.CENTRAL_DIRECTORY_END),this.readBlockEndOfCentral(),this.diskNumber===f.MAX_VALUE_16BITS||this.diskWithCentralDirStart===f.MAX_VALUE_16BITS||this.centralDirRecordsOnThisDisk===f.MAX_VALUE_16BITS||this.centralDirRecords===f.MAX_VALUE_16BITS||this.centralDirSize===f.MAX_VALUE_32BITS||this.centralDirOffset===f.MAX_VALUE_32BITS){if(this.zip64=!0,a=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),a<0)throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");if(this.reader.setIndex(a),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),this.readBlockZip64EndOfCentralLocator(),!this.isSignature(this.relativeOffsetEndOfZip64CentralDir,g.ZIP64_CENTRAL_DIRECTORY_END)&&(this.relativeOffsetEndOfZip64CentralDir=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.relativeOffsetEndOfZip64CentralDir<0))throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.readBlockZip64EndOfCentral()}var d=this.centralDirOffset+this.centralDirSize;this.zip64&&(d+=20,d+=12+this.zip64EndOfCentralSize);var e=c-d;if(e>0)this.isSignature(c,g.CENTRAL_FILE_HEADER)||(this.reader.zero=e);else if(e<0)throw new Error("Corrupted zip: missing "+Math.abs(e)+" bytes.")},prepareReader:function(a){this.reader=e(a)},load:function(a){this.prepareReader(a),this.readEndOfCentral(),this.readCentralDir(),this.readLocalFiles()}},b.exports=d},{"./reader/readerFor":22,"./signature":23,"./support":30,"./utf8":31,"./utils":32,"./zipEntry":34}],34:[function(a,b,c){"use strict";function d(a,b){this.options=a,this.loadOptions=b}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./compressedObject"),h=a("./crc32"),i=a("./utf8"),j=a("./compressions"),k=a("./support"),l=0,m=3,n=function(a){for(var b in j)if(j.hasOwnProperty(b)&&j[b].magic===a)return j[b];return null};d.prototype={isEncrypted:function(){return 1===(1&this.bitFlag)},useUTF8:function(){return 2048===(2048&this.bitFlag)},readLocalPart:function(a){var b,c;if(a.skip(22),this.fileNameLength=a.readInt(2),c=a.readInt(2),this.fileName=a.readData(this.fileNameLength),a.skip(c),this.compressedSize===-1||this.uncompressedSize===-1)throw new Error("Bug or corrupted zip : didn't get enough informations from the central directory (compressedSize === -1 || uncompressedSize === -1)");if(b=n(this.compressionMethod),null===b)throw new Error("Corrupted zip : compression "+f.pretty(this.compressionMethod)+" unknown (inner file : "+f.transformTo("string",this.fileName)+")");this.decompressed=new g(this.compressedSize,this.uncompressedSize,this.crc32,b,a.readData(this.compressedSize))},readCentralPart:function(a){this.versionMadeBy=a.readInt(2),a.skip(2),this.bitFlag=a.readInt(2),this.compressionMethod=a.readString(2),this.date=a.readDate(),this.crc32=a.readInt(4),this.compressedSize=a.readInt(4),this.uncompressedSize=a.readInt(4);var b=a.readInt(2);if(this.extraFieldsLength=a.readInt(2),this.fileCommentLength=a.readInt(2),this.diskNumberStart=a.readInt(2),this.internalFileAttributes=a.readInt(2),this.externalFileAttributes=a.readInt(4),this.localHeaderOffset=a.readInt(4),this.isEncrypted())throw new Error("Encrypted zip are not supported");a.skip(b),this.readExtraFields(a),this.parseZIP64ExtraField(a),this.fileComment=a.readData(this.fileCommentLength)},processAttributes:function(){this.unixPermissions=null,this.dosPermissions=null;var a=this.versionMadeBy>>8;this.dir=!!(16&this.externalFileAttributes),a===l&&(this.dosPermissions=63&this.externalFileAttributes),a===m&&(this.unixPermissions=this.externalFileAttributes>>16&65535),this.dir||"/"!==this.fileNameStr.slice(-1)||(this.dir=!0)},parseZIP64ExtraField:function(a){if(this.extraFields[1]){var b=e(this.extraFields[1].value);this.uncompressedSize===f.MAX_VALUE_32BITS&&(this.uncompressedSize=b.readInt(8)),this.compressedSize===f.MAX_VALUE_32BITS&&(this.compressedSize=b.readInt(8)),this.localHeaderOffset===f.MAX_VALUE_32BITS&&(this.localHeaderOffset=b.readInt(8)),this.diskNumberStart===f.MAX_VALUE_32BITS&&(this.diskNumberStart=b.readInt(4))}},readExtraFields:function(a){var b,c,d,e=a.index+this.extraFieldsLength;for(this.extraFields||(this.extraFields={});a.index<e;)b=a.readInt(2),c=a.readInt(2),d=a.readData(c),this.extraFields[b]={id:b,length:c,value:d}},handleUTF8:function(){var a=k.uint8array?"uint8array":"array";if(this.useUTF8())this.fileNameStr=i.utf8decode(this.fileName),this.fileCommentStr=i.utf8decode(this.fileComment);else{var b=this.findExtraFieldUnicodePath();if(null!==b)this.fileNameStr=b;else{var c=f.transformTo(a,this.fileName);this.fileNameStr=this.loadOptions.decodeFileName(c)}var d=this.findExtraFieldUnicodeComment();if(null!==d)this.fileCommentStr=d;else{var e=f.transformTo(a,this.fileComment);this.fileCommentStr=this.loadOptions.decodeFileName(e)}}},findExtraFieldUnicodePath:function(){var a=this.extraFields[28789];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileName)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null},findExtraFieldUnicodeComment:function(){var a=this.extraFields[25461];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileComment)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null}},b.exports=d},{"./compressedObject":2,"./compressions":3,"./crc32":4,"./reader/readerFor":22,"./support":30,"./utf8":31,"./utils":32}],35:[function(a,b,c){"use strict";var d=a("./stream/StreamHelper"),e=a("./stream/DataWorker"),f=a("./utf8"),g=a("./compressedObject"),h=a("./stream/GenericWorker"),i=function(a,b,c){this.name=a,this.dir=c.dir,this.date=c.date,this.comment=c.comment,this.unixPermissions=c.unixPermissions,this.dosPermissions=c.dosPermissions,this._data=b,this._dataBinary=c.binary,this.options={compression:c.compression,compressionOptions:c.compressionOptions}};i.prototype={internalStream:function(a){var b=null,c="string";try{if(!a)throw new Error("No output type specified.");c=a.toLowerCase();var e="string"===c||"text"===c;"binarystring"!==c&&"text"!==c||(c="string"),b=this._decompressWorker();var g=!this._dataBinary;g&&!e&&(b=b.pipe(new f.Utf8EncodeWorker)),!g&&e&&(b=b.pipe(new f.Utf8DecodeWorker))}catch(i){b=new h("error"),b.error(i)}return new d(b,c,"")},async:function(a,b){return this.internalStream(a).accumulate(b)},nodeStream:function(a,b){return this.internalStream(a||"nodebuffer").toNodejsStream(b)},_compressWorker:function(a,b){if(this._data instanceof g&&this._data.compression.magic===a.magic)return this._data.getCompressedWorker();var c=this._decompressWorker();return this._dataBinary||(c=c.pipe(new f.Utf8EncodeWorker)),g.createWorkerFrom(c,a,b)},_decompressWorker:function(){return this._data instanceof g?this._data.getContentWorker():this._data instanceof h?this._data:new e(this._data)}};for(var j=["asText","asBinary","asNodeBuffer","asUint8Array","asArrayBuffer"],k=function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},l=0;l<j.length;l++)i.prototype[j[l]]=k;b.exports=i},{"./compressedObject":2,"./stream/DataWorker":27,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31}],36:[function(a,b,c){a("../modules/web.immediate"),b.exports=a("../modules/_core").setImmediate},{"../modules/_core":40,"../modules/web.immediate":56}],37:[function(a,b,c){b.exports=function(a){if("function"!=typeof a)throw TypeError(a+" is not a function!");return a}},{}],38:[function(a,b,c){var d=a("./_is-object");b.exports=function(a){if(!d(a))throw TypeError(a+" is not an object!");return a}},{"./_is-object":51}],39:[function(a,b,c){var d={}.toString;b.exports=function(a){return d.call(a).slice(8,-1)}},{}],40:[function(a,b,c){var d=b.exports={version:"2.3.0"};"number"==typeof __e&&(__e=d)},{}],41:[function(a,b,c){var d=a("./_a-function");b.exports=function(a,b,c){if(d(a),void 0===b)return a;switch(c){case 1:return function(c){return a.call(b,c)};case 2:return function(c,d){return a.call(b,c,d)};case 3:return function(c,d,e){return a.call(b,c,d,e)}}return function(){return a.apply(b,arguments)}}},{"./_a-function":37}],42:[function(a,b,c){b.exports=!a("./_fails")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},{"./_fails":45}],43:[function(a,b,c){var d=a("./_is-object"),e=a("./_global").document,f=d(e)&&d(e.createElement);b.exports=function(a){return f?e.createElement(a):{}}},{"./_global":46,"./_is-object":51}],44:[function(a,b,c){var d=a("./_global"),e=a("./_core"),f=a("./_ctx"),g=a("./_hide"),h="prototype",i=function(a,b,c){var j,k,l,m=a&i.F,n=a&i.G,o=a&i.S,p=a&i.P,q=a&i.B,r=a&i.W,s=n?e:e[b]||(e[b]={}),t=s[h],u=n?d:o?d[b]:(d[b]||{})[h];n&&(c=b);for(j in c)k=!m&&u&&void 0!==u[j],k&&j in s||(l=k?u[j]:c[j],s[j]=n&&"function"!=typeof u[j]?c[j]:q&&k?f(l,d):r&&u[j]==l?function(a){var b=function(b,c,d){if(this instanceof a){switch(arguments.length){case 0:return new a;case 1:return new a(b);case 2:return new a(b,c)}return new a(b,c,d)}return a.apply(this,arguments)};return b[h]=a[h],b}(l):p&&"function"==typeof l?f(Function.call,l):l,p&&((s.virtual||(s.virtual={}))[j]=l,a&i.R&&t&&!t[j]&&g(t,j,l)))};i.F=1,i.G=2,i.S=4,i.P=8,i.B=16,i.W=32,i.U=64,i.R=128,b.exports=i},{"./_core":40,"./_ctx":41,"./_global":46,"./_hide":47}],45:[function(a,b,c){b.exports=function(a){try{return!!a()}catch(b){return!0}}},{}],46:[function(a,b,c){var d=b.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=d)},{}],47:[function(a,b,c){var d=a("./_object-dp"),e=a("./_property-desc");b.exports=a("./_descriptors")?function(a,b,c){return d.f(a,b,e(1,c))}:function(a,b,c){return a[b]=c,a}},{"./_descriptors":42,"./_object-dp":52,"./_property-desc":53}],48:[function(a,b,c){b.exports=a("./_global").document&&document.documentElement},{"./_global":46}],49:[function(a,b,c){b.exports=!a("./_descriptors")&&!a("./_fails")(function(){return 7!=Object.defineProperty(a("./_dom-create")("div"),"a",{get:function(){return 7}}).a})},{"./_descriptors":42,"./_dom-create":43,"./_fails":45}],50:[function(a,b,c){b.exports=function(a,b,c){var d=void 0===c;switch(b.length){case 0:return d?a():a.call(c);case 1:return d?a(b[0]):a.call(c,b[0]);case 2:return d?a(b[0],b[1]):a.call(c,b[0],b[1]);case 3:return d?a(b[0],b[1],b[2]):a.call(c,b[0],b[1],b[2]);case 4:return d?a(b[0],b[1],b[2],b[3]):a.call(c,b[0],b[1],b[2],b[3])}return a.apply(c,b)}},{}],51:[function(a,b,c){b.exports=function(a){return"object"==typeof a?null!==a:"function"==typeof a}},{}],52:[function(a,b,c){var d=a("./_an-object"),e=a("./_ie8-dom-define"),f=a("./_to-primitive"),g=Object.defineProperty;c.f=a("./_descriptors")?Object.defineProperty:function(a,b,c){if(d(a),b=f(b,!0),d(c),e)try{return g(a,b,c)}catch(h){}if("get"in c||"set"in c)throw TypeError("Accessors not supported!");return"value"in c&&(a[b]=c.value),a}},{"./_an-object":38,"./_descriptors":42,"./_ie8-dom-define":49,"./_to-primitive":55}],53:[function(a,b,c){b.exports=function(a,b){return{enumerable:!(1&a),configurable:!(2&a),writable:!(4&a),value:b}}},{}],54:[function(a,b,c){var d,e,f,g=a("./_ctx"),h=a("./_invoke"),i=a("./_html"),j=a("./_dom-create"),k=a("./_global"),l=k.process,m=k.setImmediate,n=k.clearImmediate,o=k.MessageChannel,p=0,q={},r="onreadystatechange",s=function(){var a=+this;if(q.hasOwnProperty(a)){var b=q[a];delete q[a],b()}},t=function(a){s.call(a.data)};m&&n||(m=function(a){for(var b=[],c=1;arguments.length>c;)b.push(arguments[c++]);return q[++p]=function(){h("function"==typeof a?a:Function(a),b)},d(p),p},n=function(a){delete q[a]},"process"==a("./_cof")(l)?d=function(a){l.nextTick(g(s,a,1))}:o?(e=new o,f=e.port2,e.port1.onmessage=t,d=g(f.postMessage,f,1)):k.addEventListener&&"function"==typeof postMessage&&!k.importScripts?(d=function(a){k.postMessage(a+"","*")},k.addEventListener("message",t,!1)):d=r in j("script")?function(a){i.appendChild(j("script"))[r]=function(){i.removeChild(this),s.call(a)}}:function(a){setTimeout(g(s,a,1),0)}),b.exports={set:m,clear:n}},{"./_cof":39,"./_ctx":41,"./_dom-create":43,"./_global":46,"./_html":48,"./_invoke":50}],55:[function(a,b,c){var d=a("./_is-object");b.exports=function(a,b){if(!d(a))return a;var c,e;if(b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;if("function"==typeof(c=a.valueOf)&&!d(e=c.call(a)))return e;if(!b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;throw TypeError("Can't convert object to primitive value")}},{"./_is-object":51}],56:[function(a,b,c){var d=a("./_export"),e=a("./_task");d(d.G+d.B,{setImmediate:e.set,clearImmediate:e.clear})},{"./_export":44,"./_task":54}],57:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],58:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(e){return p.reject(a,e)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&("object"==typeof a||"function"==typeof a)&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(d){c.status="error",c.value=d}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a("immediate"),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=e,e.prototype["catch"]=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},e.resolve=k,e.reject=l,e.all=m,e.race=n},{immediate:57}],59:[function(a,b,c){"use strict";var d=a("./lib/utils/common").assign,e=a("./lib/deflate"),f=a("./lib/inflate"),g=a("./lib/zlib/constants"),h={};d(h,e,f,g),b.exports=h},{"./lib/deflate":60,"./lib/inflate":61,"./lib/utils/common":62,"./lib/zlib/constants":65}],60:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=i.assign({level:s,method:u,chunkSize:16384,windowBits:15,memLevel:8,strategy:t,to:""},a||{});var b=this.options;b.raw&&b.windowBits>0?b.windowBits=-b.windowBits:b.gzip&&b.windowBits>0&&b.windowBits<16&&(b.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=h.deflateInit2(this.strm,b.level,b.method,b.windowBits,b.memLevel,b.strategy);if(c!==p)throw new Error(k[c]);if(b.header&&h.deflateSetHeader(this.strm,b.header),b.dictionary){var e;if(e="string"==typeof b.dictionary?j.string2buf(b.dictionary):"[object ArrayBuffer]"===m.call(b.dictionary)?new Uint8Array(b.dictionary):b.dictionary,c=h.deflateSetDictionary(this.strm,e),c!==p)throw new Error(k[c]);this._dict_set=!0}}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}function g(a,b){return b=b||{},b.gzip=!0,e(a,b)}var h=a("./zlib/deflate"),i=a("./utils/common"),j=a("./utils/strings"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=Object.prototype.toString,n=0,o=4,p=0,q=1,r=2,s=-1,t=0,u=8;d.prototype.push=function(a,b){var c,d,e=this.strm,f=this.options.chunkSize;if(this.ended)return!1;d=b===~~b?b:b===!0?o:n,"string"==typeof a?e.input=j.string2buf(a):"[object ArrayBuffer]"===m.call(a)?e.input=new Uint8Array(a):e.input=a,e.next_in=0,e.avail_in=e.input.length;do{if(0===e.avail_out&&(e.output=new i.Buf8(f),e.next_out=0,e.avail_out=f),c=h.deflate(e,d),c!==q&&c!==p)return this.onEnd(c),this.ended=!0,!1;0!==e.avail_out&&(0!==e.avail_in||d!==o&&d!==r)||("string"===this.options.to?this.onData(j.buf2binstring(i.shrinkBuf(e.output,e.next_out))):this.onData(i.shrinkBuf(e.output,e.next_out)))}while((e.avail_in>0||0===e.avail_out)&&c!==q);return d===o?(c=h.deflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===p):d!==r||(this.onEnd(p),e.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===p&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=i.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Deflate=d,c.deflate=e,c.deflateRaw=f,c.gzip=g},{"./utils/common":62,"./utils/strings":63,"./zlib/deflate":67,"./zlib/messages":72,"./zlib/zstream":74}],61:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=h.assign({chunkSize:16384,windowBits:0,to:""},a||{});var b=this.options;b.raw&&b.windowBits>=0&&b.windowBits<16&&(b.windowBits=-b.windowBits,0===b.windowBits&&(b.windowBits=-15)),!(b.windowBits>=0&&b.windowBits<16)||a&&a.windowBits||(b.windowBits+=32),b.windowBits>15&&b.windowBits<48&&0===(15&b.windowBits)&&(b.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=g.inflateInit2(this.strm,b.windowBits);if(c!==j.Z_OK)throw new Error(k[c]);this.header=new m,g.inflateGetHeader(this.strm,this.header)}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}var g=a("./zlib/inflate"),h=a("./utils/common"),i=a("./utils/strings"),j=a("./zlib/constants"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=a("./zlib/gzheader"),n=Object.prototype.toString;d.prototype.push=function(a,b){var c,d,e,f,k,l,m=this.strm,o=this.options.chunkSize,p=this.options.dictionary,q=!1;if(this.ended)return!1;d=b===~~b?b:b===!0?j.Z_FINISH:j.Z_NO_FLUSH,"string"==typeof a?m.input=i.binstring2buf(a):"[object ArrayBuffer]"===n.call(a)?m.input=new Uint8Array(a):m.input=a,m.next_in=0,m.avail_in=m.input.length;do{if(0===m.avail_out&&(m.output=new h.Buf8(o),m.next_out=0,m.avail_out=o),c=g.inflate(m,j.Z_NO_FLUSH),c===j.Z_NEED_DICT&&p&&(l="string"==typeof p?i.string2buf(p):"[object ArrayBuffer]"===n.call(p)?new Uint8Array(p):p,c=g.inflateSetDictionary(this.strm,l)),c===j.Z_BUF_ERROR&&q===!0&&(c=j.Z_OK,q=!1),c!==j.Z_STREAM_END&&c!==j.Z_OK)return this.onEnd(c),this.ended=!0,!1;m.next_out&&(0!==m.avail_out&&c!==j.Z_STREAM_END&&(0!==m.avail_in||d!==j.Z_FINISH&&d!==j.Z_SYNC_FLUSH)||("string"===this.options.to?(e=i.utf8border(m.output,m.next_out),f=m.next_out-e,k=i.buf2string(m.output,e),m.next_out=f,m.avail_out=o-f,f&&h.arraySet(m.output,m.output,e,f,0),this.onData(k)):this.onData(h.shrinkBuf(m.output,m.next_out)))),0===m.avail_in&&0===m.avail_out&&(q=!0)}while((m.avail_in>0||0===m.avail_out)&&c!==j.Z_STREAM_END);return c===j.Z_STREAM_END&&(d=j.Z_FINISH),d===j.Z_FINISH?(c=g.inflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===j.Z_OK):d!==j.Z_SYNC_FLUSH||(this.onEnd(j.Z_OK),m.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===j.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=h.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Inflate=d,c.inflate=e,c.inflateRaw=f,c.ungzip=e},{"./utils/common":62,"./utils/strings":63,"./zlib/constants":65,"./zlib/gzheader":68,"./zlib/inflate":70,"./zlib/messages":72,"./zlib/zstream":74}],62:[function(a,b,c){"use strict";var d="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;c.assign=function(a){for(var b=Array.prototype.slice.call(arguments,1);b.length;){var c=b.shift();if(c){if("object"!=typeof c)throw new TypeError(c+"must be non-object");for(var d in c)c.hasOwnProperty(d)&&(a[d]=c[d])}}return a},c.shrinkBuf=function(a,b){return a.length===b?a:a.subarray?a.subarray(0,b):(a.length=b,a)};var e={arraySet:function(a,b,c,d,e){if(b.subarray&&a.subarray)return void a.set(b.subarray(c,c+d),e);for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){var b,c,d,e,f,g;for(d=0,b=0,c=a.length;b<c;b++)d+=a[b].length;for(g=new Uint8Array(d),e=0,b=0,c=a.length;b<c;b++)f=a[b],g.set(f,e),e+=f.length;return g}},f={arraySet:function(a,b,c,d,e){for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){return[].concat.apply([],a)}};c.setTyped=function(a){a?(c.Buf8=Uint8Array,c.Buf16=Uint16Array,c.Buf32=Int32Array,c.assign(c,e)):(c.Buf8=Array,c.Buf16=Array,c.Buf32=Array,c.assign(c,f))},c.setTyped(d)},{}],63:[function(a,b,c){"use strict";function d(a,b){if(b<65537&&(a.subarray&&g||!a.subarray&&f))return String.fromCharCode.apply(null,e.shrinkBuf(a,b));for(var c="",d=0;d<b;d++)c+=String.fromCharCode(a[d]);return c}var e=a("./common"),f=!0,g=!0;try{String.fromCharCode.apply(null,[0])}catch(h){f=!1}try{String.fromCharCode.apply(null,new Uint8Array(1))}catch(h){g=!1}for(var i=new e.Buf8(256),j=0;j<256;j++)i[j]=j>=252?6:j>=248?5:j>=240?4:j>=224?3:j>=192?2:1;i[254]=i[254]=1,c.string2buf=function(a){var b,c,d,f,g,h=a.length,i=0;for(f=0;f<h;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=new e.Buf8(i),g=0,f=0;g<i;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),c<128?b[g++]=c:c<2048?(b[g++]=192|c>>>6,b[g++]=128|63&c):c<65536?(b[g++]=224|c>>>12,b[g++]=128|c>>>6&63,b[g++]=128|63&c):(b[g++]=240|c>>>18,b[g++]=128|c>>>12&63,b[g++]=128|c>>>6&63,b[g++]=128|63&c);return b},c.buf2binstring=function(a){return d(a,a.length)},c.binstring2buf=function(a){for(var b=new e.Buf8(a.length),c=0,d=b.length;c<d;c++)b[c]=a.charCodeAt(c);return b},c.buf2string=function(a,b){var c,e,f,g,h=b||a.length,j=new Array(2*h);for(e=0,c=0;c<h;)if(f=a[c++],f<128)j[e++]=f;else if(g=i[f],g>4)j[e++]=65533,c+=g-1;else{for(f&=2===g?31:3===g?15:7;g>1&&c<h;)f=f<<6|63&a[c++],g--;g>1?j[e++]=65533:f<65536?j[e++]=f:(f-=65536,j[e++]=55296|f>>10&1023,j[e++]=56320|1023&f)}return d(j,e)},c.utf8border=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+i[a[c]]>b?c:b}},{"./common":62}],64:[function(a,b,c){"use strict";function d(a,b,c,d){for(var e=65535&a|0,f=a>>>16&65535|0,g=0;0!==c;){g=c>2e3?2e3:c,c-=g;do e=e+b[d++]|0,f=f+e|0;while(--g);e%=65521,f%=65521}return e|f<<16|0;
}b.exports=d},{}],65:[function(a,b,c){"use strict";b.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],66:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=f,g=d+c;a^=-1;for(var h=d;h<g;h++)a=a>>>8^e[255&(a^b[h])];return a^-1}var f=d();b.exports=e},{}],67:[function(a,b,c){"use strict";function d(a,b){return a.msg=I[b],b}function e(a){return(a<<1)-(a>4?9:0)}function f(a){for(var b=a.length;--b>=0;)a[b]=0}function g(a){var b=a.state,c=b.pending;c>a.avail_out&&(c=a.avail_out),0!==c&&(E.arraySet(a.output,b.pending_buf,b.pending_out,c,a.next_out),a.next_out+=c,b.pending_out+=c,a.total_out+=c,a.avail_out-=c,b.pending-=c,0===b.pending&&(b.pending_out=0))}function h(a,b){F._tr_flush_block(a,a.block_start>=0?a.block_start:-1,a.strstart-a.block_start,b),a.block_start=a.strstart,g(a.strm)}function i(a,b){a.pending_buf[a.pending++]=b}function j(a,b){a.pending_buf[a.pending++]=b>>>8&255,a.pending_buf[a.pending++]=255&b}function k(a,b,c,d){var e=a.avail_in;return e>d&&(e=d),0===e?0:(a.avail_in-=e,E.arraySet(b,a.input,a.next_in,e,c),1===a.state.wrap?a.adler=G(a.adler,b,e,c):2===a.state.wrap&&(a.adler=H(a.adler,b,e,c)),a.next_in+=e,a.total_in+=e,e)}function l(a,b){var c,d,e=a.max_chain_length,f=a.strstart,g=a.prev_length,h=a.nice_match,i=a.strstart>a.w_size-la?a.strstart-(a.w_size-la):0,j=a.window,k=a.w_mask,l=a.prev,m=a.strstart+ka,n=j[f+g-1],o=j[f+g];a.prev_length>=a.good_match&&(e>>=2),h>a.lookahead&&(h=a.lookahead);do if(c=b,j[c+g]===o&&j[c+g-1]===n&&j[c]===j[f]&&j[++c]===j[f+1]){f+=2,c++;do;while(j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&f<m);if(d=ka-(m-f),f=m-ka,d>g){if(a.match_start=b,g=d,d>=h)break;n=j[f+g-1],o=j[f+g]}}while((b=l[b&k])>i&&0!==--e);return g<=a.lookahead?g:a.lookahead}function m(a){var b,c,d,e,f,g=a.w_size;do{if(e=a.window_size-a.lookahead-a.strstart,a.strstart>=g+(g-la)){E.arraySet(a.window,a.window,g,g,0),a.match_start-=g,a.strstart-=g,a.block_start-=g,c=a.hash_size,b=c;do d=a.head[--b],a.head[b]=d>=g?d-g:0;while(--c);c=g,b=c;do d=a.prev[--b],a.prev[b]=d>=g?d-g:0;while(--c);e+=g}if(0===a.strm.avail_in)break;if(c=k(a.strm,a.window,a.strstart+a.lookahead,e),a.lookahead+=c,a.lookahead+a.insert>=ja)for(f=a.strstart-a.insert,a.ins_h=a.window[f],a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+1])&a.hash_mask;a.insert&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+ja-1])&a.hash_mask,a.prev[f&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=f,f++,a.insert--,!(a.lookahead+a.insert<ja)););}while(a.lookahead<la&&0!==a.strm.avail_in)}function n(a,b){var c=65535;for(c>a.pending_buf_size-5&&(c=a.pending_buf_size-5);;){if(a.lookahead<=1){if(m(a),0===a.lookahead&&b===J)return ua;if(0===a.lookahead)break}a.strstart+=a.lookahead,a.lookahead=0;var d=a.block_start+c;if((0===a.strstart||a.strstart>=d)&&(a.lookahead=a.strstart-d,a.strstart=d,h(a,!1),0===a.strm.avail_out))return ua;if(a.strstart-a.block_start>=a.w_size-la&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.strstart>a.block_start&&(h(a,!1),0===a.strm.avail_out)?ua:ua}function o(a,b){for(var c,d;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),0!==c&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c)),a.match_length>=ja)if(d=F._tr_tally(a,a.strstart-a.match_start,a.match_length-ja),a.lookahead-=a.match_length,a.match_length<=a.max_lazy_match&&a.lookahead>=ja){a.match_length--;do a.strstart++,a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart;while(0!==--a.match_length);a.strstart++}else a.strstart+=a.match_length,a.match_length=0,a.ins_h=a.window[a.strstart],a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+1])&a.hash_mask;else d=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++;if(d&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function p(a,b){for(var c,d,e;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),a.prev_length=a.match_length,a.prev_match=a.match_start,a.match_length=ja-1,0!==c&&a.prev_length<a.max_lazy_match&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c),a.match_length<=5&&(a.strategy===U||a.match_length===ja&&a.strstart-a.match_start>4096)&&(a.match_length=ja-1)),a.prev_length>=ja&&a.match_length<=a.prev_length){e=a.strstart+a.lookahead-ja,d=F._tr_tally(a,a.strstart-1-a.prev_match,a.prev_length-ja),a.lookahead-=a.prev_length-1,a.prev_length-=2;do++a.strstart<=e&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart);while(0!==--a.prev_length);if(a.match_available=0,a.match_length=ja-1,a.strstart++,d&&(h(a,!1),0===a.strm.avail_out))return ua}else if(a.match_available){if(d=F._tr_tally(a,0,a.window[a.strstart-1]),d&&h(a,!1),a.strstart++,a.lookahead--,0===a.strm.avail_out)return ua}else a.match_available=1,a.strstart++,a.lookahead--}return a.match_available&&(d=F._tr_tally(a,0,a.window[a.strstart-1]),a.match_available=0),a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function q(a,b){for(var c,d,e,f,g=a.window;;){if(a.lookahead<=ka){if(m(a),a.lookahead<=ka&&b===J)return ua;if(0===a.lookahead)break}if(a.match_length=0,a.lookahead>=ja&&a.strstart>0&&(e=a.strstart-1,d=g[e],d===g[++e]&&d===g[++e]&&d===g[++e])){f=a.strstart+ka;do;while(d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&e<f);a.match_length=ka-(f-e),a.match_length>a.lookahead&&(a.match_length=a.lookahead)}if(a.match_length>=ja?(c=F._tr_tally(a,1,a.match_length-ja),a.lookahead-=a.match_length,a.strstart+=a.match_length,a.match_length=0):(c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++),c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function r(a,b){for(var c;;){if(0===a.lookahead&&(m(a),0===a.lookahead)){if(b===J)return ua;break}if(a.match_length=0,c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++,c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function s(a,b,c,d,e){this.good_length=a,this.max_lazy=b,this.nice_length=c,this.max_chain=d,this.func=e}function t(a){a.window_size=2*a.w_size,f(a.head),a.max_lazy_match=D[a.level].max_lazy,a.good_match=D[a.level].good_length,a.nice_match=D[a.level].nice_length,a.max_chain_length=D[a.level].max_chain,a.strstart=0,a.block_start=0,a.lookahead=0,a.insert=0,a.match_length=a.prev_length=ja-1,a.match_available=0,a.ins_h=0}function u(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=$,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new E.Buf16(2*ha),this.dyn_dtree=new E.Buf16(2*(2*fa+1)),this.bl_tree=new E.Buf16(2*(2*ga+1)),f(this.dyn_ltree),f(this.dyn_dtree),f(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new E.Buf16(ia+1),this.heap=new E.Buf16(2*ea+1),f(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new E.Buf16(2*ea+1),f(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function v(a){var b;return a&&a.state?(a.total_in=a.total_out=0,a.data_type=Z,b=a.state,b.pending=0,b.pending_out=0,b.wrap<0&&(b.wrap=-b.wrap),b.status=b.wrap?na:sa,a.adler=2===b.wrap?0:1,b.last_flush=J,F._tr_init(b),O):d(a,Q)}function w(a){var b=v(a);return b===O&&t(a.state),b}function x(a,b){return a&&a.state?2!==a.state.wrap?Q:(a.state.gzhead=b,O):Q}function y(a,b,c,e,f,g){if(!a)return Q;var h=1;if(b===T&&(b=6),e<0?(h=0,e=-e):e>15&&(h=2,e-=16),f<1||f>_||c!==$||e<8||e>15||b<0||b>9||g<0||g>X)return d(a,Q);8===e&&(e=9);var i=new u;return a.state=i,i.strm=a,i.wrap=h,i.gzhead=null,i.w_bits=e,i.w_size=1<<i.w_bits,i.w_mask=i.w_size-1,i.hash_bits=f+7,i.hash_size=1<<i.hash_bits,i.hash_mask=i.hash_size-1,i.hash_shift=~~((i.hash_bits+ja-1)/ja),i.window=new E.Buf8(2*i.w_size),i.head=new E.Buf16(i.hash_size),i.prev=new E.Buf16(i.w_size),i.lit_bufsize=1<<f+6,i.pending_buf_size=4*i.lit_bufsize,i.pending_buf=new E.Buf8(i.pending_buf_size),i.d_buf=1*i.lit_bufsize,i.l_buf=3*i.lit_bufsize,i.level=b,i.strategy=g,i.method=c,w(a)}function z(a,b){return y(a,b,$,aa,ba,Y)}function A(a,b){var c,h,k,l;if(!a||!a.state||b>N||b<0)return a?d(a,Q):Q;if(h=a.state,!a.output||!a.input&&0!==a.avail_in||h.status===ta&&b!==M)return d(a,0===a.avail_out?S:Q);if(h.strm=a,c=h.last_flush,h.last_flush=b,h.status===na)if(2===h.wrap)a.adler=0,i(h,31),i(h,139),i(h,8),h.gzhead?(i(h,(h.gzhead.text?1:0)+(h.gzhead.hcrc?2:0)+(h.gzhead.extra?4:0)+(h.gzhead.name?8:0)+(h.gzhead.comment?16:0)),i(h,255&h.gzhead.time),i(h,h.gzhead.time>>8&255),i(h,h.gzhead.time>>16&255),i(h,h.gzhead.time>>24&255),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,255&h.gzhead.os),h.gzhead.extra&&h.gzhead.extra.length&&(i(h,255&h.gzhead.extra.length),i(h,h.gzhead.extra.length>>8&255)),h.gzhead.hcrc&&(a.adler=H(a.adler,h.pending_buf,h.pending,0)),h.gzindex=0,h.status=oa):(i(h,0),i(h,0),i(h,0),i(h,0),i(h,0),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,ya),h.status=sa);else{var m=$+(h.w_bits-8<<4)<<8,n=-1;n=h.strategy>=V||h.level<2?0:h.level<6?1:6===h.level?2:3,m|=n<<6,0!==h.strstart&&(m|=ma),m+=31-m%31,h.status=sa,j(h,m),0!==h.strstart&&(j(h,a.adler>>>16),j(h,65535&a.adler)),a.adler=1}if(h.status===oa)if(h.gzhead.extra){for(k=h.pending;h.gzindex<(65535&h.gzhead.extra.length)&&(h.pending!==h.pending_buf_size||(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending!==h.pending_buf_size));)i(h,255&h.gzhead.extra[h.gzindex]),h.gzindex++;h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),h.gzindex===h.gzhead.extra.length&&(h.gzindex=0,h.status=pa)}else h.status=pa;if(h.status===pa)if(h.gzhead.name){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.name.length?255&h.gzhead.name.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.gzindex=0,h.status=qa)}else h.status=qa;if(h.status===qa)if(h.gzhead.comment){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.comment.length?255&h.gzhead.comment.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.status=ra)}else h.status=ra;if(h.status===ra&&(h.gzhead.hcrc?(h.pending+2>h.pending_buf_size&&g(a),h.pending+2<=h.pending_buf_size&&(i(h,255&a.adler),i(h,a.adler>>8&255),a.adler=0,h.status=sa)):h.status=sa),0!==h.pending){if(g(a),0===a.avail_out)return h.last_flush=-1,O}else if(0===a.avail_in&&e(b)<=e(c)&&b!==M)return d(a,S);if(h.status===ta&&0!==a.avail_in)return d(a,S);if(0!==a.avail_in||0!==h.lookahead||b!==J&&h.status!==ta){var o=h.strategy===V?r(h,b):h.strategy===W?q(h,b):D[h.level].func(h,b);if(o!==wa&&o!==xa||(h.status=ta),o===ua||o===wa)return 0===a.avail_out&&(h.last_flush=-1),O;if(o===va&&(b===K?F._tr_align(h):b!==N&&(F._tr_stored_block(h,0,0,!1),b===L&&(f(h.head),0===h.lookahead&&(h.strstart=0,h.block_start=0,h.insert=0))),g(a),0===a.avail_out))return h.last_flush=-1,O}return b!==M?O:h.wrap<=0?P:(2===h.wrap?(i(h,255&a.adler),i(h,a.adler>>8&255),i(h,a.adler>>16&255),i(h,a.adler>>24&255),i(h,255&a.total_in),i(h,a.total_in>>8&255),i(h,a.total_in>>16&255),i(h,a.total_in>>24&255)):(j(h,a.adler>>>16),j(h,65535&a.adler)),g(a),h.wrap>0&&(h.wrap=-h.wrap),0!==h.pending?O:P)}function B(a){var b;return a&&a.state?(b=a.state.status,b!==na&&b!==oa&&b!==pa&&b!==qa&&b!==ra&&b!==sa&&b!==ta?d(a,Q):(a.state=null,b===sa?d(a,R):O)):Q}function C(a,b){var c,d,e,g,h,i,j,k,l=b.length;if(!a||!a.state)return Q;if(c=a.state,g=c.wrap,2===g||1===g&&c.status!==na||c.lookahead)return Q;for(1===g&&(a.adler=G(a.adler,b,l,0)),c.wrap=0,l>=c.w_size&&(0===g&&(f(c.head),c.strstart=0,c.block_start=0,c.insert=0),k=new E.Buf8(c.w_size),E.arraySet(k,b,l-c.w_size,c.w_size,0),b=k,l=c.w_size),h=a.avail_in,i=a.next_in,j=a.input,a.avail_in=l,a.next_in=0,a.input=b,m(c);c.lookahead>=ja;){d=c.strstart,e=c.lookahead-(ja-1);do c.ins_h=(c.ins_h<<c.hash_shift^c.window[d+ja-1])&c.hash_mask,c.prev[d&c.w_mask]=c.head[c.ins_h],c.head[c.ins_h]=d,d++;while(--e);c.strstart=d,c.lookahead=ja-1,m(c)}return c.strstart+=c.lookahead,c.block_start=c.strstart,c.insert=c.lookahead,c.lookahead=0,c.match_length=c.prev_length=ja-1,c.match_available=0,a.next_in=i,a.input=j,a.avail_in=h,c.wrap=g,O}var D,E=a("../utils/common"),F=a("./trees"),G=a("./adler32"),H=a("./crc32"),I=a("./messages"),J=0,K=1,L=3,M=4,N=5,O=0,P=1,Q=-2,R=-3,S=-5,T=-1,U=1,V=2,W=3,X=4,Y=0,Z=2,$=8,_=9,aa=15,ba=8,ca=29,da=256,ea=da+1+ca,fa=30,ga=19,ha=2*ea+1,ia=15,ja=3,ka=258,la=ka+ja+1,ma=32,na=42,oa=69,pa=73,qa=91,ra=103,sa=113,ta=666,ua=1,va=2,wa=3,xa=4,ya=3;D=[new s(0,0,0,0,n),new s(4,4,8,4,o),new s(4,5,16,8,o),new s(4,6,32,32,o),new s(4,4,16,16,p),new s(8,16,32,32,p),new s(8,16,128,128,p),new s(8,32,128,256,p),new s(32,128,258,1024,p),new s(32,258,258,4096,p)],c.deflateInit=z,c.deflateInit2=y,c.deflateReset=w,c.deflateResetKeep=v,c.deflateSetHeader=x,c.deflate=A,c.deflateEnd=B,c.deflateSetDictionary=C,c.deflateInfo="pako deflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./messages":72,"./trees":73}],68:[function(a,b,c){"use strict";function d(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}b.exports=d},{}],69:[function(a,b,c){"use strict";var d=30,e=12;b.exports=function(a,b){var c,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C;c=a.state,f=a.next_in,B=a.input,g=f+(a.avail_in-5),h=a.next_out,C=a.output,i=h-(b-a.avail_out),j=h+(a.avail_out-257),k=c.dmax,l=c.wsize,m=c.whave,n=c.wnext,o=c.window,p=c.hold,q=c.bits,r=c.lencode,s=c.distcode,t=(1<<c.lenbits)-1,u=(1<<c.distbits)-1;a:do{q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=r[p&t];b:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,0===w)C[h++]=65535&v;else{if(!(16&w)){if(0===(64&w)){v=r[(65535&v)+(p&(1<<w)-1)];continue b}if(32&w){c.mode=e;break a}a.msg="invalid literal/length code",c.mode=d;break a}x=65535&v,w&=15,w&&(q<w&&(p+=B[f++]<<q,q+=8),x+=p&(1<<w)-1,p>>>=w,q-=w),q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=s[p&u];c:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,!(16&w)){if(0===(64&w)){v=s[(65535&v)+(p&(1<<w)-1)];continue c}a.msg="invalid distance code",c.mode=d;break a}if(y=65535&v,w&=15,q<w&&(p+=B[f++]<<q,q+=8,q<w&&(p+=B[f++]<<q,q+=8)),y+=p&(1<<w)-1,y>k){a.msg="invalid distance too far back",c.mode=d;break a}if(p>>>=w,q-=w,w=h-i,y>w){if(w=y-w,w>m&&c.sane){a.msg="invalid distance too far back",c.mode=d;break a}if(z=0,A=o,0===n){if(z+=l-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}else if(n<w){if(z+=l+n-w,w-=n,w<x){x-=w;do C[h++]=o[z++];while(--w);if(z=0,n<x){w=n,x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}}else if(z+=n-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}for(;x>2;)C[h++]=A[z++],C[h++]=A[z++],C[h++]=A[z++],x-=3;x&&(C[h++]=A[z++],x>1&&(C[h++]=A[z++]))}else{z=h-y;do C[h++]=C[z++],C[h++]=C[z++],C[h++]=C[z++],x-=3;while(x>2);x&&(C[h++]=C[z++],x>1&&(C[h++]=C[z++]))}break}}break}}while(f<g&&h<j);x=q>>3,f-=x,q-=x<<3,p&=(1<<q)-1,a.next_in=f,a.next_out=h,a.avail_in=f<g?5+(g-f):5-(f-g),a.avail_out=h<j?257+(j-h):257-(h-j),c.hold=p,c.bits=q}},{}],70:[function(a,b,c){"use strict";function d(a){return(a>>>24&255)+(a>>>8&65280)+((65280&a)<<8)+((255&a)<<24)}function e(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new s.Buf16(320),this.work=new s.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function f(a){var b;return a&&a.state?(b=a.state,a.total_in=a.total_out=b.total=0,a.msg="",b.wrap&&(a.adler=1&b.wrap),b.mode=L,b.last=0,b.havedict=0,b.dmax=32768,b.head=null,b.hold=0,b.bits=0,b.lencode=b.lendyn=new s.Buf32(pa),b.distcode=b.distdyn=new s.Buf32(qa),b.sane=1,b.back=-1,D):G}function g(a){var b;return a&&a.state?(b=a.state,b.wsize=0,b.whave=0,b.wnext=0,f(a)):G}function h(a,b){var c,d;return a&&a.state?(d=a.state,b<0?(c=0,b=-b):(c=(b>>4)+1,b<48&&(b&=15)),b&&(b<8||b>15)?G:(null!==d.window&&d.wbits!==b&&(d.window=null),d.wrap=c,d.wbits=b,g(a))):G}function i(a,b){var c,d;return a?(d=new e,a.state=d,d.window=null,c=h(a,b),c!==D&&(a.state=null),c):G}function j(a){return i(a,sa)}function k(a){if(ta){var b;for(q=new s.Buf32(512),r=new s.Buf32(32),b=0;b<144;)a.lens[b++]=8;for(;b<256;)a.lens[b++]=9;for(;b<280;)a.lens[b++]=7;for(;b<288;)a.lens[b++]=8;for(w(y,a.lens,0,288,q,0,a.work,{bits:9}),b=0;b<32;)a.lens[b++]=5;w(z,a.lens,0,32,r,0,a.work,{bits:5}),ta=!1}a.lencode=q,a.lenbits=9,a.distcode=r,a.distbits=5}function l(a,b,c,d){var e,f=a.state;return null===f.window&&(f.wsize=1<<f.wbits,f.wnext=0,f.whave=0,f.window=new s.Buf8(f.wsize)),d>=f.wsize?(s.arraySet(f.window,b,c-f.wsize,f.wsize,0),f.wnext=0,f.whave=f.wsize):(e=f.wsize-f.wnext,e>d&&(e=d),s.arraySet(f.window,b,c-d,e,f.wnext),d-=e,d?(s.arraySet(f.window,b,c-d,d,0),f.wnext=d,f.whave=f.wsize):(f.wnext+=e,f.wnext===f.wsize&&(f.wnext=0),f.whave<f.wsize&&(f.whave+=e))),0}function m(a,b){var c,e,f,g,h,i,j,m,n,o,p,q,r,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,Aa=0,Ba=new s.Buf8(4),Ca=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];if(!a||!a.state||!a.output||!a.input&&0!==a.avail_in)return G;c=a.state,c.mode===W&&(c.mode=X),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,o=i,p=j,xa=D;a:for(;;)switch(c.mode){case L:if(0===c.wrap){c.mode=X;break}for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(2&c.wrap&&35615===m){c.check=0,Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0),m=0,n=0,c.mode=M;break}if(c.flags=0,c.head&&(c.head.done=!1),!(1&c.wrap)||(((255&m)<<8)+(m>>8))%31){a.msg="incorrect header check",c.mode=ma;break}if((15&m)!==K){a.msg="unknown compression method",c.mode=ma;break}if(m>>>=4,n-=4,wa=(15&m)+8,0===c.wbits)c.wbits=wa;else if(wa>c.wbits){a.msg="invalid window size",c.mode=ma;break}c.dmax=1<<wa,a.adler=c.check=1,c.mode=512&m?U:W,m=0,n=0;break;case M:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.flags=m,(255&c.flags)!==K){a.msg="unknown compression method",c.mode=ma;break}if(57344&c.flags){a.msg="unknown header flags set",c.mode=ma;break}c.head&&(c.head.text=m>>8&1),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=N;case N:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.time=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,Ba[2]=m>>>16&255,Ba[3]=m>>>24&255,c.check=u(c.check,Ba,4,0)),m=0,n=0,c.mode=O;case O:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.xflags=255&m,c.head.os=m>>8),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=P;case P:if(1024&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length=m,c.head&&(c.head.extra_len=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0}else c.head&&(c.head.extra=null);c.mode=Q;case Q:if(1024&c.flags&&(q=c.length,q>i&&(q=i),q&&(c.head&&(wa=c.head.extra_len-c.length,c.head.extra||(c.head.extra=new Array(c.head.extra_len)),s.arraySet(c.head.extra,e,g,q,wa)),512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,c.length-=q),c.length))break a;c.length=0,c.mode=R;case R:if(2048&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.name+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.name=null);c.length=0,c.mode=S;case S:if(4096&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.comment+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.comment=null);c.mode=T;case T:if(512&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(65535&c.check)){a.msg="header crc mismatch",c.mode=ma;break}m=0,n=0}c.head&&(c.head.hcrc=c.flags>>9&1,c.head.done=!0),a.adler=c.check=0,c.mode=W;break;case U:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}a.adler=c.check=d(m),m=0,n=0,c.mode=V;case V:if(0===c.havedict)return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,F;a.adler=c.check=1,c.mode=W;case W:if(b===B||b===C)break a;case X:if(c.last){m>>>=7&n,n-=7&n,c.mode=ja;break}for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}switch(c.last=1&m,m>>>=1,n-=1,3&m){case 0:c.mode=Y;break;case 1:if(k(c),c.mode=ca,b===C){m>>>=2,n-=2;break a}break;case 2:c.mode=_;break;case 3:a.msg="invalid block type",c.mode=ma}m>>>=2,n-=2;break;case Y:for(m>>>=7&n,n-=7&n;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if((65535&m)!==(m>>>16^65535)){a.msg="invalid stored block lengths",c.mode=ma;break}if(c.length=65535&m,m=0,n=0,c.mode=Z,b===C)break a;case Z:c.mode=$;case $:if(q=c.length){if(q>i&&(q=i),q>j&&(q=j),0===q)break a;s.arraySet(f,e,g,q,h),i-=q,g+=q,j-=q,h+=q,c.length-=q;break}c.mode=W;break;case _:for(;n<14;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.nlen=(31&m)+257,m>>>=5,n-=5,c.ndist=(31&m)+1,m>>>=5,n-=5,c.ncode=(15&m)+4,m>>>=4,n-=4,c.nlen>286||c.ndist>30){a.msg="too many length or distance symbols",c.mode=ma;break}c.have=0,c.mode=aa;case aa:for(;c.have<c.ncode;){for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.lens[Ca[c.have++]]=7&m,m>>>=3,n-=3}for(;c.have<19;)c.lens[Ca[c.have++]]=0;if(c.lencode=c.lendyn,c.lenbits=7,ya={bits:c.lenbits},xa=w(x,c.lens,0,19,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid code lengths set",c.mode=ma;break}c.have=0,c.mode=ba;case ba:for(;c.have<c.nlen+c.ndist;){for(;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(sa<16)m>>>=qa,n-=qa,c.lens[c.have++]=sa;else{if(16===sa){for(za=qa+2;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m>>>=qa,n-=qa,0===c.have){a.msg="invalid bit length repeat",c.mode=ma;break}wa=c.lens[c.have-1],q=3+(3&m),m>>>=2,n-=2}else if(17===sa){for(za=qa+3;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=3+(7&m),m>>>=3,n-=3}else{for(za=qa+7;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=11+(127&m),m>>>=7,n-=7}if(c.have+q>c.nlen+c.ndist){a.msg="invalid bit length repeat",c.mode=ma;break}for(;q--;)c.lens[c.have++]=wa}}if(c.mode===ma)break;if(0===c.lens[256]){a.msg="invalid code -- missing end-of-block",c.mode=ma;break}if(c.lenbits=9,ya={bits:c.lenbits},xa=w(y,c.lens,0,c.nlen,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid literal/lengths set",c.mode=ma;break}if(c.distbits=6,c.distcode=c.distdyn,ya={bits:c.distbits},xa=w(z,c.lens,c.nlen,c.ndist,c.distcode,0,c.work,ya),c.distbits=ya.bits,xa){a.msg="invalid distances set",c.mode=ma;break}if(c.mode=ca,b===C)break a;case ca:c.mode=da;case da:if(i>=6&&j>=258){a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,v(a,p),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,c.mode===W&&(c.back=-1);break}for(c.back=0;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(ra&&0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.lencode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,c.length=sa,0===ra){c.mode=ia;break}if(32&ra){c.back=-1,c.mode=W;break}if(64&ra){a.msg="invalid literal/length code",c.mode=ma;break}c.extra=15&ra,c.mode=ea;case ea:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}c.was=c.length,c.mode=fa;case fa:for(;Aa=c.distcode[m&(1<<c.distbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.distcode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,64&ra){a.msg="invalid distance code",c.mode=ma;break}c.offset=sa,c.extra=15&ra,c.mode=ga;case ga:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.offset+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}if(c.offset>c.dmax){a.msg="invalid distance too far back",c.mode=ma;break}c.mode=ha;case ha:if(0===j)break a;if(q=p-j,c.offset>q){if(q=c.offset-q,q>c.whave&&c.sane){a.msg="invalid distance too far back",c.mode=ma;break}q>c.wnext?(q-=c.wnext,r=c.wsize-q):r=c.wnext-q,q>c.length&&(q=c.length),pa=c.window}else pa=f,r=h-c.offset,q=c.length;q>j&&(q=j),j-=q,c.length-=q;do f[h++]=pa[r++];while(--q);0===c.length&&(c.mode=da);break;case ia:if(0===j)break a;f[h++]=c.length,j--,c.mode=da;break;case ja:if(c.wrap){for(;n<32;){if(0===i)break a;i--,m|=e[g++]<<n,n+=8}if(p-=j,a.total_out+=p,c.total+=p,p&&(a.adler=c.check=c.flags?u(c.check,f,p,h-p):t(c.check,f,p,h-p)),p=j,(c.flags?m:d(m))!==c.check){a.msg="incorrect data check",c.mode=ma;break}m=0,n=0}c.mode=ka;case ka:if(c.wrap&&c.flags){for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(4294967295&c.total)){a.msg="incorrect length check",c.mode=ma;break}m=0,n=0}c.mode=la;case la:xa=E;break a;case ma:xa=H;break a;case na:return I;case oa:default:return G}return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,(c.wsize||p!==a.avail_out&&c.mode<ma&&(c.mode<ja||b!==A))&&l(a,a.output,a.next_out,p-a.avail_out)?(c.mode=na,I):(o-=a.avail_in,p-=a.avail_out,a.total_in+=o,a.total_out+=p,c.total+=p,c.wrap&&p&&(a.adler=c.check=c.flags?u(c.check,f,p,a.next_out-p):t(c.check,f,p,a.next_out-p)),a.data_type=c.bits+(c.last?64:0)+(c.mode===W?128:0)+(c.mode===ca||c.mode===Z?256:0),(0===o&&0===p||b===A)&&xa===D&&(xa=J),xa)}function n(a){if(!a||!a.state)return G;var b=a.state;return b.window&&(b.window=null),a.state=null,D}function o(a,b){var c;return a&&a.state?(c=a.state,0===(2&c.wrap)?G:(c.head=b,b.done=!1,D)):G}function p(a,b){var c,d,e,f=b.length;return a&&a.state?(c=a.state,0!==c.wrap&&c.mode!==V?G:c.mode===V&&(d=1,d=t(d,b,f,0),d!==c.check)?H:(e=l(a,b,f,f))?(c.mode=na,I):(c.havedict=1,D)):G}var q,r,s=a("../utils/common"),t=a("./adler32"),u=a("./crc32"),v=a("./inffast"),w=a("./inftrees"),x=0,y=1,z=2,A=4,B=5,C=6,D=0,E=1,F=2,G=-2,H=-3,I=-4,J=-5,K=8,L=1,M=2,N=3,O=4,P=5,Q=6,R=7,S=8,T=9,U=10,V=11,W=12,X=13,Y=14,Z=15,$=16,_=17,aa=18,ba=19,ca=20,da=21,ea=22,fa=23,ga=24,ha=25,ia=26,ja=27,ka=28,la=29,ma=30,na=31,oa=32,pa=852,qa=592,ra=15,sa=ra,ta=!0;c.inflateReset=g,c.inflateReset2=h,c.inflateResetKeep=f,c.inflateInit=j,c.inflateInit2=i,c.inflate=m,c.inflateEnd=n,c.inflateGetHeader=o,c.inflateSetDictionary=p,c.inflateInfo="pako inflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./inffast":69,"./inftrees":71}],71:[function(a,b,c){"use strict";var d=a("../utils/common"),e=15,f=852,g=592,h=0,i=1,j=2,k=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,0,0],l=[16,16,16,16,16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,16,72,78],m=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0],n=[16,16,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,64,64];b.exports=function(a,b,c,o,p,q,r,s){var t,u,v,w,x,y,z,A,B,C=s.bits,D=0,E=0,F=0,G=0,H=0,I=0,J=0,K=0,L=0,M=0,N=null,O=0,P=new d.Buf16(e+1),Q=new d.Buf16(e+1),R=null,S=0;for(D=0;D<=e;D++)P[D]=0;for(E=0;E<o;E++)P[b[c+E]]++;for(H=C,G=e;G>=1&&0===P[G];G--);if(H>G&&(H=G),0===G)return p[q++]=20971520,p[q++]=20971520,s.bits=1,0;for(F=1;F<G&&0===P[F];F++);for(H<F&&(H=F),K=1,D=1;D<=e;D++)if(K<<=1,K-=P[D],K<0)return-1;if(K>0&&(a===h||1!==G))return-1;for(Q[1]=0,D=1;D<e;D++)Q[D+1]=Q[D]+P[D];for(E=0;E<o;E++)0!==b[c+E]&&(r[Q[b[c+E]]++]=E);if(a===h?(N=R=r,y=19):a===i?(N=k,O-=257,R=l,S-=257,y=256):(N=m,R=n,y=-1),M=0,E=0,D=F,x=q,I=H,J=0,v=-1,L=1<<H,w=L-1,a===i&&L>f||a===j&&L>g)return 1;for(;;){z=D-J,r[E]<y?(A=0,B=r[E]):r[E]>y?(A=R[S+r[E]],B=N[O+r[E]]):(A=96,B=0),t=1<<D-J,u=1<<I,F=u;do u-=t,p[x+(M>>J)+u]=z<<24|A<<16|B|0;while(0!==u);for(t=1<<D-1;M&t;)t>>=1;if(0!==t?(M&=t-1,M+=t):M=0,E++,0===--P[D]){if(D===G)break;D=b[c+r[E]]}if(D>H&&(M&w)!==v){for(0===J&&(J=H),x+=F,I=D-J,K=1<<I;I+J<G&&(K-=P[I+J],!(K<=0));)I++,K<<=1;if(L+=1<<I,a===i&&L>f||a===j&&L>g)return 1;v=M&w,p[v]=H<<24|I<<16|x-q|0}}return 0!==M&&(p[x+M]=D-J<<24|64<<16|0),s.bits=H,0}},{"../utils/common":62}],72:[function(a,b,c){"use strict";b.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],73:[function(a,b,c){"use strict";function d(a){for(var b=a.length;--b>=0;)a[b]=0}function e(a,b,c,d,e){this.static_tree=a,this.extra_bits=b,this.extra_base=c,this.elems=d,this.max_length=e,this.has_stree=a&&a.length}function f(a,b){this.dyn_tree=a,this.max_code=0,this.stat_desc=b}function g(a){return a<256?ia[a]:ia[256+(a>>>7)]}function h(a,b){a.pending_buf[a.pending++]=255&b,a.pending_buf[a.pending++]=b>>>8&255}function i(a,b,c){a.bi_valid>X-c?(a.bi_buf|=b<<a.bi_valid&65535,h(a,a.bi_buf),a.bi_buf=b>>X-a.bi_valid,a.bi_valid+=c-X):(a.bi_buf|=b<<a.bi_valid&65535,a.bi_valid+=c)}function j(a,b,c){i(a,c[2*b],c[2*b+1])}function k(a,b){var c=0;do c|=1&a,a>>>=1,c<<=1;while(--b>0);return c>>>1}function l(a){16===a.bi_valid?(h(a,a.bi_buf),a.bi_buf=0,a.bi_valid=0):a.bi_valid>=8&&(a.pending_buf[a.pending++]=255&a.bi_buf,a.bi_buf>>=8,a.bi_valid-=8)}function m(a,b){var c,d,e,f,g,h,i=b.dyn_tree,j=b.max_code,k=b.stat_desc.static_tree,l=b.stat_desc.has_stree,m=b.stat_desc.extra_bits,n=b.stat_desc.extra_base,o=b.stat_desc.max_length,p=0;for(f=0;f<=W;f++)a.bl_count[f]=0;for(i[2*a.heap[a.heap_max]+1]=0,
c=a.heap_max+1;c<V;c++)d=a.heap[c],f=i[2*i[2*d+1]+1]+1,f>o&&(f=o,p++),i[2*d+1]=f,d>j||(a.bl_count[f]++,g=0,d>=n&&(g=m[d-n]),h=i[2*d],a.opt_len+=h*(f+g),l&&(a.static_len+=h*(k[2*d+1]+g)));if(0!==p){do{for(f=o-1;0===a.bl_count[f];)f--;a.bl_count[f]--,a.bl_count[f+1]+=2,a.bl_count[o]--,p-=2}while(p>0);for(f=o;0!==f;f--)for(d=a.bl_count[f];0!==d;)e=a.heap[--c],e>j||(i[2*e+1]!==f&&(a.opt_len+=(f-i[2*e+1])*i[2*e],i[2*e+1]=f),d--)}}function n(a,b,c){var d,e,f=new Array(W+1),g=0;for(d=1;d<=W;d++)f[d]=g=g+c[d-1]<<1;for(e=0;e<=b;e++){var h=a[2*e+1];0!==h&&(a[2*e]=k(f[h]++,h))}}function o(){var a,b,c,d,f,g=new Array(W+1);for(c=0,d=0;d<Q-1;d++)for(ka[d]=c,a=0;a<1<<ba[d];a++)ja[c++]=d;for(ja[c-1]=d,f=0,d=0;d<16;d++)for(la[d]=f,a=0;a<1<<ca[d];a++)ia[f++]=d;for(f>>=7;d<T;d++)for(la[d]=f<<7,a=0;a<1<<ca[d]-7;a++)ia[256+f++]=d;for(b=0;b<=W;b++)g[b]=0;for(a=0;a<=143;)ga[2*a+1]=8,a++,g[8]++;for(;a<=255;)ga[2*a+1]=9,a++,g[9]++;for(;a<=279;)ga[2*a+1]=7,a++,g[7]++;for(;a<=287;)ga[2*a+1]=8,a++,g[8]++;for(n(ga,S+1,g),a=0;a<T;a++)ha[2*a+1]=5,ha[2*a]=k(a,5);ma=new e(ga,ba,R+1,S,W),na=new e(ha,ca,0,T,W),oa=new e(new Array(0),da,0,U,Y)}function p(a){var b;for(b=0;b<S;b++)a.dyn_ltree[2*b]=0;for(b=0;b<T;b++)a.dyn_dtree[2*b]=0;for(b=0;b<U;b++)a.bl_tree[2*b]=0;a.dyn_ltree[2*Z]=1,a.opt_len=a.static_len=0,a.last_lit=a.matches=0}function q(a){a.bi_valid>8?h(a,a.bi_buf):a.bi_valid>0&&(a.pending_buf[a.pending++]=a.bi_buf),a.bi_buf=0,a.bi_valid=0}function r(a,b,c,d){q(a),d&&(h(a,c),h(a,~c)),G.arraySet(a.pending_buf,a.window,b,c,a.pending),a.pending+=c}function s(a,b,c,d){var e=2*b,f=2*c;return a[e]<a[f]||a[e]===a[f]&&d[b]<=d[c]}function t(a,b,c){for(var d=a.heap[c],e=c<<1;e<=a.heap_len&&(e<a.heap_len&&s(b,a.heap[e+1],a.heap[e],a.depth)&&e++,!s(b,d,a.heap[e],a.depth));)a.heap[c]=a.heap[e],c=e,e<<=1;a.heap[c]=d}function u(a,b,c){var d,e,f,h,k=0;if(0!==a.last_lit)do d=a.pending_buf[a.d_buf+2*k]<<8|a.pending_buf[a.d_buf+2*k+1],e=a.pending_buf[a.l_buf+k],k++,0===d?j(a,e,b):(f=ja[e],j(a,f+R+1,b),h=ba[f],0!==h&&(e-=ka[f],i(a,e,h)),d--,f=g(d),j(a,f,c),h=ca[f],0!==h&&(d-=la[f],i(a,d,h)));while(k<a.last_lit);j(a,Z,b)}function v(a,b){var c,d,e,f=b.dyn_tree,g=b.stat_desc.static_tree,h=b.stat_desc.has_stree,i=b.stat_desc.elems,j=-1;for(a.heap_len=0,a.heap_max=V,c=0;c<i;c++)0!==f[2*c]?(a.heap[++a.heap_len]=j=c,a.depth[c]=0):f[2*c+1]=0;for(;a.heap_len<2;)e=a.heap[++a.heap_len]=j<2?++j:0,f[2*e]=1,a.depth[e]=0,a.opt_len--,h&&(a.static_len-=g[2*e+1]);for(b.max_code=j,c=a.heap_len>>1;c>=1;c--)t(a,f,c);e=i;do c=a.heap[1],a.heap[1]=a.heap[a.heap_len--],t(a,f,1),d=a.heap[1],a.heap[--a.heap_max]=c,a.heap[--a.heap_max]=d,f[2*e]=f[2*c]+f[2*d],a.depth[e]=(a.depth[c]>=a.depth[d]?a.depth[c]:a.depth[d])+1,f[2*c+1]=f[2*d+1]=e,a.heap[1]=e++,t(a,f,1);while(a.heap_len>=2);a.heap[--a.heap_max]=a.heap[1],m(a,b),n(f,j,a.bl_count)}function w(a,b,c){var d,e,f=-1,g=b[1],h=0,i=7,j=4;for(0===g&&(i=138,j=3),b[2*(c+1)+1]=65535,d=0;d<=c;d++)e=g,g=b[2*(d+1)+1],++h<i&&e===g||(h<j?a.bl_tree[2*e]+=h:0!==e?(e!==f&&a.bl_tree[2*e]++,a.bl_tree[2*$]++):h<=10?a.bl_tree[2*_]++:a.bl_tree[2*aa]++,h=0,f=e,0===g?(i=138,j=3):e===g?(i=6,j=3):(i=7,j=4))}function x(a,b,c){var d,e,f=-1,g=b[1],h=0,k=7,l=4;for(0===g&&(k=138,l=3),d=0;d<=c;d++)if(e=g,g=b[2*(d+1)+1],!(++h<k&&e===g)){if(h<l){do j(a,e,a.bl_tree);while(0!==--h)}else 0!==e?(e!==f&&(j(a,e,a.bl_tree),h--),j(a,$,a.bl_tree),i(a,h-3,2)):h<=10?(j(a,_,a.bl_tree),i(a,h-3,3)):(j(a,aa,a.bl_tree),i(a,h-11,7));h=0,f=e,0===g?(k=138,l=3):e===g?(k=6,l=3):(k=7,l=4)}}function y(a){var b;for(w(a,a.dyn_ltree,a.l_desc.max_code),w(a,a.dyn_dtree,a.d_desc.max_code),v(a,a.bl_desc),b=U-1;b>=3&&0===a.bl_tree[2*ea[b]+1];b--);return a.opt_len+=3*(b+1)+5+5+4,b}function z(a,b,c,d){var e;for(i(a,b-257,5),i(a,c-1,5),i(a,d-4,4),e=0;e<d;e++)i(a,a.bl_tree[2*ea[e]+1],3);x(a,a.dyn_ltree,b-1),x(a,a.dyn_dtree,c-1)}function A(a){var b,c=4093624447;for(b=0;b<=31;b++,c>>>=1)if(1&c&&0!==a.dyn_ltree[2*b])return I;if(0!==a.dyn_ltree[18]||0!==a.dyn_ltree[20]||0!==a.dyn_ltree[26])return J;for(b=32;b<R;b++)if(0!==a.dyn_ltree[2*b])return J;return I}function B(a){pa||(o(),pa=!0),a.l_desc=new f(a.dyn_ltree,ma),a.d_desc=new f(a.dyn_dtree,na),a.bl_desc=new f(a.bl_tree,oa),a.bi_buf=0,a.bi_valid=0,p(a)}function C(a,b,c,d){i(a,(L<<1)+(d?1:0),3),r(a,b,c,!0)}function D(a){i(a,M<<1,3),j(a,Z,ga),l(a)}function E(a,b,c,d){var e,f,g=0;a.level>0?(a.strm.data_type===K&&(a.strm.data_type=A(a)),v(a,a.l_desc),v(a,a.d_desc),g=y(a),e=a.opt_len+3+7>>>3,f=a.static_len+3+7>>>3,f<=e&&(e=f)):e=f=c+5,c+4<=e&&b!==-1?C(a,b,c,d):a.strategy===H||f===e?(i(a,(M<<1)+(d?1:0),3),u(a,ga,ha)):(i(a,(N<<1)+(d?1:0),3),z(a,a.l_desc.max_code+1,a.d_desc.max_code+1,g+1),u(a,a.dyn_ltree,a.dyn_dtree)),p(a),d&&q(a)}function F(a,b,c){return a.pending_buf[a.d_buf+2*a.last_lit]=b>>>8&255,a.pending_buf[a.d_buf+2*a.last_lit+1]=255&b,a.pending_buf[a.l_buf+a.last_lit]=255&c,a.last_lit++,0===b?a.dyn_ltree[2*c]++:(a.matches++,b--,a.dyn_ltree[2*(ja[c]+R+1)]++,a.dyn_dtree[2*g(b)]++),a.last_lit===a.lit_bufsize-1}var G=a("../utils/common"),H=4,I=0,J=1,K=2,L=0,M=1,N=2,O=3,P=258,Q=29,R=256,S=R+1+Q,T=30,U=19,V=2*S+1,W=15,X=16,Y=7,Z=256,$=16,_=17,aa=18,ba=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],ca=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],da=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],ea=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],fa=512,ga=new Array(2*(S+2));d(ga);var ha=new Array(2*T);d(ha);var ia=new Array(fa);d(ia);var ja=new Array(P-O+1);d(ja);var ka=new Array(Q);d(ka);var la=new Array(T);d(la);var ma,na,oa,pa=!1;c._tr_init=B,c._tr_stored_block=C,c._tr_flush_block=E,c._tr_tally=F,c._tr_align=D},{"../utils/common":62}],74:[function(a,b,c){"use strict";function d(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}b.exports=d},{}]},{},[10])(10)}); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/libs/zip.min.js | zip.min.js |
(function () {
'use strict';
var isCommonjs = typeof module !== 'undefined' && module.exports;
var keyboardAllowed = typeof Element !== 'undefined' && 'ALLOW_KEYBOARD_INPUT' in Element;
var fn = (function () {
var val;
var valLength;
var fnMap = [
[
'requestFullscreen',
'exitFullscreen',
'fullscreenElement',
'fullscreenEnabled',
'fullscreenchange',
'fullscreenerror'
],
// new WebKit
[
'webkitRequestFullscreen',
'webkitExitFullscreen',
'webkitFullscreenElement',
'webkitFullscreenEnabled',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
// old WebKit (Safari 5.1)
[
'webkitRequestFullScreen',
'webkitCancelFullScreen',
'webkitCurrentFullScreenElement',
'webkitCancelFullScreen',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
[
'mozRequestFullScreen',
'mozCancelFullScreen',
'mozFullScreenElement',
'mozFullScreenEnabled',
'mozfullscreenchange',
'mozfullscreenerror'
],
[
'msRequestFullscreen',
'msExitFullscreen',
'msFullscreenElement',
'msFullscreenEnabled',
'MSFullscreenChange',
'MSFullscreenError'
]
];
var i = 0;
var l = fnMap.length;
var ret = {};
for (; i < l; i++) {
val = fnMap[i];
if (val && val[1] in document) {
for (i = 0, valLength = val.length; i < valLength; i++) {
ret[fnMap[0][i]] = val[i];
}
return ret;
}
}
return false;
})();
var screenfull = {
request: function (elem) {
var request = fn.requestFullscreen;
elem = elem || document.documentElement;
// Work around Safari 5.1 bug: reports support for
// keyboard in fullscreen even though it doesn't.
// Browser sniffing, since the alternative with
// setTimeout is even worse.
if (/5\.1[\.\d]* Safari/.test(navigator.userAgent)) {
elem[request]();
} else {
elem[request](keyboardAllowed && Element.ALLOW_KEYBOARD_INPUT);
}
},
exit: function () {
document[fn.exitFullscreen]();
},
toggle: function (elem) {
if (this.isFullscreen) {
this.exit();
} else {
this.request(elem);
}
},
raw: fn
};
if (!fn) {
if (isCommonjs) {
module.exports = false;
} else {
window.screenfull = false;
}
return;
}
Object.defineProperties(screenfull, {
isFullscreen: {
get: function () {
return !!document[fn.fullscreenElement];
}
},
element: {
enumerable: true,
get: function () {
return document[fn.fullscreenElement];
}
},
enabled: {
enumerable: true,
get: function () {
// Coerce to boolean in case of old WebKit
return !!document[fn.fullscreenEnabled];
}
}
});
if (isCommonjs) {
module.exports = screenfull;
} else {
window.screenfull = screenfull;
}
})(); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/libs/screenfull.js | screenfull.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.localforage=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],2:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(b){return p.reject(a,b)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&"object"==typeof a&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(a){c.status="error",c.value=a}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a(1),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=c=e,e.prototype.catch=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},c.resolve=k,c.reject=l,c.all=m,c.race=n},{1:1}],3:[function(a,b,c){(function(b){"use strict";"function"!=typeof b.Promise&&(b.Promise=a(2))}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{2:2}],4:[function(a,b,c){"use strict";function d(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function e(){try{if("undefined"!=typeof indexedDB)return indexedDB;if("undefined"!=typeof webkitIndexedDB)return webkitIndexedDB;if("undefined"!=typeof mozIndexedDB)return mozIndexedDB;if("undefined"!=typeof OIndexedDB)return OIndexedDB;if("undefined"!=typeof msIndexedDB)return msIndexedDB}catch(a){}}function f(){try{if(!ga)return!1;var a="undefined"!=typeof openDatabase&&/(Safari|iPhone|iPad|iPod)/.test(navigator.userAgent)&&!/Chrome/.test(navigator.userAgent)&&!/BlackBerry/.test(navigator.platform),b="function"==typeof fetch&&fetch.toString().indexOf("[native code")!==-1;return(!a||b)&&"undefined"!=typeof indexedDB&&"undefined"!=typeof IDBKeyRange}catch(a){return!1}}function g(){return"function"==typeof openDatabase}function h(){try{return"undefined"!=typeof localStorage&&"setItem"in localStorage&&localStorage.setItem}catch(a){return!1}}function i(a,b){a=a||[],b=b||{};try{return new Blob(a,b)}catch(f){if("TypeError"!==f.name)throw f;for(var c="undefined"!=typeof BlobBuilder?BlobBuilder:"undefined"!=typeof MSBlobBuilder?MSBlobBuilder:"undefined"!=typeof MozBlobBuilder?MozBlobBuilder:WebKitBlobBuilder,d=new c,e=0;e<a.length;e+=1)d.append(a[e]);return d.getBlob(b.type)}}function j(a,b){b&&a.then(function(a){b(null,a)},function(a){b(a)})}function k(a,b,c){"function"==typeof b&&a.then(b),"function"==typeof c&&a.catch(c)}function l(a){for(var b=a.length,c=new ArrayBuffer(b),d=new Uint8Array(c),e=0;e<b;e++)d[e]=a.charCodeAt(e);return c}function m(a){return new ja(function(b){var c=a.transaction(ka,"readwrite"),d=i([""]);c.objectStore(ka).put(d,"key"),c.onabort=function(a){a.preventDefault(),a.stopPropagation(),b(!1)},c.oncomplete=function(){var a=navigator.userAgent.match(/Chrome\/(\d+)/),c=navigator.userAgent.match(/Edge\//);b(c||!a||parseInt(a[1],10)>=43)}}).catch(function(){return!1})}function n(a){return"boolean"==typeof ha?ja.resolve(ha):m(a).then(function(a){return ha=a})}function o(a){var b=ia[a.name],c={};c.promise=new ja(function(a){c.resolve=a}),b.deferredOperations.push(c),b.dbReady?b.dbReady=b.dbReady.then(function(){return c.promise}):b.dbReady=c.promise}function p(a){var b=ia[a.name],c=b.deferredOperations.pop();c&&c.resolve()}function q(a,b){return new ja(function(c,d){if(a.db){if(!b)return c(a.db);o(a),a.db.close()}var e=[a.name];b&&e.push(a.version);var f=ga.open.apply(ga,e);b&&(f.onupgradeneeded=function(b){var c=f.result;try{c.createObjectStore(a.storeName),b.oldVersion<=1&&c.createObjectStore(ka)}catch(c){if("ConstraintError"!==c.name)throw c;console.warn('The database "'+a.name+'" has been upgraded from version '+b.oldVersion+" to version "+b.newVersion+', but the storage "'+a.storeName+'" already exists.')}}),f.onerror=function(a){a.preventDefault(),d(f.error)},f.onsuccess=function(){c(f.result),p(a)}})}function r(a){return q(a,!1)}function s(a){return q(a,!0)}function t(a,b){if(!a.db)return!0;var c=!a.db.objectStoreNames.contains(a.storeName),d=a.version<a.db.version,e=a.version>a.db.version;if(d&&(a.version!==b&&console.warn('The database "'+a.name+"\" can't be downgraded from version "+a.db.version+" to version "+a.version+"."),a.version=a.db.version),e||c){if(c){var f=a.db.version+1;f>a.version&&(a.version=f)}return!0}return!1}function u(a){return new ja(function(b,c){var d=new FileReader;d.onerror=c,d.onloadend=function(c){var d=btoa(c.target.result||"");b({__local_forage_encoded_blob:!0,data:d,type:a.type})},d.readAsBinaryString(a)})}function v(a){var b=l(atob(a.data));return i([b],{type:a.type})}function w(a){return a&&a.__local_forage_encoded_blob}function x(a){var b=this,c=b._initReady().then(function(){var a=ia[b._dbInfo.name];if(a&&a.dbReady)return a.dbReady});return k(c,a,a),c}function y(a){function b(){return ja.resolve()}var c=this,d={db:null};if(a)for(var e in a)d[e]=a[e];ia||(ia={});var f=ia[d.name];f||(f={forages:[],db:null,dbReady:null,deferredOperations:[]},ia[d.name]=f),f.forages.push(c),c._initReady||(c._initReady=c.ready,c.ready=x);for(var g=[],h=0;h<f.forages.length;h++){var i=f.forages[h];i!==c&&g.push(i._initReady().catch(b))}var j=f.forages.slice(0);return ja.all(g).then(function(){return d.db=f.db,r(d)}).then(function(a){return d.db=a,t(d,c._defaultConfig.version)?s(d):a}).then(function(a){d.db=f.db=a,c._dbInfo=d;for(var b=0;b<j.length;b++){var e=j[b];e!==c&&(e._dbInfo.db=d.db,e._dbInfo.version=d.version)}})}function z(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.get(a);g.onsuccess=function(){var a=g.result;void 0===a&&(a=null),w(a)&&(a=v(a)),b(a)},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function A(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.openCursor(),h=1;g.onsuccess=function(){var c=g.result;if(c){var d=c.value;w(d)&&(d=v(d));var e=a(d,c.key,h++);void 0!==e?b(e):c.continue()}else b()},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function B(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=new ja(function(c,e){var f;d.ready().then(function(){return f=d._dbInfo,"[object Blob]"===la.call(b)?n(f.db).then(function(a){return a?b:u(b)}):b}).then(function(b){var d=f.db.transaction(f.storeName,"readwrite"),g=d.objectStore(f.storeName),h=g.put(b,a);null===b&&(b=void 0),d.oncomplete=function(){void 0===b&&(b=null),c(b)},d.onabort=d.onerror=function(){var a=h.error?h.error:h.transaction.error;e(a)}}).catch(e)});return j(e,c),e}function C(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readwrite"),g=f.objectStore(e.storeName),h=g.delete(a);f.oncomplete=function(){b()},f.onerror=function(){d(h.error)},f.onabort=function(){var a=h.error?h.error:h.transaction.error;d(a)}}).catch(d)});return j(d,b),d}function D(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readwrite"),f=e.objectStore(d.storeName),g=f.clear();e.oncomplete=function(){a()},e.onabort=e.onerror=function(){var a=g.error?g.error:g.transaction.error;c(a)}}).catch(c)});return j(c,a),c}function E(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.count();f.onsuccess=function(){a(f.result)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function F(a,b){var c=this,d=new ja(function(b,d){return a<0?void b(null):void c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=!1,h=f.openCursor();h.onsuccess=function(){var c=h.result;return c?void(0===a?b(c.key):g?b(c.key):(g=!0,c.advance(a))):void b(null)},h.onerror=function(){d(h.error)}}).catch(d)});return j(d,b),d}function G(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.openCursor(),g=[];f.onsuccess=function(){var b=f.result;return b?(g.push(b.key),void b.continue()):void a(g)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function H(a){var b,c,d,e,f,g=.75*a.length,h=a.length,i=0;"="===a[a.length-1]&&(g--,"="===a[a.length-2]&&g--);var j=new ArrayBuffer(g),k=new Uint8Array(j);for(b=0;b<h;b+=4)c=na.indexOf(a[b]),d=na.indexOf(a[b+1]),e=na.indexOf(a[b+2]),f=na.indexOf(a[b+3]),k[i++]=c<<2|d>>4,k[i++]=(15&d)<<4|e>>2,k[i++]=(3&e)<<6|63&f;return j}function I(a){var b,c=new Uint8Array(a),d="";for(b=0;b<c.length;b+=3)d+=na[c[b]>>2],d+=na[(3&c[b])<<4|c[b+1]>>4],d+=na[(15&c[b+1])<<2|c[b+2]>>6],d+=na[63&c[b+2]];return c.length%3===2?d=d.substring(0,d.length-1)+"=":c.length%3===1&&(d=d.substring(0,d.length-2)+"=="),d}function J(a,b){var c="";if(a&&(c=Ea.call(a)),a&&("[object ArrayBuffer]"===c||a.buffer&&"[object ArrayBuffer]"===Ea.call(a.buffer))){var d,e=qa;a instanceof ArrayBuffer?(d=a,e+=sa):(d=a.buffer,"[object Int8Array]"===c?e+=ua:"[object Uint8Array]"===c?e+=va:"[object Uint8ClampedArray]"===c?e+=wa:"[object Int16Array]"===c?e+=xa:"[object Uint16Array]"===c?e+=za:"[object Int32Array]"===c?e+=ya:"[object Uint32Array]"===c?e+=Aa:"[object Float32Array]"===c?e+=Ba:"[object Float64Array]"===c?e+=Ca:b(new Error("Failed to get type for BinaryArray"))),b(e+I(d))}else if("[object Blob]"===c){var f=new FileReader;f.onload=function(){var c=oa+a.type+"~"+I(this.result);b(qa+ta+c)},f.readAsArrayBuffer(a)}else try{b(JSON.stringify(a))}catch(c){console.error("Couldn't convert value into a JSON string: ",a),b(null,c)}}function K(a){if(a.substring(0,ra)!==qa)return JSON.parse(a);var b,c=a.substring(Da),d=a.substring(ra,Da);if(d===ta&&pa.test(c)){var e=c.match(pa);b=e[1],c=c.substring(e[0].length)}var f=H(c);switch(d){case sa:return f;case ta:return i([f],{type:b});case ua:return new Int8Array(f);case va:return new Uint8Array(f);case wa:return new Uint8ClampedArray(f);case xa:return new Int16Array(f);case za:return new Uint16Array(f);case ya:return new Int32Array(f);case Aa:return new Uint32Array(f);case Ba:return new Float32Array(f);case Ca:return new Float64Array(f);default:throw new Error("Unkown type: "+d)}}function L(a){var b=this,c={db:null};if(a)for(var d in a)c[d]="string"!=typeof a[d]?a[d].toString():a[d];var e=new ja(function(a,d){try{c.db=openDatabase(c.name,String(c.version),c.description,c.size)}catch(a){return d(a)}c.db.transaction(function(e){e.executeSql("CREATE TABLE IF NOT EXISTS "+c.storeName+" (id INTEGER PRIMARY KEY, key unique, value)",[],function(){b._dbInfo=c,a()},function(a,b){d(b)})})});return c.serializer=Fa,e}function M(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName+" WHERE key = ? LIMIT 1",[a],function(a,c){var d=c.rows.length?c.rows.item(0).value:null;d&&(d=e.serializer.deserialize(d)),b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function N(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName,[],function(c,d){for(var f=d.rows,g=f.length,h=0;h<g;h++){var i=f.item(h),j=i.value;if(j&&(j=e.serializer.deserialize(j)),j=a(j,i.key,h+1),void 0!==j)return void b(j)}b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function O(a,b,c,d){var e=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var f=new ja(function(f,g){e.ready().then(function(){void 0===b&&(b=null);var h=b,i=e._dbInfo;i.serializer.serialize(b,function(b,j){j?g(j):i.db.transaction(function(c){c.executeSql("INSERT OR REPLACE INTO "+i.storeName+" (key, value) VALUES (?, ?)",[a,b],function(){f(h)},function(a,b){g(b)})},function(b){if(b.code===b.QUOTA_ERR){if(d>0)return void f(O.apply(e,[a,h,c,d-1]));g(b)}})})}).catch(g)});return j(f,c),f}function P(a,b,c){return O.apply(this,[a,b,c,1])}function Q(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("DELETE FROM "+e.storeName+" WHERE key = ?",[a],function(){b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function R(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("DELETE FROM "+d.storeName,[],function(){a()},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function S(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT COUNT(key) as c FROM "+d.storeName,[],function(b,c){var d=c.rows.item(0).c;a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function T(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT key FROM "+e.storeName+" WHERE id = ? LIMIT 1",[a+1],function(a,c){var d=c.rows.length?c.rows.item(0).key:null;b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function U(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT key FROM "+d.storeName,[],function(b,c){for(var d=[],e=0;e<c.rows.length;e++)d.push(c.rows.item(e).key);a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function V(a){var b=this,c={};if(a)for(var d in a)c[d]=a[d];return c.keyPrefix=c.name+"/",c.storeName!==b._defaultConfig.storeName&&(c.keyPrefix+=c.storeName+"/"),b._dbInfo=c,c.serializer=Fa,ja.resolve()}function W(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo.keyPrefix,c=localStorage.length-1;c>=0;c--){var d=localStorage.key(c);0===d.indexOf(a)&&localStorage.removeItem(d)}});return j(c,a),c}function X(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo,d=localStorage.getItem(b.keyPrefix+a);return d&&(d=b.serializer.deserialize(d)),d});return j(d,b),d}function Y(a,b){var c=this,d=c.ready().then(function(){for(var b=c._dbInfo,d=b.keyPrefix,e=d.length,f=localStorage.length,g=1,h=0;h<f;h++){var i=localStorage.key(h);if(0===i.indexOf(d)){var j=localStorage.getItem(i);if(j&&(j=b.serializer.deserialize(j)),j=a(j,i.substring(e),g++),void 0!==j)return j}}});return j(d,b),d}function Z(a,b){var c=this,d=c.ready().then(function(){var b,d=c._dbInfo;try{b=localStorage.key(a)}catch(a){b=null}return b&&(b=b.substring(d.keyPrefix.length)),b});return j(d,b),d}function $(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo,c=localStorage.length,d=[],e=0;e<c;e++)0===localStorage.key(e).indexOf(a.keyPrefix)&&d.push(localStorage.key(e).substring(a.keyPrefix.length));return d});return j(c,a),c}function _(a){var b=this,c=b.keys().then(function(a){return a.length});return j(c,a),c}function aa(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo;localStorage.removeItem(b.keyPrefix+a)});return j(d,b),d}function ba(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=d.ready().then(function(){void 0===b&&(b=null);var c=b;return new ja(function(e,f){var g=d._dbInfo;g.serializer.serialize(b,function(b,d){if(d)f(d);else try{localStorage.setItem(g.keyPrefix+a,b),e(c)}catch(a){"QuotaExceededError"!==a.name&&"NS_ERROR_DOM_QUOTA_REACHED"!==a.name||f(a),f(a)}})})});return j(e,c),e}function ca(a,b){a[b]=function(){var c=arguments;return a.ready().then(function(){return a[b].apply(a,c)})}}function da(){for(var a=1;a<arguments.length;a++){var b=arguments[a];if(b)for(var c in b)b.hasOwnProperty(c)&&(Oa(b[c])?arguments[0][c]=b[c].slice():arguments[0][c]=b[c])}return arguments[0]}function ea(a){for(var b in Ja)if(Ja.hasOwnProperty(b)&&Ja[b]===a)return!0;return!1}var fa="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},ga=e();"undefined"==typeof Promise&&a(3);var ha,ia,ja=Promise,ka="local-forage-detect-blob-support",la=Object.prototype.toString,ma={_driver:"asyncStorage",_initStorage:y,iterate:A,getItem:z,setItem:B,removeItem:C,clear:D,length:E,key:F,keys:G},na="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",oa="~~local_forage_type~",pa=/^~~local_forage_type~([^~]+)~/,qa="__lfsc__:",ra=qa.length,sa="arbf",ta="blob",ua="si08",va="ui08",wa="uic8",xa="si16",ya="si32",za="ur16",Aa="ui32",Ba="fl32",Ca="fl64",Da=ra+sa.length,Ea=Object.prototype.toString,Fa={serialize:J,deserialize:K,stringToBuffer:H,bufferToString:I},Ga={_driver:"webSQLStorage",_initStorage:L,iterate:N,getItem:M,setItem:P,removeItem:Q,clear:R,length:S,key:T,keys:U},Ha={_driver:"localStorageWrapper",_initStorage:V,iterate:Y,getItem:X,setItem:ba,removeItem:aa,clear:W,length:_,key:Z,keys:$},Ia={},Ja={INDEXEDDB:"asyncStorage",LOCALSTORAGE:"localStorageWrapper",WEBSQL:"webSQLStorage"},Ka=[Ja.INDEXEDDB,Ja.WEBSQL,Ja.LOCALSTORAGE],La=["clear","getItem","iterate","key","keys","length","removeItem","setItem"],Ma={description:"",driver:Ka.slice(),name:"localforage",size:4980736,storeName:"keyvaluepairs",version:1},Na={};Na[Ja.INDEXEDDB]=f(),Na[Ja.WEBSQL]=g(),Na[Ja.LOCALSTORAGE]=h();var Oa=Array.isArray||function(a){return"[object Array]"===Object.prototype.toString.call(a)},Pa=function(){function a(b){d(this,a),this.INDEXEDDB=Ja.INDEXEDDB,this.LOCALSTORAGE=Ja.LOCALSTORAGE,this.WEBSQL=Ja.WEBSQL,this._defaultConfig=da({},Ma),this._config=da({},this._defaultConfig,b),this._driverSet=null,this._initDriver=null,this._ready=!1,this._dbInfo=null,this._wrapLibraryMethodsWithReady(),this.setDriver(this._config.driver).catch(function(){})}return a.prototype.config=function(a){if("object"===("undefined"==typeof a?"undefined":fa(a))){if(this._ready)return new Error("Can't call config() after localforage has been used.");for(var b in a){if("storeName"===b&&(a[b]=a[b].replace(/\W/g,"_")),"version"===b&&"number"!=typeof a[b])return new Error("Database version must be a number.");this._config[b]=a[b]}return!("driver"in a&&a.driver)||this.setDriver(this._config.driver)}return"string"==typeof a?this._config[a]:this._config},a.prototype.defineDriver=function(a,b,c){var d=new ja(function(b,c){try{var d=a._driver,e=new Error("Custom driver not compliant; see https://mozilla.github.io/localForage/#definedriver"),f=new Error("Custom driver name already in use: "+a._driver);if(!a._driver)return void c(e);if(ea(a._driver))return void c(f);for(var g=La.concat("_initStorage"),h=0;h<g.length;h++){var i=g[h];if(!i||!a[i]||"function"!=typeof a[i])return void c(e)}var j=ja.resolve(!0);"_support"in a&&(j=a._support&&"function"==typeof a._support?a._support():ja.resolve(!!a._support)),j.then(function(c){Na[d]=c,Ia[d]=a,b()},c)}catch(a){c(a)}});return k(d,b,c),d},a.prototype.driver=function(){return this._driver||null},a.prototype.getDriver=function(a,b,c){var d=this,e=ja.resolve().then(function(){if(!ea(a)){if(Ia[a])return Ia[a];throw new Error("Driver not found.")}switch(a){case d.INDEXEDDB:return ma;case d.LOCALSTORAGE:return Ha;case d.WEBSQL:return Ga}});return k(e,b,c),e},a.prototype.getSerializer=function(a){var b=ja.resolve(Fa);return k(b,a),b},a.prototype.ready=function(a){var b=this,c=b._driverSet.then(function(){return null===b._ready&&(b._ready=b._initDriver()),b._ready});return k(c,a,a),c},a.prototype.setDriver=function(a,b,c){function d(){g._config.driver=g.driver()}function e(a){return g._extend(a),d(),g._ready=g._initStorage(g._config),g._ready}function f(a){return function(){function b(){for(;c<a.length;){var f=a[c];return c++,g._dbInfo=null,g._ready=null,g.getDriver(f).then(e).catch(b)}d();var h=new Error("No available storage method found.");return g._driverSet=ja.reject(h),g._driverSet}var c=0;return b()}}var g=this;Oa(a)||(a=[a]);var h=this._getSupportedDrivers(a),i=null!==this._driverSet?this._driverSet.catch(function(){return ja.resolve()}):ja.resolve();return this._driverSet=i.then(function(){var a=h[0];return g._dbInfo=null,g._ready=null,g.getDriver(a).then(function(a){g._driver=a._driver,d(),g._wrapLibraryMethodsWithReady(),g._initDriver=f(h)})}).catch(function(){d();var a=new Error("No available storage method found.");return g._driverSet=ja.reject(a),g._driverSet}),k(this._driverSet,b,c),this._driverSet},a.prototype.supports=function(a){return!!Na[a]},a.prototype._extend=function(a){da(this,a)},a.prototype._getSupportedDrivers=function(a){for(var b=[],c=0,d=a.length;c<d;c++){var e=a[c];this.supports(e)&&b.push(e)}return b},a.prototype._wrapLibraryMethodsWithReady=function(){for(var a=0;a<La.length;a++)ca(this,La[a])},a.prototype.createInstance=function(b){return new a(b)},a}(),Qa=new Pa;b.exports=Qa},{3:3}]},{},[4])(4)}); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol1 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol1-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol1/js/libs/localforage.min.js | localforage.min.js |
# 紫竹张先生公众号备份(二)
## 下载
### Docker
```
docker pull apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
docker run -tid -p <port>:80 apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 <port>
# 访问 http://localhost:{port} 查看文档
``` | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/README.md | README.md |
# 紫竹张先生公众号备份(二)
## 下载
### Docker
```
docker pull apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
docker run -tid -p <port>:80 apachecn0/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zizhu-zhangxiansheng-gongzhonggao-beifen-vol2
zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 <port>
# 访问 http://localhost:{port} 查看文档
``` | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/README.md | README.md |
!function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):b(a.RSVP=a.RSVP||{})}(this,function(a){"use strict";function b(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return c;return-1}function c(a){var b=a._promiseCallbacks;return b||(b=a._promiseCallbacks={}),b}function d(a,b){if(2!==arguments.length)return wa[a];wa[a]=b}function e(a){var b=typeof a;return null!==a&&("object"===b||"function"===b)}function f(a){return"function"==typeof a}function g(a){return null!==a&&"object"==typeof a}function h(a){return null!==a&&"object"==typeof a}function i(){setTimeout(function(){for(var a=0;a<Aa.length;a++){var b=Aa[a],c=b.payload;c.guid=c.key+c.id,c.childGuid=c.key+c.childId,c.error&&(c.stack=c.error.stack),wa.trigger(b.name,b.payload)}Aa.length=0},50)}function j(a,b,c){1===Aa.push({name:a,payload:{key:b._guidKey,id:b._id,eventName:a,detail:b._result,childId:c&&c._id,label:b._label,timeStamp:za(),error:wa["instrument-with-stack"]?new Error(b._label):null}})&&i()}function k(a,b){var c=this;if(a&&"object"==typeof a&&a.constructor===c)return a;var d=new c(m,b);return s(d,a),d}function l(){return new TypeError("A promises callback cannot return that same promise.")}function m(){}function n(a){try{return a.then}catch(a){return Ea.error=a,Ea}}function o(a,b,c,d){try{a.call(b,c,d)}catch(a){return a}}function p(a,b,c){wa.async(function(a){var d=!1,e=o(c,b,function(c){d||(d=!0,b!==c?s(a,c,void 0):u(a,c))},function(b){d||(d=!0,v(a,b))},"Settle: "+(a._label||" unknown promise"));!d&&e&&(d=!0,v(a,e))},a)}function q(a,b){b._state===Ca?u(a,b._result):b._state===Da?(b._onError=null,v(a,b._result)):w(b,void 0,function(c){b!==c?s(a,c,void 0):u(a,c)},function(b){return v(a,b)})}function r(a,b,c){b.constructor===a.constructor&&c===C&&a.constructor.resolve===k?q(a,b):c===Ea?(v(a,Ea.error),Ea.error=null):f(c)?p(a,b,c):u(a,b)}function s(a,b){a===b?u(a,b):e(b)?r(a,b,n(b)):u(a,b)}function t(a){a._onError&&a._onError(a._result),x(a)}function u(a,b){a._state===Ba&&(a._result=b,a._state=Ca,0===a._subscribers.length?wa.instrument&&j("fulfilled",a):wa.async(x,a))}function v(a,b){a._state===Ba&&(a._state=Da,a._result=b,wa.async(t,a))}function w(a,b,c,d){var e=a._subscribers,f=e.length;a._onError=null,e[f]=b,e[f+Ca]=c,e[f+Da]=d,0===f&&a._state&&wa.async(x,a)}function x(a){var b=a._subscribers,c=a._state;if(wa.instrument&&j(c===Ca?"fulfilled":"rejected",a),0!==b.length){for(var d=void 0,e=void 0,f=a._result,g=0;g<b.length;g+=3)d=b[g],e=b[g+c],d?A(c,d,e,f):e(f);a._subscribers.length=0}}function y(){this.error=null}function z(a,b){try{return a(b)}catch(a){return Fa.error=a,Fa}}function A(a,b,c,d){var e=f(c),g=void 0,h=void 0;if(e){if((g=z(c,d))===Fa)h=g.error,g.error=null;else if(g===b)return void v(b,l())}else g=d;b._state!==Ba||(e&&void 0===h?s(b,g):void 0!==h?v(b,h):a===Ca?u(b,g):a===Da&&v(b,g))}function B(a,b){var c=!1;try{b(function(b){c||(c=!0,s(a,b))},function(b){c||(c=!0,v(a,b))})}catch(b){v(a,b)}}function C(a,b,c){var d=this,e=d._state;if(e===Ca&&!a||e===Da&&!b)return wa.instrument&&j("chained",d,d),d;d._onError=null;var f=new d.constructor(m,c),g=d._result;if(wa.instrument&&j("chained",d,f),e===Ba)w(d,f,a,b);else{var h=e===Ca?a:b;wa.async(function(){return A(e,f,h,g)})}return f}function D(a,b,c){return a===Ca?{state:"fulfilled",value:c}:{state:"rejected",reason:c}}function E(a,b){return ya(a)?new Ga(this,a,!0,b).promise:this.reject(new TypeError("Promise.all must be called with an array"),b)}function F(a,b){var c=this,d=new c(m,b);if(!ya(a))return v(d,new TypeError("Promise.race must be called with an array")),d;for(var e=0;d._state===Ba&&e<a.length;e++)w(c.resolve(a[e]),void 0,function(a){return s(d,a)},function(a){return v(d,a)});return d}function G(a,b){var c=this,d=new c(m,b);return v(d,a),d}function H(){throw new TypeError("You must pass a resolver function as the first argument to the promise constructor")}function I(){throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.")}function J(){this.value=void 0}function K(a){try{return a.then}catch(a){return Ka.value=a,Ka}}function L(a,b,c){try{a.apply(b,c)}catch(a){return Ka.value=a,Ka}}function M(a,b){for(var c={},d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=a[f];for(var g=0;g<b.length;g++){c[b[g]]=e[g+1]}return c}function N(a){for(var b=a.length,c=new Array(b-1),d=1;d<b;d++)c[d-1]=a[d];return c}function O(a,b){return{then:function(c,d){return a.call(b,c,d)}}}function P(a,b){var c=function(){for(var c=this,d=arguments.length,e=new Array(d+1),f=!1,g=0;g<d;++g){var h=arguments[g];if(!f){if((f=S(h))===La){var i=new Ja(m);return v(i,La.value),i}f&&!0!==f&&(h=O(f,h))}e[g]=h}var j=new Ja(m);return e[d]=function(a,c){a?v(j,a):void 0===b?s(j,c):!0===b?s(j,N(arguments)):ya(b)?s(j,M(arguments,b)):s(j,c)},f?R(j,e,a,c):Q(j,e,a,c)};return c.__proto__=a,c}function Q(a,b,c,d){var e=L(c,d,b);return e===Ka&&v(a,e.value),a}function R(a,b,c,d){return Ja.all(b).then(function(b){var e=L(c,d,b);return e===Ka&&v(a,e.value),a})}function S(a){return!(!a||"object"!=typeof a)&&(a.constructor===Ja||K(a))}function T(a,b){return Ja.all(a,b)}function U(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function V(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function W(a,b){return ya(a)?new Ma(Ja,a,b).promise:Ja.reject(new TypeError("Promise.allSettled must be called with an array"),b)}function X(a,b){return Ja.race(a,b)}function Y(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function Z(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function $(a,b){return g(a)?new Oa(Ja,a,b).promise:Ja.reject(new TypeError("Promise.hash must be called with an object"),b)}function _(a,b){if(!a)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!b||"object"!=typeof b&&"function"!=typeof b?a:b}function aa(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(Object.setPrototypeOf?Object.setPrototypeOf(a,b):a.__proto__=b)}function ba(a,b){return g(a)?new Pa(Ja,a,!1,b).promise:Ja.reject(new TypeError("RSVP.hashSettled must be called with an object"),b)}function ca(a){throw setTimeout(function(){throw a}),a}function da(a){var b={resolve:void 0,reject:void 0};return b.promise=new Ja(function(a,c){b.resolve=a,b.reject=c},a),b}function ea(a,b,c){return ya(a)?f(b)?Ja.all(a,c).then(function(a){for(var d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=b(a[f]);return Ja.all(e,c)}):Ja.reject(new TypeError("RSVP.map expects a function as a second argument"),c):Ja.reject(new TypeError("RSVP.map must be called with an array"),c)}function fa(a,b){return Ja.resolve(a,b)}function ga(a,b){return Ja.reject(a,b)}function ha(a,b){return Ja.all(a,b)}function ia(a,b){return Ja.resolve(a,b).then(function(a){return ha(a,b)})}function ja(a,b,c){return ya(a)||g(a)&&void 0!==a.then?f(b)?(ya(a)?ha(a,c):ia(a,c)).then(function(a){for(var d=a.length,e=new Array(d),f=0;f<d;f++)e[f]=b(a[f]);return ha(e,c).then(function(b){for(var c=new Array(d),e=0,f=0;f<d;f++)b[f]&&(c[e]=a[f],e++);return c.length=e,c})}):Ja.reject(new TypeError("RSVP.filter expects function as a second argument"),c):Ja.reject(new TypeError("RSVP.filter must be called with an array or promise"),c)}function ka(a,b){Xa[Qa]=a,Xa[Qa+1]=b,2===(Qa+=2)&&Ya()}function la(){var a=process.nextTick,b=process.versions.node.match(/^(?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)$/);return Array.isArray(b)&&"0"===b[1]&&"10"===b[2]&&(a=setImmediate),function(){return a(qa)}}function ma(){return void 0!==Ra?function(){Ra(qa)}:pa()}function na(){var a=0,b=new Ua(qa),c=document.createTextNode("");return b.observe(c,{characterData:!0}),function(){return c.data=a=++a%2}}function oa(){var a=new MessageChannel;return a.port1.onmessage=qa,function(){return a.port2.postMessage(0)}}function pa(){return function(){return setTimeout(qa,1)}}function qa(){for(var a=0;a<Qa;a+=2){(0,Xa[a])(Xa[a+1]),Xa[a]=void 0,Xa[a+1]=void 0}Qa=0}function ra(){try{var a=require,b=a("vertx");return Ra=b.runOnLoop||b.runOnContext,ma()}catch(a){return pa()}}function sa(a,b,c){return b in a?Object.defineProperty(a,b,{value:c,enumerable:!0,configurable:!0,writable:!0}):a[b]=c,a}function ta(){wa.on.apply(wa,arguments)}function ua(){wa.off.apply(wa,arguments)}var va={mixin:function(a){return a.on=this.on,a.off=this.off,a.trigger=this.trigger,a._promiseCallbacks=void 0,a},on:function(a,d){if("function"!=typeof d)throw new TypeError("Callback must be a function");var e=c(this),f=void 0;f=e[a],f||(f=e[a]=[]),-1===b(f,d)&&f.push(d)},off:function(a,d){var e=c(this),f=void 0,g=void 0;if(!d)return void(e[a]=[]);f=e[a],-1!==(g=b(f,d))&&f.splice(g,1)},trigger:function(a,b,d){var e=c(this),f=void 0;if(f=e[a])for(var g=0;g<f.length;g++)(0,f[g])(b,d)}},wa={instrument:!1};va.mixin(wa);var xa=void 0;xa=Array.isArray?Array.isArray:function(a){return"[object Array]"===Object.prototype.toString.call(a)};var ya=xa,za=Date.now||function(){return(new Date).getTime()},Aa=[],Ba=void 0,Ca=1,Da=2,Ea=new y,Fa=new y,Ga=function(){function a(a,b,c,d){this._instanceConstructor=a,this.promise=new a(m,d),this._abortOnReject=c,this._init.apply(this,arguments)}return a.prototype._init=function(a,b){var c=b.length||0;this.length=c,this._remaining=c,this._result=new Array(c),this._enumerate(b),0===this._remaining&&u(this.promise,this._result)},a.prototype._enumerate=function(a){for(var b=this.length,c=this.promise,d=0;c._state===Ba&&d<b;d++)this._eachEntry(a[d],d)},a.prototype._settleMaybeThenable=function(a,b){var c=this._instanceConstructor,d=c.resolve;if(d===k){var e=n(a);if(e===C&&a._state!==Ba)a._onError=null,this._settledAt(a._state,b,a._result);else if("function"!=typeof e)this._remaining--,this._result[b]=this._makeResult(Ca,b,a);else if(c===Ja){var f=new c(m);r(f,a,e),this._willSettleAt(f,b)}else this._willSettleAt(new c(function(b){return b(a)}),b)}else this._willSettleAt(d(a),b)},a.prototype._eachEntry=function(a,b){h(a)?this._settleMaybeThenable(a,b):(this._remaining--,this._result[b]=this._makeResult(Ca,b,a))},a.prototype._settledAt=function(a,b,c){var d=this.promise;d._state===Ba&&(this._abortOnReject&&a===Da?v(d,c):(this._remaining--,this._result[b]=this._makeResult(a,b,c),0===this._remaining&&u(d,this._result)))},a.prototype._makeResult=function(a,b,c){return c},a.prototype._willSettleAt=function(a,b){var c=this;w(a,void 0,function(a){return c._settledAt(Ca,b,a)},function(a){return c._settledAt(Da,b,a)})},a}(),Ha="rsvp_"+za()+"-",Ia=0,Ja=function(){function a(b,c){this._id=Ia++,this._label=c,this._state=void 0,this._result=void 0,this._subscribers=[],wa.instrument&&j("created",this),m!==b&&("function"!=typeof b&&H(),this instanceof a?B(this,b):I())}return a.prototype._onError=function(a){var b=this;wa.after(function(){b._onError&&wa.trigger("error",a,b._label)})},a.prototype.catch=function(a,b){return this.then(void 0,a,b)},a.prototype.finally=function(a,b){var c=this,d=c.constructor;return c.then(function(b){return d.resolve(a()).then(function(){return b})},function(b){return d.resolve(a()).then(function(){throw b})},b)},a}();Ja.cast=k,Ja.all=E,Ja.race=F,Ja.resolve=k,Ja.reject=G,Ja.prototype._guidKey=Ha,Ja.prototype.then=C;var Ka=new J,La=new J,Ma=function(a){function b(b,c,d){return U(this,a.call(this,b,c,!1,d))}return V(b,a),b}(Ga);Ma.prototype._makeResult=D;var Na=Object.prototype.hasOwnProperty,Oa=function(a){function b(b,c){var d=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],e=arguments[3];return Y(this,a.call(this,b,c,d,e))}return Z(b,a),b.prototype._init=function(a,b){this._result={},this._enumerate(b),0===this._remaining&&u(this.promise,this._result)},b.prototype._enumerate=function(a){var b=this.promise,c=[];for(var d in a)Na.call(a,d)&&c.push({position:d,entry:a[d]});var e=c.length;this._remaining=e;for(var f=void 0,g=0;b._state===Ba&&g<e;g++)f=c[g],this._eachEntry(f.entry,f.position)},b}(Ga),Pa=function(a){function b(b,c,d){return _(this,a.call(this,b,c,!1,d))}return aa(b,a),b}(Oa);Pa.prototype._makeResult=D;var Qa=0,Ra=void 0,Sa="undefined"!=typeof window?window:void 0,Ta=Sa||{},Ua=Ta.MutationObserver||Ta.WebKitMutationObserver,Va="undefined"==typeof self&&"undefined"!=typeof process&&"[object process]"==={}.toString.call(process),Wa="undefined"!=typeof Uint8ClampedArray&&"undefined"!=typeof importScripts&&"undefined"!=typeof MessageChannel,Xa=new Array(1e3),Ya=void 0;Ya=Va?la():Ua?na():Wa?oa():void 0===Sa&&"function"==typeof require?ra():pa();if("object"==typeof self)self;else{if("object"!=typeof global)throw new Error("no global: `self` or `global` found");global}var Za;wa.async=ka,wa.after=function(a){return setTimeout(a,0)};var $a=fa,_a=function(a,b){return wa.async(a,b)};if("undefined"!=typeof window&&"object"==typeof window.__PROMISE_INSTRUMENTATION__){var ab=window.__PROMISE_INSTRUMENTATION__;d("instrument",!0);for(var bb in ab)ab.hasOwnProperty(bb)&&ta(bb,ab[bb])}var cb=(Za={asap:ka,cast:$a,Promise:Ja,EventTarget:va,all:T,allSettled:W,race:X,hash:$,hashSettled:ba,rethrow:ca,defer:da,denodeify:P,configure:d,on:ta,off:ua,resolve:fa,reject:ga,map:ea},sa(Za,"async",_a),sa(Za,"filter",ja),Za);a.default=cb,a.asap=ka,a.cast=$a,a.Promise=Ja,a.EventTarget=va,a.all=T,a.allSettled=W,a.race=X,a.hash=$,a.hashSettled=ba,a.rethrow=ca,a.defer=da,a.denodeify=P,a.configure=d,a.on=ta,a.off=ua,a.resolve=fa,a.reject=ga,a.map=ea,a.async=_a,a.filter=ja,Object.defineProperty(a,"__esModule",{value:!0})});var EPUBJS=EPUBJS||{};EPUBJS.core={};var ELEMENT_NODE=1,TEXT_NODE=3,COMMENT_NODE=8,DOCUMENT_NODE=9;EPUBJS.core.getEl=function(a){return document.getElementById(a)},EPUBJS.core.getEls=function(a){return document.getElementsByClassName(a)},EPUBJS.core.request=function(a,b,c){var d,e=window.URL,f=e?"blob":"arraybuffer",g=new RSVP.defer,h=new XMLHttpRequest,i=XMLHttpRequest.prototype,j=function(){var a;this.readyState==this.DONE&&(200!==this.status&&0!==this.status||!this.response?g.reject({message:this.response,stack:(new Error).stack}):(a="xml"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"application/xml"):"xhtml"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"application/xhtml+xml"):"html"==b?this.responseXML?this.responseXML:(new DOMParser).parseFromString(this.response,"text/html"):"json"==b?JSON.parse(this.response):"blob"==b?e?this.response:new Blob([this.response]):this.response,g.resolve(a)))};return"overrideMimeType"in i||Object.defineProperty(i,"overrideMimeType",{value:function(a){}}),h.onreadystatechange=j,h.open("GET",a,!0),c&&(h.withCredentials=!0),b||(d=EPUBJS.core.uri(a),b=d.extension,b={htm:"html"}[b]||b),"blob"==b&&(h.responseType=f),"json"==b&&h.setRequestHeader("Accept","application/json"),"xml"==b&&(h.responseType="document",h.overrideMimeType("text/xml")),"xhtml"==b&&(h.responseType="document"),"html"==b&&(h.responseType="document"),"binary"==b&&(h.responseType="arraybuffer"),h.send(),g.promise},EPUBJS.core.toArray=function(a){var b=[];for(var c in a){var d;a.hasOwnProperty(c)&&(d=a[c],d.ident=c,b.push(d))}return b},EPUBJS.core.uri=function(a){var b,c,d,e={protocol:"",host:"",path:"",origin:"",directory:"",base:"",filename:"",extension:"",fragment:"",href:a},f=a.indexOf("blob:"),g=a.indexOf("://"),h=a.indexOf("?"),i=a.indexOf("#");return 0===f?(e.protocol="blob",e.base=a.indexOf(0,i),e):(-1!=i&&(e.fragment=a.slice(i+1),a=a.slice(0,i)),-1!=h&&(e.search=a.slice(h+1),a=a.slice(0,h),href=e.href),-1!=g?(e.protocol=a.slice(0,g),b=a.slice(g+3),d=b.indexOf("/"),-1===d?(e.host=e.path,e.path=""):(e.host=b.slice(0,d),e.path=b.slice(d)),e.origin=e.protocol+"://"+e.host,e.directory=EPUBJS.core.folder(e.path),e.base=e.origin+e.directory):(e.path=a,e.directory=EPUBJS.core.folder(a),e.base=e.directory),e.filename=a.replace(e.base,""),c=e.filename.lastIndexOf("."),-1!=c&&(e.extension=e.filename.slice(c+1)),e)},EPUBJS.core.folder=function(a){var b=a.lastIndexOf("/");if(-1==b);return a.slice(0,b+1)},EPUBJS.core.dataURLToBlob=function(a){var b,c,d,e,f,g=";base64,";if(-1==a.indexOf(g))return b=a.split(","),c=b[0].split(":")[1],d=b[1],new Blob([d],{type:c});b=a.split(g),c=b[0].split(":")[1],d=window.atob(b[1]),e=d.length,f=new Uint8Array(e);for(var h=0;h<e;++h)f[h]=d.charCodeAt(h);return new Blob([f],{type:c})},EPUBJS.core.addScript=function(a,b,c){var d,e;e=!1,d=document.createElement("script"),d.type="text/javascript",d.async=!1,d.src=a,d.onload=d.onreadystatechange=function(){e||this.readyState&&"complete"!=this.readyState||(e=!0,b&&b())},c=c||document.body,c.appendChild(d)},EPUBJS.core.addScripts=function(a,b,c){var d=a.length,e=0,f=function(){e++,d==e?b&&b():EPUBJS.core.addScript(a[e],f,c)};EPUBJS.core.addScript(a[e],f,c)},EPUBJS.core.addCss=function(a,b,c){var d,e;e=!1,d=document.createElement("link"),d.type="text/css",d.rel="stylesheet",d.href=a,d.onload=d.onreadystatechange=function(){e||this.readyState&&"complete"!=this.readyState||(e=!0,b&&b())},c=c||document.body,c.appendChild(d)},EPUBJS.core.prefixed=function(a){var b=["Webkit","Moz","O","ms"],c=a[0].toUpperCase()+a.slice(1),d=b.length;if(void 0!==document.documentElement.style[a])return a;for(var e=0;e<d;e++)if(void 0!==document.documentElement.style[b[e]+c])return b[e]+c;return a},EPUBJS.core.resolveUrl=function(a,b){var c,d,e=[],f=EPUBJS.core.uri(b),g=a.split("/");return f.host?b:(g.pop(),d=b.split("/"),d.forEach(function(a){".."===a?g.pop():e.push(a)}),c=g.concat(e),c.join("/"))},EPUBJS.core.uuid=function(){var a=(new Date).getTime();return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,function(b){var c=(a+16*Math.random())%16|0;return a=Math.floor(a/16),("x"==b?c:7&c|8).toString(16)})},EPUBJS.core.insert=function(a,b,c){var d=EPUBJS.core.locationOf(a,b,c);return b.splice(d,0,a),d},EPUBJS.core.locationOf=function(a,b,c,d,e){var f,g=d||0,h=e||b.length,i=parseInt(g+(h-g)/2);return c||(c=function(a,b){return a>b?1:a<b?-1:(a=b)?0:void 0}),h-g<=0?i:(f=c(b[i],a),h-g==1?f>0?i:i+1:0===f?i:-1===f?EPUBJS.core.locationOf(a,b,c,i,h):EPUBJS.core.locationOf(a,b,c,g,i))},EPUBJS.core.indexOfSorted=function(a,b,c,d,e){var f,g=d||0,h=e||b.length,i=parseInt(g+(h-g)/2);return c||(c=function(a,b){return a>b?1:a<b?-1:(a=b)?0:void 0}),h-g<=0?-1:(f=c(b[i],a),h-g==1?0===f?i:-1:0===f?i:-1===f?EPUBJS.core.indexOfSorted(a,b,c,i,h):EPUBJS.core.indexOfSorted(a,b,c,g,i))},EPUBJS.core.queue=function(a){var b=[],c=a,d=function(a,c,d){return b.push({funcName:a,args:c,context:d}),b},e=function(){var a;b.length&&(a=b.shift(),c[a.funcName].apply(a.context||c,a.args))};return{enqueue:d,dequeue:e,flush:function(){for(;b.length;)e()},clear:function(){b=[]},length:function(){return b.length}}},EPUBJS.core.getElementXPath=function(a){return a&&a.id?'//*[@id="'+a.id+'"]':EPUBJS.core.getElementTreeXPath(a)},EPUBJS.core.getElementTreeXPath=function(a){var b,c,d,e,f=[],g="http://www.w3.org/1999/xhtml"===a.ownerDocument.documentElement.getAttribute("xmlns");for(a.nodeType===Node.TEXT_NODE&&(b=EPUBJS.core.indexOfTextNode(a)+1,f.push("text()["+b+"]"),a=a.parentNode);a&&1==a.nodeType;a=a.parentNode){b=0;for(var h=a.previousSibling;h;h=h.previousSibling)h.nodeType!=Node.DOCUMENT_TYPE_NODE&&h.nodeName==a.nodeName&&++b;c=a.nodeName.toLowerCase(),d=g?"xhtml:"+c:c,e=b?"["+(b+1)+"]":"",f.splice(0,0,d+e)}return f.length?"./"+f.join("/"):null},EPUBJS.core.nsResolver=function(a){return{xhtml:"http://www.w3.org/1999/xhtml",epub:"http://www.idpf.org/2007/ops"}[a]||null},EPUBJS.core.cleanStringForXpath=function(a){var b=a.match(/[^'"]+|['"]/g);return b=b.map(function(a){return"'"===a?'"\'"':'"'===a?"'\"'":"'"+a+"'"}),"concat('',"+b.join(",")+")"},EPUBJS.core.indexOfTextNode=function(a){for(var b,c=a.parentNode,d=c.childNodes,e=-1,f=0;f<d.length&&(b=d[f],b.nodeType===Node.TEXT_NODE&&e++,b!=a);f++);return e},EPUBJS.core.defaults=function(a){for(var b=1,c=arguments.length;b<c;b++){var d=arguments[b];for(var e in d)void 0===a[e]&&(a[e]=d[e])}return a},EPUBJS.core.extend=function(a){return[].slice.call(arguments,1).forEach(function(b){b&&Object.getOwnPropertyNames(b).forEach(function(c){Object.defineProperty(a,c,Object.getOwnPropertyDescriptor(b,c))})}),a},EPUBJS.core.clone=function(a){return EPUBJS.core.isArray(a)?a.slice():EPUBJS.core.extend({},a)},EPUBJS.core.isElement=function(a){return!(!a||1!=a.nodeType)},EPUBJS.core.isNumber=function(a){return!isNaN(parseFloat(a))&&isFinite(a)},EPUBJS.core.isString=function(a){return"string"==typeof a||a instanceof String},EPUBJS.core.isArray=Array.isArray||function(a){return"[object Array]"===Object.prototype.toString.call(a)},EPUBJS.core.values=function(a){var b,c,d,e=-1;if(!a)return[];for(b=Object.keys(a),c=b.length,d=Array(c);++e<c;)d[e]=a[b[e]];return d},EPUBJS.core.indexOfNode=function(a,b){for(var c,d=a.parentNode,e=d.childNodes,f=-1,g=0;g<e.length&&(c=e[g],c.nodeType===b&&f++,c!=a);g++);return f},EPUBJS.core.indexOfTextNode=function(a){return EPUBJS.core.indexOfNode(a,TEXT_NODE)},EPUBJS.core.indexOfElementNode=function(a){return EPUBJS.core.indexOfNode(a,ELEMENT_NODE)};var EPUBJS=EPUBJS||{};EPUBJS.reader={},EPUBJS.reader.plugins={},function(a,b){var c=(a.ePubReader,a.ePubReader=function(a,b){return new EPUBJS.Reader(a,b)});"function"==typeof define&&define.amd?define(function(){return Reader}):"undefined"!=typeof module&&module.exports&&(module.exports=c)}(window,jQuery),EPUBJS.Reader=function(a,b){var c,d,e,f=this,g=$("#viewer"),h=window.location.search;this.settings=EPUBJS.core.defaults(b||{},{bookPath:a,restore:!1,reload:!1,bookmarks:void 0,annotations:void 0,contained:void 0,bookKey:void 0,styles:void 0,sidebarReflow:!1,generatePagination:!1,history:!0}),h&&(e=h.slice(1).split("&"),e.forEach(function(a){var b=a.split("="),c=b[0],d=b[1]||"";f.settings[c]=decodeURIComponent(d)})),this.setBookKey(this.settings.bookPath),this.settings.restore&&this.isSaved()&&this.applySavedSettings(),this.settings.styles=this.settings.styles||{fontSize:"100%"},this.book=c=new ePub(this.settings.bookPath,this.settings),this.offline=!1,this.sidebarOpen=!1,this.settings.bookmarks||(this.settings.bookmarks=[]),this.settings.annotations||(this.settings.annotations=[]),this.settings.generatePagination&&c.generatePagination(g.width(),g.height()),this.rendition=c.renderTo("viewer",{ignoreClass:"annotator-hl",width:"100%",height:"100%"}),this.settings.previousLocationCfi?this.displayed=this.rendition.display(this.settings.previousLocationCfi):this.displayed=this.rendition.display(),c.ready.then(function(){f.ReaderController=EPUBJS.reader.ReaderController.call(f,c),f.SettingsController=EPUBJS.reader.SettingsController.call(f,c),f.ControlsController=EPUBJS.reader.ControlsController.call(f,c),f.SidebarController=EPUBJS.reader.SidebarController.call(f,c),f.BookmarksController=EPUBJS.reader.BookmarksController.call(f,c),f.NotesController=EPUBJS.reader.NotesController.call(f,c),window.addEventListener("hashchange",this.hashChanged.bind(this),!1),document.addEventListener("keydown",this.adjustFontSize.bind(this),!1),this.rendition.on("keydown",this.adjustFontSize.bind(this)),this.rendition.on("keydown",f.ReaderController.arrowKeys.bind(this)),this.rendition.on("selected",this.selectedRange.bind(this))}.bind(this)).then(function(){f.ReaderController.hideLoader()}.bind(this));for(d in EPUBJS.reader.plugins)EPUBJS.reader.plugins.hasOwnProperty(d)&&(f[d]=EPUBJS.reader.plugins[d].call(f,c));return c.loaded.metadata.then(function(a){f.MetaController=EPUBJS.reader.MetaController.call(f,a)}),c.loaded.navigation.then(function(a){f.TocController=EPUBJS.reader.TocController.call(f,a)}),window.addEventListener("beforeunload",this.unload.bind(this),!1),this},EPUBJS.Reader.prototype.adjustFontSize=function(a){var b,c=2,d=a.ctrlKey||a.metaKey;this.settings.styles&&(this.settings.styles.fontSize||(this.settings.styles.fontSize="100%"),b=parseInt(this.settings.styles.fontSize.slice(0,-1)),d&&187==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize",b+c+"%")),d&&189==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize",b-c+"%")),d&&48==a.keyCode&&(a.preventDefault(),this.book.setStyle("fontSize","100%")))},EPUBJS.Reader.prototype.addBookmark=function(a){this.isBookmarked(a)>-1||(this.settings.bookmarks.push(a),this.trigger("reader:bookmarked",a))},EPUBJS.Reader.prototype.removeBookmark=function(a){var b=this.isBookmarked(a);-1!==b&&(this.settings.bookmarks.splice(b,1),this.trigger("reader:unbookmarked",b))},EPUBJS.Reader.prototype.isBookmarked=function(a){return this.settings.bookmarks.indexOf(a)},EPUBJS.Reader.prototype.clearBookmarks=function(){this.settings.bookmarks=[]},EPUBJS.Reader.prototype.addNote=function(a){this.settings.annotations.push(a)},EPUBJS.Reader.prototype.removeNote=function(a){var b=this.settings.annotations.indexOf(a);-1!==b&&delete this.settings.annotations[b]},EPUBJS.Reader.prototype.clearNotes=function(){this.settings.annotations=[]},EPUBJS.Reader.prototype.setBookKey=function(a){return this.settings.bookKey||(this.settings.bookKey="epubjsreader:"+EPUBJS.VERSION+":"+window.location.host+":"+a),this.settings.bookKey},EPUBJS.Reader.prototype.isSaved=function(a){return!!localStorage&&null!==localStorage.getItem(this.settings.bookKey)},EPUBJS.Reader.prototype.removeSavedSettings=function(){if(!localStorage)return!1;localStorage.removeItem(this.settings.bookKey)},EPUBJS.Reader.prototype.applySavedSettings=function(){var a;if(!localStorage)return!1;try{a=JSON.parse(localStorage.getItem(this.settings.bookKey))}catch(a){return!1}return!!a&&(a.styles&&(this.settings.styles=EPUBJS.core.defaults(this.settings.styles||{},a.styles)),this.settings=EPUBJS.core.defaults(this.settings,a),!0)},EPUBJS.Reader.prototype.saveSettings=function(){if(this.book&&(this.settings.previousLocationCfi=this.rendition.currentLocation().start.cfi),!localStorage)return!1;localStorage.setItem(this.settings.bookKey,JSON.stringify(this.settings))},EPUBJS.Reader.prototype.unload=function(){this.settings.restore&&localStorage&&this.saveSettings()},EPUBJS.Reader.prototype.hashChanged=function(){var a=window.location.hash.slice(1);this.rendition.display(a)},EPUBJS.Reader.prototype.selectedRange=function(a){var b="#"+a;this.settings.history&&window.location.hash!=b&&(history.pushState({},"",b),this.currentLocationCfi=a)},RSVP.EventTarget.mixin(EPUBJS.Reader.prototype),EPUBJS.reader.BookmarksController=function(){var a=this.book,b=this.rendition,c=$("#bookmarksView"),d=c.find("#bookmarks"),e=document.createDocumentFragment(),f=function(){c.show()},g=function(){c.hide()},h=0,i=function(c){var d=document.createElement("li"),e=document.createElement("a");d.id="bookmark-"+h,d.classList.add("list_item");var f,g=a.spine.get(c);return g.index in a.navigation.toc?(f=a.navigation.toc[g.index],e.textContent=f.label):e.textContent=c,e.href=c,e.classList.add("bookmark_link"),e.addEventListener("click",function(a){var c=this.getAttribute("href");b.display(c),a.preventDefault()},!1),d.appendChild(e),h++,d};return this.settings.bookmarks.forEach(function(a){var b=i(a);e.appendChild(b)}),d.append(e),this.on("reader:bookmarked",function(a){var b=i(a);d.append(b)}),this.on("reader:unbookmarked",function(a){$("#bookmark-"+a).remove()}),{show:f,hide:g}},EPUBJS.reader.ControlsController=function(a){var b=this,c=this.rendition,d=($("#store"),$("#fullscreen")),e=($("#fullscreenicon"),$("#cancelfullscreenicon"),$("#slider")),f=($("#main"),$("#sidebar"),$("#setting")),g=$("#bookmark");return e.on("click",function(){b.sidebarOpen?(b.SidebarController.hide(),e.addClass("icon-menu"),e.removeClass("icon-right")):(b.SidebarController.show(),e.addClass("icon-right"),e.removeClass("icon-menu"))}),"undefined"!=typeof screenfull&&(d.on("click",function(){screenfull.toggle($("#container")[0])}),screenfull.raw&&document.addEventListener(screenfull.raw.fullscreenchange,function(){fullscreen=screenfull.isFullscreen,fullscreen?d.addClass("icon-resize-small").removeClass("icon-resize-full"):d.addClass("icon-resize-full").removeClass("icon-resize-small")})),f.on("click",function(){b.SettingsController.show()}),g.on("click",function(){var a=b.rendition.currentLocation().start.cfi;-1===b.isBookmarked(a)?(b.addBookmark(a),g.addClass("icon-bookmark").removeClass("icon-bookmark-empty")):(b.removeBookmark(a),g.removeClass("icon-bookmark").addClass("icon-bookmark-empty"))}),c.on("relocated",function(a){var c=a.start.cfi,d="#"+c;-1===b.isBookmarked(c)?g.removeClass("icon-bookmark").addClass("icon-bookmark-empty"):g.addClass("icon-bookmark").removeClass("icon-bookmark-empty"),b.currentLocationCfi=c,b.settings.history&&window.location.hash!=d&&history.pushState({},"",d)}),{}},EPUBJS.reader.MetaController=function(a){var b=a.title,c=a.creator,d=$("#book-title"),e=$("#chapter-title"),f=$("#title-seperator");document.title=b+" – "+c,d.html(b),e.html(c),f.show()},EPUBJS.reader.NotesController=function(){var a=this.book,b=this.rendition,c=this,d=$("#notesView"),e=$("#notes"),f=$("#note-text"),g=$("#note-anchor"),h=c.settings.annotations,i=a.renderer,j=[],k=new ePub.CFI,l=function(){d.show()},m=function(){d.hide()},n=function(d){var e,h,i,j,l,m=a.renderer.doc;if(m.caretPositionFromPoint?(e=m.caretPositionFromPoint(d.clientX,d.clientY),h=e.offsetNode,i=e.offset):m.caretRangeFromPoint&&(e=m.caretRangeFromPoint(d.clientX,d.clientY),h=e.startContainer,i=e.startOffset),3!==h.nodeType)for(var q=0;q<h.childNodes.length;q++)if(3==h.childNodes[q].nodeType){h=h.childNodes[q];break}i=h.textContent.indexOf(".",i),-1===i?i=h.length:i+=1,j=k.generateCfiFromTextNode(h,i,a.renderer.currentChapter.cfiBase),l={annotatedAt:new Date,anchor:j,body:f.val()},c.addNote(l),o(l),p(l),f.val(""),g.text("Attach"),f.prop("disabled",!1),b.off("click",n)},o=function(a){var c=document.createElement("li"),d=document.createElement("a");c.innerHTML=a.body,d.innerHTML=" context »",d.href="#"+a.anchor,d.onclick=function(){return b.display(a.anchor),!1},c.appendChild(d),e.append(c)},p=function(b){var c=a.renderer.doc,d=document.createElement("span"),e=document.createElement("a");d.classList.add("footnotesuperscript","reader_generated"),d.style.verticalAlign="super",d.style.fontSize=".75em",d.style.lineHeight="1em",e.style.padding="2px",e.style.backgroundColor="#fffa96",e.style.borderRadius="5px",e.style.cursor="pointer",d.id="note-"+EPUBJS.core.uuid(),e.innerHTML=h.indexOf(b)+1+"[Reader]",d.appendChild(e),k.addMarker(b.anchor,c,d),q(d,b.body)},q=function(a,d){var e=a.id,f=function(){var c,f,l,m,n=i.height,o=i.width,p=225;j[e]||(j[e]=document.createElement("div"),j[e].setAttribute("class","popup"),pop_content=document.createElement("div"),j[e].appendChild(pop_content),pop_content.innerHTML=d,pop_content.setAttribute("class","pop_content"),i.render.document.body.appendChild(j[e]),j[e].addEventListener("mouseover",g,!1),j[e].addEventListener("mouseout",h,!1),b.on("locationChanged",k,this),b.on("locationChanged",h,this)),c=j[e],f=a.getBoundingClientRect(),l=f.left,m=f.top,c.classList.add("show"),popRect=c.getBoundingClientRect(),c.style.left=l-popRect.width/2+"px",c.style.top=m+"px",p>n/2.5&&(p=n/2.5,pop_content.style.maxHeight=p+"px"),popRect.height+m>=n-25?(c.style.top=m-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),l-popRect.width<=0?(c.style.left=l+"px",c.classList.add("left")):c.classList.remove("left"),l+popRect.width/2>=o?(c.style.left=l-300+"px",popRect=c.getBoundingClientRect(),c.style.left=l-popRect.width+"px",popRect.height+m>=n-25?(c.style.top=m-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),c.classList.add("right")):c.classList.remove("right")},g=function(){j[e].classList.add("on")},h=function(){j[e].classList.remove("on")},k=function(){setTimeout(function(){j[e].classList.remove("show")},100)},m=function(){c.ReaderController.slideOut(),l()};a.addEventListener("mouseover",f,!1),a.addEventListener("mouseout",k,!1),a.addEventListener("click",m,!1)};return g.on("click",function(a){g.text("Cancel"),f.prop("disabled","true"),b.on("click",n)}),h.forEach(function(a){o(a)}),{show:l,hide:m}},EPUBJS.reader.ReaderController=function(a){var b=$("#main"),c=$("#divider"),d=$("#loader"),e=$("#next"),f=$("#prev"),g=this,a=this.book,h=this.rendition,i=function(){h.currentLocation().start.cfi;g.settings.sidebarReflow?(b.removeClass("single"),b.one("transitionend",function(){h.resize()})):b.removeClass("closed")},j=function(){var a=h.currentLocation();if(a){a.start.cfi;g.settings.sidebarReflow?(b.addClass("single"),b.one("transitionend",function(){h.resize()})):b.addClass("closed")}},k=function(){d.show(),n()},l=function(){d.hide()},m=function(){c.addClass("show")},n=function(){c.removeClass("show")},o=!1,p=function(b){37==b.keyCode&&("rtl"===a.package.metadata.direction?h.next():h.prev(),f.addClass("active"),o=!0,setTimeout(function(){o=!1,f.removeClass("active")},100),b.preventDefault()),39==b.keyCode&&("rtl"===a.package.metadata.direction?h.prev():h.next(),e.addClass("active"),o=!0,setTimeout(function(){o=!1,e.removeClass("active")},100),b.preventDefault())};return document.addEventListener("keydown",p,!1),e.on("click",function(b){"rtl"===a.package.metadata.direction?h.prev():h.next(),b.preventDefault()}),f.on("click",function(b){"rtl"===a.package.metadata.direction?h.next():h.prev(),b.preventDefault()}),h.on("layout",function(a){!0===a.spread?m():n()}),h.on("relocated",function(a){a.atStart&&f.addClass("disabled"),a.atEnd&&e.addClass("disabled")}),{slideOut:j,slideIn:i,showLoader:k,hideLoader:l,showDivider:m,hideDivider:n,arrowKeys:p}},EPUBJS.reader.SettingsController=function(){var a=(this.book,this),b=$("#settings-modal"),c=$(".overlay"),d=function(){b.addClass("md-show")},e=function(){b.removeClass("md-show")};return $("#sidebarReflow").on("click",function(){a.settings.sidebarReflow=!a.settings.sidebarReflow}),b.find(".closer").on("click",function(){e()}),c.on("click",function(){e()}),{show:d,hide:e}},EPUBJS.reader.SidebarController=function(a){var b=this,c=$("#sidebar"),d=$("#panels"),e="Toc",f=function(a){var c=a+"Controller";e!=a&&void 0!==b[c]&&(b[e+"Controller"].hide(),b[c].show(),e=a,d.find(".active").removeClass("active"),d.find("#show-"+a).addClass("active"))},g=function(){return e},h=function(){b.sidebarOpen=!0,b.ReaderController.slideOut(),c.addClass("open")},i=function(){b.sidebarOpen=!1,b.ReaderController.slideIn(),c.removeClass("open")};return d.find(".show_view").on("click",function(a){var b=$(this).data("view");f(b),a.preventDefault()}),{show:h,hide:i,getActivePanel:g,changePanelTo:f}},EPUBJS.reader.TocController=function(a){var b=(this.book,this.rendition),c=$("#tocView"),d=document.createDocumentFragment(),e=!1,f=function(a,b){var c=document.createElement("ul");return b||(b=1),a.forEach(function(a){var d=document.createElement("li"),e=document.createElement("a");toggle=document.createElement("a");var g;d.id="toc-"+a.id,d.classList.add("list_item"),e.textContent=a.label,e.href=a.href,e.classList.add("toc_link"),d.appendChild(e),a.subitems&&a.subitems.length>0&&(b++,g=f(a.subitems,b),toggle.classList.add("toc_toggle"),d.insertBefore(toggle,e),d.appendChild(g)),c.appendChild(d)}),c},g=function(){c.show()},h=function(){c.hide()},i=function(a){var b=a.id,d=c.find("#toc-"+b),f=c.find(".currentChapter");c.find(".openChapter");d.length&&(d!=f&&d.has(e).length>0&&f.removeClass("currentChapter"),d.addClass("currentChapter"),d.parents("li").addClass("openChapter"))};b.on("renderered",i);var j=f(a);return d.appendChild(j),c.append(d),c.find(".toc_link").on("click",function(a){var d=this.getAttribute("href");a.preventDefault(),b.display(d),c.find(".currentChapter").addClass("openChapter").removeClass("currentChapter"),$(this).parent("li").addClass("currentChapter")}),c.find(".toc_toggle").on("click",function(a){var b=$(this).parent("li"),c=b.hasClass("openChapter");a.preventDefault(),c?b.removeClass("openChapter"):b.addClass("openChapter")}),{show:g,hide:h}}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/reader.min.js | reader.min.js |
EPUBJS.Hooks.register("beforeChapterDisplay").endnotes=function(a,b){var c=b.contents.querySelectorAll("a[href]"),d=Array.prototype.slice.call(c),e=EPUBJS.core.folder(location.pathname),f=(EPUBJS.cssPath,{});EPUBJS.core.addCss(EPUBJS.cssPath+"popup.css",!1,b.render.document.head),d.forEach(function(a){function c(){var c,h,n=b.height,o=b.width,p=225;m||(c=j.cloneNode(!0),m=c.querySelector("p")),f[i]||(f[i]=document.createElement("div"),f[i].setAttribute("class","popup"),pop_content=document.createElement("div"),f[i].appendChild(pop_content),pop_content.appendChild(m),pop_content.setAttribute("class","pop_content"),b.render.document.body.appendChild(f[i]),f[i].addEventListener("mouseover",d,!1),f[i].addEventListener("mouseout",e,!1),b.on("renderer:pageChanged",g,this),b.on("renderer:pageChanged",e,this)),c=f[i],h=a.getBoundingClientRect(),k=h.left,l=h.top,c.classList.add("show"),popRect=c.getBoundingClientRect(),c.style.left=k-popRect.width/2+"px",c.style.top=l+"px",p>n/2.5&&(p=n/2.5,pop_content.style.maxHeight=p+"px"),popRect.height+l>=n-25?(c.style.top=l-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),k-popRect.width<=0?(c.style.left=k+"px",c.classList.add("left")):c.classList.remove("left"),k+popRect.width/2>=o?(c.style.left=k-300+"px",popRect=c.getBoundingClientRect(),c.style.left=k-popRect.width+"px",popRect.height+l>=n-25?(c.style.top=l-popRect.height+"px",c.classList.add("above")):c.classList.remove("above"),c.classList.add("right")):c.classList.remove("right")}function d(){f[i].classList.add("on")}function e(){f[i].classList.remove("on")}function g(){setTimeout(function(){f[i].classList.remove("show")},100)}var h,i,j,k,l,m;"noteref"==a.getAttribute("epub:type")&&(h=a.getAttribute("href"),i=h.replace("#",""),j=b.render.document.getElementById(i),a.addEventListener("mouseover",c,!1),a.addEventListener("mouseout",g,!1))}),a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").mathml=function(a,b){if(b.currentChapter.manifestProperties.indexOf("mathml")!==-1){b.render.iframe.contentWindow.mathmlCallback=a;var c=document.createElement("script");c.type="text/x-mathjax-config",c.innerHTML=' MathJax.Hub.Register.StartupHook("End",function () { window.mathmlCallback(); }); MathJax.Hub.Config({jax: ["input/TeX","input/MathML","output/SVG"],extensions: ["tex2jax.js","mml2jax.js","MathEvents.js"],TeX: {extensions: ["noErrors.js","noUndefined.js","autoload-all.js"]},MathMenu: {showRenderer: false},menuSettings: {zoom: "Click"},messageStyle: "none"}); ',b.doc.body.appendChild(c),EPUBJS.core.addScript("http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML",null,b.doc.head)}else a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").smartimages=function(a,b){var c=b.contents.querySelectorAll("img"),d=Array.prototype.slice.call(c),e=b.height;if("reflowable"!=b.layoutSettings.layout)return void a();d.forEach(function(a){var c=function(){var c,d=a.getBoundingClientRect(),f=d.height,g=d.top,h=a.getAttribute("data-height"),i=h||f,j=Number(getComputedStyle(a,"").fontSize.match(/(\d*(\.\d*)?)px/)[1]),k=j?j/2:0;e=b.contents.clientHeight,g<0&&(g=0),a.style.maxWidth="100%",i+g>=e?(g<e/2?(c=e-g-k,a.style.maxHeight=c+"px",a.style.width="auto"):(i>e&&(a.style.maxHeight=e+"px",a.style.width="auto",d=a.getBoundingClientRect(),i=d.height),a.style.display="block",a.style.WebkitColumnBreakBefore="always",a.style.breakBefore="column"),a.setAttribute("data-height",c)):(a.style.removeProperty("max-height"),a.style.removeProperty("margin-top"))},d=function(){b.off("renderer:resized",c),b.off("renderer:chapterUnload",this)};a.addEventListener("load",c,!1),b.on("renderer:resized",c),b.on("renderer:chapterUnload",d),c()}),a&&a()},EPUBJS.Hooks.register("beforeChapterDisplay").transculsions=function(a,b){var c=b.contents.querySelectorAll("[transclusion]");Array.prototype.slice.call(c).forEach(function(a){function c(){j=g,k=h,j>chapter.colWidth&&(d=chapter.colWidth/j,j=chapter.colWidth,k*=d),f.width=j,f.height=k}var d,e=a.getAttribute("ref"),f=document.createElement("iframe"),g=a.getAttribute("width"),h=a.getAttribute("height"),i=a.parentNode,j=g,k=h;c(),b.listenUntil("renderer:resized","renderer:chapterUnloaded",c),f.src=e,i.replaceChild(f,a)}),a&&a()}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/hooks.min.js | hooks.min.js |
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(factory((global.RSVP = global.RSVP || {})));
}(this, (function (exports) { 'use strict';
function indexOf(callbacks, callback) {
for (var i = 0, l = callbacks.length; i < l; i++) {
if (callbacks[i] === callback) {
return i;
}
}
return -1;
}
function callbacksFor(object) {
var callbacks = object._promiseCallbacks;
if (!callbacks) {
callbacks = object._promiseCallbacks = {};
}
return callbacks;
}
/**
@class RSVP.EventTarget
*/
var EventTarget = {
/**
`RSVP.EventTarget.mixin` extends an object with EventTarget methods. For
Example:
```javascript
let object = {};
RSVP.EventTarget.mixin(object);
object.on('finished', function(event) {
// handle event
});
object.trigger('finished', { detail: value });
```
`EventTarget.mixin` also works with prototypes:
```javascript
let Person = function() {};
RSVP.EventTarget.mixin(Person.prototype);
let yehuda = new Person();
let tom = new Person();
yehuda.on('poke', function(event) {
console.log('Yehuda says OW');
});
tom.on('poke', function(event) {
console.log('Tom says OW');
});
yehuda.trigger('poke');
tom.trigger('poke');
```
@method mixin
@for RSVP.EventTarget
@private
@param {Object} object object to extend with EventTarget methods
*/
mixin: function (object) {
object['on'] = this['on'];
object['off'] = this['off'];
object['trigger'] = this['trigger'];
object._promiseCallbacks = undefined;
return object;
},
/**
Registers a callback to be executed when `eventName` is triggered
```javascript
object.on('event', function(eventInfo){
// handle the event
});
object.trigger('event');
```
@method on
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to listen for
@param {Function} callback function to be called when the event is triggered.
*/
on: function (eventName, callback) {
if (typeof callback !== 'function') {
throw new TypeError('Callback must be a function');
}
var allCallbacks = callbacksFor(this),
callbacks = void 0;
callbacks = allCallbacks[eventName];
if (!callbacks) {
callbacks = allCallbacks[eventName] = [];
}
if (indexOf(callbacks, callback) === -1) {
callbacks.push(callback);
}
},
/**
You can use `off` to stop firing a particular callback for an event:
```javascript
function doStuff() { // do stuff! }
object.on('stuff', doStuff);
object.trigger('stuff'); // doStuff will be called
// Unregister ONLY the doStuff callback
object.off('stuff', doStuff);
object.trigger('stuff'); // doStuff will NOT be called
```
If you don't pass a `callback` argument to `off`, ALL callbacks for the
event will not be executed when the event fires. For example:
```javascript
let callback1 = function(){};
let callback2 = function(){};
object.on('stuff', callback1);
object.on('stuff', callback2);
object.trigger('stuff'); // callback1 and callback2 will be executed.
object.off('stuff');
object.trigger('stuff'); // callback1 and callback2 will not be executed!
```
@method off
@for RSVP.EventTarget
@private
@param {String} eventName event to stop listening to
@param {Function} callback optional argument. If given, only the function
given will be removed from the event's callback queue. If no `callback`
argument is given, all callbacks will be removed from the event's callback
queue.
*/
off: function (eventName, callback) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
index = void 0;
if (!callback) {
allCallbacks[eventName] = [];
return;
}
callbacks = allCallbacks[eventName];
index = indexOf(callbacks, callback);
if (index !== -1) {
callbacks.splice(index, 1);
}
},
/**
Use `trigger` to fire custom events. For example:
```javascript
object.on('foo', function(){
console.log('foo event happened!');
});
object.trigger('foo');
// 'foo event happened!' logged to the console
```
You can also pass a value as a second argument to `trigger` that will be
passed as an argument to all event listeners for the event:
```javascript
object.on('foo', function(value){
console.log(value.name);
});
object.trigger('foo', { name: 'bar' });
// 'bar' logged to the console
```
@method trigger
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to be triggered
@param {*} options optional value to be passed to any event handlers for
the given `eventName`
*/
trigger: function (eventName, options, label) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
callback = void 0;
if (callbacks = allCallbacks[eventName]) {
// Don't cache the callbacks.length since it may grow
for (var i = 0; i < callbacks.length; i++) {
callback = callbacks[i];
callback(options, label);
}
}
}
};
var config = {
instrument: false
};
EventTarget['mixin'](config);
function configure(name, value) {
if (arguments.length === 2) {
config[name] = value;
} else {
return config[name];
}
}
function objectOrFunction(x) {
var type = typeof x;
return x !== null && (type === 'object' || type === 'function');
}
function isFunction(x) {
return typeof x === 'function';
}
function isObject(x) {
return x !== null && typeof x === 'object';
}
function isMaybeThenable(x) {
return x !== null && typeof x === 'object';
}
var _isArray = void 0;
if (Array.isArray) {
_isArray = Array.isArray;
} else {
_isArray = function (x) {
return Object.prototype.toString.call(x) === '[object Array]';
};
}
var isArray = _isArray;
// Date.now is not available in browsers < IE9
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/now#Compatibility
var now = Date.now || function () {
return new Date().getTime();
};
var queue = [];
function scheduleFlush() {
setTimeout(function () {
for (var i = 0; i < queue.length; i++) {
var entry = queue[i];
var payload = entry.payload;
payload.guid = payload.key + payload.id;
payload.childGuid = payload.key + payload.childId;
if (payload.error) {
payload.stack = payload.error.stack;
}
config['trigger'](entry.name, entry.payload);
}
queue.length = 0;
}, 50);
}
function instrument(eventName, promise, child) {
if (1 === queue.push({
name: eventName,
payload: {
key: promise._guidKey,
id: promise._id,
eventName: eventName,
detail: promise._result,
childId: child && child._id,
label: promise._label,
timeStamp: now(),
error: config["instrument-with-stack"] ? new Error(promise._label) : null
} })) {
scheduleFlush();
}
}
/**
`RSVP.Promise.resolve` returns a promise that will become resolved with the
passed `value`. It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
resolve(1);
});
promise.then(function(value){
// value === 1
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.resolve(1);
promise.then(function(value){
// value === 1
});
```
@method resolve
@static
@param {*} object value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$1(object, label) {
/*jshint validthis:true */
var Constructor = this;
if (object && typeof object === 'object' && object.constructor === Constructor) {
return object;
}
var promise = new Constructor(noop, label);
resolve(promise, object);
return promise;
}
function withOwnPromise() {
return new TypeError('A promises callback cannot return that same promise.');
}
function noop() {}
var PENDING = void 0;
var FULFILLED = 1;
var REJECTED = 2;
var GET_THEN_ERROR = new ErrorObject();
function getThen(promise) {
try {
return promise.then;
} catch (error) {
GET_THEN_ERROR.error = error;
return GET_THEN_ERROR;
}
}
function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) {
try {
then$$1.call(value, fulfillmentHandler, rejectionHandler);
} catch (e) {
return e;
}
}
function handleForeignThenable(promise, thenable, then$$1) {
config.async(function (promise) {
var sealed = false;
var error = tryThen(then$$1, thenable, function (value) {
if (sealed) {
return;
}
sealed = true;
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
if (sealed) {
return;
}
sealed = true;
reject(promise, reason);
}, 'Settle: ' + (promise._label || ' unknown promise'));
if (!sealed && error) {
sealed = true;
reject(promise, error);
}
}, promise);
}
function handleOwnThenable(promise, thenable) {
if (thenable._state === FULFILLED) {
fulfill(promise, thenable._result);
} else if (thenable._state === REJECTED) {
thenable._onError = null;
reject(promise, thenable._result);
} else {
subscribe(thenable, undefined, function (value) {
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
return reject(promise, reason);
});
}
}
function handleMaybeThenable(promise, maybeThenable, then$$1) {
var isOwnThenable = maybeThenable.constructor === promise.constructor && then$$1 === then && promise.constructor.resolve === resolve$1;
if (isOwnThenable) {
handleOwnThenable(promise, maybeThenable);
} else if (then$$1 === GET_THEN_ERROR) {
reject(promise, GET_THEN_ERROR.error);
GET_THEN_ERROR.error = null;
} else if (isFunction(then$$1)) {
handleForeignThenable(promise, maybeThenable, then$$1);
} else {
fulfill(promise, maybeThenable);
}
}
function resolve(promise, value) {
if (promise === value) {
fulfill(promise, value);
} else if (objectOrFunction(value)) {
handleMaybeThenable(promise, value, getThen(value));
} else {
fulfill(promise, value);
}
}
function publishRejection(promise) {
if (promise._onError) {
promise._onError(promise._result);
}
publish(promise);
}
function fulfill(promise, value) {
if (promise._state !== PENDING) {
return;
}
promise._result = value;
promise._state = FULFILLED;
if (promise._subscribers.length === 0) {
if (config.instrument) {
instrument('fulfilled', promise);
}
} else {
config.async(publish, promise);
}
}
function reject(promise, reason) {
if (promise._state !== PENDING) {
return;
}
promise._state = REJECTED;
promise._result = reason;
config.async(publishRejection, promise);
}
function subscribe(parent, child, onFulfillment, onRejection) {
var subscribers = parent._subscribers;
var length = subscribers.length;
parent._onError = null;
subscribers[length] = child;
subscribers[length + FULFILLED] = onFulfillment;
subscribers[length + REJECTED] = onRejection;
if (length === 0 && parent._state) {
config.async(publish, parent);
}
}
function publish(promise) {
var subscribers = promise._subscribers;
var settled = promise._state;
if (config.instrument) {
instrument(settled === FULFILLED ? 'fulfilled' : 'rejected', promise);
}
if (subscribers.length === 0) {
return;
}
var child = void 0,
callback = void 0,
result = promise._result;
for (var i = 0; i < subscribers.length; i += 3) {
child = subscribers[i];
callback = subscribers[i + settled];
if (child) {
invokeCallback(settled, child, callback, result);
} else {
callback(result);
}
}
promise._subscribers.length = 0;
}
function ErrorObject() {
this.error = null;
}
var TRY_CATCH_ERROR = new ErrorObject();
function tryCatch(callback, result) {
try {
return callback(result);
} catch (e) {
TRY_CATCH_ERROR.error = e;
return TRY_CATCH_ERROR;
}
}
function invokeCallback(state, promise, callback, result) {
var hasCallback = isFunction(callback);
var value = void 0,
error = void 0;
if (hasCallback) {
value = tryCatch(callback, result);
if (value === TRY_CATCH_ERROR) {
error = value.error;
value.error = null; // release
} else if (value === promise) {
reject(promise, withOwnPromise());
return;
}
} else {
value = result;
}
if (promise._state !== PENDING) {
// noop
} else if (hasCallback && error === undefined) {
resolve(promise, value);
} else if (error !== undefined) {
reject(promise, error);
} else if (state === FULFILLED) {
fulfill(promise, value);
} else if (state === REJECTED) {
reject(promise, value);
}
}
function initializePromise(promise, resolver) {
var resolved = false;
try {
resolver(function (value) {
if (resolved) {
return;
}
resolved = true;
resolve(promise, value);
}, function (reason) {
if (resolved) {
return;
}
resolved = true;
reject(promise, reason);
});
} catch (e) {
reject(promise, e);
}
}
function then(onFulfillment, onRejection, label) {
var parent = this;
var state = parent._state;
if (state === FULFILLED && !onFulfillment || state === REJECTED && !onRejection) {
config.instrument && instrument('chained', parent, parent);
return parent;
}
parent._onError = null;
var child = new parent.constructor(noop, label);
var result = parent._result;
config.instrument && instrument('chained', parent, child);
if (state === PENDING) {
subscribe(parent, child, onFulfillment, onRejection);
} else {
var callback = state === FULFILLED ? onFulfillment : onRejection;
config.async(function () {
return invokeCallback(state, child, callback, result);
});
}
return child;
}
var Enumerator = function () {
function Enumerator(Constructor, input, abortOnReject, label) {
this._instanceConstructor = Constructor;
this.promise = new Constructor(noop, label);
this._abortOnReject = abortOnReject;
this._init.apply(this, arguments);
}
Enumerator.prototype._init = function _init(Constructor, input) {
var len = input.length || 0;
this.length = len;
this._remaining = len;
this._result = new Array(len);
this._enumerate(input);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
Enumerator.prototype._enumerate = function _enumerate(input) {
var length = this.length;
var promise = this.promise;
for (var i = 0; promise._state === PENDING && i < length; i++) {
this._eachEntry(input[i], i);
}
};
Enumerator.prototype._settleMaybeThenable = function _settleMaybeThenable(entry, i) {
var c = this._instanceConstructor;
var resolve$$1 = c.resolve;
if (resolve$$1 === resolve$1) {
var then$$1 = getThen(entry);
if (then$$1 === then && entry._state !== PENDING) {
entry._onError = null;
this._settledAt(entry._state, i, entry._result);
} else if (typeof then$$1 !== 'function') {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
} else if (c === Promise) {
var promise = new c(noop);
handleMaybeThenable(promise, entry, then$$1);
this._willSettleAt(promise, i);
} else {
this._willSettleAt(new c(function (resolve$$1) {
return resolve$$1(entry);
}), i);
}
} else {
this._willSettleAt(resolve$$1(entry), i);
}
};
Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {
if (isMaybeThenable(entry)) {
this._settleMaybeThenable(entry, i);
} else {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
}
};
Enumerator.prototype._settledAt = function _settledAt(state, i, value) {
var promise = this.promise;
if (promise._state === PENDING) {
if (this._abortOnReject && state === REJECTED) {
reject(promise, value);
} else {
this._remaining--;
this._result[i] = this._makeResult(state, i, value);
if (this._remaining === 0) {
fulfill(promise, this._result);
}
}
}
};
Enumerator.prototype._makeResult = function _makeResult(state, i, value) {
return value;
};
Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {
var enumerator = this;
subscribe(promise, undefined, function (value) {
return enumerator._settledAt(FULFILLED, i, value);
}, function (reason) {
return enumerator._settledAt(REJECTED, i, reason);
});
};
return Enumerator;
}();
function makeSettledResult(state, position, value) {
if (state === FULFILLED) {
return {
state: 'fulfilled',
value: value
};
} else {
return {
state: 'rejected',
reason: value
};
}
}
/**
`RSVP.Promise.all` accepts an array of promises, and returns a new promise which
is fulfilled with an array of fulfillment values for the passed promises, or
rejected with the reason of the first passed promise to be rejected. It casts all
elements of the passed iterable to promises as it runs this algorithm.
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// The array here would be [ 1, 2, 3 ];
});
```
If any of the `promises` given to `RSVP.all` are rejected, the first promise
that is rejected will be given as an argument to the returned promises's
rejection handler. For example:
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error("2"));
let promise3 = RSVP.reject(new Error("3"));
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// Code here never runs because there are rejected promises!
}, function(error) {
// error.message === "2"
});
```
@method all
@static
@param {Array} entries array of promises
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all `promises` have been
fulfilled, or rejected if any of them become rejected.
@static
*/
function all(entries, label) {
if (!isArray(entries)) {
return this.reject(new TypeError("Promise.all must be called with an array"), label);
}
return new Enumerator(this, entries, true /* abort on reject */, label).promise;
}
/**
`RSVP.Promise.race` returns a new promise which is settled in the same way as the
first passed promise to settle.
Example:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 2');
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// result === 'promise 2' because it was resolved before promise1
// was resolved.
});
```
`RSVP.Promise.race` is deterministic in that only the state of the first
settled promise matters. For example, even if other promises given to the
`promises` array argument are resolved, but the first settled promise has
become rejected before the other promises became fulfilled, the returned
promise will become rejected:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
reject(new Error('promise 2'));
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// Code here never runs
}, function(reason){
// reason.message === 'promise 2' because promise 2 became rejected before
// promise 1 became fulfilled
});
```
An example real-world use case is implementing timeouts:
```javascript
RSVP.Promise.race([ajax('foo.json'), timeout(5000)])
```
@method race
@static
@param {Array} entries array of promises to observe
@param {String} label optional string for describing the promise returned.
Useful for tooling.
@return {Promise} a promise which settles in the same way as the first passed
promise to settle.
*/
function race(entries, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
if (!isArray(entries)) {
reject(promise, new TypeError('Promise.race must be called with an array'));
return promise;
}
for (var i = 0; promise._state === PENDING && i < entries.length; i++) {
subscribe(Constructor.resolve(entries[i]), undefined, function (value) {
return resolve(promise, value);
}, function (reason) {
return reject(promise, reason);
});
}
return promise;
}
/**
`RSVP.Promise.reject` returns a promise rejected with the passed `reason`.
It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
reject(new Error('WHOOPS'));
});
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.reject(new Error('WHOOPS'));
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
@method reject
@static
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$1(reason, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
reject(promise, reason);
return promise;
}
var guidKey = 'rsvp_' + now() + '-';
var counter = 0;
function needsResolver() {
throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
}
function needsNew() {
throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
}
/**
Promise objects represent the eventual result of an asynchronous operation. The
primary way of interacting with a promise is through its `then` method, which
registers callbacks to receive either a promise’s eventual value or the reason
why the promise cannot be fulfilled.
Terminology
-----------
- `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- `thenable` is an object or function that defines a `then` method.
- `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- `exception` is a value that is thrown using the throw statement.
- `reason` is a value that indicates why a promise was rejected.
- `settled` the final resting state of a promise, fulfilled or rejected.
A promise can be in one of three states: pending, fulfilled, or rejected.
Promises that are fulfilled have a fulfillment value and are in the fulfilled
state. Promises that are rejected have a rejection reason and are in the
rejected state. A fulfillment value is never a thenable.
Promises can also be said to *resolve* a value. If this value is also a
promise, then the original promise's settled state will match the value's
settled state. So a promise that *resolves* a promise that rejects will
itself reject, and a promise that *resolves* a promise that fulfills will
itself fulfill.
Basic Usage:
------------
```js
let promise = new Promise(function(resolve, reject) {
// on success
resolve(value);
// on failure
reject(reason);
});
promise.then(function(value) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Advanced Usage:
---------------
Promises shine when abstracting away asynchronous interactions such as
`XMLHttpRequest`s.
```js
function getJSON(url) {
return new Promise(function(resolve, reject){
let xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = handler;
xhr.responseType = 'json';
xhr.setRequestHeader('Accept', 'application/json');
xhr.send();
function handler() {
if (this.readyState === this.DONE) {
if (this.status === 200) {
resolve(this.response);
} else {
reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
}
}
};
});
}
getJSON('/posts.json').then(function(json) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Unlike callbacks, promises are great composable primitives.
```js
Promise.all([
getJSON('/posts'),
getJSON('/comments')
]).then(function(values){
values[0] // => postsJSON
values[1] // => commentsJSON
return values;
});
```
@class RSVP.Promise
@param {function} resolver
@param {String} label optional string for labeling the promise.
Useful for tooling.
@constructor
*/
var Promise = function () {
function Promise(resolver, label) {
this._id = counter++;
this._label = label;
this._state = undefined;
this._result = undefined;
this._subscribers = [];
config.instrument && instrument('created', this);
if (noop !== resolver) {
typeof resolver !== 'function' && needsResolver();
this instanceof Promise ? initializePromise(this, resolver) : needsNew();
}
}
Promise.prototype._onError = function _onError(reason) {
var _this = this;
config.after(function () {
if (_this._onError) {
config.trigger('error', reason, _this._label);
}
});
};
/**
`catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
as the catch block of a try/catch statement.
```js
function findAuthor(){
throw new Error('couldn\'t find that author');
}
// synchronous
try {
findAuthor();
} catch(reason) {
// something went wrong
}
// async with promises
findAuthor().catch(function(reason){
// something went wrong
});
```
@method catch
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.catch = function _catch(onRejection, label) {
return this.then(undefined, onRejection, label);
};
/**
`finally` will be invoked regardless of the promise's fate just as native
try/catch/finally behaves
Synchronous example:
```js
findAuthor() {
if (Math.random() > 0.5) {
throw new Error();
}
return new Author();
}
try {
return findAuthor(); // succeed or fail
} catch(error) {
return findOtherAuthor();
} finally {
// always runs
// doesn't affect the return value
}
```
Asynchronous example:
```js
findAuthor().catch(function(reason){
return findOtherAuthor();
}).finally(function(){
// author was either found, or not
});
```
@method finally
@param {Function} callback
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.finally = function _finally(callback, label) {
var promise = this;
var constructor = promise.constructor;
return promise.then(function (value) {
return constructor.resolve(callback()).then(function () {
return value;
});
}, function (reason) {
return constructor.resolve(callback()).then(function () {
throw reason;
});
}, label);
};
return Promise;
}();
Promise.cast = resolve$1; // deprecated
Promise.all = all;
Promise.race = race;
Promise.resolve = resolve$1;
Promise.reject = reject$1;
Promise.prototype._guidKey = guidKey;
/**
The primary way of interacting with a promise is through its `then` method,
which registers callbacks to receive either a promise's eventual value or the
reason why the promise cannot be fulfilled.
```js
findUser().then(function(user){
// user is available
}, function(reason){
// user is unavailable, and you are given the reason why
});
```
Chaining
--------
The return value of `then` is itself a promise. This second, 'downstream'
promise is resolved with the return value of the first promise's fulfillment
or rejection handler, or rejected if the handler throws an exception.
```js
findUser().then(function (user) {
return user.name;
}, function (reason) {
return 'default name';
}).then(function (userName) {
// If `findUser` fulfilled, `userName` will be the user's name, otherwise it
// will be `'default name'`
});
findUser().then(function (user) {
throw new Error('Found user, but still unhappy');
}, function (reason) {
throw new Error('`findUser` rejected and we\'re unhappy');
}).then(function (value) {
// never reached
}, function (reason) {
// if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
// If `findUser` rejected, `reason` will be '`findUser` rejected and we\'re unhappy'.
});
```
If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
```js
findUser().then(function (user) {
throw new PedagogicalException('Upstream error');
}).then(function (value) {
// never reached
}).then(function (value) {
// never reached
}, function (reason) {
// The `PedgagocialException` is propagated all the way down to here
});
```
Assimilation
------------
Sometimes the value you want to propagate to a downstream promise can only be
retrieved asynchronously. This can be achieved by returning a promise in the
fulfillment or rejection handler. The downstream promise will then be pending
until the returned promise is settled. This is called *assimilation*.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// The user's comments are now available
});
```
If the assimliated promise rejects, then the downstream promise will also reject.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// If `findCommentsByAuthor` fulfills, we'll have the value here
}, function (reason) {
// If `findCommentsByAuthor` rejects, we'll have the reason here
});
```
Simple Example
--------------
Synchronous Example
```javascript
let result;
try {
result = findResult();
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
findResult(function(result, err){
if (err) {
// failure
} else {
// success
}
});
```
Promise Example;
```javascript
findResult().then(function(result){
// success
}, function(reason){
// failure
});
```
Advanced Example
--------------
Synchronous Example
```javascript
let author, books;
try {
author = findAuthor();
books = findBooksByAuthor(author);
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
function foundBooks(books) {
}
function failure(reason) {
}
findAuthor(function(author, err){
if (err) {
failure(err);
// failure
} else {
try {
findBoooksByAuthor(author, function(books, err) {
if (err) {
failure(err);
} else {
try {
foundBooks(books);
} catch(reason) {
failure(reason);
}
}
});
} catch(error) {
failure(err);
}
// success
}
});
```
Promise Example;
```javascript
findAuthor().
then(findBooksByAuthor).
then(function(books){
// found books
}).catch(function(reason){
// something went wrong
});
```
@method then
@param {Function} onFulfillment
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.then = then;
function Result() {
this.value = undefined;
}
var ERROR = new Result();
var GET_THEN_ERROR$1 = new Result();
function getThen$1(obj) {
try {
return obj.then;
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function tryApply(f, s, a) {
try {
f.apply(s, a);
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function makeObject(_, argumentNames) {
var obj = {};
var length = _.length;
var args = new Array(length);
for (var x = 0; x < length; x++) {
args[x] = _[x];
}
for (var i = 0; i < argumentNames.length; i++) {
var name = argumentNames[i];
obj[name] = args[i + 1];
}
return obj;
}
function arrayResult(_) {
var length = _.length;
var args = new Array(length - 1);
for (var i = 1; i < length; i++) {
args[i - 1] = _[i];
}
return args;
}
function wrapThenable(then, promise) {
return {
then: function (onFulFillment, onRejection) {
return then.call(promise, onFulFillment, onRejection);
}
};
}
/**
`RSVP.denodeify` takes a 'node-style' function and returns a function that
will return an `RSVP.Promise`. You can use `denodeify` in Node.js or the
browser when you'd prefer to use promises over using callbacks. For example,
`denodeify` transforms the following:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) return handleError(err);
handleData(data);
});
```
into:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
readFile('myfile.txt').then(handleData, handleError);
```
If the node function has multiple success parameters, then `denodeify`
just returns the first one:
```javascript
let request = RSVP.denodeify(require('request'));
request('http://example.com').then(function(res) {
// ...
});
```
However, if you need all success parameters, setting `denodeify`'s
second parameter to `true` causes it to return all success parameters
as an array:
```javascript
let request = RSVP.denodeify(require('request'), true);
request('http://example.com').then(function(result) {
// result[0] -> res
// result[1] -> body
});
```
Or if you pass it an array with names it returns the parameters as a hash:
```javascript
let request = RSVP.denodeify(require('request'), ['res', 'body']);
request('http://example.com').then(function(result) {
// result.res
// result.body
});
```
Sometimes you need to retain the `this`:
```javascript
let app = require('express')();
let render = RSVP.denodeify(app.render.bind(app));
```
The denodified function inherits from the original function. It works in all
environments, except IE 10 and below. Consequently all properties of the original
function are available to you. However, any properties you change on the
denodeified function won't be changed on the original function. Example:
```javascript
let request = RSVP.denodeify(require('request')),
cookieJar = request.jar(); // <- Inheritance is used here
request('http://example.com', {jar: cookieJar}).then(function(res) {
// cookieJar.cookies holds now the cookies returned by example.com
});
```
Using `denodeify` makes it easier to compose asynchronous operations instead
of using callbacks. For example, instead of:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) { ... } // Handle error
fs.writeFile('myfile2.txt', data, function(err){
if (err) { ... } // Handle error
console.log('done')
});
});
```
you can chain the operations together using `then` from the returned promise:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
let writeFile = RSVP.denodeify(fs.writeFile);
readFile('myfile.txt').then(function(data){
return writeFile('myfile2.txt', data);
}).then(function(){
console.log('done')
}).catch(function(error){
// Handle error
});
```
@method denodeify
@static
@for RSVP
@param {Function} nodeFunc a 'node-style' function that takes a callback as
its last argument. The callback expects an error to be passed as its first
argument (if an error occurred, otherwise null), and the value from the
operation as its second argument ('function(err, value){ }').
@param {Boolean|Array} [options] An optional paramter that if set
to `true` causes the promise to fulfill with the callback's success arguments
as an array. This is useful if the node function has multiple success
paramters. If you set this paramter to an array with names, the promise will
fulfill with a hash with these names as keys and the success parameters as
values.
@return {Function} a function that wraps `nodeFunc` to return an
`RSVP.Promise`
@static
*/
function denodeify(nodeFunc, options) {
var fn = function () {
var self = this;
var l = arguments.length;
var args = new Array(l + 1);
var promiseInput = false;
for (var i = 0; i < l; ++i) {
var arg = arguments[i];
if (!promiseInput) {
// TODO: clean this up
promiseInput = needsPromiseInput(arg);
if (promiseInput === GET_THEN_ERROR$1) {
var p = new Promise(noop);
reject(p, GET_THEN_ERROR$1.value);
return p;
} else if (promiseInput && promiseInput !== true) {
arg = wrapThenable(promiseInput, arg);
}
}
args[i] = arg;
}
var promise = new Promise(noop);
args[l] = function (err, val) {
if (err) reject(promise, err);else if (options === undefined) resolve(promise, val);else if (options === true) resolve(promise, arrayResult(arguments));else if (isArray(options)) resolve(promise, makeObject(arguments, options));else resolve(promise, val);
};
if (promiseInput) {
return handlePromiseInput(promise, args, nodeFunc, self);
} else {
return handleValueInput(promise, args, nodeFunc, self);
}
};
fn.__proto__ = nodeFunc;
return fn;
}
function handleValueInput(promise, args, nodeFunc, self) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
}
function handlePromiseInput(promise, args, nodeFunc, self) {
return Promise.all(args).then(function (args) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
});
}
function needsPromiseInput(arg) {
if (arg && typeof arg === 'object') {
if (arg.constructor === Promise) {
return true;
} else {
return getThen$1(arg);
}
} else {
return false;
}
}
/**
This is a convenient alias for `RSVP.Promise.all`.
@method all
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function all$1(array, label) {
return Promise.all(array, label);
}
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var AllSettled = function (_Enumerator) {
_inherits(AllSettled, _Enumerator);
function AllSettled(Constructor, entries, label) {
return _possibleConstructorReturn(this, _Enumerator.call(this, Constructor, entries, false /* don't abort on reject */, label));
}
return AllSettled;
}(Enumerator);
AllSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.allSettled` is similar to `RSVP.all`, but instead of implementing
a fail-fast method, it waits until all the promises have returned and
shows you all the results. This is useful if you want to handle multiple
promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled. The return promise is fulfilled with an array of the states of
the promises passed into the `promises` array argument.
Each state object will either indicate fulfillment or rejection, and
provide the corresponding value or reason. The states will take one of
the following formats:
```javascript
{ state: 'fulfilled', value: value }
or
{ state: 'rejected', reason: reason }
```
Example:
```javascript
let promise1 = RSVP.Promise.resolve(1);
let promise2 = RSVP.Promise.reject(new Error('2'));
let promise3 = RSVP.Promise.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
RSVP.allSettled(promises).then(function(array){
// array == [
// { state: 'fulfilled', value: 1 },
// { state: 'rejected', reason: Error },
// { state: 'rejected', reason: Error }
// ]
// Note that for the second item, reason.message will be '2', and for the
// third item, reason.message will be '3'.
}, function(error) {
// Not run. (This block would only be called if allSettled had failed,
// for instance if passed an incorrect argument type.)
});
```
@method allSettled
@static
@for RSVP
@param {Array} entries
@param {String} label - optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with an array of the settled
states of the constituent promises.
*/
function allSettled(entries, label) {
if (!isArray(entries)) {
return Promise.reject(new TypeError("Promise.allSettled must be called with an array"), label);
}
return new AllSettled(Promise, entries, label).promise;
}
/**
This is a convenient alias for `RSVP.Promise.race`.
@method race
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function race$1(array, label) {
return Promise.race(array, label);
}
function _possibleConstructorReturn$1(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$1(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var hasOwnProperty = Object.prototype.hasOwnProperty;
var PromiseHash = function (_Enumerator) {
_inherits$1(PromiseHash, _Enumerator);
function PromiseHash(Constructor, object) {
var abortOnReject = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
var label = arguments[3];
return _possibleConstructorReturn$1(this, _Enumerator.call(this, Constructor, object, abortOnReject, label));
}
PromiseHash.prototype._init = function _init(Constructor, object) {
this._result = {};
this._enumerate(object);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
PromiseHash.prototype._enumerate = function _enumerate(input) {
var promise = this.promise;
var results = [];
for (var key in input) {
if (hasOwnProperty.call(input, key)) {
results.push({
position: key,
entry: input[key]
});
}
}
var length = results.length;
this._remaining = length;
var result = void 0;
for (var i = 0; promise._state === PENDING && i < length; i++) {
result = results[i];
this._eachEntry(result.entry, result.position);
}
};
return PromiseHash;
}(Enumerator);
/**
`RSVP.hash` is similar to `RSVP.all`, but takes an object instead of an array
for its `promises` argument.
Returns a promise that is fulfilled when all the given promises have been
fulfilled, or rejected if any of them become rejected. The returned promise
is fulfilled with a hash that has the same key names as the `promises` object
argument. If any of the values in the object are not promises, they will
simply be copied over to the fulfilled object.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
yourPromise: RSVP.resolve(2),
theirPromise: RSVP.resolve(3),
notAPromise: 4
};
RSVP.hash(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: 1,
// yourPromise: 2,
// theirPromise: 3,
// notAPromise: 4
// }
});
````
If any of the `promises` given to `RSVP.hash` are rejected, the first promise
that is rejected will be given as the reason to the rejection handler.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
rejectedPromise: RSVP.reject(new Error('rejectedPromise')),
anotherRejectedPromise: RSVP.reject(new Error('anotherRejectedPromise')),
};
RSVP.hash(promises).then(function(hash){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === 'rejectedPromise'
});
```
An important note: `RSVP.hash` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hash` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hash(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: 'Example'
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hash
@static
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all properties of `promises`
have been fulfilled, or rejected if any of them become rejected.
*/
function hash(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("Promise.hash must be called with an object"), label);
}
return new PromiseHash(Promise, object, label).promise;
}
function _possibleConstructorReturn$2(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$2(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var HashSettled = function (_PromiseHash) {
_inherits$2(HashSettled, _PromiseHash);
function HashSettled(Constructor, object, label) {
return _possibleConstructorReturn$2(this, _PromiseHash.call(this, Constructor, object, false, label));
}
return HashSettled;
}(PromiseHash);
HashSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.hashSettled` is similar to `RSVP.allSettled`, but takes an object
instead of an array for its `promises` argument.
Unlike `RSVP.all` or `RSVP.hash`, which implement a fail-fast method,
but like `RSVP.allSettled`, `hashSettled` waits until all the
constituent promises have returned and then shows you all the results
with their states and values/reasons. This is useful if you want to
handle multiple promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled, or rejected if the passed parameters are invalid.
The returned promise is fulfilled with a hash that has the same key names as
the `promises` object argument. If any of the values in the object are not
promises, they will be copied over to the fulfilled object and marked with state
'fulfilled'.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
yourPromise: RSVP.Promise.resolve(2),
theirPromise: RSVP.Promise.resolve(3),
notAPromise: 4
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// yourPromise: { state: 'fulfilled', value: 2 },
// theirPromise: { state: 'fulfilled', value: 3 },
// notAPromise: { state: 'fulfilled', value: 4 }
// }
});
```
If any of the `promises` given to `RSVP.hash` are rejected, the state will
be set to 'rejected' and the reason for rejection provided.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
rejectedPromise: RSVP.Promise.reject(new Error('rejection')),
anotherRejectedPromise: RSVP.Promise.reject(new Error('more rejection')),
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// rejectedPromise: { state: 'rejected', reason: Error },
// anotherRejectedPromise: { state: 'rejected', reason: Error },
// }
// Note that for rejectedPromise, reason.message == 'rejection',
// and for anotherRejectedPromise, reason.message == 'more rejection'.
});
```
An important note: `RSVP.hashSettled` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hashSettled` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.Promise.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.Promise.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hashSettled(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: { state: 'fulfilled', value: 'Example' }
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hashSettled
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when when all properties of `promises`
have been settled.
@static
*/
function hashSettled(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("RSVP.hashSettled must be called with an object"), label);
}
return new HashSettled(Promise, object, false, label).promise;
}
/**
`RSVP.rethrow` will rethrow an error on the next turn of the JavaScript event
loop in order to aid debugging.
Promises A+ specifies that any exceptions that occur with a promise must be
caught by the promises implementation and bubbled to the last handler. For
this reason, it is recommended that you always specify a second rejection
handler function to `then`. However, `RSVP.rethrow` will throw the exception
outside of the promise, so it bubbles up to your console if in the browser,
or domain/cause uncaught exception in Node. `rethrow` will also throw the
error again so the error can be handled by the promise per the spec.
```javascript
function throws(){
throw new Error('Whoops!');
}
let promise = new RSVP.Promise(function(resolve, reject){
throws();
});
promise.catch(RSVP.rethrow).then(function(){
// Code here doesn't run because the promise became rejected due to an
// error!
}, function (err){
// handle the error here
});
```
The 'Whoops' error will be thrown on the next turn of the event loop
and you can watch for it in your console. You can also handle it using a
rejection handler given to `.then` or `.catch` on the returned promise.
@method rethrow
@static
@for RSVP
@param {Error} reason reason the promise became rejected.
@throws Error
@static
*/
function rethrow(reason) {
setTimeout(function () {
throw reason;
});
throw reason;
}
/**
`RSVP.defer` returns an object similar to jQuery's `$.Deferred`.
`RSVP.defer` should be used when porting over code reliant on `$.Deferred`'s
interface. New code should use the `RSVP.Promise` constructor instead.
The object returned from `RSVP.defer` is a plain object with three properties:
* promise - an `RSVP.Promise`.
* reject - a function that causes the `promise` property on this object to
become rejected
* resolve - a function that causes the `promise` property on this object to
become fulfilled.
Example:
```javascript
let deferred = RSVP.defer();
deferred.resolve("Success!");
deferred.promise.then(function(value){
// value here is "Success!"
});
```
@method defer
@static
@for RSVP
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Object}
*/
function defer(label) {
var deferred = { resolve: undefined, reject: undefined };
deferred.promise = new Promise(function (resolve, reject) {
deferred.resolve = resolve;
deferred.reject = reject;
}, label);
return deferred;
}
/**
`RSVP.map` is similar to JavaScript's native `map` method, except that it
waits for all promises to become fulfilled before running the `mapFn` on
each item in given to `promises`. `RSVP.map` returns a promise that will
become fulfilled with the result of running `mapFn` on the values the promises
become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(result){
// result is [ 2, 3, 4 ]
});
```
If any of the `promises` given to `RSVP.map` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.map` will also wait if a promise is returned from `mapFn`. For example,
say you want to get all comments from a set of blog posts, but you need
the blog posts first because they contain a url to those comments.
```javscript
let mapFn = function(blogPost){
// getComments does some ajax and returns an RSVP.Promise that is fulfilled
// with some comments data
return getComments(blogPost.comments_url);
};
// getBlogPosts does some ajax and returns an RSVP.Promise that is fulfilled
// with some blog post data
RSVP.map(getBlogPosts(), mapFn).then(function(comments){
// comments is the result of asking the server for the comments
// of all blog posts returned from getBlogPosts()
});
```
@method map
@static
@for RSVP
@param {Array} promises
@param {Function} mapFn function to be called on each fulfilled promise.
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with the result of calling
`mapFn` on each fulfilled promise or value when they become fulfilled.
The promise will be rejected if any of the given `promises` become rejected.
@static
*/
function map(promises, mapFn, label) {
if (!isArray(promises)) {
return Promise.reject(new TypeError("RSVP.map must be called with an array"), label);
}
if (!isFunction(mapFn)) {
return Promise.reject(new TypeError("RSVP.map expects a function as a second argument"), label);
}
return Promise.all(promises, label).then(function (values) {
var length = values.length;
var results = new Array(length);
for (var i = 0; i < length; i++) {
results[i] = mapFn(values[i]);
}
return Promise.all(results, label);
});
}
/**
This is a convenient alias for `RSVP.Promise.resolve`.
@method resolve
@static
@for RSVP
@param {*} value value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$2(value, label) {
return Promise.resolve(value, label);
}
/**
This is a convenient alias for `RSVP.Promise.reject`.
@method reject
@static
@for RSVP
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$2(reason, label) {
return Promise.reject(reason, label);
}
/**
`RSVP.filter` is similar to JavaScript's native `filter` method, except that it
waits for all promises to become fulfilled before running the `filterFn` on
each item in given to `promises`. `RSVP.filter` returns a promise that will
become fulfilled with the result of running `filterFn` on the values the
promises become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [promise1, promise2, promise3];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(result){
// result is [ 2, 3 ]
});
```
If any of the `promises` given to `RSVP.filter` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.filter` will also wait for any promises returned from `filterFn`.
For instance, you may want to fetch a list of users then return a subset
of those users based on some asynchronous operation:
```javascript
let alice = { name: 'alice' };
let bob = { name: 'bob' };
let users = [ alice, bob ];
let promises = users.map(function(user){
return RSVP.resolve(user);
});
let filterFn = function(user){
// Here, Alice has permissions to create a blog post, but Bob does not.
return getPrivilegesForUser(user).then(function(privs){
return privs.can_create_blog_post === true;
});
};
RSVP.filter(promises, filterFn).then(function(users){
// true, because the server told us only Alice can create a blog post.
users.length === 1;
// false, because Alice is the only user present in `users`
users[0] === bob;
});
```
@method filter
@static
@for RSVP
@param {Array} promises
@param {Function} filterFn - function to be called on each resolved value to
filter the final results.
@param {String} label optional string describing the promise. Useful for
tooling.
@return {Promise}
*/
function resolveAll(promises, label) {
return Promise.all(promises, label);
}
function resolveSingle(promise, label) {
return Promise.resolve(promise, label).then(function (promises) {
return resolveAll(promises, label);
});
}
function filter(promises, filterFn, label) {
if (!isArray(promises) && !(isObject(promises) && promises.then !== undefined)) {
return Promise.reject(new TypeError("RSVP.filter must be called with an array or promise"), label);
}
if (!isFunction(filterFn)) {
return Promise.reject(new TypeError("RSVP.filter expects function as a second argument"), label);
}
var promise = isArray(promises) ? resolveAll(promises, label) : resolveSingle(promises, label);
return promise.then(function (values) {
var length = values.length;
var filtered = new Array(length);
for (var i = 0; i < length; i++) {
filtered[i] = filterFn(values[i]);
}
return resolveAll(filtered, label).then(function (filtered) {
var results = new Array(length);
var newLength = 0;
for (var _i = 0; _i < length; _i++) {
if (filtered[_i]) {
results[newLength] = values[_i];
newLength++;
}
}
results.length = newLength;
return results;
});
});
}
var len = 0;
var vertxNext = void 0;
function asap(callback, arg) {
queue$1[len] = callback;
queue$1[len + 1] = arg;
len += 2;
if (len === 2) {
// If len is 1, that means that we need to schedule an async flush.
// If additional callbacks are queued before the queue is flushed, they
// will be processed by this flush that we are scheduling.
scheduleFlush$1();
}
}
var browserWindow = typeof window !== 'undefined' ? window : undefined;
var browserGlobal = browserWindow || {};
var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
var isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
// test for web worker but not in IE10
var isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';
// node
function useNextTick() {
var nextTick = process.nextTick;
// node version 0.10.x displays a deprecation warning when nextTick is used recursively
// setImmediate should be used instead instead
var version = process.versions.node.match(/^(?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)$/);
if (Array.isArray(version) && version[1] === '0' && version[2] === '10') {
nextTick = setImmediate;
}
return function () {
return nextTick(flush);
};
}
// vertx
function useVertxTimer() {
if (typeof vertxNext !== 'undefined') {
return function () {
vertxNext(flush);
};
}
return useSetTimeout();
}
function useMutationObserver() {
var iterations = 0;
var observer = new BrowserMutationObserver(flush);
var node = document.createTextNode('');
observer.observe(node, { characterData: true });
return function () {
return node.data = iterations = ++iterations % 2;
};
}
// web worker
function useMessageChannel() {
var channel = new MessageChannel();
channel.port1.onmessage = flush;
return function () {
return channel.port2.postMessage(0);
};
}
function useSetTimeout() {
return function () {
return setTimeout(flush, 1);
};
}
var queue$1 = new Array(1000);
function flush() {
for (var i = 0; i < len; i += 2) {
var callback = queue$1[i];
var arg = queue$1[i + 1];
callback(arg);
queue$1[i] = undefined;
queue$1[i + 1] = undefined;
}
len = 0;
}
function attemptVertex() {
try {
var r = require;
var vertx = r('vertx');
vertxNext = vertx.runOnLoop || vertx.runOnContext;
return useVertxTimer();
} catch (e) {
return useSetTimeout();
}
}
var scheduleFlush$1 = void 0;
// Decide what async method to use to triggering processing of queued callbacks:
if (isNode) {
scheduleFlush$1 = useNextTick();
} else if (BrowserMutationObserver) {
scheduleFlush$1 = useMutationObserver();
} else if (isWorker) {
scheduleFlush$1 = useMessageChannel();
} else if (browserWindow === undefined && typeof require === 'function') {
scheduleFlush$1 = attemptVertex();
} else {
scheduleFlush$1 = useSetTimeout();
}
var platform = void 0;
/* global self */
if (typeof self === 'object') {
platform = self;
/* global global */
} else if (typeof global === 'object') {
platform = global;
} else {
throw new Error('no global: `self` or `global` found');
}
var _asap$cast$Promise$Ev;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
// defaults
config.async = asap;
config.after = function (cb) {
return setTimeout(cb, 0);
};
var cast = resolve$2;
var async = function (callback, arg) {
return config.async(callback, arg);
};
function on() {
config['on'].apply(config, arguments);
}
function off() {
config['off'].apply(config, arguments);
}
// Set up instrumentation through `window.__PROMISE_INTRUMENTATION__`
if (typeof window !== 'undefined' && typeof window['__PROMISE_INSTRUMENTATION__'] === 'object') {
var callbacks = window['__PROMISE_INSTRUMENTATION__'];
configure('instrument', true);
for (var eventName in callbacks) {
if (callbacks.hasOwnProperty(eventName)) {
on(eventName, callbacks[eventName]);
}
}
}
// the default export here is for backwards compat:
// https://github.com/tildeio/rsvp.js/issues/434
var rsvp = (_asap$cast$Promise$Ev = {
asap: asap,
cast: cast,
Promise: Promise,
EventTarget: EventTarget,
all: all$1,
allSettled: allSettled,
race: race$1,
hash: hash,
hashSettled: hashSettled,
rethrow: rethrow,
defer: defer,
denodeify: denodeify,
configure: configure,
on: on,
off: off,
resolve: resolve$2,
reject: reject$2,
map: map
}, _defineProperty(_asap$cast$Promise$Ev, 'async', async), _defineProperty(_asap$cast$Promise$Ev, 'filter', filter), _asap$cast$Promise$Ev);
exports['default'] = rsvp;
exports.asap = asap;
exports.cast = cast;
exports.Promise = Promise;
exports.EventTarget = EventTarget;
exports.all = all$1;
exports.allSettled = allSettled;
exports.race = race$1;
exports.hash = hash;
exports.hashSettled = hashSettled;
exports.rethrow = rethrow;
exports.defer = defer;
exports.denodeify = denodeify;
exports.configure = configure;
exports.on = on;
exports.off = off;
exports.resolve = resolve$2;
exports.reject = reject$2;
exports.map = map;
exports.async = async;
exports.filter = filter;
Object.defineProperty(exports, '__esModule', { value: true });
})));
//
var EPUBJS = EPUBJS || {};
EPUBJS.core = {};
var ELEMENT_NODE = 1;
var TEXT_NODE = 3;
var COMMENT_NODE = 8;
var DOCUMENT_NODE = 9;
//-- Get a element for an id
EPUBJS.core.getEl = function(elem) {
return document.getElementById(elem);
};
//-- Get all elements for a class
EPUBJS.core.getEls = function(classes) {
return document.getElementsByClassName(classes);
};
EPUBJS.core.request = function(url, type, withCredentials) {
var supportsURL = window.URL;
var BLOB_RESPONSE = supportsURL ? "blob" : "arraybuffer";
var deferred = new RSVP.defer();
var xhr = new XMLHttpRequest();
var uri;
//-- Check from PDF.js:
// https://github.com/mozilla/pdf.js/blob/master/web/compatibility.js
var xhrPrototype = XMLHttpRequest.prototype;
var handler = function() {
var r;
if (this.readyState != this.DONE) return;
if ((this.status === 200 || this.status === 0) && this.response) { // Android & Firefox reporting 0 for local & blob urls
if (type == 'xml'){
// If this.responseXML wasn't set, try to parse using a DOMParser from text
if(!this.responseXML) {
r = new DOMParser().parseFromString(this.response, "application/xml");
} else {
r = this.responseXML;
}
} else if (type == 'xhtml') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "application/xhtml+xml");
} else {
r = this.responseXML;
}
} else if (type == 'html') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "text/html");
} else {
r = this.responseXML;
}
} else if (type == 'json') {
r = JSON.parse(this.response);
} else if (type == 'blob') {
if (supportsURL) {
r = this.response;
} else {
//-- Safari doesn't support responseType blob, so create a blob from arraybuffer
r = new Blob([this.response]);
}
} else {
r = this.response;
}
deferred.resolve(r);
} else {
deferred.reject({
message : this.response,
stack : new Error().stack
});
}
};
if (!('overrideMimeType' in xhrPrototype)) {
// IE10 might have response, but not overrideMimeType
Object.defineProperty(xhrPrototype, 'overrideMimeType', {
value: function xmlHttpRequestOverrideMimeType(mimeType) {}
});
}
xhr.onreadystatechange = handler;
xhr.open("GET", url, true);
if(withCredentials) {
xhr.withCredentials = true;
}
// If type isn't set, determine it from the file extension
if(!type) {
uri = EPUBJS.core.uri(url);
type = uri.extension;
type = {
'htm': 'html'
}[type] || type;
}
if(type == 'blob'){
xhr.responseType = BLOB_RESPONSE;
}
if(type == "json") {
xhr.setRequestHeader("Accept", "application/json");
}
if(type == 'xml') {
xhr.responseType = "document";
xhr.overrideMimeType('text/xml'); // for OPF parsing
}
if(type == 'xhtml') {
xhr.responseType = "document";
}
if(type == 'html') {
xhr.responseType = "document";
}
if(type == "binary") {
xhr.responseType = "arraybuffer";
}
xhr.send();
return deferred.promise;
};
EPUBJS.core.toArray = function(obj) {
var arr = [];
for (var member in obj) {
var newitm;
if ( obj.hasOwnProperty(member) ) {
newitm = obj[member];
newitm.ident = member;
arr.push(newitm);
}
}
return arr;
};
//-- Parse the different parts of a url, returning a object
EPUBJS.core.uri = function(url){
var uri = {
protocol : '',
host : '',
path : '',
origin : '',
directory : '',
base : '',
filename : '',
extension : '',
fragment : '',
href : url
},
blob = url.indexOf('blob:'),
doubleSlash = url.indexOf('://'),
search = url.indexOf('?'),
fragment = url.indexOf("#"),
withoutProtocol,
dot,
firstSlash;
if(blob === 0) {
uri.protocol = "blob";
uri.base = url.indexOf(0, fragment);
return uri;
}
if(fragment != -1) {
uri.fragment = url.slice(fragment + 1);
url = url.slice(0, fragment);
}
if(search != -1) {
uri.search = url.slice(search + 1);
url = url.slice(0, search);
href = uri.href;
}
if(doubleSlash != -1) {
uri.protocol = url.slice(0, doubleSlash);
withoutProtocol = url.slice(doubleSlash+3);
firstSlash = withoutProtocol.indexOf('/');
if(firstSlash === -1) {
uri.host = uri.path;
uri.path = "";
} else {
uri.host = withoutProtocol.slice(0, firstSlash);
uri.path = withoutProtocol.slice(firstSlash);
}
uri.origin = uri.protocol + "://" + uri.host;
uri.directory = EPUBJS.core.folder(uri.path);
uri.base = uri.origin + uri.directory;
// return origin;
} else {
uri.path = url;
uri.directory = EPUBJS.core.folder(url);
uri.base = uri.directory;
}
//-- Filename
uri.filename = url.replace(uri.base, '');
dot = uri.filename.lastIndexOf('.');
if(dot != -1) {
uri.extension = uri.filename.slice(dot+1);
}
return uri;
};
//-- Parse out the folder, will return everything before the last slash
EPUBJS.core.folder = function(url){
var lastSlash = url.lastIndexOf('/');
if(lastSlash == -1) var folder = '';
folder = url.slice(0, lastSlash + 1);
return folder;
};
//-- https://github.com/ebidel/filer.js/blob/master/src/filer.js#L128
EPUBJS.core.dataURLToBlob = function(dataURL) {
var BASE64_MARKER = ';base64,',
parts, contentType, raw, rawLength, uInt8Array;
if (dataURL.indexOf(BASE64_MARKER) == -1) {
parts = dataURL.split(',');
contentType = parts[0].split(':')[1];
raw = parts[1];
return new Blob([raw], {type: contentType});
}
parts = dataURL.split(BASE64_MARKER);
contentType = parts[0].split(':')[1];
raw = window.atob(parts[1]);
rawLength = raw.length;
uInt8Array = new Uint8Array(rawLength);
for (var i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], {type: contentType});
};
//-- Load scripts async: http://stackoverflow.com/questions/7718935/load-scripts-asynchronously
EPUBJS.core.addScript = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('script');
s.type = 'text/javascript';
s.async = false;
s.src = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.addScripts = function(srcArr, callback, target) {
var total = srcArr.length,
curr = 0,
cb = function(){
curr++;
if(total == curr){
if(callback) callback();
}else{
EPUBJS.core.addScript(srcArr[curr], cb, target);
}
};
EPUBJS.core.addScript(srcArr[curr], cb, target);
};
EPUBJS.core.addCss = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('link');
s.type = 'text/css';
s.rel = "stylesheet";
s.href = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.prefixed = function(unprefixed) {
var vendors = ["Webkit", "Moz", "O", "ms" ],
prefixes = ['-Webkit-', '-moz-', '-o-', '-ms-'],
upper = unprefixed[0].toUpperCase() + unprefixed.slice(1),
length = vendors.length;
if (typeof(document.documentElement.style[unprefixed]) != 'undefined') {
return unprefixed;
}
for ( var i=0; i < length; i++ ) {
if (typeof(document.documentElement.style[vendors[i] + upper]) != 'undefined') {
return vendors[i] + upper;
}
}
return unprefixed;
};
EPUBJS.core.resolveUrl = function(base, path) {
var url,
segments = [],
uri = EPUBJS.core.uri(path),
folders = base.split("/"),
paths;
if(uri.host) {
return path;
}
folders.pop();
paths = path.split("/");
paths.forEach(function(p){
if(p === ".."){
folders.pop();
}else{
segments.push(p);
}
});
url = folders.concat(segments);
return url.join("/");
};
// http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript
EPUBJS.core.uuid = function() {
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x7|0x8)).toString(16);
});
return uuid;
};
// Fast quicksort insert for sorted array -- based on:
// http://stackoverflow.com/questions/1344500/efficient-way-to-insert-a-number-into-a-sorted-array-of-numbers
EPUBJS.core.insert = function(item, array, compareFunction) {
var location = EPUBJS.core.locationOf(item, array, compareFunction);
array.splice(location, 0, item);
return location;
};
EPUBJS.core.locationOf = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return pivot;
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared > 0 ? pivot : pivot + 1;
}
if(compared === 0) {
return pivot;
}
if(compared === -1) {
return EPUBJS.core.locationOf(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.locationOf(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.indexOfSorted = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return -1; // Not found
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared === 0 ? pivot : -1;
}
if(compared === 0) {
return pivot; // Found
}
if(compared === -1) {
return EPUBJS.core.indexOfSorted(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.indexOfSorted(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.queue = function(_scope){
var _q = [];
var scope = _scope;
// Add an item to the queue
var enqueue = function(funcName, args, context) {
_q.push({
"funcName" : funcName,
"args" : args,
"context" : context
});
return _q;
};
// Run one item
var dequeue = function(){
var inwait;
if(_q.length) {
inwait = _q.shift();
// Defer to any current tasks
// setTimeout(function(){
scope[inwait.funcName].apply(inwait.context || scope, inwait.args);
// }, 0);
}
};
// Run All
var flush = function(){
while(_q.length) {
dequeue();
}
};
// Clear all items in wait
var clear = function(){
_q = [];
};
var length = function(){
return _q.length;
};
return {
"enqueue" : enqueue,
"dequeue" : dequeue,
"flush" : flush,
"clear" : clear,
"length" : length
};
};
// From: https://code.google.com/p/fbug/source/browse/branches/firebug1.10/content/firebug/lib/xpath.js
/**
* Gets an XPath for an element which describes its hierarchical location.
*/
EPUBJS.core.getElementXPath = function(element) {
if (element && element.id) {
return '//*[@id="' + element.id + '"]';
} else {
return EPUBJS.core.getElementTreeXPath(element);
}
};
EPUBJS.core.getElementTreeXPath = function(element) {
var paths = [];
var isXhtml = (element.ownerDocument.documentElement.getAttribute('xmlns') === "http://www.w3.org/1999/xhtml");
var index, nodeName, tagName, pathIndex;
if(element.nodeType === Node.TEXT_NODE){
// index = Array.prototype.indexOf.call(element.parentNode.childNodes, element) + 1;
index = EPUBJS.core.indexOfTextNode(element) + 1;
paths.push("text()["+index+"]");
element = element.parentNode;
}
// Use nodeName (instead of localName) so namespace prefix is included (if any).
for (; element && element.nodeType == 1; element = element.parentNode)
{
index = 0;
for (var sibling = element.previousSibling; sibling; sibling = sibling.previousSibling)
{
// Ignore document type declaration.
if (sibling.nodeType == Node.DOCUMENT_TYPE_NODE) {
continue;
}
if (sibling.nodeName == element.nodeName) {
++index;
}
}
nodeName = element.nodeName.toLowerCase();
tagName = (isXhtml ? "xhtml:" + nodeName : nodeName);
pathIndex = (index ? "[" + (index+1) + "]" : "");
paths.splice(0, 0, tagName + pathIndex);
}
return paths.length ? "./" + paths.join("/") : null;
};
EPUBJS.core.nsResolver = function(prefix) {
var ns = {
'xhtml' : 'http://www.w3.org/1999/xhtml',
'epub': 'http://www.idpf.org/2007/ops'
};
return ns[prefix] || null;
};
//https://stackoverflow.com/questions/13482352/xquery-looking-for-text-with-single-quote/13483496#13483496
EPUBJS.core.cleanStringForXpath = function(str) {
var parts = str.match(/[^'"]+|['"]/g);
parts = parts.map(function(part){
if (part === "'") {
return '\"\'\"'; // output "'"
}
if (part === '"') {
return "\'\"\'"; // output '"'
}
return "\'" + part + "\'";
});
return "concat(\'\'," + parts.join(",") + ")";
};
EPUBJS.core.indexOfTextNode = function(textNode){
var parent = textNode.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if(sib.nodeType === Node.TEXT_NODE){
index++;
}
if(sib == textNode) break;
}
return index;
};
// Underscore
EPUBJS.core.defaults = function(obj) {
for (var i = 1, length = arguments.length; i < length; i++) {
var source = arguments[i];
for (var prop in source) {
if (obj[prop] === void 0) obj[prop] = source[prop];
}
}
return obj;
};
EPUBJS.core.extend = function(target) {
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
if(!source) return;
Object.getOwnPropertyNames(source).forEach(function(propName) {
Object.defineProperty(target, propName, Object.getOwnPropertyDescriptor(source, propName));
});
});
return target;
};
EPUBJS.core.clone = function(obj) {
return EPUBJS.core.isArray(obj) ? obj.slice() : EPUBJS.core.extend({}, obj);
};
EPUBJS.core.isElement = function(obj) {
return !!(obj && obj.nodeType == 1);
};
EPUBJS.core.isNumber = function(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
};
EPUBJS.core.isString = function(str) {
return (typeof str === 'string' || str instanceof String);
};
EPUBJS.core.isArray = Array.isArray || function(obj) {
return Object.prototype.toString.call(obj) === '[object Array]';
};
// Lodash
EPUBJS.core.values = function(object) {
var index = -1;
var props, length, result;
if(!object) return [];
props = Object.keys(object);
length = props.length;
result = Array(length);
while (++index < length) {
result[index] = object[props[index]];
}
return result;
};
EPUBJS.core.indexOfNode = function(node, typeId) {
var parent = node.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if (sib.nodeType === typeId) {
index++;
}
if (sib == node) break;
}
return index;
}
EPUBJS.core.indexOfTextNode = function(textNode) {
return EPUBJS.core.indexOfNode(textNode, TEXT_NODE);
}
EPUBJS.core.indexOfElementNode = function(elementNode) {
return EPUBJS.core.indexOfNode(elementNode, ELEMENT_NODE);
}
var EPUBJS = EPUBJS || {};
EPUBJS.reader = {};
EPUBJS.reader.plugins = {}; //-- Attach extra Controllers as plugins (like search?)
(function(root, $) {
var previousReader = root.ePubReader || {};
var ePubReader = root.ePubReader = function(path, options) {
return new EPUBJS.Reader(path, options);
};
//exports to multiple environments
if (typeof define === 'function' && define.amd) {
//AMD
define(function(){ return Reader; });
} else if (typeof module != "undefined" && module.exports) {
//Node
module.exports = ePubReader;
}
})(window, jQuery);
EPUBJS.Reader = function(bookPath, _options) {
var reader = this;
var book;
var plugin;
var $viewer = $("#viewer");
var search = window.location.search;
var parameters;
this.settings = EPUBJS.core.defaults(_options || {}, {
bookPath : bookPath,
restore : false,
reload : false,
bookmarks : undefined,
annotations : undefined,
contained : undefined,
bookKey : undefined,
styles : undefined,
sidebarReflow: false,
generatePagination: false,
history: true
});
// Overide options with search parameters
if(search) {
parameters = search.slice(1).split("&");
parameters.forEach(function(p){
var split = p.split("=");
var name = split[0];
var value = split[1] || '';
reader.settings[name] = decodeURIComponent(value);
});
}
this.setBookKey(this.settings.bookPath); //-- This could be username + path or any unique string
if(this.settings.restore && this.isSaved()) {
this.applySavedSettings();
}
this.settings.styles = this.settings.styles || {
fontSize : "100%"
};
this.book = book = new ePub(this.settings.bookPath, this.settings);
this.offline = false;
this.sidebarOpen = false;
if(!this.settings.bookmarks) {
this.settings.bookmarks = [];
}
if(!this.settings.annotations) {
this.settings.annotations = [];
}
if(this.settings.generatePagination) {
book.generatePagination($viewer.width(), $viewer.height());
}
this.rendition = book.renderTo("viewer", {
ignoreClass: "annotator-hl",
width: "100%",
height: "100%"
});
if(this.settings.previousLocationCfi) {
this.displayed = this.rendition.display(this.settings.previousLocationCfi);
} else {
this.displayed = this.rendition.display();
}
book.ready.then(function () {
reader.ReaderController = EPUBJS.reader.ReaderController.call(reader, book);
reader.SettingsController = EPUBJS.reader.SettingsController.call(reader, book);
reader.ControlsController = EPUBJS.reader.ControlsController.call(reader, book);
reader.SidebarController = EPUBJS.reader.SidebarController.call(reader, book);
reader.BookmarksController = EPUBJS.reader.BookmarksController.call(reader, book);
reader.NotesController = EPUBJS.reader.NotesController.call(reader, book);
window.addEventListener("hashchange", this.hashChanged.bind(this), false);
document.addEventListener('keydown', this.adjustFontSize.bind(this), false);
this.rendition.on("keydown", this.adjustFontSize.bind(this));
this.rendition.on("keydown", reader.ReaderController.arrowKeys.bind(this));
this.rendition.on("selected", this.selectedRange.bind(this));
}.bind(this)).then(function() {
reader.ReaderController.hideLoader();
}.bind(this));
// Call Plugins
for(plugin in EPUBJS.reader.plugins) {
if(EPUBJS.reader.plugins.hasOwnProperty(plugin)) {
reader[plugin] = EPUBJS.reader.plugins[plugin].call(reader, book);
}
}
book.loaded.metadata.then(function(meta) {
reader.MetaController = EPUBJS.reader.MetaController.call(reader, meta);
});
book.loaded.navigation.then(function(navigation) {
reader.TocController = EPUBJS.reader.TocController.call(reader, navigation);
});
window.addEventListener("beforeunload", this.unload.bind(this), false);
return this;
};
EPUBJS.Reader.prototype.adjustFontSize = function(e) {
var fontSize;
var interval = 2;
var PLUS = 187;
var MINUS = 189;
var ZERO = 48;
var MOD = (e.ctrlKey || e.metaKey );
if(!this.settings.styles) return;
if(!this.settings.styles.fontSize) {
this.settings.styles.fontSize = "100%";
}
fontSize = parseInt(this.settings.styles.fontSize.slice(0, -1));
if(MOD && e.keyCode == PLUS) {
e.preventDefault();
this.book.setStyle("fontSize", (fontSize + interval) + "%");
}
if(MOD && e.keyCode == MINUS){
e.preventDefault();
this.book.setStyle("fontSize", (fontSize - interval) + "%");
}
if(MOD && e.keyCode == ZERO){
e.preventDefault();
this.book.setStyle("fontSize", "100%");
}
};
EPUBJS.Reader.prototype.addBookmark = function(cfi) {
var present = this.isBookmarked(cfi);
if(present > -1 ) return;
this.settings.bookmarks.push(cfi);
this.trigger("reader:bookmarked", cfi);
};
EPUBJS.Reader.prototype.removeBookmark = function(cfi) {
var bookmark = this.isBookmarked(cfi);
if( bookmark === -1 ) return;
this.settings.bookmarks.splice(bookmark, 1);
this.trigger("reader:unbookmarked", bookmark);
};
EPUBJS.Reader.prototype.isBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks;
return bookmarks.indexOf(cfi);
};
/*
EPUBJS.Reader.prototype.searchBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks,
len = bookmarks.length,
i;
for(i = 0; i < len; i++) {
if (bookmarks[i]['cfi'] === cfi) return i;
}
return -1;
};
*/
EPUBJS.Reader.prototype.clearBookmarks = function() {
this.settings.bookmarks = [];
};
//-- Notes
EPUBJS.Reader.prototype.addNote = function(note) {
this.settings.annotations.push(note);
};
EPUBJS.Reader.prototype.removeNote = function(note) {
var index = this.settings.annotations.indexOf(note);
if( index === -1 ) return;
delete this.settings.annotations[index];
};
EPUBJS.Reader.prototype.clearNotes = function() {
this.settings.annotations = [];
};
//-- Settings
EPUBJS.Reader.prototype.setBookKey = function(identifier){
if(!this.settings.bookKey) {
this.settings.bookKey = "epubjsreader:" + EPUBJS.VERSION + ":" + window.location.host + ":" + identifier;
}
return this.settings.bookKey;
};
//-- Checks if the book setting can be retrieved from localStorage
EPUBJS.Reader.prototype.isSaved = function(bookPath) {
var storedSettings;
if(!localStorage) {
return false;
}
storedSettings = localStorage.getItem(this.settings.bookKey);
if(storedSettings === null) {
return false;
} else {
return true;
}
};
EPUBJS.Reader.prototype.removeSavedSettings = function() {
if(!localStorage) {
return false;
}
localStorage.removeItem(this.settings.bookKey);
};
EPUBJS.Reader.prototype.applySavedSettings = function() {
var stored;
if(!localStorage) {
return false;
}
try {
stored = JSON.parse(localStorage.getItem(this.settings.bookKey));
} catch (e) { // parsing error of localStorage
return false;
}
if(stored) {
// Merge styles
if(stored.styles) {
this.settings.styles = EPUBJS.core.defaults(this.settings.styles || {}, stored.styles);
}
// Merge the rest
this.settings = EPUBJS.core.defaults(this.settings, stored);
return true;
} else {
return false;
}
};
EPUBJS.Reader.prototype.saveSettings = function(){
if(this.book) {
this.settings.previousLocationCfi = this.rendition.currentLocation().start.cfi;
}
if(!localStorage) {
return false;
}
localStorage.setItem(this.settings.bookKey, JSON.stringify(this.settings));
};
EPUBJS.Reader.prototype.unload = function(){
if(this.settings.restore && localStorage) {
this.saveSettings();
}
};
EPUBJS.Reader.prototype.hashChanged = function(){
var hash = window.location.hash.slice(1);
this.rendition.display(hash);
};
EPUBJS.Reader.prototype.selectedRange = function(cfiRange){
var cfiFragment = "#"+cfiRange;
// Update the History Location
if(this.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
this.currentLocationCfi = cfiRange;
}
};
//-- Enable binding events to reader
RSVP.EventTarget.mixin(EPUBJS.Reader.prototype);
EPUBJS.reader.BookmarksController = function() {
var reader = this;
var book = this.book;
var rendition = this.rendition;
var $bookmarks = $("#bookmarksView"),
$list = $bookmarks.find("#bookmarks");
var docfrag = document.createDocumentFragment();
var show = function() {
$bookmarks.show();
};
var hide = function() {
$bookmarks.hide();
};
var counter = 0;
var createBookmarkItem = function(cfi) {
var listitem = document.createElement("li"),
link = document.createElement("a");
listitem.id = "bookmark-"+counter;
listitem.classList.add('list_item');
var spineItem = book.spine.get(cfi);
var tocItem;
if (spineItem.index in book.navigation.toc) {
tocItem = book.navigation.toc[spineItem.index];
link.textContent = tocItem.label;
} else {
link.textContent = cfi;
}
link.href = cfi;
link.classList.add('bookmark_link');
link.addEventListener("click", function(event){
var cfi = this.getAttribute('href');
rendition.display(cfi);
event.preventDefault();
}, false);
listitem.appendChild(link);
counter++;
return listitem;
};
this.settings.bookmarks.forEach(function(cfi) {
var bookmark = createBookmarkItem(cfi);
docfrag.appendChild(bookmark);
});
$list.append(docfrag);
this.on("reader:bookmarked", function(cfi) {
var item = createBookmarkItem(cfi);
$list.append(item);
});
this.on("reader:unbookmarked", function(index) {
var $item = $("#bookmark-"+index);
$item.remove();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ControlsController = function(book) {
var reader = this;
var rendition = this.rendition;
var $store = $("#store"),
$fullscreen = $("#fullscreen"),
$fullscreenicon = $("#fullscreenicon"),
$cancelfullscreenicon = $("#cancelfullscreenicon"),
$slider = $("#slider"),
$main = $("#main"),
$sidebar = $("#sidebar"),
$settings = $("#setting"),
$bookmark = $("#bookmark");
/*
var goOnline = function() {
reader.offline = false;
// $store.attr("src", $icon.data("save"));
};
var goOffline = function() {
reader.offline = true;
// $store.attr("src", $icon.data("saved"));
};
var fullscreen = false;
book.on("book:online", goOnline);
book.on("book:offline", goOffline);
*/
$slider.on("click", function () {
if(reader.sidebarOpen) {
reader.SidebarController.hide();
$slider.addClass("icon-menu");
$slider.removeClass("icon-right");
} else {
reader.SidebarController.show();
$slider.addClass("icon-right");
$slider.removeClass("icon-menu");
}
});
if(typeof screenfull !== 'undefined') {
$fullscreen.on("click", function() {
screenfull.toggle($('#container')[0]);
});
if(screenfull.raw) {
document.addEventListener(screenfull.raw.fullscreenchange, function() {
fullscreen = screenfull.isFullscreen;
if(fullscreen) {
$fullscreen
.addClass("icon-resize-small")
.removeClass("icon-resize-full");
} else {
$fullscreen
.addClass("icon-resize-full")
.removeClass("icon-resize-small");
}
});
}
}
$settings.on("click", function() {
reader.SettingsController.show();
});
$bookmark.on("click", function() {
var cfi = reader.rendition.currentLocation().start.cfi;
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Add bookmark
reader.addBookmark(cfi);
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
} else { //-- Remove Bookmark
reader.removeBookmark(cfi);
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
}
});
rendition.on('relocated', function(location){
var cfi = location.start.cfi;
var cfiFragment = "#" + cfi;
//-- Check if bookmarked
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Not bookmarked
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
} else { //-- Bookmarked
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
}
reader.currentLocationCfi = cfi;
// Update the History Location
if(reader.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
}
});
return {
};
};
EPUBJS.reader.MetaController = function(meta) {
var title = meta.title,
author = meta.creator;
var $title = $("#book-title"),
$author = $("#chapter-title"),
$dash = $("#title-seperator");
document.title = title+" – "+author;
$title.html(title);
$author.html(author);
$dash.show();
};
EPUBJS.reader.NotesController = function() {
var book = this.book;
var rendition = this.rendition;
var reader = this;
var $notesView = $("#notesView");
var $notes = $("#notes");
var $text = $("#note-text");
var $anchor = $("#note-anchor");
var annotations = reader.settings.annotations;
var renderer = book.renderer;
var popups = [];
var epubcfi = new ePub.CFI();
var show = function() {
$notesView.show();
};
var hide = function() {
$notesView.hide();
}
var insertAtPoint = function(e) {
var range;
var textNode;
var offset;
var doc = book.renderer.doc;
var cfi;
var annotation;
// standard
if (doc.caretPositionFromPoint) {
range = doc.caretPositionFromPoint(e.clientX, e.clientY);
textNode = range.offsetNode;
offset = range.offset;
// WebKit
} else if (doc.caretRangeFromPoint) {
range = doc.caretRangeFromPoint(e.clientX, e.clientY);
textNode = range.startContainer;
offset = range.startOffset;
}
if (textNode.nodeType !== 3) {
for (var i=0; i < textNode.childNodes.length; i++) {
if (textNode.childNodes[i].nodeType == 3) {
textNode = textNode.childNodes[i];
break;
}
}
}
// Find the end of the sentance
offset = textNode.textContent.indexOf(".", offset);
if(offset === -1){
offset = textNode.length; // Last item
} else {
offset += 1; // After the period
}
cfi = epubcfi.generateCfiFromTextNode(textNode, offset, book.renderer.currentChapter.cfiBase);
annotation = {
annotatedAt: new Date(),
anchor: cfi,
body: $text.val()
}
// add to list
reader.addNote(annotation);
// attach
addAnnotation(annotation);
placeMarker(annotation);
// clear
$text.val('');
$anchor.text("Attach");
$text.prop("disabled", false);
rendition.off("click", insertAtPoint);
};
var addAnnotation = function(annotation){
var note = document.createElement("li");
var link = document.createElement("a");
note.innerHTML = annotation.body;
// note.setAttribute("ref", annotation.anchor);
link.innerHTML = " context »";
link.href = "#"+annotation.anchor;
link.onclick = function(){
rendition.display(annotation.anchor);
return false;
};
note.appendChild(link);
$notes.append(note);
};
var placeMarker = function(annotation){
var doc = book.renderer.doc;
var marker = document.createElement("span");
var mark = document.createElement("a");
marker.classList.add("footnotesuperscript", "reader_generated");
marker.style.verticalAlign = "super";
marker.style.fontSize = ".75em";
// marker.style.position = "relative";
marker.style.lineHeight = "1em";
// mark.style.display = "inline-block";
mark.style.padding = "2px";
mark.style.backgroundColor = "#fffa96";
mark.style.borderRadius = "5px";
mark.style.cursor = "pointer";
marker.id = "note-"+EPUBJS.core.uuid();
mark.innerHTML = annotations.indexOf(annotation) + 1 + "[Reader]";
marker.appendChild(mark);
epubcfi.addMarker(annotation.anchor, doc, marker);
markerEvents(marker, annotation.body);
}
var markerEvents = function(item, txt){
var id = item.id;
var showPop = function(){
var poppos,
iheight = renderer.height,
iwidth = renderer.width,
tip,
pop,
maxHeight = 225,
itemRect,
left,
top,
pos;
//-- create a popup with endnote inside of it
if(!popups[id]) {
popups[id] = document.createElement("div");
popups[id].setAttribute("class", "popup");
pop_content = document.createElement("div");
popups[id].appendChild(pop_content);
pop_content.innerHTML = txt;
pop_content.setAttribute("class", "pop_content");
renderer.render.document.body.appendChild(popups[id]);
//-- TODO: will these leak memory? - Fred
popups[id].addEventListener("mouseover", onPop, false);
popups[id].addEventListener("mouseout", offPop, false);
//-- Add hide on page change
rendition.on("locationChanged", hidePop, this);
rendition.on("locationChanged", offPop, this);
// chapter.book.on("renderer:chapterDestroy", hidePop, this);
}
pop = popups[id];
//-- get location of item
itemRect = item.getBoundingClientRect();
left = itemRect.left;
top = itemRect.top;
//-- show the popup
pop.classList.add("show");
//-- locations of popup
popRect = pop.getBoundingClientRect();
//-- position the popup
pop.style.left = left - popRect.width / 2 + "px";
pop.style.top = top + "px";
//-- Adjust max height
if(maxHeight > iheight / 2.5) {
maxHeight = iheight / 2.5;
pop_content.style.maxHeight = maxHeight + "px";
}
//-- switch above / below
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
//-- switch left
if(left - popRect.width <= 0) {
pop.style.left = left + "px";
pop.classList.add("left");
}else{
pop.classList.remove("left");
}
//-- switch right
if(left + popRect.width / 2 >= iwidth) {
//-- TEMP MOVE: 300
pop.style.left = left - 300 + "px";
popRect = pop.getBoundingClientRect();
pop.style.left = left - popRect.width + "px";
//-- switch above / below again
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
pop.classList.add("right");
}else{
pop.classList.remove("right");
}
}
var onPop = function(){
popups[id].classList.add("on");
}
var offPop = function(){
popups[id].classList.remove("on");
}
var hidePop = function(){
setTimeout(function(){
popups[id].classList.remove("show");
}, 100);
}
var openSidebar = function(){
reader.ReaderController.slideOut();
show();
};
item.addEventListener("mouseover", showPop, false);
item.addEventListener("mouseout", hidePop, false);
item.addEventListener("click", openSidebar, false);
}
$anchor.on("click", function(e){
$anchor.text("Cancel");
$text.prop("disabled", "true");
// listen for selection
rendition.on("click", insertAtPoint);
});
annotations.forEach(function(note) {
addAnnotation(note);
});
/*
renderer.registerHook("beforeChapterDisplay", function(callback, renderer){
var chapter = renderer.currentChapter;
annotations.forEach(function(note) {
var cfi = epubcfi.parse(note.anchor);
if(cfi.spinePos === chapter.spinePos) {
try {
placeMarker(note);
} catch(e) {
console.log("anchoring failed", note.anchor);
}
}
});
callback();
}, true);
*/
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ReaderController = function(book) {
var $main = $("#main"),
$divider = $("#divider"),
$loader = $("#loader"),
$next = $("#next"),
$prev = $("#prev");
var reader = this;
var book = this.book;
var rendition = this.rendition;
var slideIn = function() {
var currentPosition = rendition.currentLocation().start.cfi;
if (reader.settings.sidebarReflow){
$main.removeClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.removeClass("closed");
}
};
var slideOut = function() {
var location = rendition.currentLocation();
if (!location) {
return;
}
var currentPosition = location.start.cfi;
if (reader.settings.sidebarReflow){
$main.addClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.addClass("closed");
}
};
var showLoader = function() {
$loader.show();
hideDivider();
};
var hideLoader = function() {
$loader.hide();
//-- If the book is using spreads, show the divider
// if(book.settings.spreads) {
// showDivider();
// }
};
var showDivider = function() {
$divider.addClass("show");
};
var hideDivider = function() {
$divider.removeClass("show");
};
var keylock = false;
var arrowKeys = function(e) {
if(e.keyCode == 37) {
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
$prev.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$prev.removeClass("active");
}, 100);
e.preventDefault();
}
if(e.keyCode == 39) {
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
$next.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$next.removeClass("active");
}, 100);
e.preventDefault();
}
}
document.addEventListener('keydown', arrowKeys, false);
$next.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
e.preventDefault();
});
$prev.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
e.preventDefault();
});
rendition.on("layout", function(props){
if(props.spread === true) {
showDivider();
} else {
hideDivider();
}
});
rendition.on('relocated', function(location){
if (location.atStart) {
$prev.addClass("disabled");
}
if (location.atEnd) {
$next.addClass("disabled");
}
});
return {
"slideOut" : slideOut,
"slideIn" : slideIn,
"showLoader" : showLoader,
"hideLoader" : hideLoader,
"showDivider" : showDivider,
"hideDivider" : hideDivider,
"arrowKeys" : arrowKeys
};
};
EPUBJS.reader.SettingsController = function() {
var book = this.book;
var reader = this;
var $settings = $("#settings-modal"),
$overlay = $(".overlay");
var show = function() {
$settings.addClass("md-show");
};
var hide = function() {
$settings.removeClass("md-show");
};
var $sidebarReflowSetting = $('#sidebarReflow');
$sidebarReflowSetting.on('click', function() {
reader.settings.sidebarReflow = !reader.settings.sidebarReflow;
});
$settings.find(".closer").on("click", function() {
hide();
});
$overlay.on("click", function() {
hide();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.SidebarController = function(book) {
var reader = this;
var $sidebar = $("#sidebar"),
$panels = $("#panels");
var activePanel = "Toc";
var changePanelTo = function(viewName) {
var controllerName = viewName + "Controller";
if(activePanel == viewName || typeof reader[controllerName] === 'undefined' ) return;
reader[activePanel+ "Controller"].hide();
reader[controllerName].show();
activePanel = viewName;
$panels.find('.active').removeClass("active");
$panels.find("#show-" + viewName ).addClass("active");
};
var getActivePanel = function() {
return activePanel;
};
var show = function() {
reader.sidebarOpen = true;
reader.ReaderController.slideOut();
$sidebar.addClass("open");
}
var hide = function() {
reader.sidebarOpen = false;
reader.ReaderController.slideIn();
$sidebar.removeClass("open");
}
$panels.find(".show_view").on("click", function(event) {
var view = $(this).data("view");
changePanelTo(view);
event.preventDefault();
});
return {
'show' : show,
'hide' : hide,
'getActivePanel' : getActivePanel,
'changePanelTo' : changePanelTo
};
};
EPUBJS.reader.TocController = function(toc) {
var book = this.book;
var rendition = this.rendition;
var $list = $("#tocView"),
docfrag = document.createDocumentFragment();
var currentChapter = false;
var generateTocItems = function(toc, level) {
var container = document.createElement("ul");
if(!level) level = 1;
toc.forEach(function(chapter) {
var listitem = document.createElement("li"),
link = document.createElement("a");
toggle = document.createElement("a");
var subitems;
listitem.id = "toc-"+chapter.id;
listitem.classList.add('list_item');
link.textContent = chapter.label;
link.href = chapter.href;
link.classList.add('toc_link');
listitem.appendChild(link);
if(chapter.subitems && chapter.subitems.length > 0) {
level++;
subitems = generateTocItems(chapter.subitems, level);
toggle.classList.add('toc_toggle');
listitem.insertBefore(toggle, link);
listitem.appendChild(subitems);
}
container.appendChild(listitem);
});
return container;
};
var onShow = function() {
$list.show();
};
var onHide = function() {
$list.hide();
};
var chapterChange = function(e) {
var id = e.id,
$item = $list.find("#toc-"+id),
$current = $list.find(".currentChapter"),
$open = $list.find('.openChapter');
if($item.length){
if($item != $current && $item.has(currentChapter).length > 0) {
$current.removeClass("currentChapter");
}
$item.addClass("currentChapter");
// $open.removeClass("openChapter");
$item.parents('li').addClass("openChapter");
}
};
rendition.on('renderered', chapterChange);
var tocitems = generateTocItems(toc);
docfrag.appendChild(tocitems);
$list.append(docfrag);
$list.find(".toc_link").on("click", function(event){
var url = this.getAttribute('href');
event.preventDefault();
//-- Provide the Book with the url to show
// The Url must be found in the books manifest
rendition.display(url);
$list.find(".currentChapter")
.addClass("openChapter")
.removeClass("currentChapter");
$(this).parent('li').addClass("currentChapter");
});
$list.find(".toc_toggle").on("click", function(event){
var $el = $(this).parent('li'),
open = $el.hasClass("openChapter");
event.preventDefault();
if(open){
$el.removeClass("openChapter");
} else {
$el.addClass("openChapter");
}
});
return {
"show" : onShow,
"hide" : onHide
};
};
//# sourceMappingURL=reader.js.map | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/reader.js | reader.js |
window.hypothesisConfig = function() {
var Annotator = window.Annotator;
var $main = $("#main");
function EpubAnnotationSidebar(elem, options) {
options = {
server: true,
origin: true,
showHighlights: true,
Toolbar: {container: '#annotation-controls'}
}
Annotator.Host.call(this, elem, options);
}
EpubAnnotationSidebar.prototype = Object.create(Annotator.Host.prototype);
EpubAnnotationSidebar.prototype.show = function() {
this.frame.css({
'margin-left': (-1 * this.frame.width()) + "px"
});
this.frame.removeClass('annotator-collapsed');
if (!$main.hasClass('single')) {
$main.addClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-left').addClass('h-icon-chevron-right');
this.setVisibleHighlights(true);
}
};
EpubAnnotationSidebar.prototype.hide = function() {
this.frame.css({
'margin-left': ''
});
this.frame.addClass('annotator-collapsed');
if ($main.hasClass('single')) {
$main.removeClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-right').addClass('h-icon-chevron-left');
this.setVisibleHighlights(false);
}
};
return {
constructor: EpubAnnotationSidebar,
}
};
// This is the Epub.js plugin. Annotations are updated on location change.
EPUBJS.reader.plugins.HypothesisController = function (Book) {
var reader = this;
var $main = $("#main");
var updateAnnotations = function () {
var annotator = Book.renderer.render.window.annotator;
if (annotator && annotator.constructor.$) {
var annotations = getVisibleAnnotations(annotator.constructor.$);
annotator.showAnnotations(annotations)
}
};
var getVisibleAnnotations = function ($) {
var width = Book.renderer.render.iframe.clientWidth;
return $('.annotator-hl').map(function() {
var $this = $(this),
left = this.getBoundingClientRect().left;
if (left >= 0 && left <= width) {
return $this.data('annotation');
}
}).get();
};
Book.on("renderer:locationChanged", updateAnnotations);
return {}
}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/plugins/hypothesis.js | hypothesis.js |
EPUBJS.reader.search = {};
// Search Server -- https://github.com/futurepress/epubjs-search
EPUBJS.reader.search.SERVER = "https://pacific-cliffs-3579.herokuapp.com";
EPUBJS.reader.search.request = function(q, callback) {
var fetch = $.ajax({
dataType: "json",
url: EPUBJS.reader.search.SERVER + "/search?q=" + encodeURIComponent(q)
});
fetch.fail(function(err) {
console.error(err);
});
fetch.done(function(results) {
callback(results);
});
};
EPUBJS.reader.plugins.SearchController = function(Book) {
var reader = this;
var $searchBox = $("#searchBox"),
$searchResults = $("#searchResults"),
$searchView = $("#searchView"),
iframeDoc;
var searchShown = false;
var onShow = function() {
query();
searchShown = true;
$searchView.addClass("shown");
};
var onHide = function() {
searchShown = false;
$searchView.removeClass("shown");
};
var query = function() {
var q = $searchBox.val();
if(q == '') {
return;
}
$searchResults.empty();
$searchResults.append("<li><p>Searching...</p></li>");
EPUBJS.reader.search.request(q, function(data) {
var results = data.results;
$searchResults.empty();
if(iframeDoc) {
$(iframeDoc).find('body').unhighlight();
}
if(results.length == 0) {
$searchResults.append("<li><p>No Results Found</p></li>");
return;
}
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
results.forEach(function(result) {
var $li = $("<li></li>");
var $item = $("<a href='"+result.href+"' data-cfi='"+result.cfi+"'><span>"+result.title+"</span><p>"+result.highlight+"</p></a>");
$item.on("click", function(e) {
var $this = $(this),
cfi = $this.data("cfi");
e.preventDefault();
Book.gotoCfi(cfi+"/1:0");
Book.on("renderer:chapterDisplayed", function() {
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
})
});
$li.append($item);
$searchResults.append($li);
});
});
};
$searchBox.on("search", function(e) {
var q = $searchBox.val();
//-- SearchBox is empty or cleared
if(q == '') {
$searchResults.empty();
if(reader.SidebarController.getActivePanel() == "Search") {
reader.SidebarController.changePanelTo("Toc");
}
$(iframeDoc).find('body').unhighlight();
iframeDoc = false;
return;
}
reader.SidebarController.changePanelTo("Search");
e.preventDefault();
});
return {
"show" : onShow,
"hide" : onHide
};
}; | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/plugins/search.js | search.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.JSZip=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){"use strict";var d=a("./utils"),e=a("./support"),f="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";c.encode=function(a){for(var b,c,e,g,h,i,j,k=[],l=0,m=a.length,n=m,o="string"!==d.getTypeOf(a);l<a.length;)n=m-l,o?(b=a[l++],c=l<m?a[l++]:0,e=l<m?a[l++]:0):(b=a.charCodeAt(l++),c=l<m?a.charCodeAt(l++):0,e=l<m?a.charCodeAt(l++):0),g=b>>2,h=(3&b)<<4|c>>4,i=n>1?(15&c)<<2|e>>6:64,j=n>2?63&e:64,k.push(f.charAt(g)+f.charAt(h)+f.charAt(i)+f.charAt(j));return k.join("")},c.decode=function(a){var b,c,d,g,h,i,j,k=0,l=0,m="data:";if(a.substr(0,m.length)===m)throw new Error("Invalid base64 input, it looks like a data url.");a=a.replace(/[^A-Za-z0-9\+\/\=]/g,"");var n=3*a.length/4;if(a.charAt(a.length-1)===f.charAt(64)&&n--,a.charAt(a.length-2)===f.charAt(64)&&n--,n%1!==0)throw new Error("Invalid base64 input, bad content length.");var o;for(o=e.uint8array?new Uint8Array(0|n):new Array(0|n);k<a.length;)g=f.indexOf(a.charAt(k++)),h=f.indexOf(a.charAt(k++)),i=f.indexOf(a.charAt(k++)),j=f.indexOf(a.charAt(k++)),b=g<<2|h>>4,c=(15&h)<<4|i>>2,d=(3&i)<<6|j,o[l++]=b,64!==i&&(o[l++]=c),64!==j&&(o[l++]=d);return o}},{"./support":30,"./utils":32}],2:[function(a,b,c){"use strict";function d(a,b,c,d,e){this.compressedSize=a,this.uncompressedSize=b,this.crc32=c,this.compression=d,this.compressedContent=e}var e=a("./external"),f=a("./stream/DataWorker"),g=a("./stream/DataLengthProbe"),h=a("./stream/Crc32Probe"),g=a("./stream/DataLengthProbe");d.prototype={getContentWorker:function(){var a=new f(e.Promise.resolve(this.compressedContent)).pipe(this.compression.uncompressWorker()).pipe(new g("data_length")),b=this;return a.on("end",function(){if(this.streamInfo.data_length!==b.uncompressedSize)throw new Error("Bug : uncompressed data size mismatch")}),a},getCompressedWorker:function(){return new f(e.Promise.resolve(this.compressedContent)).withStreamInfo("compressedSize",this.compressedSize).withStreamInfo("uncompressedSize",this.uncompressedSize).withStreamInfo("crc32",this.crc32).withStreamInfo("compression",this.compression)}},d.createWorkerFrom=function(a,b,c){return a.pipe(new h).pipe(new g("uncompressedSize")).pipe(b.compressWorker(c)).pipe(new g("compressedSize")).withStreamInfo("compression",b)},b.exports=d},{"./external":6,"./stream/Crc32Probe":25,"./stream/DataLengthProbe":26,"./stream/DataWorker":27}],3:[function(a,b,c){"use strict";var d=a("./stream/GenericWorker");c.STORE={magic:"\0\0",compressWorker:function(a){return new d("STORE compression")},uncompressWorker:function(){return new d("STORE decompression")}},c.DEFLATE=a("./flate")},{"./flate":7,"./stream/GenericWorker":28}],4:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b[g])];return a^-1}function f(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b.charCodeAt(g))];return a^-1}var g=a("./utils"),h=d();b.exports=function(a,b){if("undefined"==typeof a||!a.length)return 0;var c="string"!==g.getTypeOf(a);return c?e(0|b,a,a.length,0):f(0|b,a,a.length,0)}},{"./utils":32}],5:[function(a,b,c){"use strict";c.base64=!1,c.binary=!1,c.dir=!1,c.createFolders=!0,c.date=null,c.compression=null,c.compressionOptions=null,c.comment=null,c.unixPermissions=null,c.dosPermissions=null},{}],6:[function(a,b,c){"use strict";var d=null;d="undefined"!=typeof Promise?Promise:a("lie"),b.exports={Promise:d}},{lie:58}],7:[function(a,b,c){"use strict";function d(a,b){h.call(this,"FlateWorker/"+a),this._pako=null,this._pakoAction=a,this._pakoOptions=b,this.meta={}}var e="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Uint32Array,f=a("pako"),g=a("./utils"),h=a("./stream/GenericWorker"),i=e?"uint8array":"array";c.magic="\b\0",g.inherits(d,h),d.prototype.processChunk=function(a){this.meta=a.meta,null===this._pako&&this._createPako(),this._pako.push(g.transformTo(i,a.data),!1)},d.prototype.flush=function(){h.prototype.flush.call(this),null===this._pako&&this._createPako(),this._pako.push([],!0)},d.prototype.cleanUp=function(){h.prototype.cleanUp.call(this),this._pako=null},d.prototype._createPako=function(){this._pako=new f[this._pakoAction]({raw:!0,level:this._pakoOptions.level||-1});var a=this;this._pako.onData=function(b){a.push({data:b,meta:a.meta})}},c.compressWorker=function(a){return new d("Deflate",a)},c.uncompressWorker=function(){return new d("Inflate",{})}},{"./stream/GenericWorker":28,"./utils":32,pako:59}],8:[function(a,b,c){"use strict";function d(a,b,c,d){f.call(this,"ZipFileWorker"),this.bytesWritten=0,this.zipComment=b,this.zipPlatform=c,this.encodeFileName=d,this.streamFiles=a,this.accumulate=!1,this.contentBuffer=[],this.dirRecords=[],this.currentSourceOffset=0,this.entriesCount=0,this.currentFile=null,this._sources=[]}var e=a("../utils"),f=a("../stream/GenericWorker"),g=a("../utf8"),h=a("../crc32"),i=a("../signature"),j=function(a,b){var c,d="";for(c=0;c<b;c++)d+=String.fromCharCode(255&a),a>>>=8;return d},k=function(a,b){var c=a;return a||(c=b?16893:33204),(65535&c)<<16},l=function(a,b){return 63&(a||0)},m=function(a,b,c,d,f,m){var n,o,p=a.file,q=a.compression,r=m!==g.utf8encode,s=e.transformTo("string",m(p.name)),t=e.transformTo("string",g.utf8encode(p.name)),u=p.comment,v=e.transformTo("string",m(u)),w=e.transformTo("string",g.utf8encode(u)),x=t.length!==p.name.length,y=w.length!==u.length,z="",A="",B="",C=p.dir,D=p.date,E={crc32:0,compressedSize:0,uncompressedSize:0};b&&!c||(E.crc32=a.crc32,E.compressedSize=a.compressedSize,E.uncompressedSize=a.uncompressedSize);var F=0;b&&(F|=8),r||!x&&!y||(F|=2048);var G=0,H=0;C&&(G|=16),"UNIX"===f?(H=798,G|=k(p.unixPermissions,C)):(H=20,G|=l(p.dosPermissions,C)),n=D.getUTCHours(),n<<=6,n|=D.getUTCMinutes(),n<<=5,n|=D.getUTCSeconds()/2,o=D.getUTCFullYear()-1980,o<<=4,o|=D.getUTCMonth()+1,o<<=5,o|=D.getUTCDate(),x&&(A=j(1,1)+j(h(s),4)+t,z+="up"+j(A.length,2)+A),y&&(B=j(1,1)+j(h(v),4)+w,z+="uc"+j(B.length,2)+B);var I="";I+="\n\0",I+=j(F,2),I+=q.magic,I+=j(n,2),I+=j(o,2),I+=j(E.crc32,4),I+=j(E.compressedSize,4),I+=j(E.uncompressedSize,4),I+=j(s.length,2),I+=j(z.length,2);var J=i.LOCAL_FILE_HEADER+I+s+z,K=i.CENTRAL_FILE_HEADER+j(H,2)+I+j(v.length,2)+"\0\0\0\0"+j(G,4)+j(d,4)+s+z+v;return{fileRecord:J,dirRecord:K}},n=function(a,b,c,d,f){var g="",h=e.transformTo("string",f(d));return g=i.CENTRAL_DIRECTORY_END+"\0\0\0\0"+j(a,2)+j(a,2)+j(b,4)+j(c,4)+j(h.length,2)+h},o=function(a){var b="";return b=i.DATA_DESCRIPTOR+j(a.crc32,4)+j(a.compressedSize,4)+j(a.uncompressedSize,4)};e.inherits(d,f),d.prototype.push=function(a){var b=a.meta.percent||0,c=this.entriesCount,d=this._sources.length;this.accumulate?this.contentBuffer.push(a):(this.bytesWritten+=a.data.length,f.prototype.push.call(this,{data:a.data,meta:{currentFile:this.currentFile,percent:c?(b+100*(c-d-1))/c:100}}))},d.prototype.openedSource=function(a){this.currentSourceOffset=this.bytesWritten,this.currentFile=a.file.name;var b=this.streamFiles&&!a.file.dir;if(b){var c=m(a,b,!1,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);this.push({data:c.fileRecord,meta:{percent:0}})}else this.accumulate=!0},d.prototype.closedSource=function(a){this.accumulate=!1;var b=this.streamFiles&&!a.file.dir,c=m(a,b,!0,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);if(this.dirRecords.push(c.dirRecord),b)this.push({data:o(a),meta:{percent:100}});else for(this.push({data:c.fileRecord,meta:{percent:0}});this.contentBuffer.length;)this.push(this.contentBuffer.shift());this.currentFile=null},d.prototype.flush=function(){for(var a=this.bytesWritten,b=0;b<this.dirRecords.length;b++)this.push({data:this.dirRecords[b],meta:{percent:100}});var c=this.bytesWritten-a,d=n(this.dirRecords.length,c,a,this.zipComment,this.encodeFileName);this.push({data:d,meta:{percent:100}})},d.prototype.prepareNextSource=function(){this.previous=this._sources.shift(),this.openedSource(this.previous.streamInfo),this.isPaused?this.previous.pause():this.previous.resume()},d.prototype.registerPrevious=function(a){this._sources.push(a);var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.closedSource(b.previous.streamInfo),b._sources.length?b.prepareNextSource():b.end()}),a.on("error",function(a){b.error(a)}),this},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this.previous&&this._sources.length?(this.prepareNextSource(),!0):this.previous||this._sources.length||this.generatedError?void 0:(this.end(),!0))},d.prototype.error=function(a){var b=this._sources;if(!f.prototype.error.call(this,a))return!1;for(var c=0;c<b.length;c++)try{b[c].error(a)}catch(a){}return!0},d.prototype.lock=function(){f.prototype.lock.call(this);for(var a=this._sources,b=0;b<a.length;b++)a[b].lock()},b.exports=d},{"../crc32":4,"../signature":23,"../stream/GenericWorker":28,"../utf8":31,"../utils":32}],9:[function(a,b,c){"use strict";var d=a("../compressions"),e=a("./ZipFileWorker"),f=function(a,b){var c=a||b,e=d[c];if(!e)throw new Error(c+" is not a valid compression method !");return e};c.generateWorker=function(a,b,c){var d=new e(b.streamFiles,c,b.platform,b.encodeFileName),g=0;try{a.forEach(function(a,c){g++;var e=f(c.options.compression,b.compression),h=c.options.compressionOptions||b.compressionOptions||{},i=c.dir,j=c.date;c._compressWorker(e,h).withStreamInfo("file",{name:a,dir:i,date:j,comment:c.comment||"",unixPermissions:c.unixPermissions,dosPermissions:c.dosPermissions}).pipe(d)}),d.entriesCount=g}catch(h){d.error(h)}return d}},{"../compressions":3,"./ZipFileWorker":8}],10:[function(a,b,c){"use strict";function d(){if(!(this instanceof d))return new d;if(arguments.length)throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");this.files={},this.comment=null,this.root="",this.clone=function(){var a=new d;for(var b in this)"function"!=typeof this[b]&&(a[b]=this[b]);return a}}d.prototype=a("./object"),d.prototype.loadAsync=a("./load"),d.support=a("./support"),d.defaults=a("./defaults"),d.version="3.1.5",d.loadAsync=function(a,b){return(new d).loadAsync(a,b)},d.external=a("./external"),b.exports=d},{"./defaults":5,"./external":6,"./load":11,"./object":15,"./support":30}],11:[function(a,b,c){"use strict";function d(a){return new f.Promise(function(b,c){var d=a.decompressed.getContentWorker().pipe(new i);d.on("error",function(a){c(a)}).on("end",function(){d.streamInfo.crc32!==a.decompressed.crc32?c(new Error("Corrupted zip : CRC32 mismatch")):b()}).resume()})}var e=a("./utils"),f=a("./external"),g=a("./utf8"),e=a("./utils"),h=a("./zipEntries"),i=a("./stream/Crc32Probe"),j=a("./nodejsUtils");b.exports=function(a,b){var c=this;return b=e.extend(b||{},{base64:!1,checkCRC32:!1,optimizedBinaryString:!1,createFolders:!1,decodeFileName:g.utf8decode}),j.isNode&&j.isStream(a)?f.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file.")):e.prepareContent("the loaded zip file",a,!0,b.optimizedBinaryString,b.base64).then(function(a){var c=new h(b);return c.load(a),c}).then(function(a){var c=[f.Promise.resolve(a)],e=a.files;if(b.checkCRC32)for(var g=0;g<e.length;g++)c.push(d(e[g]));return f.Promise.all(c)}).then(function(a){for(var d=a.shift(),e=d.files,f=0;f<e.length;f++){var g=e[f];c.file(g.fileNameStr,g.decompressed,{binary:!0,optimizedBinaryString:!0,date:g.date,dir:g.dir,comment:g.fileCommentStr.length?g.fileCommentStr:null,unixPermissions:g.unixPermissions,dosPermissions:g.dosPermissions,createFolders:b.createFolders})}return d.zipComment.length&&(c.comment=d.zipComment),c})}},{"./external":6,"./nodejsUtils":14,"./stream/Crc32Probe":25,"./utf8":31,"./utils":32,"./zipEntries":33}],12:[function(a,b,c){"use strict";function d(a,b){f.call(this,"Nodejs stream input adapter for "+a),this._upstreamEnded=!1,this._bindStream(b)}var e=a("../utils"),f=a("../stream/GenericWorker");e.inherits(d,f),d.prototype._bindStream=function(a){var b=this;this._stream=a,a.pause(),a.on("data",function(a){b.push({data:a,meta:{percent:0}})}).on("error",function(a){b.isPaused?this.generatedError=a:b.error(a)}).on("end",function(){b.isPaused?b._upstreamEnded=!0:b.end()})},d.prototype.pause=function(){return!!f.prototype.pause.call(this)&&(this._stream.pause(),!0)},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(this._upstreamEnded?this.end():this._stream.resume(),!0)},b.exports=d},{"../stream/GenericWorker":28,"../utils":32}],13:[function(a,b,c){"use strict";function d(a,b,c){e.call(this,b),this._helper=a;var d=this;a.on("data",function(a,b){d.push(a)||d._helper.pause(),c&&c(b)}).on("error",function(a){d.emit("error",a)}).on("end",function(){d.push(null)})}var e=a("readable-stream").Readable,f=a("../utils");f.inherits(d,e),d.prototype._read=function(){this._helper.resume()},b.exports=d},{"../utils":32,"readable-stream":16}],14:[function(a,b,c){"use strict";b.exports={isNode:"undefined"!=typeof Buffer,newBufferFrom:function(a,b){return new Buffer(a,b)},allocBuffer:function(a){return Buffer.alloc?Buffer.alloc(a):new Buffer(a)},isBuffer:function(a){return Buffer.isBuffer(a)},isStream:function(a){return a&&"function"==typeof a.on&&"function"==typeof a.pause&&"function"==typeof a.resume}}},{}],15:[function(a,b,c){"use strict";function d(a){return"[object RegExp]"===Object.prototype.toString.call(a)}var e=a("./utf8"),f=a("./utils"),g=a("./stream/GenericWorker"),h=a("./stream/StreamHelper"),i=a("./defaults"),j=a("./compressedObject"),k=a("./zipObject"),l=a("./generate"),m=a("./nodejsUtils"),n=a("./nodejs/NodejsStreamInputAdapter"),o=function(a,b,c){var d,e=f.getTypeOf(b),h=f.extend(c||{},i);h.date=h.date||new Date,null!==h.compression&&(h.compression=h.compression.toUpperCase()),"string"==typeof h.unixPermissions&&(h.unixPermissions=parseInt(h.unixPermissions,8)),h.unixPermissions&&16384&h.unixPermissions&&(h.dir=!0),h.dosPermissions&&16&h.dosPermissions&&(h.dir=!0),h.dir&&(a=q(a)),h.createFolders&&(d=p(a))&&r.call(this,d,!0);var l="string"===e&&h.binary===!1&&h.base64===!1;c&&"undefined"!=typeof c.binary||(h.binary=!l);var o=b instanceof j&&0===b.uncompressedSize;(o||h.dir||!b||0===b.length)&&(h.base64=!1,h.binary=!0,b="",h.compression="STORE",e="string");var s=null;s=b instanceof j||b instanceof g?b:m.isNode&&m.isStream(b)?new n(a,b):f.prepareContent(a,b,h.binary,h.optimizedBinaryString,h.base64);var t=new k(a,s,h);this.files[a]=t},p=function(a){"/"===a.slice(-1)&&(a=a.substring(0,a.length-1));var b=a.lastIndexOf("/");return b>0?a.substring(0,b):""},q=function(a){return"/"!==a.slice(-1)&&(a+="/"),a},r=function(a,b){return b="undefined"!=typeof b?b:i.createFolders,a=q(a),this.files[a]||o.call(this,a,null,{dir:!0,createFolders:b}),this.files[a]},s={load:function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},forEach:function(a){var b,c,d;for(b in this.files)this.files.hasOwnProperty(b)&&(d=this.files[b],c=b.slice(this.root.length,b.length),c&&b.slice(0,this.root.length)===this.root&&a(c,d))},filter:function(a){var b=[];return this.forEach(function(c,d){a(c,d)&&b.push(d)}),b},file:function(a,b,c){if(1===arguments.length){if(d(a)){var e=a;return this.filter(function(a,b){return!b.dir&&e.test(a)})}var f=this.files[this.root+a];return f&&!f.dir?f:null}return a=this.root+a,o.call(this,a,b,c),this},folder:function(a){if(!a)return this;if(d(a))return this.filter(function(b,c){return c.dir&&a.test(b)});var b=this.root+a,c=r.call(this,b),e=this.clone();return e.root=c.name,e},remove:function(a){a=this.root+a;var b=this.files[a];if(b||("/"!==a.slice(-1)&&(a+="/"),b=this.files[a]),b&&!b.dir)delete this.files[a];else for(var c=this.filter(function(b,c){return c.name.slice(0,a.length)===a}),d=0;d<c.length;d++)delete this.files[c[d].name];return this},generate:function(a){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},generateInternalStream:function(a){var b,c={};try{if(c=f.extend(a||{},{streamFiles:!1,compression:"STORE",compressionOptions:null,type:"",platform:"DOS",comment:null,mimeType:"application/zip",encodeFileName:e.utf8encode}),c.type=c.type.toLowerCase(),c.compression=c.compression.toUpperCase(),"binarystring"===c.type&&(c.type="string"),!c.type)throw new Error("No output type specified.");f.checkSupport(c.type),"darwin"!==c.platform&&"freebsd"!==c.platform&&"linux"!==c.platform&&"sunos"!==c.platform||(c.platform="UNIX"),"win32"===c.platform&&(c.platform="DOS");var d=c.comment||this.comment||"";b=l.generateWorker(this,c,d)}catch(i){b=new g("error"),b.error(i)}return new h(b,c.type||"string",c.mimeType)},generateAsync:function(a,b){return this.generateInternalStream(a).accumulate(b)},generateNodeStream:function(a,b){return a=a||{},a.type||(a.type="nodebuffer"),this.generateInternalStream(a).toNodejsStream(b)}};b.exports=s},{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":12,"./nodejsUtils":14,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31,"./utils":32,"./zipObject":35}],16:[function(a,b,c){b.exports=a("stream")},{stream:void 0}],17:[function(a,b,c){"use strict";function d(a){e.call(this,a);for(var b=0;b<this.data.length;b++)a[b]=255&a[b]}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data[this.zero+a]},d.prototype.lastIndexOfSignature=function(a){for(var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.length-4;f>=0;--f)if(this.data[f]===b&&this.data[f+1]===c&&this.data[f+2]===d&&this.data[f+3]===e)return f-this.zero;return-1},d.prototype.readAndCheckSignature=function(a){var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.readData(4);return b===f[0]&&c===f[1]&&d===f[2]&&e===f[3]},d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return[];var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],18:[function(a,b,c){"use strict";function d(a){this.data=a,this.length=a.length,this.index=0,this.zero=0}var e=a("../utils");d.prototype={checkOffset:function(a){this.checkIndex(this.index+a)},checkIndex:function(a){if(this.length<this.zero+a||a<0)throw new Error("End of data reached (data length = "+this.length+", asked index = "+a+"). Corrupted zip ?")},setIndex:function(a){this.checkIndex(a),this.index=a},skip:function(a){this.setIndex(this.index+a)},byteAt:function(a){},readInt:function(a){var b,c=0;for(this.checkOffset(a),b=this.index+a-1;b>=this.index;b--)c=(c<<8)+this.byteAt(b);return this.index+=a,c},readString:function(a){return e.transformTo("string",this.readData(a))},readData:function(a){},lastIndexOfSignature:function(a){},readAndCheckSignature:function(a){},readDate:function(){var a=this.readInt(4);return new Date(Date.UTC((a>>25&127)+1980,(a>>21&15)-1,a>>16&31,a>>11&31,a>>5&63,(31&a)<<1))}},b.exports=d},{"../utils":32}],19:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./Uint8ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./Uint8ArrayReader":21}],20:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data.charCodeAt(this.zero+a)},d.prototype.lastIndexOfSignature=function(a){return this.data.lastIndexOf(a)-this.zero},d.prototype.readAndCheckSignature=function(a){var b=this.readData(4);return a===b},d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],21:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return new Uint8Array(0);var b=this.data.subarray(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./ArrayReader":17}],22:[function(a,b,c){"use strict";var d=a("../utils"),e=a("../support"),f=a("./ArrayReader"),g=a("./StringReader"),h=a("./NodeBufferReader"),i=a("./Uint8ArrayReader");b.exports=function(a){var b=d.getTypeOf(a);return d.checkSupport(b),"string"!==b||e.uint8array?"nodebuffer"===b?new h(a):e.uint8array?new i(d.transformTo("uint8array",a)):new f(d.transformTo("array",a)):new g(a)}},{"../support":30,"../utils":32,"./ArrayReader":17,"./NodeBufferReader":19,"./StringReader":20,"./Uint8ArrayReader":21}],23:[function(a,b,c){"use strict";c.LOCAL_FILE_HEADER="PK",c.CENTRAL_FILE_HEADER="PK",c.CENTRAL_DIRECTORY_END="PK",c.ZIP64_CENTRAL_DIRECTORY_LOCATOR="PK",c.ZIP64_CENTRAL_DIRECTORY_END="PK",c.DATA_DESCRIPTOR="PK\b"},{}],24:[function(a,b,c){"use strict";function d(a){e.call(this,"ConvertWorker to "+a),this.destType=a}var e=a("./GenericWorker"),f=a("../utils");f.inherits(d,e),d.prototype.processChunk=function(a){this.push({data:f.transformTo(this.destType,a.data),meta:a.meta})},b.exports=d},{"../utils":32,"./GenericWorker":28}],25:[function(a,b,c){"use strict";function d(){e.call(this,"Crc32Probe"),this.withStreamInfo("crc32",0)}var e=a("./GenericWorker"),f=a("../crc32"),g=a("../utils");g.inherits(d,e),d.prototype.processChunk=function(a){this.streamInfo.crc32=f(a.data,this.streamInfo.crc32||0),this.push(a)},b.exports=d},{"../crc32":4,"../utils":32,"./GenericWorker":28}],26:[function(a,b,c){"use strict";function d(a){f.call(this,"DataLengthProbe for "+a),this.propName=a,this.withStreamInfo(a,0)}var e=a("../utils"),f=a("./GenericWorker");e.inherits(d,f),d.prototype.processChunk=function(a){if(a){var b=this.streamInfo[this.propName]||0;this.streamInfo[this.propName]=b+a.data.length}f.prototype.processChunk.call(this,a)},b.exports=d},{"../utils":32,"./GenericWorker":28}],27:[function(a,b,c){"use strict";function d(a){f.call(this,"DataWorker");var b=this;this.dataIsReady=!1,this.index=0,this.max=0,this.data=null,this.type="",this._tickScheduled=!1,a.then(function(a){b.dataIsReady=!0,b.data=a,b.max=a&&a.length||0,b.type=e.getTypeOf(a),b.isPaused||b._tickAndRepeat()},function(a){b.error(a)})}var e=a("../utils"),f=a("./GenericWorker"),g=16384;e.inherits(d,f),d.prototype.cleanUp=function(){f.prototype.cleanUp.call(this),this.data=null},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this._tickScheduled&&this.dataIsReady&&(this._tickScheduled=!0,e.delay(this._tickAndRepeat,[],this)),!0)},d.prototype._tickAndRepeat=function(){this._tickScheduled=!1,this.isPaused||this.isFinished||(this._tick(),this.isFinished||(e.delay(this._tickAndRepeat,[],this),this._tickScheduled=!0))},d.prototype._tick=function(){if(this.isPaused||this.isFinished)return!1;var a=g,b=null,c=Math.min(this.max,this.index+a);if(this.index>=this.max)return this.end();switch(this.type){case"string":b=this.data.substring(this.index,c);break;case"uint8array":b=this.data.subarray(this.index,c);break;case"array":case"nodebuffer":b=this.data.slice(this.index,c)}return this.index=c,this.push({data:b,meta:{percent:this.max?this.index/this.max*100:0}})},b.exports=d},{"../utils":32,"./GenericWorker":28}],28:[function(a,b,c){"use strict";function d(a){this.name=a||"default",this.streamInfo={},this.generatedError=null,this.extraStreamInfo={},this.isPaused=!0,this.isFinished=!1,this.isLocked=!1,this._listeners={data:[],end:[],error:[]},this.previous=null}d.prototype={push:function(a){this.emit("data",a)},end:function(){if(this.isFinished)return!1;this.flush();try{this.emit("end"),this.cleanUp(),this.isFinished=!0}catch(a){this.emit("error",a)}return!0},error:function(a){return!this.isFinished&&(this.isPaused?this.generatedError=a:(this.isFinished=!0,this.emit("error",a),this.previous&&this.previous.error(a),this.cleanUp()),!0)},on:function(a,b){return this._listeners[a].push(b),this},cleanUp:function(){this.streamInfo=this.generatedError=this.extraStreamInfo=null,this._listeners=[]},emit:function(a,b){if(this._listeners[a])for(var c=0;c<this._listeners[a].length;c++)this._listeners[a][c].call(this,b)},pipe:function(a){return a.registerPrevious(this)},registerPrevious:function(a){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.streamInfo=a.streamInfo,this.mergeStreamInfo(),this.previous=a;var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.end()}),a.on("error",function(a){b.error(a)}),this},pause:function(){return!this.isPaused&&!this.isFinished&&(this.isPaused=!0,this.previous&&this.previous.pause(),!0)},resume:function(){if(!this.isPaused||this.isFinished)return!1;this.isPaused=!1;var a=!1;return this.generatedError&&(this.error(this.generatedError),a=!0),this.previous&&this.previous.resume(),!a},flush:function(){},processChunk:function(a){this.push(a)},withStreamInfo:function(a,b){return this.extraStreamInfo[a]=b,this.mergeStreamInfo(),this},mergeStreamInfo:function(){for(var a in this.extraStreamInfo)this.extraStreamInfo.hasOwnProperty(a)&&(this.streamInfo[a]=this.extraStreamInfo[a])},lock:function(){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.isLocked=!0,this.previous&&this.previous.lock()},toString:function(){var a="Worker "+this.name;return this.previous?this.previous+" -> "+a:a}},b.exports=d},{}],29:[function(a,b,c){"use strict";function d(a,b,c){switch(a){case"blob":return h.newBlob(h.transformTo("arraybuffer",b),c);case"base64":return k.encode(b);default:return h.transformTo(a,b)}}function e(a,b){var c,d=0,e=null,f=0;for(c=0;c<b.length;c++)f+=b[c].length;switch(a){case"string":return b.join("");case"array":return Array.prototype.concat.apply([],b);case"uint8array":for(e=new Uint8Array(f),c=0;c<b.length;c++)e.set(b[c],d),d+=b[c].length;return e;case"nodebuffer":return Buffer.concat(b);default:throw new Error("concat : unsupported type '"+a+"'")}}function f(a,b){return new m.Promise(function(c,f){var g=[],h=a._internalType,i=a._outputType,j=a._mimeType;a.on("data",function(a,c){g.push(a),b&&b(c)}).on("error",function(a){g=[],f(a)}).on("end",function(){try{var a=d(i,e(h,g),j);c(a)}catch(b){f(b)}g=[]}).resume()})}function g(a,b,c){var d=b;switch(b){case"blob":case"arraybuffer":d="uint8array";break;case"base64":d="string"}try{this._internalType=d,this._outputType=b,this._mimeType=c,h.checkSupport(d),this._worker=a.pipe(new i(d)),a.lock()}catch(e){this._worker=new j("error"),this._worker.error(e)}}var h=a("../utils"),i=a("./ConvertWorker"),j=a("./GenericWorker"),k=a("../base64"),l=a("../support"),m=a("../external"),n=null;if(l.nodestream)try{n=a("../nodejs/NodejsStreamOutputAdapter")}catch(o){}g.prototype={accumulate:function(a){return f(this,a)},on:function(a,b){var c=this;return"data"===a?this._worker.on(a,function(a){b.call(c,a.data,a.meta)}):this._worker.on(a,function(){h.delay(b,arguments,c)}),this},resume:function(){return h.delay(this._worker.resume,[],this._worker),this},pause:function(){return this._worker.pause(),this},toNodejsStream:function(a){if(h.checkSupport("nodestream"),"nodebuffer"!==this._outputType)throw new Error(this._outputType+" is not supported by this method");return new n(this,{objectMode:"nodebuffer"!==this._outputType},a)}},b.exports=g},{"../base64":1,"../external":6,"../nodejs/NodejsStreamOutputAdapter":13,"../support":30,"../utils":32,"./ConvertWorker":24,"./GenericWorker":28}],30:[function(a,b,c){"use strict";if(c.base64=!0,c.array=!0,c.string=!0,c.arraybuffer="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof Uint8Array,c.nodebuffer="undefined"!=typeof Buffer,c.uint8array="undefined"!=typeof Uint8Array,"undefined"==typeof ArrayBuffer)c.blob=!1;else{var d=new ArrayBuffer(0);try{c.blob=0===new Blob([d],{type:"application/zip"}).size}catch(e){try{var f=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,g=new f;g.append(d),c.blob=0===g.getBlob("application/zip").size}catch(e){c.blob=!1}}}try{c.nodestream=!!a("readable-stream").Readable}catch(e){c.nodestream=!1}},{"readable-stream":16}],31:[function(a,b,c){"use strict";function d(){i.call(this,"utf-8 decode"),this.leftOver=null}function e(){i.call(this,"utf-8 encode")}for(var f=a("./utils"),g=a("./support"),h=a("./nodejsUtils"),i=a("./stream/GenericWorker"),j=new Array(256),k=0;k<256;k++)j[k]=k>=252?6:k>=248?5:k>=240?4:k>=224?3:k>=192?2:1;j[254]=j[254]=1;var l=function(a){var b,c,d,e,f,h=a.length,i=0;for(e=0;e<h;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=g.uint8array?new Uint8Array(i):new Array(i),f=0,e=0;f<i;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),c<128?b[f++]=c:c<2048?(b[f++]=192|c>>>6,b[f++]=128|63&c):c<65536?(b[f++]=224|c>>>12,b[f++]=128|c>>>6&63,b[f++]=128|63&c):(b[f++]=240|c>>>18,b[f++]=128|c>>>12&63,b[f++]=128|c>>>6&63,b[f++]=128|63&c);return b},m=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+j[a[c]]>b?c:b},n=function(a){var b,c,d,e,g=a.length,h=new Array(2*g);for(c=0,b=0;b<g;)if(d=a[b++],d<128)h[c++]=d;else if(e=j[d],e>4)h[c++]=65533,b+=e-1;else{for(d&=2===e?31:3===e?15:7;e>1&&b<g;)d=d<<6|63&a[b++],e--;e>1?h[c++]=65533:d<65536?h[c++]=d:(d-=65536,h[c++]=55296|d>>10&1023,h[c++]=56320|1023&d)}return h.length!==c&&(h.subarray?h=h.subarray(0,c):h.length=c),f.applyFromCharCode(h)};c.utf8encode=function(a){return g.nodebuffer?h.newBufferFrom(a,"utf-8"):l(a)},c.utf8decode=function(a){return g.nodebuffer?f.transformTo("nodebuffer",a).toString("utf-8"):(a=f.transformTo(g.uint8array?"uint8array":"array",a),n(a))},f.inherits(d,i),d.prototype.processChunk=function(a){var b=f.transformTo(g.uint8array?"uint8array":"array",a.data);if(this.leftOver&&this.leftOver.length){if(g.uint8array){var d=b;b=new Uint8Array(d.length+this.leftOver.length),b.set(this.leftOver,0),b.set(d,this.leftOver.length)}else b=this.leftOver.concat(b);this.leftOver=null}var e=m(b),h=b;e!==b.length&&(g.uint8array?(h=b.subarray(0,e),this.leftOver=b.subarray(e,b.length)):(h=b.slice(0,e),this.leftOver=b.slice(e,b.length))),this.push({data:c.utf8decode(h),meta:a.meta})},d.prototype.flush=function(){this.leftOver&&this.leftOver.length&&(this.push({data:c.utf8decode(this.leftOver),meta:{}}),this.leftOver=null)},c.Utf8DecodeWorker=d,f.inherits(e,i),e.prototype.processChunk=function(a){this.push({data:c.utf8encode(a.data),meta:a.meta})},c.Utf8EncodeWorker=e},{"./nodejsUtils":14,"./stream/GenericWorker":28,"./support":30,"./utils":32}],32:[function(a,b,c){"use strict";function d(a){var b=null;return b=i.uint8array?new Uint8Array(a.length):new Array(a.length),f(a,b)}function e(a){return a}function f(a,b){for(var c=0;c<a.length;++c)b[c]=255&a.charCodeAt(c);return b}function g(a){var b=65536,d=c.getTypeOf(a),e=!0;if("uint8array"===d?e=n.applyCanBeUsed.uint8array:"nodebuffer"===d&&(e=n.applyCanBeUsed.nodebuffer),e)for(;b>1;)try{return n.stringifyByChunk(a,d,b)}catch(f){b=Math.floor(b/2)}return n.stringifyByChar(a)}function h(a,b){for(var c=0;c<a.length;c++)b[c]=a[c];
return b}var i=a("./support"),j=a("./base64"),k=a("./nodejsUtils"),l=a("core-js/library/fn/set-immediate"),m=a("./external");c.newBlob=function(a,b){c.checkSupport("blob");try{return new Blob([a],{type:b})}catch(d){try{var e=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,f=new e;return f.append(a),f.getBlob(b)}catch(d){throw new Error("Bug : can't construct the Blob.")}}};var n={stringifyByChunk:function(a,b,c){var d=[],e=0,f=a.length;if(f<=c)return String.fromCharCode.apply(null,a);for(;e<f;)"array"===b||"nodebuffer"===b?d.push(String.fromCharCode.apply(null,a.slice(e,Math.min(e+c,f)))):d.push(String.fromCharCode.apply(null,a.subarray(e,Math.min(e+c,f)))),e+=c;return d.join("")},stringifyByChar:function(a){for(var b="",c=0;c<a.length;c++)b+=String.fromCharCode(a[c]);return b},applyCanBeUsed:{uint8array:function(){try{return i.uint8array&&1===String.fromCharCode.apply(null,new Uint8Array(1)).length}catch(a){return!1}}(),nodebuffer:function(){try{return i.nodebuffer&&1===String.fromCharCode.apply(null,k.allocBuffer(1)).length}catch(a){return!1}}()}};c.applyFromCharCode=g;var o={};o.string={string:e,array:function(a){return f(a,new Array(a.length))},arraybuffer:function(a){return o.string.uint8array(a).buffer},uint8array:function(a){return f(a,new Uint8Array(a.length))},nodebuffer:function(a){return f(a,k.allocBuffer(a.length))}},o.array={string:g,array:e,arraybuffer:function(a){return new Uint8Array(a).buffer},uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(a)}},o.arraybuffer={string:function(a){return g(new Uint8Array(a))},array:function(a){return h(new Uint8Array(a),new Array(a.byteLength))},arraybuffer:e,uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(new Uint8Array(a))}},o.uint8array={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return a.buffer},uint8array:e,nodebuffer:function(a){return k.newBufferFrom(a)}},o.nodebuffer={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return o.nodebuffer.uint8array(a).buffer},uint8array:function(a){return h(a,new Uint8Array(a.length))},nodebuffer:e},c.transformTo=function(a,b){if(b||(b=""),!a)return b;c.checkSupport(a);var d=c.getTypeOf(b),e=o[d][a](b);return e},c.getTypeOf=function(a){return"string"==typeof a?"string":"[object Array]"===Object.prototype.toString.call(a)?"array":i.nodebuffer&&k.isBuffer(a)?"nodebuffer":i.uint8array&&a instanceof Uint8Array?"uint8array":i.arraybuffer&&a instanceof ArrayBuffer?"arraybuffer":void 0},c.checkSupport=function(a){var b=i[a.toLowerCase()];if(!b)throw new Error(a+" is not supported by this platform")},c.MAX_VALUE_16BITS=65535,c.MAX_VALUE_32BITS=-1,c.pretty=function(a){var b,c,d="";for(c=0;c<(a||"").length;c++)b=a.charCodeAt(c),d+="\\x"+(b<16?"0":"")+b.toString(16).toUpperCase();return d},c.delay=function(a,b,c){l(function(){a.apply(c||null,b||[])})},c.inherits=function(a,b){var c=function(){};c.prototype=b.prototype,a.prototype=new c},c.extend=function(){var a,b,c={};for(a=0;a<arguments.length;a++)for(b in arguments[a])arguments[a].hasOwnProperty(b)&&"undefined"==typeof c[b]&&(c[b]=arguments[a][b]);return c},c.prepareContent=function(a,b,e,f,g){var h=m.Promise.resolve(b).then(function(a){var b=i.blob&&(a instanceof Blob||["[object File]","[object Blob]"].indexOf(Object.prototype.toString.call(a))!==-1);return b&&"undefined"!=typeof FileReader?new m.Promise(function(b,c){var d=new FileReader;d.onload=function(a){b(a.target.result)},d.onerror=function(a){c(a.target.error)},d.readAsArrayBuffer(a)}):a});return h.then(function(b){var h=c.getTypeOf(b);return h?("arraybuffer"===h?b=c.transformTo("uint8array",b):"string"===h&&(g?b=j.decode(b):e&&f!==!0&&(b=d(b))),b):m.Promise.reject(new Error("Can't read the data of '"+a+"'. Is it in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?"))})}},{"./base64":1,"./external":6,"./nodejsUtils":14,"./support":30,"core-js/library/fn/set-immediate":36}],33:[function(a,b,c){"use strict";function d(a){this.files=[],this.loadOptions=a}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./signature"),h=a("./zipEntry"),i=(a("./utf8"),a("./support"));d.prototype={checkSignature:function(a){if(!this.reader.readAndCheckSignature(a)){this.reader.index-=4;var b=this.reader.readString(4);throw new Error("Corrupted zip or bug: unexpected signature ("+f.pretty(b)+", expected "+f.pretty(a)+")")}},isSignature:function(a,b){var c=this.reader.index;this.reader.setIndex(a);var d=this.reader.readString(4),e=d===b;return this.reader.setIndex(c),e},readBlockEndOfCentral:function(){this.diskNumber=this.reader.readInt(2),this.diskWithCentralDirStart=this.reader.readInt(2),this.centralDirRecordsOnThisDisk=this.reader.readInt(2),this.centralDirRecords=this.reader.readInt(2),this.centralDirSize=this.reader.readInt(4),this.centralDirOffset=this.reader.readInt(4),this.zipCommentLength=this.reader.readInt(2);var a=this.reader.readData(this.zipCommentLength),b=i.uint8array?"uint8array":"array",c=f.transformTo(b,a);this.zipComment=this.loadOptions.decodeFileName(c)},readBlockZip64EndOfCentral:function(){this.zip64EndOfCentralSize=this.reader.readInt(8),this.reader.skip(4),this.diskNumber=this.reader.readInt(4),this.diskWithCentralDirStart=this.reader.readInt(4),this.centralDirRecordsOnThisDisk=this.reader.readInt(8),this.centralDirRecords=this.reader.readInt(8),this.centralDirSize=this.reader.readInt(8),this.centralDirOffset=this.reader.readInt(8),this.zip64ExtensibleData={};for(var a,b,c,d=this.zip64EndOfCentralSize-44,e=0;e<d;)a=this.reader.readInt(2),b=this.reader.readInt(4),c=this.reader.readData(b),this.zip64ExtensibleData[a]={id:a,length:b,value:c}},readBlockZip64EndOfCentralLocator:function(){if(this.diskWithZip64CentralDirStart=this.reader.readInt(4),this.relativeOffsetEndOfZip64CentralDir=this.reader.readInt(8),this.disksCount=this.reader.readInt(4),this.disksCount>1)throw new Error("Multi-volumes zip are not supported")},readLocalFiles:function(){var a,b;for(a=0;a<this.files.length;a++)b=this.files[a],this.reader.setIndex(b.localHeaderOffset),this.checkSignature(g.LOCAL_FILE_HEADER),b.readLocalPart(this.reader),b.handleUTF8(),b.processAttributes()},readCentralDir:function(){var a;for(this.reader.setIndex(this.centralDirOffset);this.reader.readAndCheckSignature(g.CENTRAL_FILE_HEADER);)a=new h({zip64:this.zip64},this.loadOptions),a.readCentralPart(this.reader),this.files.push(a);if(this.centralDirRecords!==this.files.length&&0!==this.centralDirRecords&&0===this.files.length)throw new Error("Corrupted zip or bug: expected "+this.centralDirRecords+" records in central dir, got "+this.files.length)},readEndOfCentral:function(){var a=this.reader.lastIndexOfSignature(g.CENTRAL_DIRECTORY_END);if(a<0){var b=!this.isSignature(0,g.LOCAL_FILE_HEADER);throw b?new Error("Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html"):new Error("Corrupted zip: can't find end of central directory")}this.reader.setIndex(a);var c=a;if(this.checkSignature(g.CENTRAL_DIRECTORY_END),this.readBlockEndOfCentral(),this.diskNumber===f.MAX_VALUE_16BITS||this.diskWithCentralDirStart===f.MAX_VALUE_16BITS||this.centralDirRecordsOnThisDisk===f.MAX_VALUE_16BITS||this.centralDirRecords===f.MAX_VALUE_16BITS||this.centralDirSize===f.MAX_VALUE_32BITS||this.centralDirOffset===f.MAX_VALUE_32BITS){if(this.zip64=!0,a=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),a<0)throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");if(this.reader.setIndex(a),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),this.readBlockZip64EndOfCentralLocator(),!this.isSignature(this.relativeOffsetEndOfZip64CentralDir,g.ZIP64_CENTRAL_DIRECTORY_END)&&(this.relativeOffsetEndOfZip64CentralDir=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.relativeOffsetEndOfZip64CentralDir<0))throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.readBlockZip64EndOfCentral()}var d=this.centralDirOffset+this.centralDirSize;this.zip64&&(d+=20,d+=12+this.zip64EndOfCentralSize);var e=c-d;if(e>0)this.isSignature(c,g.CENTRAL_FILE_HEADER)||(this.reader.zero=e);else if(e<0)throw new Error("Corrupted zip: missing "+Math.abs(e)+" bytes.")},prepareReader:function(a){this.reader=e(a)},load:function(a){this.prepareReader(a),this.readEndOfCentral(),this.readCentralDir(),this.readLocalFiles()}},b.exports=d},{"./reader/readerFor":22,"./signature":23,"./support":30,"./utf8":31,"./utils":32,"./zipEntry":34}],34:[function(a,b,c){"use strict";function d(a,b){this.options=a,this.loadOptions=b}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./compressedObject"),h=a("./crc32"),i=a("./utf8"),j=a("./compressions"),k=a("./support"),l=0,m=3,n=function(a){for(var b in j)if(j.hasOwnProperty(b)&&j[b].magic===a)return j[b];return null};d.prototype={isEncrypted:function(){return 1===(1&this.bitFlag)},useUTF8:function(){return 2048===(2048&this.bitFlag)},readLocalPart:function(a){var b,c;if(a.skip(22),this.fileNameLength=a.readInt(2),c=a.readInt(2),this.fileName=a.readData(this.fileNameLength),a.skip(c),this.compressedSize===-1||this.uncompressedSize===-1)throw new Error("Bug or corrupted zip : didn't get enough informations from the central directory (compressedSize === -1 || uncompressedSize === -1)");if(b=n(this.compressionMethod),null===b)throw new Error("Corrupted zip : compression "+f.pretty(this.compressionMethod)+" unknown (inner file : "+f.transformTo("string",this.fileName)+")");this.decompressed=new g(this.compressedSize,this.uncompressedSize,this.crc32,b,a.readData(this.compressedSize))},readCentralPart:function(a){this.versionMadeBy=a.readInt(2),a.skip(2),this.bitFlag=a.readInt(2),this.compressionMethod=a.readString(2),this.date=a.readDate(),this.crc32=a.readInt(4),this.compressedSize=a.readInt(4),this.uncompressedSize=a.readInt(4);var b=a.readInt(2);if(this.extraFieldsLength=a.readInt(2),this.fileCommentLength=a.readInt(2),this.diskNumberStart=a.readInt(2),this.internalFileAttributes=a.readInt(2),this.externalFileAttributes=a.readInt(4),this.localHeaderOffset=a.readInt(4),this.isEncrypted())throw new Error("Encrypted zip are not supported");a.skip(b),this.readExtraFields(a),this.parseZIP64ExtraField(a),this.fileComment=a.readData(this.fileCommentLength)},processAttributes:function(){this.unixPermissions=null,this.dosPermissions=null;var a=this.versionMadeBy>>8;this.dir=!!(16&this.externalFileAttributes),a===l&&(this.dosPermissions=63&this.externalFileAttributes),a===m&&(this.unixPermissions=this.externalFileAttributes>>16&65535),this.dir||"/"!==this.fileNameStr.slice(-1)||(this.dir=!0)},parseZIP64ExtraField:function(a){if(this.extraFields[1]){var b=e(this.extraFields[1].value);this.uncompressedSize===f.MAX_VALUE_32BITS&&(this.uncompressedSize=b.readInt(8)),this.compressedSize===f.MAX_VALUE_32BITS&&(this.compressedSize=b.readInt(8)),this.localHeaderOffset===f.MAX_VALUE_32BITS&&(this.localHeaderOffset=b.readInt(8)),this.diskNumberStart===f.MAX_VALUE_32BITS&&(this.diskNumberStart=b.readInt(4))}},readExtraFields:function(a){var b,c,d,e=a.index+this.extraFieldsLength;for(this.extraFields||(this.extraFields={});a.index<e;)b=a.readInt(2),c=a.readInt(2),d=a.readData(c),this.extraFields[b]={id:b,length:c,value:d}},handleUTF8:function(){var a=k.uint8array?"uint8array":"array";if(this.useUTF8())this.fileNameStr=i.utf8decode(this.fileName),this.fileCommentStr=i.utf8decode(this.fileComment);else{var b=this.findExtraFieldUnicodePath();if(null!==b)this.fileNameStr=b;else{var c=f.transformTo(a,this.fileName);this.fileNameStr=this.loadOptions.decodeFileName(c)}var d=this.findExtraFieldUnicodeComment();if(null!==d)this.fileCommentStr=d;else{var e=f.transformTo(a,this.fileComment);this.fileCommentStr=this.loadOptions.decodeFileName(e)}}},findExtraFieldUnicodePath:function(){var a=this.extraFields[28789];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileName)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null},findExtraFieldUnicodeComment:function(){var a=this.extraFields[25461];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileComment)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null}},b.exports=d},{"./compressedObject":2,"./compressions":3,"./crc32":4,"./reader/readerFor":22,"./support":30,"./utf8":31,"./utils":32}],35:[function(a,b,c){"use strict";var d=a("./stream/StreamHelper"),e=a("./stream/DataWorker"),f=a("./utf8"),g=a("./compressedObject"),h=a("./stream/GenericWorker"),i=function(a,b,c){this.name=a,this.dir=c.dir,this.date=c.date,this.comment=c.comment,this.unixPermissions=c.unixPermissions,this.dosPermissions=c.dosPermissions,this._data=b,this._dataBinary=c.binary,this.options={compression:c.compression,compressionOptions:c.compressionOptions}};i.prototype={internalStream:function(a){var b=null,c="string";try{if(!a)throw new Error("No output type specified.");c=a.toLowerCase();var e="string"===c||"text"===c;"binarystring"!==c&&"text"!==c||(c="string"),b=this._decompressWorker();var g=!this._dataBinary;g&&!e&&(b=b.pipe(new f.Utf8EncodeWorker)),!g&&e&&(b=b.pipe(new f.Utf8DecodeWorker))}catch(i){b=new h("error"),b.error(i)}return new d(b,c,"")},async:function(a,b){return this.internalStream(a).accumulate(b)},nodeStream:function(a,b){return this.internalStream(a||"nodebuffer").toNodejsStream(b)},_compressWorker:function(a,b){if(this._data instanceof g&&this._data.compression.magic===a.magic)return this._data.getCompressedWorker();var c=this._decompressWorker();return this._dataBinary||(c=c.pipe(new f.Utf8EncodeWorker)),g.createWorkerFrom(c,a,b)},_decompressWorker:function(){return this._data instanceof g?this._data.getContentWorker():this._data instanceof h?this._data:new e(this._data)}};for(var j=["asText","asBinary","asNodeBuffer","asUint8Array","asArrayBuffer"],k=function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},l=0;l<j.length;l++)i.prototype[j[l]]=k;b.exports=i},{"./compressedObject":2,"./stream/DataWorker":27,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31}],36:[function(a,b,c){a("../modules/web.immediate"),b.exports=a("../modules/_core").setImmediate},{"../modules/_core":40,"../modules/web.immediate":56}],37:[function(a,b,c){b.exports=function(a){if("function"!=typeof a)throw TypeError(a+" is not a function!");return a}},{}],38:[function(a,b,c){var d=a("./_is-object");b.exports=function(a){if(!d(a))throw TypeError(a+" is not an object!");return a}},{"./_is-object":51}],39:[function(a,b,c){var d={}.toString;b.exports=function(a){return d.call(a).slice(8,-1)}},{}],40:[function(a,b,c){var d=b.exports={version:"2.3.0"};"number"==typeof __e&&(__e=d)},{}],41:[function(a,b,c){var d=a("./_a-function");b.exports=function(a,b,c){if(d(a),void 0===b)return a;switch(c){case 1:return function(c){return a.call(b,c)};case 2:return function(c,d){return a.call(b,c,d)};case 3:return function(c,d,e){return a.call(b,c,d,e)}}return function(){return a.apply(b,arguments)}}},{"./_a-function":37}],42:[function(a,b,c){b.exports=!a("./_fails")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},{"./_fails":45}],43:[function(a,b,c){var d=a("./_is-object"),e=a("./_global").document,f=d(e)&&d(e.createElement);b.exports=function(a){return f?e.createElement(a):{}}},{"./_global":46,"./_is-object":51}],44:[function(a,b,c){var d=a("./_global"),e=a("./_core"),f=a("./_ctx"),g=a("./_hide"),h="prototype",i=function(a,b,c){var j,k,l,m=a&i.F,n=a&i.G,o=a&i.S,p=a&i.P,q=a&i.B,r=a&i.W,s=n?e:e[b]||(e[b]={}),t=s[h],u=n?d:o?d[b]:(d[b]||{})[h];n&&(c=b);for(j in c)k=!m&&u&&void 0!==u[j],k&&j in s||(l=k?u[j]:c[j],s[j]=n&&"function"!=typeof u[j]?c[j]:q&&k?f(l,d):r&&u[j]==l?function(a){var b=function(b,c,d){if(this instanceof a){switch(arguments.length){case 0:return new a;case 1:return new a(b);case 2:return new a(b,c)}return new a(b,c,d)}return a.apply(this,arguments)};return b[h]=a[h],b}(l):p&&"function"==typeof l?f(Function.call,l):l,p&&((s.virtual||(s.virtual={}))[j]=l,a&i.R&&t&&!t[j]&&g(t,j,l)))};i.F=1,i.G=2,i.S=4,i.P=8,i.B=16,i.W=32,i.U=64,i.R=128,b.exports=i},{"./_core":40,"./_ctx":41,"./_global":46,"./_hide":47}],45:[function(a,b,c){b.exports=function(a){try{return!!a()}catch(b){return!0}}},{}],46:[function(a,b,c){var d=b.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=d)},{}],47:[function(a,b,c){var d=a("./_object-dp"),e=a("./_property-desc");b.exports=a("./_descriptors")?function(a,b,c){return d.f(a,b,e(1,c))}:function(a,b,c){return a[b]=c,a}},{"./_descriptors":42,"./_object-dp":52,"./_property-desc":53}],48:[function(a,b,c){b.exports=a("./_global").document&&document.documentElement},{"./_global":46}],49:[function(a,b,c){b.exports=!a("./_descriptors")&&!a("./_fails")(function(){return 7!=Object.defineProperty(a("./_dom-create")("div"),"a",{get:function(){return 7}}).a})},{"./_descriptors":42,"./_dom-create":43,"./_fails":45}],50:[function(a,b,c){b.exports=function(a,b,c){var d=void 0===c;switch(b.length){case 0:return d?a():a.call(c);case 1:return d?a(b[0]):a.call(c,b[0]);case 2:return d?a(b[0],b[1]):a.call(c,b[0],b[1]);case 3:return d?a(b[0],b[1],b[2]):a.call(c,b[0],b[1],b[2]);case 4:return d?a(b[0],b[1],b[2],b[3]):a.call(c,b[0],b[1],b[2],b[3])}return a.apply(c,b)}},{}],51:[function(a,b,c){b.exports=function(a){return"object"==typeof a?null!==a:"function"==typeof a}},{}],52:[function(a,b,c){var d=a("./_an-object"),e=a("./_ie8-dom-define"),f=a("./_to-primitive"),g=Object.defineProperty;c.f=a("./_descriptors")?Object.defineProperty:function(a,b,c){if(d(a),b=f(b,!0),d(c),e)try{return g(a,b,c)}catch(h){}if("get"in c||"set"in c)throw TypeError("Accessors not supported!");return"value"in c&&(a[b]=c.value),a}},{"./_an-object":38,"./_descriptors":42,"./_ie8-dom-define":49,"./_to-primitive":55}],53:[function(a,b,c){b.exports=function(a,b){return{enumerable:!(1&a),configurable:!(2&a),writable:!(4&a),value:b}}},{}],54:[function(a,b,c){var d,e,f,g=a("./_ctx"),h=a("./_invoke"),i=a("./_html"),j=a("./_dom-create"),k=a("./_global"),l=k.process,m=k.setImmediate,n=k.clearImmediate,o=k.MessageChannel,p=0,q={},r="onreadystatechange",s=function(){var a=+this;if(q.hasOwnProperty(a)){var b=q[a];delete q[a],b()}},t=function(a){s.call(a.data)};m&&n||(m=function(a){for(var b=[],c=1;arguments.length>c;)b.push(arguments[c++]);return q[++p]=function(){h("function"==typeof a?a:Function(a),b)},d(p),p},n=function(a){delete q[a]},"process"==a("./_cof")(l)?d=function(a){l.nextTick(g(s,a,1))}:o?(e=new o,f=e.port2,e.port1.onmessage=t,d=g(f.postMessage,f,1)):k.addEventListener&&"function"==typeof postMessage&&!k.importScripts?(d=function(a){k.postMessage(a+"","*")},k.addEventListener("message",t,!1)):d=r in j("script")?function(a){i.appendChild(j("script"))[r]=function(){i.removeChild(this),s.call(a)}}:function(a){setTimeout(g(s,a,1),0)}),b.exports={set:m,clear:n}},{"./_cof":39,"./_ctx":41,"./_dom-create":43,"./_global":46,"./_html":48,"./_invoke":50}],55:[function(a,b,c){var d=a("./_is-object");b.exports=function(a,b){if(!d(a))return a;var c,e;if(b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;if("function"==typeof(c=a.valueOf)&&!d(e=c.call(a)))return e;if(!b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;throw TypeError("Can't convert object to primitive value")}},{"./_is-object":51}],56:[function(a,b,c){var d=a("./_export"),e=a("./_task");d(d.G+d.B,{setImmediate:e.set,clearImmediate:e.clear})},{"./_export":44,"./_task":54}],57:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],58:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(e){return p.reject(a,e)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&("object"==typeof a||"function"==typeof a)&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(d){c.status="error",c.value=d}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a("immediate"),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=e,e.prototype["catch"]=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},e.resolve=k,e.reject=l,e.all=m,e.race=n},{immediate:57}],59:[function(a,b,c){"use strict";var d=a("./lib/utils/common").assign,e=a("./lib/deflate"),f=a("./lib/inflate"),g=a("./lib/zlib/constants"),h={};d(h,e,f,g),b.exports=h},{"./lib/deflate":60,"./lib/inflate":61,"./lib/utils/common":62,"./lib/zlib/constants":65}],60:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=i.assign({level:s,method:u,chunkSize:16384,windowBits:15,memLevel:8,strategy:t,to:""},a||{});var b=this.options;b.raw&&b.windowBits>0?b.windowBits=-b.windowBits:b.gzip&&b.windowBits>0&&b.windowBits<16&&(b.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=h.deflateInit2(this.strm,b.level,b.method,b.windowBits,b.memLevel,b.strategy);if(c!==p)throw new Error(k[c]);if(b.header&&h.deflateSetHeader(this.strm,b.header),b.dictionary){var e;if(e="string"==typeof b.dictionary?j.string2buf(b.dictionary):"[object ArrayBuffer]"===m.call(b.dictionary)?new Uint8Array(b.dictionary):b.dictionary,c=h.deflateSetDictionary(this.strm,e),c!==p)throw new Error(k[c]);this._dict_set=!0}}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}function g(a,b){return b=b||{},b.gzip=!0,e(a,b)}var h=a("./zlib/deflate"),i=a("./utils/common"),j=a("./utils/strings"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=Object.prototype.toString,n=0,o=4,p=0,q=1,r=2,s=-1,t=0,u=8;d.prototype.push=function(a,b){var c,d,e=this.strm,f=this.options.chunkSize;if(this.ended)return!1;d=b===~~b?b:b===!0?o:n,"string"==typeof a?e.input=j.string2buf(a):"[object ArrayBuffer]"===m.call(a)?e.input=new Uint8Array(a):e.input=a,e.next_in=0,e.avail_in=e.input.length;do{if(0===e.avail_out&&(e.output=new i.Buf8(f),e.next_out=0,e.avail_out=f),c=h.deflate(e,d),c!==q&&c!==p)return this.onEnd(c),this.ended=!0,!1;0!==e.avail_out&&(0!==e.avail_in||d!==o&&d!==r)||("string"===this.options.to?this.onData(j.buf2binstring(i.shrinkBuf(e.output,e.next_out))):this.onData(i.shrinkBuf(e.output,e.next_out)))}while((e.avail_in>0||0===e.avail_out)&&c!==q);return d===o?(c=h.deflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===p):d!==r||(this.onEnd(p),e.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===p&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=i.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Deflate=d,c.deflate=e,c.deflateRaw=f,c.gzip=g},{"./utils/common":62,"./utils/strings":63,"./zlib/deflate":67,"./zlib/messages":72,"./zlib/zstream":74}],61:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=h.assign({chunkSize:16384,windowBits:0,to:""},a||{});var b=this.options;b.raw&&b.windowBits>=0&&b.windowBits<16&&(b.windowBits=-b.windowBits,0===b.windowBits&&(b.windowBits=-15)),!(b.windowBits>=0&&b.windowBits<16)||a&&a.windowBits||(b.windowBits+=32),b.windowBits>15&&b.windowBits<48&&0===(15&b.windowBits)&&(b.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=g.inflateInit2(this.strm,b.windowBits);if(c!==j.Z_OK)throw new Error(k[c]);this.header=new m,g.inflateGetHeader(this.strm,this.header)}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}var g=a("./zlib/inflate"),h=a("./utils/common"),i=a("./utils/strings"),j=a("./zlib/constants"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=a("./zlib/gzheader"),n=Object.prototype.toString;d.prototype.push=function(a,b){var c,d,e,f,k,l,m=this.strm,o=this.options.chunkSize,p=this.options.dictionary,q=!1;if(this.ended)return!1;d=b===~~b?b:b===!0?j.Z_FINISH:j.Z_NO_FLUSH,"string"==typeof a?m.input=i.binstring2buf(a):"[object ArrayBuffer]"===n.call(a)?m.input=new Uint8Array(a):m.input=a,m.next_in=0,m.avail_in=m.input.length;do{if(0===m.avail_out&&(m.output=new h.Buf8(o),m.next_out=0,m.avail_out=o),c=g.inflate(m,j.Z_NO_FLUSH),c===j.Z_NEED_DICT&&p&&(l="string"==typeof p?i.string2buf(p):"[object ArrayBuffer]"===n.call(p)?new Uint8Array(p):p,c=g.inflateSetDictionary(this.strm,l)),c===j.Z_BUF_ERROR&&q===!0&&(c=j.Z_OK,q=!1),c!==j.Z_STREAM_END&&c!==j.Z_OK)return this.onEnd(c),this.ended=!0,!1;m.next_out&&(0!==m.avail_out&&c!==j.Z_STREAM_END&&(0!==m.avail_in||d!==j.Z_FINISH&&d!==j.Z_SYNC_FLUSH)||("string"===this.options.to?(e=i.utf8border(m.output,m.next_out),f=m.next_out-e,k=i.buf2string(m.output,e),m.next_out=f,m.avail_out=o-f,f&&h.arraySet(m.output,m.output,e,f,0),this.onData(k)):this.onData(h.shrinkBuf(m.output,m.next_out)))),0===m.avail_in&&0===m.avail_out&&(q=!0)}while((m.avail_in>0||0===m.avail_out)&&c!==j.Z_STREAM_END);return c===j.Z_STREAM_END&&(d=j.Z_FINISH),d===j.Z_FINISH?(c=g.inflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===j.Z_OK):d!==j.Z_SYNC_FLUSH||(this.onEnd(j.Z_OK),m.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===j.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=h.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Inflate=d,c.inflate=e,c.inflateRaw=f,c.ungzip=e},{"./utils/common":62,"./utils/strings":63,"./zlib/constants":65,"./zlib/gzheader":68,"./zlib/inflate":70,"./zlib/messages":72,"./zlib/zstream":74}],62:[function(a,b,c){"use strict";var d="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;c.assign=function(a){for(var b=Array.prototype.slice.call(arguments,1);b.length;){var c=b.shift();if(c){if("object"!=typeof c)throw new TypeError(c+"must be non-object");for(var d in c)c.hasOwnProperty(d)&&(a[d]=c[d])}}return a},c.shrinkBuf=function(a,b){return a.length===b?a:a.subarray?a.subarray(0,b):(a.length=b,a)};var e={arraySet:function(a,b,c,d,e){if(b.subarray&&a.subarray)return void a.set(b.subarray(c,c+d),e);for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){var b,c,d,e,f,g;for(d=0,b=0,c=a.length;b<c;b++)d+=a[b].length;for(g=new Uint8Array(d),e=0,b=0,c=a.length;b<c;b++)f=a[b],g.set(f,e),e+=f.length;return g}},f={arraySet:function(a,b,c,d,e){for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){return[].concat.apply([],a)}};c.setTyped=function(a){a?(c.Buf8=Uint8Array,c.Buf16=Uint16Array,c.Buf32=Int32Array,c.assign(c,e)):(c.Buf8=Array,c.Buf16=Array,c.Buf32=Array,c.assign(c,f))},c.setTyped(d)},{}],63:[function(a,b,c){"use strict";function d(a,b){if(b<65537&&(a.subarray&&g||!a.subarray&&f))return String.fromCharCode.apply(null,e.shrinkBuf(a,b));for(var c="",d=0;d<b;d++)c+=String.fromCharCode(a[d]);return c}var e=a("./common"),f=!0,g=!0;try{String.fromCharCode.apply(null,[0])}catch(h){f=!1}try{String.fromCharCode.apply(null,new Uint8Array(1))}catch(h){g=!1}for(var i=new e.Buf8(256),j=0;j<256;j++)i[j]=j>=252?6:j>=248?5:j>=240?4:j>=224?3:j>=192?2:1;i[254]=i[254]=1,c.string2buf=function(a){var b,c,d,f,g,h=a.length,i=0;for(f=0;f<h;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=new e.Buf8(i),g=0,f=0;g<i;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),c<128?b[g++]=c:c<2048?(b[g++]=192|c>>>6,b[g++]=128|63&c):c<65536?(b[g++]=224|c>>>12,b[g++]=128|c>>>6&63,b[g++]=128|63&c):(b[g++]=240|c>>>18,b[g++]=128|c>>>12&63,b[g++]=128|c>>>6&63,b[g++]=128|63&c);return b},c.buf2binstring=function(a){return d(a,a.length)},c.binstring2buf=function(a){for(var b=new e.Buf8(a.length),c=0,d=b.length;c<d;c++)b[c]=a.charCodeAt(c);return b},c.buf2string=function(a,b){var c,e,f,g,h=b||a.length,j=new Array(2*h);for(e=0,c=0;c<h;)if(f=a[c++],f<128)j[e++]=f;else if(g=i[f],g>4)j[e++]=65533,c+=g-1;else{for(f&=2===g?31:3===g?15:7;g>1&&c<h;)f=f<<6|63&a[c++],g--;g>1?j[e++]=65533:f<65536?j[e++]=f:(f-=65536,j[e++]=55296|f>>10&1023,j[e++]=56320|1023&f)}return d(j,e)},c.utf8border=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+i[a[c]]>b?c:b}},{"./common":62}],64:[function(a,b,c){"use strict";function d(a,b,c,d){for(var e=65535&a|0,f=a>>>16&65535|0,g=0;0!==c;){g=c>2e3?2e3:c,c-=g;do e=e+b[d++]|0,f=f+e|0;while(--g);e%=65521,f%=65521}return e|f<<16|0;
}b.exports=d},{}],65:[function(a,b,c){"use strict";b.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],66:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=f,g=d+c;a^=-1;for(var h=d;h<g;h++)a=a>>>8^e[255&(a^b[h])];return a^-1}var f=d();b.exports=e},{}],67:[function(a,b,c){"use strict";function d(a,b){return a.msg=I[b],b}function e(a){return(a<<1)-(a>4?9:0)}function f(a){for(var b=a.length;--b>=0;)a[b]=0}function g(a){var b=a.state,c=b.pending;c>a.avail_out&&(c=a.avail_out),0!==c&&(E.arraySet(a.output,b.pending_buf,b.pending_out,c,a.next_out),a.next_out+=c,b.pending_out+=c,a.total_out+=c,a.avail_out-=c,b.pending-=c,0===b.pending&&(b.pending_out=0))}function h(a,b){F._tr_flush_block(a,a.block_start>=0?a.block_start:-1,a.strstart-a.block_start,b),a.block_start=a.strstart,g(a.strm)}function i(a,b){a.pending_buf[a.pending++]=b}function j(a,b){a.pending_buf[a.pending++]=b>>>8&255,a.pending_buf[a.pending++]=255&b}function k(a,b,c,d){var e=a.avail_in;return e>d&&(e=d),0===e?0:(a.avail_in-=e,E.arraySet(b,a.input,a.next_in,e,c),1===a.state.wrap?a.adler=G(a.adler,b,e,c):2===a.state.wrap&&(a.adler=H(a.adler,b,e,c)),a.next_in+=e,a.total_in+=e,e)}function l(a,b){var c,d,e=a.max_chain_length,f=a.strstart,g=a.prev_length,h=a.nice_match,i=a.strstart>a.w_size-la?a.strstart-(a.w_size-la):0,j=a.window,k=a.w_mask,l=a.prev,m=a.strstart+ka,n=j[f+g-1],o=j[f+g];a.prev_length>=a.good_match&&(e>>=2),h>a.lookahead&&(h=a.lookahead);do if(c=b,j[c+g]===o&&j[c+g-1]===n&&j[c]===j[f]&&j[++c]===j[f+1]){f+=2,c++;do;while(j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&f<m);if(d=ka-(m-f),f=m-ka,d>g){if(a.match_start=b,g=d,d>=h)break;n=j[f+g-1],o=j[f+g]}}while((b=l[b&k])>i&&0!==--e);return g<=a.lookahead?g:a.lookahead}function m(a){var b,c,d,e,f,g=a.w_size;do{if(e=a.window_size-a.lookahead-a.strstart,a.strstart>=g+(g-la)){E.arraySet(a.window,a.window,g,g,0),a.match_start-=g,a.strstart-=g,a.block_start-=g,c=a.hash_size,b=c;do d=a.head[--b],a.head[b]=d>=g?d-g:0;while(--c);c=g,b=c;do d=a.prev[--b],a.prev[b]=d>=g?d-g:0;while(--c);e+=g}if(0===a.strm.avail_in)break;if(c=k(a.strm,a.window,a.strstart+a.lookahead,e),a.lookahead+=c,a.lookahead+a.insert>=ja)for(f=a.strstart-a.insert,a.ins_h=a.window[f],a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+1])&a.hash_mask;a.insert&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+ja-1])&a.hash_mask,a.prev[f&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=f,f++,a.insert--,!(a.lookahead+a.insert<ja)););}while(a.lookahead<la&&0!==a.strm.avail_in)}function n(a,b){var c=65535;for(c>a.pending_buf_size-5&&(c=a.pending_buf_size-5);;){if(a.lookahead<=1){if(m(a),0===a.lookahead&&b===J)return ua;if(0===a.lookahead)break}a.strstart+=a.lookahead,a.lookahead=0;var d=a.block_start+c;if((0===a.strstart||a.strstart>=d)&&(a.lookahead=a.strstart-d,a.strstart=d,h(a,!1),0===a.strm.avail_out))return ua;if(a.strstart-a.block_start>=a.w_size-la&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.strstart>a.block_start&&(h(a,!1),0===a.strm.avail_out)?ua:ua}function o(a,b){for(var c,d;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),0!==c&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c)),a.match_length>=ja)if(d=F._tr_tally(a,a.strstart-a.match_start,a.match_length-ja),a.lookahead-=a.match_length,a.match_length<=a.max_lazy_match&&a.lookahead>=ja){a.match_length--;do a.strstart++,a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart;while(0!==--a.match_length);a.strstart++}else a.strstart+=a.match_length,a.match_length=0,a.ins_h=a.window[a.strstart],a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+1])&a.hash_mask;else d=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++;if(d&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function p(a,b){for(var c,d,e;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),a.prev_length=a.match_length,a.prev_match=a.match_start,a.match_length=ja-1,0!==c&&a.prev_length<a.max_lazy_match&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c),a.match_length<=5&&(a.strategy===U||a.match_length===ja&&a.strstart-a.match_start>4096)&&(a.match_length=ja-1)),a.prev_length>=ja&&a.match_length<=a.prev_length){e=a.strstart+a.lookahead-ja,d=F._tr_tally(a,a.strstart-1-a.prev_match,a.prev_length-ja),a.lookahead-=a.prev_length-1,a.prev_length-=2;do++a.strstart<=e&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart);while(0!==--a.prev_length);if(a.match_available=0,a.match_length=ja-1,a.strstart++,d&&(h(a,!1),0===a.strm.avail_out))return ua}else if(a.match_available){if(d=F._tr_tally(a,0,a.window[a.strstart-1]),d&&h(a,!1),a.strstart++,a.lookahead--,0===a.strm.avail_out)return ua}else a.match_available=1,a.strstart++,a.lookahead--}return a.match_available&&(d=F._tr_tally(a,0,a.window[a.strstart-1]),a.match_available=0),a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function q(a,b){for(var c,d,e,f,g=a.window;;){if(a.lookahead<=ka){if(m(a),a.lookahead<=ka&&b===J)return ua;if(0===a.lookahead)break}if(a.match_length=0,a.lookahead>=ja&&a.strstart>0&&(e=a.strstart-1,d=g[e],d===g[++e]&&d===g[++e]&&d===g[++e])){f=a.strstart+ka;do;while(d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&e<f);a.match_length=ka-(f-e),a.match_length>a.lookahead&&(a.match_length=a.lookahead)}if(a.match_length>=ja?(c=F._tr_tally(a,1,a.match_length-ja),a.lookahead-=a.match_length,a.strstart+=a.match_length,a.match_length=0):(c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++),c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function r(a,b){for(var c;;){if(0===a.lookahead&&(m(a),0===a.lookahead)){if(b===J)return ua;break}if(a.match_length=0,c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++,c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function s(a,b,c,d,e){this.good_length=a,this.max_lazy=b,this.nice_length=c,this.max_chain=d,this.func=e}function t(a){a.window_size=2*a.w_size,f(a.head),a.max_lazy_match=D[a.level].max_lazy,a.good_match=D[a.level].good_length,a.nice_match=D[a.level].nice_length,a.max_chain_length=D[a.level].max_chain,a.strstart=0,a.block_start=0,a.lookahead=0,a.insert=0,a.match_length=a.prev_length=ja-1,a.match_available=0,a.ins_h=0}function u(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=$,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new E.Buf16(2*ha),this.dyn_dtree=new E.Buf16(2*(2*fa+1)),this.bl_tree=new E.Buf16(2*(2*ga+1)),f(this.dyn_ltree),f(this.dyn_dtree),f(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new E.Buf16(ia+1),this.heap=new E.Buf16(2*ea+1),f(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new E.Buf16(2*ea+1),f(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function v(a){var b;return a&&a.state?(a.total_in=a.total_out=0,a.data_type=Z,b=a.state,b.pending=0,b.pending_out=0,b.wrap<0&&(b.wrap=-b.wrap),b.status=b.wrap?na:sa,a.adler=2===b.wrap?0:1,b.last_flush=J,F._tr_init(b),O):d(a,Q)}function w(a){var b=v(a);return b===O&&t(a.state),b}function x(a,b){return a&&a.state?2!==a.state.wrap?Q:(a.state.gzhead=b,O):Q}function y(a,b,c,e,f,g){if(!a)return Q;var h=1;if(b===T&&(b=6),e<0?(h=0,e=-e):e>15&&(h=2,e-=16),f<1||f>_||c!==$||e<8||e>15||b<0||b>9||g<0||g>X)return d(a,Q);8===e&&(e=9);var i=new u;return a.state=i,i.strm=a,i.wrap=h,i.gzhead=null,i.w_bits=e,i.w_size=1<<i.w_bits,i.w_mask=i.w_size-1,i.hash_bits=f+7,i.hash_size=1<<i.hash_bits,i.hash_mask=i.hash_size-1,i.hash_shift=~~((i.hash_bits+ja-1)/ja),i.window=new E.Buf8(2*i.w_size),i.head=new E.Buf16(i.hash_size),i.prev=new E.Buf16(i.w_size),i.lit_bufsize=1<<f+6,i.pending_buf_size=4*i.lit_bufsize,i.pending_buf=new E.Buf8(i.pending_buf_size),i.d_buf=1*i.lit_bufsize,i.l_buf=3*i.lit_bufsize,i.level=b,i.strategy=g,i.method=c,w(a)}function z(a,b){return y(a,b,$,aa,ba,Y)}function A(a,b){var c,h,k,l;if(!a||!a.state||b>N||b<0)return a?d(a,Q):Q;if(h=a.state,!a.output||!a.input&&0!==a.avail_in||h.status===ta&&b!==M)return d(a,0===a.avail_out?S:Q);if(h.strm=a,c=h.last_flush,h.last_flush=b,h.status===na)if(2===h.wrap)a.adler=0,i(h,31),i(h,139),i(h,8),h.gzhead?(i(h,(h.gzhead.text?1:0)+(h.gzhead.hcrc?2:0)+(h.gzhead.extra?4:0)+(h.gzhead.name?8:0)+(h.gzhead.comment?16:0)),i(h,255&h.gzhead.time),i(h,h.gzhead.time>>8&255),i(h,h.gzhead.time>>16&255),i(h,h.gzhead.time>>24&255),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,255&h.gzhead.os),h.gzhead.extra&&h.gzhead.extra.length&&(i(h,255&h.gzhead.extra.length),i(h,h.gzhead.extra.length>>8&255)),h.gzhead.hcrc&&(a.adler=H(a.adler,h.pending_buf,h.pending,0)),h.gzindex=0,h.status=oa):(i(h,0),i(h,0),i(h,0),i(h,0),i(h,0),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,ya),h.status=sa);else{var m=$+(h.w_bits-8<<4)<<8,n=-1;n=h.strategy>=V||h.level<2?0:h.level<6?1:6===h.level?2:3,m|=n<<6,0!==h.strstart&&(m|=ma),m+=31-m%31,h.status=sa,j(h,m),0!==h.strstart&&(j(h,a.adler>>>16),j(h,65535&a.adler)),a.adler=1}if(h.status===oa)if(h.gzhead.extra){for(k=h.pending;h.gzindex<(65535&h.gzhead.extra.length)&&(h.pending!==h.pending_buf_size||(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending!==h.pending_buf_size));)i(h,255&h.gzhead.extra[h.gzindex]),h.gzindex++;h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),h.gzindex===h.gzhead.extra.length&&(h.gzindex=0,h.status=pa)}else h.status=pa;if(h.status===pa)if(h.gzhead.name){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.name.length?255&h.gzhead.name.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.gzindex=0,h.status=qa)}else h.status=qa;if(h.status===qa)if(h.gzhead.comment){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.comment.length?255&h.gzhead.comment.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.status=ra)}else h.status=ra;if(h.status===ra&&(h.gzhead.hcrc?(h.pending+2>h.pending_buf_size&&g(a),h.pending+2<=h.pending_buf_size&&(i(h,255&a.adler),i(h,a.adler>>8&255),a.adler=0,h.status=sa)):h.status=sa),0!==h.pending){if(g(a),0===a.avail_out)return h.last_flush=-1,O}else if(0===a.avail_in&&e(b)<=e(c)&&b!==M)return d(a,S);if(h.status===ta&&0!==a.avail_in)return d(a,S);if(0!==a.avail_in||0!==h.lookahead||b!==J&&h.status!==ta){var o=h.strategy===V?r(h,b):h.strategy===W?q(h,b):D[h.level].func(h,b);if(o!==wa&&o!==xa||(h.status=ta),o===ua||o===wa)return 0===a.avail_out&&(h.last_flush=-1),O;if(o===va&&(b===K?F._tr_align(h):b!==N&&(F._tr_stored_block(h,0,0,!1),b===L&&(f(h.head),0===h.lookahead&&(h.strstart=0,h.block_start=0,h.insert=0))),g(a),0===a.avail_out))return h.last_flush=-1,O}return b!==M?O:h.wrap<=0?P:(2===h.wrap?(i(h,255&a.adler),i(h,a.adler>>8&255),i(h,a.adler>>16&255),i(h,a.adler>>24&255),i(h,255&a.total_in),i(h,a.total_in>>8&255),i(h,a.total_in>>16&255),i(h,a.total_in>>24&255)):(j(h,a.adler>>>16),j(h,65535&a.adler)),g(a),h.wrap>0&&(h.wrap=-h.wrap),0!==h.pending?O:P)}function B(a){var b;return a&&a.state?(b=a.state.status,b!==na&&b!==oa&&b!==pa&&b!==qa&&b!==ra&&b!==sa&&b!==ta?d(a,Q):(a.state=null,b===sa?d(a,R):O)):Q}function C(a,b){var c,d,e,g,h,i,j,k,l=b.length;if(!a||!a.state)return Q;if(c=a.state,g=c.wrap,2===g||1===g&&c.status!==na||c.lookahead)return Q;for(1===g&&(a.adler=G(a.adler,b,l,0)),c.wrap=0,l>=c.w_size&&(0===g&&(f(c.head),c.strstart=0,c.block_start=0,c.insert=0),k=new E.Buf8(c.w_size),E.arraySet(k,b,l-c.w_size,c.w_size,0),b=k,l=c.w_size),h=a.avail_in,i=a.next_in,j=a.input,a.avail_in=l,a.next_in=0,a.input=b,m(c);c.lookahead>=ja;){d=c.strstart,e=c.lookahead-(ja-1);do c.ins_h=(c.ins_h<<c.hash_shift^c.window[d+ja-1])&c.hash_mask,c.prev[d&c.w_mask]=c.head[c.ins_h],c.head[c.ins_h]=d,d++;while(--e);c.strstart=d,c.lookahead=ja-1,m(c)}return c.strstart+=c.lookahead,c.block_start=c.strstart,c.insert=c.lookahead,c.lookahead=0,c.match_length=c.prev_length=ja-1,c.match_available=0,a.next_in=i,a.input=j,a.avail_in=h,c.wrap=g,O}var D,E=a("../utils/common"),F=a("./trees"),G=a("./adler32"),H=a("./crc32"),I=a("./messages"),J=0,K=1,L=3,M=4,N=5,O=0,P=1,Q=-2,R=-3,S=-5,T=-1,U=1,V=2,W=3,X=4,Y=0,Z=2,$=8,_=9,aa=15,ba=8,ca=29,da=256,ea=da+1+ca,fa=30,ga=19,ha=2*ea+1,ia=15,ja=3,ka=258,la=ka+ja+1,ma=32,na=42,oa=69,pa=73,qa=91,ra=103,sa=113,ta=666,ua=1,va=2,wa=3,xa=4,ya=3;D=[new s(0,0,0,0,n),new s(4,4,8,4,o),new s(4,5,16,8,o),new s(4,6,32,32,o),new s(4,4,16,16,p),new s(8,16,32,32,p),new s(8,16,128,128,p),new s(8,32,128,256,p),new s(32,128,258,1024,p),new s(32,258,258,4096,p)],c.deflateInit=z,c.deflateInit2=y,c.deflateReset=w,c.deflateResetKeep=v,c.deflateSetHeader=x,c.deflate=A,c.deflateEnd=B,c.deflateSetDictionary=C,c.deflateInfo="pako deflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./messages":72,"./trees":73}],68:[function(a,b,c){"use strict";function d(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}b.exports=d},{}],69:[function(a,b,c){"use strict";var d=30,e=12;b.exports=function(a,b){var c,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C;c=a.state,f=a.next_in,B=a.input,g=f+(a.avail_in-5),h=a.next_out,C=a.output,i=h-(b-a.avail_out),j=h+(a.avail_out-257),k=c.dmax,l=c.wsize,m=c.whave,n=c.wnext,o=c.window,p=c.hold,q=c.bits,r=c.lencode,s=c.distcode,t=(1<<c.lenbits)-1,u=(1<<c.distbits)-1;a:do{q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=r[p&t];b:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,0===w)C[h++]=65535&v;else{if(!(16&w)){if(0===(64&w)){v=r[(65535&v)+(p&(1<<w)-1)];continue b}if(32&w){c.mode=e;break a}a.msg="invalid literal/length code",c.mode=d;break a}x=65535&v,w&=15,w&&(q<w&&(p+=B[f++]<<q,q+=8),x+=p&(1<<w)-1,p>>>=w,q-=w),q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=s[p&u];c:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,!(16&w)){if(0===(64&w)){v=s[(65535&v)+(p&(1<<w)-1)];continue c}a.msg="invalid distance code",c.mode=d;break a}if(y=65535&v,w&=15,q<w&&(p+=B[f++]<<q,q+=8,q<w&&(p+=B[f++]<<q,q+=8)),y+=p&(1<<w)-1,y>k){a.msg="invalid distance too far back",c.mode=d;break a}if(p>>>=w,q-=w,w=h-i,y>w){if(w=y-w,w>m&&c.sane){a.msg="invalid distance too far back",c.mode=d;break a}if(z=0,A=o,0===n){if(z+=l-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}else if(n<w){if(z+=l+n-w,w-=n,w<x){x-=w;do C[h++]=o[z++];while(--w);if(z=0,n<x){w=n,x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}}else if(z+=n-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}for(;x>2;)C[h++]=A[z++],C[h++]=A[z++],C[h++]=A[z++],x-=3;x&&(C[h++]=A[z++],x>1&&(C[h++]=A[z++]))}else{z=h-y;do C[h++]=C[z++],C[h++]=C[z++],C[h++]=C[z++],x-=3;while(x>2);x&&(C[h++]=C[z++],x>1&&(C[h++]=C[z++]))}break}}break}}while(f<g&&h<j);x=q>>3,f-=x,q-=x<<3,p&=(1<<q)-1,a.next_in=f,a.next_out=h,a.avail_in=f<g?5+(g-f):5-(f-g),a.avail_out=h<j?257+(j-h):257-(h-j),c.hold=p,c.bits=q}},{}],70:[function(a,b,c){"use strict";function d(a){return(a>>>24&255)+(a>>>8&65280)+((65280&a)<<8)+((255&a)<<24)}function e(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new s.Buf16(320),this.work=new s.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function f(a){var b;return a&&a.state?(b=a.state,a.total_in=a.total_out=b.total=0,a.msg="",b.wrap&&(a.adler=1&b.wrap),b.mode=L,b.last=0,b.havedict=0,b.dmax=32768,b.head=null,b.hold=0,b.bits=0,b.lencode=b.lendyn=new s.Buf32(pa),b.distcode=b.distdyn=new s.Buf32(qa),b.sane=1,b.back=-1,D):G}function g(a){var b;return a&&a.state?(b=a.state,b.wsize=0,b.whave=0,b.wnext=0,f(a)):G}function h(a,b){var c,d;return a&&a.state?(d=a.state,b<0?(c=0,b=-b):(c=(b>>4)+1,b<48&&(b&=15)),b&&(b<8||b>15)?G:(null!==d.window&&d.wbits!==b&&(d.window=null),d.wrap=c,d.wbits=b,g(a))):G}function i(a,b){var c,d;return a?(d=new e,a.state=d,d.window=null,c=h(a,b),c!==D&&(a.state=null),c):G}function j(a){return i(a,sa)}function k(a){if(ta){var b;for(q=new s.Buf32(512),r=new s.Buf32(32),b=0;b<144;)a.lens[b++]=8;for(;b<256;)a.lens[b++]=9;for(;b<280;)a.lens[b++]=7;for(;b<288;)a.lens[b++]=8;for(w(y,a.lens,0,288,q,0,a.work,{bits:9}),b=0;b<32;)a.lens[b++]=5;w(z,a.lens,0,32,r,0,a.work,{bits:5}),ta=!1}a.lencode=q,a.lenbits=9,a.distcode=r,a.distbits=5}function l(a,b,c,d){var e,f=a.state;return null===f.window&&(f.wsize=1<<f.wbits,f.wnext=0,f.whave=0,f.window=new s.Buf8(f.wsize)),d>=f.wsize?(s.arraySet(f.window,b,c-f.wsize,f.wsize,0),f.wnext=0,f.whave=f.wsize):(e=f.wsize-f.wnext,e>d&&(e=d),s.arraySet(f.window,b,c-d,e,f.wnext),d-=e,d?(s.arraySet(f.window,b,c-d,d,0),f.wnext=d,f.whave=f.wsize):(f.wnext+=e,f.wnext===f.wsize&&(f.wnext=0),f.whave<f.wsize&&(f.whave+=e))),0}function m(a,b){var c,e,f,g,h,i,j,m,n,o,p,q,r,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,Aa=0,Ba=new s.Buf8(4),Ca=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];if(!a||!a.state||!a.output||!a.input&&0!==a.avail_in)return G;c=a.state,c.mode===W&&(c.mode=X),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,o=i,p=j,xa=D;a:for(;;)switch(c.mode){case L:if(0===c.wrap){c.mode=X;break}for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(2&c.wrap&&35615===m){c.check=0,Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0),m=0,n=0,c.mode=M;break}if(c.flags=0,c.head&&(c.head.done=!1),!(1&c.wrap)||(((255&m)<<8)+(m>>8))%31){a.msg="incorrect header check",c.mode=ma;break}if((15&m)!==K){a.msg="unknown compression method",c.mode=ma;break}if(m>>>=4,n-=4,wa=(15&m)+8,0===c.wbits)c.wbits=wa;else if(wa>c.wbits){a.msg="invalid window size",c.mode=ma;break}c.dmax=1<<wa,a.adler=c.check=1,c.mode=512&m?U:W,m=0,n=0;break;case M:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.flags=m,(255&c.flags)!==K){a.msg="unknown compression method",c.mode=ma;break}if(57344&c.flags){a.msg="unknown header flags set",c.mode=ma;break}c.head&&(c.head.text=m>>8&1),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=N;case N:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.time=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,Ba[2]=m>>>16&255,Ba[3]=m>>>24&255,c.check=u(c.check,Ba,4,0)),m=0,n=0,c.mode=O;case O:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.xflags=255&m,c.head.os=m>>8),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=P;case P:if(1024&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length=m,c.head&&(c.head.extra_len=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0}else c.head&&(c.head.extra=null);c.mode=Q;case Q:if(1024&c.flags&&(q=c.length,q>i&&(q=i),q&&(c.head&&(wa=c.head.extra_len-c.length,c.head.extra||(c.head.extra=new Array(c.head.extra_len)),s.arraySet(c.head.extra,e,g,q,wa)),512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,c.length-=q),c.length))break a;c.length=0,c.mode=R;case R:if(2048&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.name+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.name=null);c.length=0,c.mode=S;case S:if(4096&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.comment+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.comment=null);c.mode=T;case T:if(512&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(65535&c.check)){a.msg="header crc mismatch",c.mode=ma;break}m=0,n=0}c.head&&(c.head.hcrc=c.flags>>9&1,c.head.done=!0),a.adler=c.check=0,c.mode=W;break;case U:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}a.adler=c.check=d(m),m=0,n=0,c.mode=V;case V:if(0===c.havedict)return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,F;a.adler=c.check=1,c.mode=W;case W:if(b===B||b===C)break a;case X:if(c.last){m>>>=7&n,n-=7&n,c.mode=ja;break}for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}switch(c.last=1&m,m>>>=1,n-=1,3&m){case 0:c.mode=Y;break;case 1:if(k(c),c.mode=ca,b===C){m>>>=2,n-=2;break a}break;case 2:c.mode=_;break;case 3:a.msg="invalid block type",c.mode=ma}m>>>=2,n-=2;break;case Y:for(m>>>=7&n,n-=7&n;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if((65535&m)!==(m>>>16^65535)){a.msg="invalid stored block lengths",c.mode=ma;break}if(c.length=65535&m,m=0,n=0,c.mode=Z,b===C)break a;case Z:c.mode=$;case $:if(q=c.length){if(q>i&&(q=i),q>j&&(q=j),0===q)break a;s.arraySet(f,e,g,q,h),i-=q,g+=q,j-=q,h+=q,c.length-=q;break}c.mode=W;break;case _:for(;n<14;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.nlen=(31&m)+257,m>>>=5,n-=5,c.ndist=(31&m)+1,m>>>=5,n-=5,c.ncode=(15&m)+4,m>>>=4,n-=4,c.nlen>286||c.ndist>30){a.msg="too many length or distance symbols",c.mode=ma;break}c.have=0,c.mode=aa;case aa:for(;c.have<c.ncode;){for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.lens[Ca[c.have++]]=7&m,m>>>=3,n-=3}for(;c.have<19;)c.lens[Ca[c.have++]]=0;if(c.lencode=c.lendyn,c.lenbits=7,ya={bits:c.lenbits},xa=w(x,c.lens,0,19,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid code lengths set",c.mode=ma;break}c.have=0,c.mode=ba;case ba:for(;c.have<c.nlen+c.ndist;){for(;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(sa<16)m>>>=qa,n-=qa,c.lens[c.have++]=sa;else{if(16===sa){for(za=qa+2;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m>>>=qa,n-=qa,0===c.have){a.msg="invalid bit length repeat",c.mode=ma;break}wa=c.lens[c.have-1],q=3+(3&m),m>>>=2,n-=2}else if(17===sa){for(za=qa+3;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=3+(7&m),m>>>=3,n-=3}else{for(za=qa+7;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=11+(127&m),m>>>=7,n-=7}if(c.have+q>c.nlen+c.ndist){a.msg="invalid bit length repeat",c.mode=ma;break}for(;q--;)c.lens[c.have++]=wa}}if(c.mode===ma)break;if(0===c.lens[256]){a.msg="invalid code -- missing end-of-block",c.mode=ma;break}if(c.lenbits=9,ya={bits:c.lenbits},xa=w(y,c.lens,0,c.nlen,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid literal/lengths set",c.mode=ma;break}if(c.distbits=6,c.distcode=c.distdyn,ya={bits:c.distbits},xa=w(z,c.lens,c.nlen,c.ndist,c.distcode,0,c.work,ya),c.distbits=ya.bits,xa){a.msg="invalid distances set",c.mode=ma;break}if(c.mode=ca,b===C)break a;case ca:c.mode=da;case da:if(i>=6&&j>=258){a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,v(a,p),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,c.mode===W&&(c.back=-1);break}for(c.back=0;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(ra&&0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.lencode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,c.length=sa,0===ra){c.mode=ia;break}if(32&ra){c.back=-1,c.mode=W;break}if(64&ra){a.msg="invalid literal/length code",c.mode=ma;break}c.extra=15&ra,c.mode=ea;case ea:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}c.was=c.length,c.mode=fa;case fa:for(;Aa=c.distcode[m&(1<<c.distbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.distcode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,64&ra){a.msg="invalid distance code",c.mode=ma;break}c.offset=sa,c.extra=15&ra,c.mode=ga;case ga:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.offset+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}if(c.offset>c.dmax){a.msg="invalid distance too far back",c.mode=ma;break}c.mode=ha;case ha:if(0===j)break a;if(q=p-j,c.offset>q){if(q=c.offset-q,q>c.whave&&c.sane){a.msg="invalid distance too far back",c.mode=ma;break}q>c.wnext?(q-=c.wnext,r=c.wsize-q):r=c.wnext-q,q>c.length&&(q=c.length),pa=c.window}else pa=f,r=h-c.offset,q=c.length;q>j&&(q=j),j-=q,c.length-=q;do f[h++]=pa[r++];while(--q);0===c.length&&(c.mode=da);break;case ia:if(0===j)break a;f[h++]=c.length,j--,c.mode=da;break;case ja:if(c.wrap){for(;n<32;){if(0===i)break a;i--,m|=e[g++]<<n,n+=8}if(p-=j,a.total_out+=p,c.total+=p,p&&(a.adler=c.check=c.flags?u(c.check,f,p,h-p):t(c.check,f,p,h-p)),p=j,(c.flags?m:d(m))!==c.check){a.msg="incorrect data check",c.mode=ma;break}m=0,n=0}c.mode=ka;case ka:if(c.wrap&&c.flags){for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(4294967295&c.total)){a.msg="incorrect length check",c.mode=ma;break}m=0,n=0}c.mode=la;case la:xa=E;break a;case ma:xa=H;break a;case na:return I;case oa:default:return G}return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,(c.wsize||p!==a.avail_out&&c.mode<ma&&(c.mode<ja||b!==A))&&l(a,a.output,a.next_out,p-a.avail_out)?(c.mode=na,I):(o-=a.avail_in,p-=a.avail_out,a.total_in+=o,a.total_out+=p,c.total+=p,c.wrap&&p&&(a.adler=c.check=c.flags?u(c.check,f,p,a.next_out-p):t(c.check,f,p,a.next_out-p)),a.data_type=c.bits+(c.last?64:0)+(c.mode===W?128:0)+(c.mode===ca||c.mode===Z?256:0),(0===o&&0===p||b===A)&&xa===D&&(xa=J),xa)}function n(a){if(!a||!a.state)return G;var b=a.state;return b.window&&(b.window=null),a.state=null,D}function o(a,b){var c;return a&&a.state?(c=a.state,0===(2&c.wrap)?G:(c.head=b,b.done=!1,D)):G}function p(a,b){var c,d,e,f=b.length;return a&&a.state?(c=a.state,0!==c.wrap&&c.mode!==V?G:c.mode===V&&(d=1,d=t(d,b,f,0),d!==c.check)?H:(e=l(a,b,f,f))?(c.mode=na,I):(c.havedict=1,D)):G}var q,r,s=a("../utils/common"),t=a("./adler32"),u=a("./crc32"),v=a("./inffast"),w=a("./inftrees"),x=0,y=1,z=2,A=4,B=5,C=6,D=0,E=1,F=2,G=-2,H=-3,I=-4,J=-5,K=8,L=1,M=2,N=3,O=4,P=5,Q=6,R=7,S=8,T=9,U=10,V=11,W=12,X=13,Y=14,Z=15,$=16,_=17,aa=18,ba=19,ca=20,da=21,ea=22,fa=23,ga=24,ha=25,ia=26,ja=27,ka=28,la=29,ma=30,na=31,oa=32,pa=852,qa=592,ra=15,sa=ra,ta=!0;c.inflateReset=g,c.inflateReset2=h,c.inflateResetKeep=f,c.inflateInit=j,c.inflateInit2=i,c.inflate=m,c.inflateEnd=n,c.inflateGetHeader=o,c.inflateSetDictionary=p,c.inflateInfo="pako inflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./inffast":69,"./inftrees":71}],71:[function(a,b,c){"use strict";var d=a("../utils/common"),e=15,f=852,g=592,h=0,i=1,j=2,k=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,0,0],l=[16,16,16,16,16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,16,72,78],m=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0],n=[16,16,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,64,64];b.exports=function(a,b,c,o,p,q,r,s){var t,u,v,w,x,y,z,A,B,C=s.bits,D=0,E=0,F=0,G=0,H=0,I=0,J=0,K=0,L=0,M=0,N=null,O=0,P=new d.Buf16(e+1),Q=new d.Buf16(e+1),R=null,S=0;for(D=0;D<=e;D++)P[D]=0;for(E=0;E<o;E++)P[b[c+E]]++;for(H=C,G=e;G>=1&&0===P[G];G--);if(H>G&&(H=G),0===G)return p[q++]=20971520,p[q++]=20971520,s.bits=1,0;for(F=1;F<G&&0===P[F];F++);for(H<F&&(H=F),K=1,D=1;D<=e;D++)if(K<<=1,K-=P[D],K<0)return-1;if(K>0&&(a===h||1!==G))return-1;for(Q[1]=0,D=1;D<e;D++)Q[D+1]=Q[D]+P[D];for(E=0;E<o;E++)0!==b[c+E]&&(r[Q[b[c+E]]++]=E);if(a===h?(N=R=r,y=19):a===i?(N=k,O-=257,R=l,S-=257,y=256):(N=m,R=n,y=-1),M=0,E=0,D=F,x=q,I=H,J=0,v=-1,L=1<<H,w=L-1,a===i&&L>f||a===j&&L>g)return 1;for(;;){z=D-J,r[E]<y?(A=0,B=r[E]):r[E]>y?(A=R[S+r[E]],B=N[O+r[E]]):(A=96,B=0),t=1<<D-J,u=1<<I,F=u;do u-=t,p[x+(M>>J)+u]=z<<24|A<<16|B|0;while(0!==u);for(t=1<<D-1;M&t;)t>>=1;if(0!==t?(M&=t-1,M+=t):M=0,E++,0===--P[D]){if(D===G)break;D=b[c+r[E]]}if(D>H&&(M&w)!==v){for(0===J&&(J=H),x+=F,I=D-J,K=1<<I;I+J<G&&(K-=P[I+J],!(K<=0));)I++,K<<=1;if(L+=1<<I,a===i&&L>f||a===j&&L>g)return 1;v=M&w,p[v]=H<<24|I<<16|x-q|0}}return 0!==M&&(p[x+M]=D-J<<24|64<<16|0),s.bits=H,0}},{"../utils/common":62}],72:[function(a,b,c){"use strict";b.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],73:[function(a,b,c){"use strict";function d(a){for(var b=a.length;--b>=0;)a[b]=0}function e(a,b,c,d,e){this.static_tree=a,this.extra_bits=b,this.extra_base=c,this.elems=d,this.max_length=e,this.has_stree=a&&a.length}function f(a,b){this.dyn_tree=a,this.max_code=0,this.stat_desc=b}function g(a){return a<256?ia[a]:ia[256+(a>>>7)]}function h(a,b){a.pending_buf[a.pending++]=255&b,a.pending_buf[a.pending++]=b>>>8&255}function i(a,b,c){a.bi_valid>X-c?(a.bi_buf|=b<<a.bi_valid&65535,h(a,a.bi_buf),a.bi_buf=b>>X-a.bi_valid,a.bi_valid+=c-X):(a.bi_buf|=b<<a.bi_valid&65535,a.bi_valid+=c)}function j(a,b,c){i(a,c[2*b],c[2*b+1])}function k(a,b){var c=0;do c|=1&a,a>>>=1,c<<=1;while(--b>0);return c>>>1}function l(a){16===a.bi_valid?(h(a,a.bi_buf),a.bi_buf=0,a.bi_valid=0):a.bi_valid>=8&&(a.pending_buf[a.pending++]=255&a.bi_buf,a.bi_buf>>=8,a.bi_valid-=8)}function m(a,b){var c,d,e,f,g,h,i=b.dyn_tree,j=b.max_code,k=b.stat_desc.static_tree,l=b.stat_desc.has_stree,m=b.stat_desc.extra_bits,n=b.stat_desc.extra_base,o=b.stat_desc.max_length,p=0;for(f=0;f<=W;f++)a.bl_count[f]=0;for(i[2*a.heap[a.heap_max]+1]=0,
c=a.heap_max+1;c<V;c++)d=a.heap[c],f=i[2*i[2*d+1]+1]+1,f>o&&(f=o,p++),i[2*d+1]=f,d>j||(a.bl_count[f]++,g=0,d>=n&&(g=m[d-n]),h=i[2*d],a.opt_len+=h*(f+g),l&&(a.static_len+=h*(k[2*d+1]+g)));if(0!==p){do{for(f=o-1;0===a.bl_count[f];)f--;a.bl_count[f]--,a.bl_count[f+1]+=2,a.bl_count[o]--,p-=2}while(p>0);for(f=o;0!==f;f--)for(d=a.bl_count[f];0!==d;)e=a.heap[--c],e>j||(i[2*e+1]!==f&&(a.opt_len+=(f-i[2*e+1])*i[2*e],i[2*e+1]=f),d--)}}function n(a,b,c){var d,e,f=new Array(W+1),g=0;for(d=1;d<=W;d++)f[d]=g=g+c[d-1]<<1;for(e=0;e<=b;e++){var h=a[2*e+1];0!==h&&(a[2*e]=k(f[h]++,h))}}function o(){var a,b,c,d,f,g=new Array(W+1);for(c=0,d=0;d<Q-1;d++)for(ka[d]=c,a=0;a<1<<ba[d];a++)ja[c++]=d;for(ja[c-1]=d,f=0,d=0;d<16;d++)for(la[d]=f,a=0;a<1<<ca[d];a++)ia[f++]=d;for(f>>=7;d<T;d++)for(la[d]=f<<7,a=0;a<1<<ca[d]-7;a++)ia[256+f++]=d;for(b=0;b<=W;b++)g[b]=0;for(a=0;a<=143;)ga[2*a+1]=8,a++,g[8]++;for(;a<=255;)ga[2*a+1]=9,a++,g[9]++;for(;a<=279;)ga[2*a+1]=7,a++,g[7]++;for(;a<=287;)ga[2*a+1]=8,a++,g[8]++;for(n(ga,S+1,g),a=0;a<T;a++)ha[2*a+1]=5,ha[2*a]=k(a,5);ma=new e(ga,ba,R+1,S,W),na=new e(ha,ca,0,T,W),oa=new e(new Array(0),da,0,U,Y)}function p(a){var b;for(b=0;b<S;b++)a.dyn_ltree[2*b]=0;for(b=0;b<T;b++)a.dyn_dtree[2*b]=0;for(b=0;b<U;b++)a.bl_tree[2*b]=0;a.dyn_ltree[2*Z]=1,a.opt_len=a.static_len=0,a.last_lit=a.matches=0}function q(a){a.bi_valid>8?h(a,a.bi_buf):a.bi_valid>0&&(a.pending_buf[a.pending++]=a.bi_buf),a.bi_buf=0,a.bi_valid=0}function r(a,b,c,d){q(a),d&&(h(a,c),h(a,~c)),G.arraySet(a.pending_buf,a.window,b,c,a.pending),a.pending+=c}function s(a,b,c,d){var e=2*b,f=2*c;return a[e]<a[f]||a[e]===a[f]&&d[b]<=d[c]}function t(a,b,c){for(var d=a.heap[c],e=c<<1;e<=a.heap_len&&(e<a.heap_len&&s(b,a.heap[e+1],a.heap[e],a.depth)&&e++,!s(b,d,a.heap[e],a.depth));)a.heap[c]=a.heap[e],c=e,e<<=1;a.heap[c]=d}function u(a,b,c){var d,e,f,h,k=0;if(0!==a.last_lit)do d=a.pending_buf[a.d_buf+2*k]<<8|a.pending_buf[a.d_buf+2*k+1],e=a.pending_buf[a.l_buf+k],k++,0===d?j(a,e,b):(f=ja[e],j(a,f+R+1,b),h=ba[f],0!==h&&(e-=ka[f],i(a,e,h)),d--,f=g(d),j(a,f,c),h=ca[f],0!==h&&(d-=la[f],i(a,d,h)));while(k<a.last_lit);j(a,Z,b)}function v(a,b){var c,d,e,f=b.dyn_tree,g=b.stat_desc.static_tree,h=b.stat_desc.has_stree,i=b.stat_desc.elems,j=-1;for(a.heap_len=0,a.heap_max=V,c=0;c<i;c++)0!==f[2*c]?(a.heap[++a.heap_len]=j=c,a.depth[c]=0):f[2*c+1]=0;for(;a.heap_len<2;)e=a.heap[++a.heap_len]=j<2?++j:0,f[2*e]=1,a.depth[e]=0,a.opt_len--,h&&(a.static_len-=g[2*e+1]);for(b.max_code=j,c=a.heap_len>>1;c>=1;c--)t(a,f,c);e=i;do c=a.heap[1],a.heap[1]=a.heap[a.heap_len--],t(a,f,1),d=a.heap[1],a.heap[--a.heap_max]=c,a.heap[--a.heap_max]=d,f[2*e]=f[2*c]+f[2*d],a.depth[e]=(a.depth[c]>=a.depth[d]?a.depth[c]:a.depth[d])+1,f[2*c+1]=f[2*d+1]=e,a.heap[1]=e++,t(a,f,1);while(a.heap_len>=2);a.heap[--a.heap_max]=a.heap[1],m(a,b),n(f,j,a.bl_count)}function w(a,b,c){var d,e,f=-1,g=b[1],h=0,i=7,j=4;for(0===g&&(i=138,j=3),b[2*(c+1)+1]=65535,d=0;d<=c;d++)e=g,g=b[2*(d+1)+1],++h<i&&e===g||(h<j?a.bl_tree[2*e]+=h:0!==e?(e!==f&&a.bl_tree[2*e]++,a.bl_tree[2*$]++):h<=10?a.bl_tree[2*_]++:a.bl_tree[2*aa]++,h=0,f=e,0===g?(i=138,j=3):e===g?(i=6,j=3):(i=7,j=4))}function x(a,b,c){var d,e,f=-1,g=b[1],h=0,k=7,l=4;for(0===g&&(k=138,l=3),d=0;d<=c;d++)if(e=g,g=b[2*(d+1)+1],!(++h<k&&e===g)){if(h<l){do j(a,e,a.bl_tree);while(0!==--h)}else 0!==e?(e!==f&&(j(a,e,a.bl_tree),h--),j(a,$,a.bl_tree),i(a,h-3,2)):h<=10?(j(a,_,a.bl_tree),i(a,h-3,3)):(j(a,aa,a.bl_tree),i(a,h-11,7));h=0,f=e,0===g?(k=138,l=3):e===g?(k=6,l=3):(k=7,l=4)}}function y(a){var b;for(w(a,a.dyn_ltree,a.l_desc.max_code),w(a,a.dyn_dtree,a.d_desc.max_code),v(a,a.bl_desc),b=U-1;b>=3&&0===a.bl_tree[2*ea[b]+1];b--);return a.opt_len+=3*(b+1)+5+5+4,b}function z(a,b,c,d){var e;for(i(a,b-257,5),i(a,c-1,5),i(a,d-4,4),e=0;e<d;e++)i(a,a.bl_tree[2*ea[e]+1],3);x(a,a.dyn_ltree,b-1),x(a,a.dyn_dtree,c-1)}function A(a){var b,c=4093624447;for(b=0;b<=31;b++,c>>>=1)if(1&c&&0!==a.dyn_ltree[2*b])return I;if(0!==a.dyn_ltree[18]||0!==a.dyn_ltree[20]||0!==a.dyn_ltree[26])return J;for(b=32;b<R;b++)if(0!==a.dyn_ltree[2*b])return J;return I}function B(a){pa||(o(),pa=!0),a.l_desc=new f(a.dyn_ltree,ma),a.d_desc=new f(a.dyn_dtree,na),a.bl_desc=new f(a.bl_tree,oa),a.bi_buf=0,a.bi_valid=0,p(a)}function C(a,b,c,d){i(a,(L<<1)+(d?1:0),3),r(a,b,c,!0)}function D(a){i(a,M<<1,3),j(a,Z,ga),l(a)}function E(a,b,c,d){var e,f,g=0;a.level>0?(a.strm.data_type===K&&(a.strm.data_type=A(a)),v(a,a.l_desc),v(a,a.d_desc),g=y(a),e=a.opt_len+3+7>>>3,f=a.static_len+3+7>>>3,f<=e&&(e=f)):e=f=c+5,c+4<=e&&b!==-1?C(a,b,c,d):a.strategy===H||f===e?(i(a,(M<<1)+(d?1:0),3),u(a,ga,ha)):(i(a,(N<<1)+(d?1:0),3),z(a,a.l_desc.max_code+1,a.d_desc.max_code+1,g+1),u(a,a.dyn_ltree,a.dyn_dtree)),p(a),d&&q(a)}function F(a,b,c){return a.pending_buf[a.d_buf+2*a.last_lit]=b>>>8&255,a.pending_buf[a.d_buf+2*a.last_lit+1]=255&b,a.pending_buf[a.l_buf+a.last_lit]=255&c,a.last_lit++,0===b?a.dyn_ltree[2*c]++:(a.matches++,b--,a.dyn_ltree[2*(ja[c]+R+1)]++,a.dyn_dtree[2*g(b)]++),a.last_lit===a.lit_bufsize-1}var G=a("../utils/common"),H=4,I=0,J=1,K=2,L=0,M=1,N=2,O=3,P=258,Q=29,R=256,S=R+1+Q,T=30,U=19,V=2*S+1,W=15,X=16,Y=7,Z=256,$=16,_=17,aa=18,ba=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],ca=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],da=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],ea=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],fa=512,ga=new Array(2*(S+2));d(ga);var ha=new Array(2*T);d(ha);var ia=new Array(fa);d(ia);var ja=new Array(P-O+1);d(ja);var ka=new Array(Q);d(ka);var la=new Array(T);d(la);var ma,na,oa,pa=!1;c._tr_init=B,c._tr_stored_block=C,c._tr_flush_block=E,c._tr_tally=F,c._tr_align=D},{"../utils/common":62}],74:[function(a,b,c){"use strict";function d(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}b.exports=d},{}]},{},[10])(10)}); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/libs/zip.min.js | zip.min.js |
(function () {
'use strict';
var isCommonjs = typeof module !== 'undefined' && module.exports;
var keyboardAllowed = typeof Element !== 'undefined' && 'ALLOW_KEYBOARD_INPUT' in Element;
var fn = (function () {
var val;
var valLength;
var fnMap = [
[
'requestFullscreen',
'exitFullscreen',
'fullscreenElement',
'fullscreenEnabled',
'fullscreenchange',
'fullscreenerror'
],
// new WebKit
[
'webkitRequestFullscreen',
'webkitExitFullscreen',
'webkitFullscreenElement',
'webkitFullscreenEnabled',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
// old WebKit (Safari 5.1)
[
'webkitRequestFullScreen',
'webkitCancelFullScreen',
'webkitCurrentFullScreenElement',
'webkitCancelFullScreen',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
[
'mozRequestFullScreen',
'mozCancelFullScreen',
'mozFullScreenElement',
'mozFullScreenEnabled',
'mozfullscreenchange',
'mozfullscreenerror'
],
[
'msRequestFullscreen',
'msExitFullscreen',
'msFullscreenElement',
'msFullscreenEnabled',
'MSFullscreenChange',
'MSFullscreenError'
]
];
var i = 0;
var l = fnMap.length;
var ret = {};
for (; i < l; i++) {
val = fnMap[i];
if (val && val[1] in document) {
for (i = 0, valLength = val.length; i < valLength; i++) {
ret[fnMap[0][i]] = val[i];
}
return ret;
}
}
return false;
})();
var screenfull = {
request: function (elem) {
var request = fn.requestFullscreen;
elem = elem || document.documentElement;
// Work around Safari 5.1 bug: reports support for
// keyboard in fullscreen even though it doesn't.
// Browser sniffing, since the alternative with
// setTimeout is even worse.
if (/5\.1[\.\d]* Safari/.test(navigator.userAgent)) {
elem[request]();
} else {
elem[request](keyboardAllowed && Element.ALLOW_KEYBOARD_INPUT);
}
},
exit: function () {
document[fn.exitFullscreen]();
},
toggle: function (elem) {
if (this.isFullscreen) {
this.exit();
} else {
this.request(elem);
}
},
raw: fn
};
if (!fn) {
if (isCommonjs) {
module.exports = false;
} else {
window.screenfull = false;
}
return;
}
Object.defineProperties(screenfull, {
isFullscreen: {
get: function () {
return !!document[fn.fullscreenElement];
}
},
element: {
enumerable: true,
get: function () {
return document[fn.fullscreenElement];
}
},
enabled: {
enumerable: true,
get: function () {
// Coerce to boolean in case of old WebKit
return !!document[fn.fullscreenEnabled];
}
}
});
if (isCommonjs) {
module.exports = screenfull;
} else {
window.screenfull = screenfull;
}
})(); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/libs/screenfull.js | screenfull.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.localforage=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],2:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(b){return p.reject(a,b)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&"object"==typeof a&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(a){c.status="error",c.value=a}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a(1),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=c=e,e.prototype.catch=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},c.resolve=k,c.reject=l,c.all=m,c.race=n},{1:1}],3:[function(a,b,c){(function(b){"use strict";"function"!=typeof b.Promise&&(b.Promise=a(2))}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{2:2}],4:[function(a,b,c){"use strict";function d(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function e(){try{if("undefined"!=typeof indexedDB)return indexedDB;if("undefined"!=typeof webkitIndexedDB)return webkitIndexedDB;if("undefined"!=typeof mozIndexedDB)return mozIndexedDB;if("undefined"!=typeof OIndexedDB)return OIndexedDB;if("undefined"!=typeof msIndexedDB)return msIndexedDB}catch(a){}}function f(){try{if(!ga)return!1;var a="undefined"!=typeof openDatabase&&/(Safari|iPhone|iPad|iPod)/.test(navigator.userAgent)&&!/Chrome/.test(navigator.userAgent)&&!/BlackBerry/.test(navigator.platform),b="function"==typeof fetch&&fetch.toString().indexOf("[native code")!==-1;return(!a||b)&&"undefined"!=typeof indexedDB&&"undefined"!=typeof IDBKeyRange}catch(a){return!1}}function g(){return"function"==typeof openDatabase}function h(){try{return"undefined"!=typeof localStorage&&"setItem"in localStorage&&localStorage.setItem}catch(a){return!1}}function i(a,b){a=a||[],b=b||{};try{return new Blob(a,b)}catch(f){if("TypeError"!==f.name)throw f;for(var c="undefined"!=typeof BlobBuilder?BlobBuilder:"undefined"!=typeof MSBlobBuilder?MSBlobBuilder:"undefined"!=typeof MozBlobBuilder?MozBlobBuilder:WebKitBlobBuilder,d=new c,e=0;e<a.length;e+=1)d.append(a[e]);return d.getBlob(b.type)}}function j(a,b){b&&a.then(function(a){b(null,a)},function(a){b(a)})}function k(a,b,c){"function"==typeof b&&a.then(b),"function"==typeof c&&a.catch(c)}function l(a){for(var b=a.length,c=new ArrayBuffer(b),d=new Uint8Array(c),e=0;e<b;e++)d[e]=a.charCodeAt(e);return c}function m(a){return new ja(function(b){var c=a.transaction(ka,"readwrite"),d=i([""]);c.objectStore(ka).put(d,"key"),c.onabort=function(a){a.preventDefault(),a.stopPropagation(),b(!1)},c.oncomplete=function(){var a=navigator.userAgent.match(/Chrome\/(\d+)/),c=navigator.userAgent.match(/Edge\//);b(c||!a||parseInt(a[1],10)>=43)}}).catch(function(){return!1})}function n(a){return"boolean"==typeof ha?ja.resolve(ha):m(a).then(function(a){return ha=a})}function o(a){var b=ia[a.name],c={};c.promise=new ja(function(a){c.resolve=a}),b.deferredOperations.push(c),b.dbReady?b.dbReady=b.dbReady.then(function(){return c.promise}):b.dbReady=c.promise}function p(a){var b=ia[a.name],c=b.deferredOperations.pop();c&&c.resolve()}function q(a,b){return new ja(function(c,d){if(a.db){if(!b)return c(a.db);o(a),a.db.close()}var e=[a.name];b&&e.push(a.version);var f=ga.open.apply(ga,e);b&&(f.onupgradeneeded=function(b){var c=f.result;try{c.createObjectStore(a.storeName),b.oldVersion<=1&&c.createObjectStore(ka)}catch(c){if("ConstraintError"!==c.name)throw c;console.warn('The database "'+a.name+'" has been upgraded from version '+b.oldVersion+" to version "+b.newVersion+', but the storage "'+a.storeName+'" already exists.')}}),f.onerror=function(a){a.preventDefault(),d(f.error)},f.onsuccess=function(){c(f.result),p(a)}})}function r(a){return q(a,!1)}function s(a){return q(a,!0)}function t(a,b){if(!a.db)return!0;var c=!a.db.objectStoreNames.contains(a.storeName),d=a.version<a.db.version,e=a.version>a.db.version;if(d&&(a.version!==b&&console.warn('The database "'+a.name+"\" can't be downgraded from version "+a.db.version+" to version "+a.version+"."),a.version=a.db.version),e||c){if(c){var f=a.db.version+1;f>a.version&&(a.version=f)}return!0}return!1}function u(a){return new ja(function(b,c){var d=new FileReader;d.onerror=c,d.onloadend=function(c){var d=btoa(c.target.result||"");b({__local_forage_encoded_blob:!0,data:d,type:a.type})},d.readAsBinaryString(a)})}function v(a){var b=l(atob(a.data));return i([b],{type:a.type})}function w(a){return a&&a.__local_forage_encoded_blob}function x(a){var b=this,c=b._initReady().then(function(){var a=ia[b._dbInfo.name];if(a&&a.dbReady)return a.dbReady});return k(c,a,a),c}function y(a){function b(){return ja.resolve()}var c=this,d={db:null};if(a)for(var e in a)d[e]=a[e];ia||(ia={});var f=ia[d.name];f||(f={forages:[],db:null,dbReady:null,deferredOperations:[]},ia[d.name]=f),f.forages.push(c),c._initReady||(c._initReady=c.ready,c.ready=x);for(var g=[],h=0;h<f.forages.length;h++){var i=f.forages[h];i!==c&&g.push(i._initReady().catch(b))}var j=f.forages.slice(0);return ja.all(g).then(function(){return d.db=f.db,r(d)}).then(function(a){return d.db=a,t(d,c._defaultConfig.version)?s(d):a}).then(function(a){d.db=f.db=a,c._dbInfo=d;for(var b=0;b<j.length;b++){var e=j[b];e!==c&&(e._dbInfo.db=d.db,e._dbInfo.version=d.version)}})}function z(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.get(a);g.onsuccess=function(){var a=g.result;void 0===a&&(a=null),w(a)&&(a=v(a)),b(a)},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function A(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.openCursor(),h=1;g.onsuccess=function(){var c=g.result;if(c){var d=c.value;w(d)&&(d=v(d));var e=a(d,c.key,h++);void 0!==e?b(e):c.continue()}else b()},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function B(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=new ja(function(c,e){var f;d.ready().then(function(){return f=d._dbInfo,"[object Blob]"===la.call(b)?n(f.db).then(function(a){return a?b:u(b)}):b}).then(function(b){var d=f.db.transaction(f.storeName,"readwrite"),g=d.objectStore(f.storeName),h=g.put(b,a);null===b&&(b=void 0),d.oncomplete=function(){void 0===b&&(b=null),c(b)},d.onabort=d.onerror=function(){var a=h.error?h.error:h.transaction.error;e(a)}}).catch(e)});return j(e,c),e}function C(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readwrite"),g=f.objectStore(e.storeName),h=g.delete(a);f.oncomplete=function(){b()},f.onerror=function(){d(h.error)},f.onabort=function(){var a=h.error?h.error:h.transaction.error;d(a)}}).catch(d)});return j(d,b),d}function D(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readwrite"),f=e.objectStore(d.storeName),g=f.clear();e.oncomplete=function(){a()},e.onabort=e.onerror=function(){var a=g.error?g.error:g.transaction.error;c(a)}}).catch(c)});return j(c,a),c}function E(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.count();f.onsuccess=function(){a(f.result)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function F(a,b){var c=this,d=new ja(function(b,d){return a<0?void b(null):void c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=!1,h=f.openCursor();h.onsuccess=function(){var c=h.result;return c?void(0===a?b(c.key):g?b(c.key):(g=!0,c.advance(a))):void b(null)},h.onerror=function(){d(h.error)}}).catch(d)});return j(d,b),d}function G(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.openCursor(),g=[];f.onsuccess=function(){var b=f.result;return b?(g.push(b.key),void b.continue()):void a(g)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function H(a){var b,c,d,e,f,g=.75*a.length,h=a.length,i=0;"="===a[a.length-1]&&(g--,"="===a[a.length-2]&&g--);var j=new ArrayBuffer(g),k=new Uint8Array(j);for(b=0;b<h;b+=4)c=na.indexOf(a[b]),d=na.indexOf(a[b+1]),e=na.indexOf(a[b+2]),f=na.indexOf(a[b+3]),k[i++]=c<<2|d>>4,k[i++]=(15&d)<<4|e>>2,k[i++]=(3&e)<<6|63&f;return j}function I(a){var b,c=new Uint8Array(a),d="";for(b=0;b<c.length;b+=3)d+=na[c[b]>>2],d+=na[(3&c[b])<<4|c[b+1]>>4],d+=na[(15&c[b+1])<<2|c[b+2]>>6],d+=na[63&c[b+2]];return c.length%3===2?d=d.substring(0,d.length-1)+"=":c.length%3===1&&(d=d.substring(0,d.length-2)+"=="),d}function J(a,b){var c="";if(a&&(c=Ea.call(a)),a&&("[object ArrayBuffer]"===c||a.buffer&&"[object ArrayBuffer]"===Ea.call(a.buffer))){var d,e=qa;a instanceof ArrayBuffer?(d=a,e+=sa):(d=a.buffer,"[object Int8Array]"===c?e+=ua:"[object Uint8Array]"===c?e+=va:"[object Uint8ClampedArray]"===c?e+=wa:"[object Int16Array]"===c?e+=xa:"[object Uint16Array]"===c?e+=za:"[object Int32Array]"===c?e+=ya:"[object Uint32Array]"===c?e+=Aa:"[object Float32Array]"===c?e+=Ba:"[object Float64Array]"===c?e+=Ca:b(new Error("Failed to get type for BinaryArray"))),b(e+I(d))}else if("[object Blob]"===c){var f=new FileReader;f.onload=function(){var c=oa+a.type+"~"+I(this.result);b(qa+ta+c)},f.readAsArrayBuffer(a)}else try{b(JSON.stringify(a))}catch(c){console.error("Couldn't convert value into a JSON string: ",a),b(null,c)}}function K(a){if(a.substring(0,ra)!==qa)return JSON.parse(a);var b,c=a.substring(Da),d=a.substring(ra,Da);if(d===ta&&pa.test(c)){var e=c.match(pa);b=e[1],c=c.substring(e[0].length)}var f=H(c);switch(d){case sa:return f;case ta:return i([f],{type:b});case ua:return new Int8Array(f);case va:return new Uint8Array(f);case wa:return new Uint8ClampedArray(f);case xa:return new Int16Array(f);case za:return new Uint16Array(f);case ya:return new Int32Array(f);case Aa:return new Uint32Array(f);case Ba:return new Float32Array(f);case Ca:return new Float64Array(f);default:throw new Error("Unkown type: "+d)}}function L(a){var b=this,c={db:null};if(a)for(var d in a)c[d]="string"!=typeof a[d]?a[d].toString():a[d];var e=new ja(function(a,d){try{c.db=openDatabase(c.name,String(c.version),c.description,c.size)}catch(a){return d(a)}c.db.transaction(function(e){e.executeSql("CREATE TABLE IF NOT EXISTS "+c.storeName+" (id INTEGER PRIMARY KEY, key unique, value)",[],function(){b._dbInfo=c,a()},function(a,b){d(b)})})});return c.serializer=Fa,e}function M(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName+" WHERE key = ? LIMIT 1",[a],function(a,c){var d=c.rows.length?c.rows.item(0).value:null;d&&(d=e.serializer.deserialize(d)),b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function N(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName,[],function(c,d){for(var f=d.rows,g=f.length,h=0;h<g;h++){var i=f.item(h),j=i.value;if(j&&(j=e.serializer.deserialize(j)),j=a(j,i.key,h+1),void 0!==j)return void b(j)}b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function O(a,b,c,d){var e=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var f=new ja(function(f,g){e.ready().then(function(){void 0===b&&(b=null);var h=b,i=e._dbInfo;i.serializer.serialize(b,function(b,j){j?g(j):i.db.transaction(function(c){c.executeSql("INSERT OR REPLACE INTO "+i.storeName+" (key, value) VALUES (?, ?)",[a,b],function(){f(h)},function(a,b){g(b)})},function(b){if(b.code===b.QUOTA_ERR){if(d>0)return void f(O.apply(e,[a,h,c,d-1]));g(b)}})})}).catch(g)});return j(f,c),f}function P(a,b,c){return O.apply(this,[a,b,c,1])}function Q(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("DELETE FROM "+e.storeName+" WHERE key = ?",[a],function(){b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function R(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("DELETE FROM "+d.storeName,[],function(){a()},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function S(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT COUNT(key) as c FROM "+d.storeName,[],function(b,c){var d=c.rows.item(0).c;a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function T(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT key FROM "+e.storeName+" WHERE id = ? LIMIT 1",[a+1],function(a,c){var d=c.rows.length?c.rows.item(0).key:null;b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function U(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT key FROM "+d.storeName,[],function(b,c){for(var d=[],e=0;e<c.rows.length;e++)d.push(c.rows.item(e).key);a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function V(a){var b=this,c={};if(a)for(var d in a)c[d]=a[d];return c.keyPrefix=c.name+"/",c.storeName!==b._defaultConfig.storeName&&(c.keyPrefix+=c.storeName+"/"),b._dbInfo=c,c.serializer=Fa,ja.resolve()}function W(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo.keyPrefix,c=localStorage.length-1;c>=0;c--){var d=localStorage.key(c);0===d.indexOf(a)&&localStorage.removeItem(d)}});return j(c,a),c}function X(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo,d=localStorage.getItem(b.keyPrefix+a);return d&&(d=b.serializer.deserialize(d)),d});return j(d,b),d}function Y(a,b){var c=this,d=c.ready().then(function(){for(var b=c._dbInfo,d=b.keyPrefix,e=d.length,f=localStorage.length,g=1,h=0;h<f;h++){var i=localStorage.key(h);if(0===i.indexOf(d)){var j=localStorage.getItem(i);if(j&&(j=b.serializer.deserialize(j)),j=a(j,i.substring(e),g++),void 0!==j)return j}}});return j(d,b),d}function Z(a,b){var c=this,d=c.ready().then(function(){var b,d=c._dbInfo;try{b=localStorage.key(a)}catch(a){b=null}return b&&(b=b.substring(d.keyPrefix.length)),b});return j(d,b),d}function $(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo,c=localStorage.length,d=[],e=0;e<c;e++)0===localStorage.key(e).indexOf(a.keyPrefix)&&d.push(localStorage.key(e).substring(a.keyPrefix.length));return d});return j(c,a),c}function _(a){var b=this,c=b.keys().then(function(a){return a.length});return j(c,a),c}function aa(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo;localStorage.removeItem(b.keyPrefix+a)});return j(d,b),d}function ba(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=d.ready().then(function(){void 0===b&&(b=null);var c=b;return new ja(function(e,f){var g=d._dbInfo;g.serializer.serialize(b,function(b,d){if(d)f(d);else try{localStorage.setItem(g.keyPrefix+a,b),e(c)}catch(a){"QuotaExceededError"!==a.name&&"NS_ERROR_DOM_QUOTA_REACHED"!==a.name||f(a),f(a)}})})});return j(e,c),e}function ca(a,b){a[b]=function(){var c=arguments;return a.ready().then(function(){return a[b].apply(a,c)})}}function da(){for(var a=1;a<arguments.length;a++){var b=arguments[a];if(b)for(var c in b)b.hasOwnProperty(c)&&(Oa(b[c])?arguments[0][c]=b[c].slice():arguments[0][c]=b[c])}return arguments[0]}function ea(a){for(var b in Ja)if(Ja.hasOwnProperty(b)&&Ja[b]===a)return!0;return!1}var fa="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},ga=e();"undefined"==typeof Promise&&a(3);var ha,ia,ja=Promise,ka="local-forage-detect-blob-support",la=Object.prototype.toString,ma={_driver:"asyncStorage",_initStorage:y,iterate:A,getItem:z,setItem:B,removeItem:C,clear:D,length:E,key:F,keys:G},na="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",oa="~~local_forage_type~",pa=/^~~local_forage_type~([^~]+)~/,qa="__lfsc__:",ra=qa.length,sa="arbf",ta="blob",ua="si08",va="ui08",wa="uic8",xa="si16",ya="si32",za="ur16",Aa="ui32",Ba="fl32",Ca="fl64",Da=ra+sa.length,Ea=Object.prototype.toString,Fa={serialize:J,deserialize:K,stringToBuffer:H,bufferToString:I},Ga={_driver:"webSQLStorage",_initStorage:L,iterate:N,getItem:M,setItem:P,removeItem:Q,clear:R,length:S,key:T,keys:U},Ha={_driver:"localStorageWrapper",_initStorage:V,iterate:Y,getItem:X,setItem:ba,removeItem:aa,clear:W,length:_,key:Z,keys:$},Ia={},Ja={INDEXEDDB:"asyncStorage",LOCALSTORAGE:"localStorageWrapper",WEBSQL:"webSQLStorage"},Ka=[Ja.INDEXEDDB,Ja.WEBSQL,Ja.LOCALSTORAGE],La=["clear","getItem","iterate","key","keys","length","removeItem","setItem"],Ma={description:"",driver:Ka.slice(),name:"localforage",size:4980736,storeName:"keyvaluepairs",version:1},Na={};Na[Ja.INDEXEDDB]=f(),Na[Ja.WEBSQL]=g(),Na[Ja.LOCALSTORAGE]=h();var Oa=Array.isArray||function(a){return"[object Array]"===Object.prototype.toString.call(a)},Pa=function(){function a(b){d(this,a),this.INDEXEDDB=Ja.INDEXEDDB,this.LOCALSTORAGE=Ja.LOCALSTORAGE,this.WEBSQL=Ja.WEBSQL,this._defaultConfig=da({},Ma),this._config=da({},this._defaultConfig,b),this._driverSet=null,this._initDriver=null,this._ready=!1,this._dbInfo=null,this._wrapLibraryMethodsWithReady(),this.setDriver(this._config.driver).catch(function(){})}return a.prototype.config=function(a){if("object"===("undefined"==typeof a?"undefined":fa(a))){if(this._ready)return new Error("Can't call config() after localforage has been used.");for(var b in a){if("storeName"===b&&(a[b]=a[b].replace(/\W/g,"_")),"version"===b&&"number"!=typeof a[b])return new Error("Database version must be a number.");this._config[b]=a[b]}return!("driver"in a&&a.driver)||this.setDriver(this._config.driver)}return"string"==typeof a?this._config[a]:this._config},a.prototype.defineDriver=function(a,b,c){var d=new ja(function(b,c){try{var d=a._driver,e=new Error("Custom driver not compliant; see https://mozilla.github.io/localForage/#definedriver"),f=new Error("Custom driver name already in use: "+a._driver);if(!a._driver)return void c(e);if(ea(a._driver))return void c(f);for(var g=La.concat("_initStorage"),h=0;h<g.length;h++){var i=g[h];if(!i||!a[i]||"function"!=typeof a[i])return void c(e)}var j=ja.resolve(!0);"_support"in a&&(j=a._support&&"function"==typeof a._support?a._support():ja.resolve(!!a._support)),j.then(function(c){Na[d]=c,Ia[d]=a,b()},c)}catch(a){c(a)}});return k(d,b,c),d},a.prototype.driver=function(){return this._driver||null},a.prototype.getDriver=function(a,b,c){var d=this,e=ja.resolve().then(function(){if(!ea(a)){if(Ia[a])return Ia[a];throw new Error("Driver not found.")}switch(a){case d.INDEXEDDB:return ma;case d.LOCALSTORAGE:return Ha;case d.WEBSQL:return Ga}});return k(e,b,c),e},a.prototype.getSerializer=function(a){var b=ja.resolve(Fa);return k(b,a),b},a.prototype.ready=function(a){var b=this,c=b._driverSet.then(function(){return null===b._ready&&(b._ready=b._initDriver()),b._ready});return k(c,a,a),c},a.prototype.setDriver=function(a,b,c){function d(){g._config.driver=g.driver()}function e(a){return g._extend(a),d(),g._ready=g._initStorage(g._config),g._ready}function f(a){return function(){function b(){for(;c<a.length;){var f=a[c];return c++,g._dbInfo=null,g._ready=null,g.getDriver(f).then(e).catch(b)}d();var h=new Error("No available storage method found.");return g._driverSet=ja.reject(h),g._driverSet}var c=0;return b()}}var g=this;Oa(a)||(a=[a]);var h=this._getSupportedDrivers(a),i=null!==this._driverSet?this._driverSet.catch(function(){return ja.resolve()}):ja.resolve();return this._driverSet=i.then(function(){var a=h[0];return g._dbInfo=null,g._ready=null,g.getDriver(a).then(function(a){g._driver=a._driver,d(),g._wrapLibraryMethodsWithReady(),g._initDriver=f(h)})}).catch(function(){d();var a=new Error("No available storage method found.");return g._driverSet=ja.reject(a),g._driverSet}),k(this._driverSet,b,c),this._driverSet},a.prototype.supports=function(a){return!!Na[a]},a.prototype._extend=function(a){da(this,a)},a.prototype._getSupportedDrivers=function(a){for(var b=[],c=0,d=a.length;c<d;c++){var e=a[c];this.supports(e)&&b.push(e)}return b},a.prototype._wrapLibraryMethodsWithReady=function(){for(var a=0;a<La.length;a++)ca(this,La[a])},a.prototype.createInstance=function(b){return new a(b)},a}(),Qa=new Pa;b.exports=Qa},{3:3}]},{},[4])(4)}); | zizhu-zhangxiansheng-gongzhonggao-beifen-vol2 | /zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0.tar.gz/zizhu-zhangxiansheng-gongzhonggao-beifen-vol2-2022.10.10.0/ZizhuZhangxianshengGongzhonggaoBeifenVol2/js/libs/localforage.min.js | localforage.min.js |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | zizu-first-package | /zizu_first_package-0.1.tar.gz/zizu_first_package-0.1/zizu_first_package/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | zizu-first-package | /zizu_first_package-0.1.tar.gz/zizu_first_package-0.1/zizu_first_package/Binomialdistribution.py | Binomialdistribution.py |
from collections import deque
from datetime import datetime
import time
import cv2
__all__ = ['FPSRealTime', 'FPS', 'MyTimer', 'runtime']
class FPSRealTime(object):
def __init__(self, buffer_len=10):
self._start_tick = cv2.getTickCount()
self._freq = 1000.0 / cv2.getTickFrequency()
self._difftimes = deque(maxlen=buffer_len)
def get_fps(self, number=2):
current_tick = cv2.getTickCount()
different_time = (current_tick - self._start_tick) * self._freq
self._start_tick = current_tick
self._difftimes.append(different_time)
fps = 1000.0 / (sum(self._difftimes) / len(self._difftimes))
fps_rounded = round(fps, number)
return fps_rounded
class FPS:
def __init__(self):
# store the start time, end time, and total number of frames
# that were examined between the start and end intervals
self._start = None
self._end = None
self._numFrames = 0
def start(self):
# start the timer
self._start = datetime.datetime.now()
return self
def stop(self):
# stop the timer
self._end = datetime.datetime.now()
def update(self):
# increment the total number of frames examined during the
# start and end intervals
self._numFrames += 1
def elapsed(self):
# return the total number of seconds between the start and
# end interval
return (self._end - self._start).total_seconds()
def get_fps(self):
# compute the (approximate) frames per second
return round(self._numFrames / self.elapsed(), 2)
class MyTimer(object):
def __init__(self):
self.first_start_time = self.restart()
def restart(self):
self.start_time = time.time()
return self.start_time
def elapsed(self, restart=False, unit='ms'):
assert unit in ('us', 'ms', 's', 'min')
duration = (time.time() - self.start_time)
if unit == 'us':
duration = duration * 1e6
elif unit == 'ms':
duration = duration * 1e3
elif unit == 's':
duration = duration
elif unit == 'min':
duration = duration / 60
if restart:
self.restart()
return duration
def log(self, tip='Elapsed time', unit='ms', reset=False):
duration = round(self.elapsed(reset, unit), 3)
print('{}: {}{}'.format(tip, duration, unit))
return duration
def rlog(self, tip='Elapsed time'):
return self.log(unit='ms', reset=True, tip=tip)
def total_elapsed(self, unit='ms'):
assert unit in ('us', 'ms', 's', 'min')
duration = (time.time() - self.first_start_time)
if unit == 'us':
duration = duration * 1e6
elif unit == 'ms':
duration = duration * 1e3
elif unit == 's':
duration = duration
elif unit == 'min':
duration = duration / 60
return duration
MYTIMER_ = MyTimer()
def runtime(func):
def wrapper(*args, **kw):
MYTIMER_.restart()
ret = func(*args, **kw)
MYTIMER_.rlog(f'func "{func.__name__}" run time')
return ret
return wrapper
def main():
timer = MyTimer()
sum = 0
for i in range(int(10e6)):
sum += i
# timer.log(unit='us', reset=False)
timer.log(unit='ms', reset=False)
timer.log(unit='s', reset=False)
# timer.log(unit='min', reset=False)
if __name__ == '__main__':
start = datetime.now()
print("Start time is {}".format(start))
main()
end = datetime.now()
print("End time is {}".format(end))
print("\nTotal running time is {}s".format((end - start).seconds))
print("\nCongratulations!!!") | zj-utils-zjj421 | /zj_utils_zjj421-0.0.15-py3-none-any.whl/zj_utils/timer.py | timer.py |
import logging
import os
import socket
import time
from datetime import datetime
def is_chinese(uchar):
"""判断一个unicode是否是汉字"""
if uchar >= u'\u4e00' and uchar <= u'\u9fa5':
return True
else:
return False
def is_number(uchar):
"""判断一个unicode是否是半角数字"""
if uchar >= u'\u0030' and uchar <= u'\u0039':
return True
else:
return False
def is_Qnumber(uchar):
"""判断一个unicode是否是全角数字"""
if uchar >= u'\uff10' and uchar <= u'\uff19':
return True
else:
return False
def is_alphabet(uchar):
"""判断一个unicode是否是半角英文字母"""
if (uchar >= u'\u0041' and uchar <= u'\u005a') or (uchar >= u'\u0061' and uchar <= u'\u007a'):
return True
else:
return False
def is_Qalphabet(uchar):
"""判断一个unicode是否是全角英文字母"""
if (uchar >= u'\uff21' and uchar <= u'\uff3a') or (uchar >= u'\uff41' and uchar <= u'\uff5a'):
return True
else:
return False
def is_other(uchar):
"""判断是否非汉字,数字和英文字符"""
if not (is_chinese(uchar) or is_number(uchar) or is_alphabet(uchar)):
return True
else:
return False
def Q2B(uchar):
"""单个字符 全角转半角"""
inside_code = ord(uchar)
if inside_code == 0x3000:
inside_code = 0x0020
else:
inside_code -= 0xfee0
if inside_code < 0x0020 or inside_code > 0x7e: # 转完之后不是半角字符返回原来的字符
return uchar
return chr(inside_code)
def B2Q(uchar):
"""单个字符 半角转全角"""
inside_code = ord(uchar)
if inside_code < 0x0020 or inside_code > 0x7e: # 不是半角字符就返回原来的字符
return uchar
if inside_code == 0x0020: # 除了空格其他的全角半角的公式为: 半角 = 全角 - 0xfee0
inside_code = 0x3000
else:
inside_code += 0xfee0
return chr(inside_code)
def stringQ2B(ustring):
"""把字符串全角转半角"""
return "".join([Q2B(uchar) for uchar in ustring])
def stringpartQ2B(ustring):
"""把字符串中数字和字母全角转半角"""
return "".join([Q2B(uchar) if is_Qnumber(uchar) or is_Qalphabet(uchar) else uchar for uchar in ustring])
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def setup_logger(logger_name, log_root=None, log_file_save_basename=None, level=logging.INFO, screen=True,
tofile=False, msecs=False):
'''set up logger'''
lg = logging.getLogger(logger_name)
if msecs:
formatter = logging.Formatter('[%(levelname)s] [%(asctime)s.%(msecs)03d] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
else:
formatter = logging.Formatter('[%(levelname)s] [%(asctime)s] %(message)s',
datefmt='%Y-%m-%d %I:%M:%S %p')
lg.setLevel(level)
if tofile and log_root:
if not os.path.exists(log_root):
os.makedirs(log_root)
if log_file_save_basename:
log_file = os.path.join(log_root, log_file_save_basename)
else:
log_file = os.path.join(log_root, f'{get_time_str()}.log')
fh = logging.FileHandler(log_file, mode='a')
fh.setFormatter(formatter)
lg.addHandler(fh)
if screen:
sh = logging.StreamHandler()
sh.setFormatter(formatter)
lg.addHandler(sh)
return lg
def get_time_int():
return int(time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())))
# def get_time_str():
# return time.strftime("%m%d%H%M%S%Y", time.localtime(time.time()))
def get_time_str():
return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time()))
def get_host_ip():
"""
查询本机ip地址
:return: ip
"""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip = s.getsockname()[0]
finally:
s.close()
return ip
def main():
mylogger = setup_logger('test', msecs=True)
mylogger.info('tttt')
if __name__ == '__main__':
start = datetime.now()
print("Start time is {}".format(start))
main()
end = datetime.now()
print("End time is {}".format(end))
print("\nTotal running time is {}s".format((end - start).seconds))
print("\nCongratulations!!!") | zj-utils-zjj421 | /zj_utils_zjj421-0.0.15-py3-none-any.whl/zj_utils/helper.py | helper.py |
import binascii
import glob
import hashlib
import os
import re
from datetime import datetime
# import fitz
# from timer import MyTimer
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def get_image_md5(image_arr):
md5_hash = hashlib.md5(image_arr.tobytes()).digest()
md5_hash = binascii.hexlify(md5_hash).decode()
return md5_hash
def makedirs_if_not_exists(dirname):
dirname = os.path.abspath(dirname)
assert not os.path.isfile(dirname), '‘{}’ 存在同名文件!'.format(dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
def makedirs(dirname):
return makedirs_if_not_exists(dirname)
def remove_file_path_prefix(file_path: str, prefix: str):
if not prefix.endswith('/'):
prefix += '/'
return file_path.replace(prefix, '')
def delete_earlier_model(root, file_name, keeps=3, log=True):
files = glob.glob(os.path.join(root, file_name))
files = sorted(files, key=os.path.getctime, reverse=True)
files_deleted = files[keeps:]
# print([os.path.basename(x) for x in files])
if len(files_deleted) != 0:
if log:
print('Deleting files in {} ...'.format(root))
for f in files_deleted:
os.remove(f)
if log:
print("File ‘{}’ have been deleted.".format(os.path.basename(f)))
def delete_0_dir(dir_root):
"""
删除目录下的空白目录。
:param dir_root:
:return:
"""
root, dirs, _ = next(os.walk(dir_root))
for dir in dirs:
dir_path = os.path.join(root, dir)
files_ = os.listdir(dir_path)
len_ = len(files_)
if len_ == 0:
os.rmdir(dir_path) # 目录为空时才可以删除,否则报错
print("目录 '{}' 已被删除。".format(dir))
elif len_ == 1:
if files_[0][:7] == 'logger_' and files_[0].endswith('.log'):
os.remove(os.path.join(dir_path, files_[0]))
os.rmdir(dir_path) # 目录为空时才可以删除,否则报错
print("目录 '{}' 已被删除。".format(dir))
def get_file_path_list(dir_path, ext=None):
"""
从给定目录中获取所有文件的路径
:param dir_path: 路径名
:return: 该路径下的所有文件路径(path)列表
"""
if ext:
patt = re.compile(r".*{}$".format(ext))
file_path_list = []
for root, dirs, files in os.walk(dir_path):
for file in files:
if ext:
result = patt.search(file)
if not result:
continue
path = os.path.join(root, file)
file_path_list.append(path)
# print("'{}'目录中文件个数 : {}".format(os.path.basename(dir_path), len(file_path_list)))
return file_path_list
# def pdf2image_path(pdf_path, save_dir):
# mytimer = MyTimer()
# save_dir = os.path.join(save_dir, os.path.splitext(os.path.basename(pdf_path))[0])
# if not os.path.exists(save_dir):
# os.makedirs(save_dir)
# rotate = int(0)
# zoom = 2
# trans = fitz.Matrix(zoom, zoom).preRotate(rotate)
# save_path_lst = []
# with fitz.open(pdf_path) as pdf:
# for i in range(pdf.pageCount):
# pm = pdf[i].getPixmap(matrix=trans, alpha=False)
# image_save_path = os.path.join(save_dir, f'{i:03d}.png')
# pm.writePNG(image_save_path)
# save_path_lst.append(image_save_path)
# mytimer.log(f'convert {pdf_path} to images time', unit='s')
# return save_path_lst
def main():
pass
if __name__ == '__main__':
start = datetime.now()
print("Start time is {}".format(start))
main()
end = datetime.now()
print("End time is {}".format(end))
print("\nTotal running time is {}s".format((end - start).seconds))
print("\nCongratulations!!!") | zj-utils-zjj421 | /zj_utils_zjj421-0.0.15-py3-none-any.whl/zj_utils/files.py | files.py |
from datetime import datetime
import threading
from multiprocessing import Process
import math
class MyThread(threading.Thread):
"""
通过obj.get_result()获取线程的输出
"""
def __init__(self, target, args=()):
super(MyThread, self).__init__()
self.func = target
self.args = args
def run(self):
self.result = self.func(*self.args)
def get_result(self):
try:
return self.result # 如果子线程不使用join方法,此处可能会报没有self.result的错误
except Exception:
return None
# TODO 多进程获取进程结果失败
class MyProcess(Process):
"""
通过obj.get_result()获取进程的输出
"""
def __init__(self, target, args=()):
super(MyProcess, self).__init__()
self.func = target
self.args = args
def run(self):
self.result = self.func(*self.args)
def get_result(self):
try:
return self.result
except Exception:
return None
def split_list(l, n):
"""
把一个list n等分,最后一部分可以不是等分的。
:param l: list.
:param n: n等分.
:return: list,每个元素又是一个list.
"""
assert isinstance(l, list) and isinstance(n, int)
if len(l) < n:
n = len(l)
split_lst = []
step = int(math.ceil(len(l) / n))
for i in range(0, len(l), step):
split_lst.append(l[i:i + step])
return split_lst
def multi_thread_task(target_func, args_lst, workers):
"""
将待执行任务n等分,交由n个线程去执行。
:param target_func: task
:param args_lst: args_lst中每个元素也是list,且子list中的元素相等。
:param workers: n
:return:
"""
args_split_lst = []
for l in args_lst:
if isinstance(l, list):
arg = split_list(l, workers)
else:
arg = l
args_split_lst.append(arg)
thread_lst = []
for i in range(len(args_split_lst[0])):
args = []
for arg in args_split_lst:
if isinstance(arg, list):
args.append(arg[i])
else:
args.append(arg)
t = MyThread(target=target_func, args=tuple(args))
thread_lst.append(t)
t.start()
for t in thread_lst:
t.join()
return thread_lst
def multi_process_task(target_func, args_lst, workers):
"""
:param target_func:
:param args_lst: args_lst中每个元素也是list,且子list中的元素相等。
:param workers:
:return:
"""
args_split_lst = []
for l in args_lst:
if isinstance(l, list):
arg = split_list(l, workers)
else:
arg = l
args_split_lst.append(arg)
process_lst = []
for i in range(len(args_split_lst[0])):
args = []
for arg in args_split_lst:
if isinstance(arg, list):
args.append(arg[i])
else:
args.append(arg)
t = MyProcess(target=target_func, args=tuple(args))
process_lst.append(t)
t.start()
for t in process_lst:
t.join()
return process_lst
def main():
pass
if __name__ == '__main__':
start = datetime.now()
print("Start time is {}".format(start))
main()
end = datetime.now()
print("End time is {}".format(end))
print("\nTotal running time is {}s".format((end - start).seconds))
print("\nCongratulations!!!") | zj-utils-zjj421 | /zj_utils_zjj421-0.0.15-py3-none-any.whl/zj_utils/concurrency.py | concurrency.py |
import _queue
import logging
import os
import time
from queue import Queue
from threading import Thread
import cv2
from .files import makedirs
from .helper import get_time_str, setup_logger
from .timer import MyTimer, FPSRealTime
__all__ = ['FrameInfo', 'VideoReader']
class FrameInfo(object):
def __init__(self, image, frame_idx=None, frame_elapsed_ms=None):
self.image = image
self.frame_idx = frame_idx
self.frame_elapsed_ms = frame_elapsed_ms
self.process_ret = None
def get_image(self):
return self.image
def set_image(self, image):
self.image = image
def get_frame_idx(self):
return self.frame_idx
def get_frame_elapsed_s(self):
return self.frame_elapsed_ms / 1000
def get_frame_elapsed_ms(self):
return self.frame_elapsed_ms
def set_ret(self, result):
self.process_ret = result
def get_ret(self):
return self.process_ret
class VideoReader(object):
def __init__(self, video_input_param, auto_drop_frame=True, skip_frames=0, reload_video=True, log_name='demo'):
self.video_input_param = video_input_param
self.stopped = False
self.skip_frames = skip_frames + 1
self.auto_drop_frame = auto_drop_frame
self.reload_video = reload_video
self.mylogger = logging.getLogger(log_name)
self.mylogger.info('VideoStreamReader init done.')
self.cap_load_done = False
def load_camera(self, ):
cap = cv2.VideoCapture(self.video_input_param)
self.mylogger.info(
f'Video is {"opened." if cap.isOpened() else "not opened."}')
self.cap_fps = cap.get(5)
self.cap_height, self.cap_width = cap.get(cv2.CAP_PROP_FRAME_HEIGHT), cap.get(
cv2.CAP_PROP_FRAME_WIDTH)
self.mylogger.info(
f'Video stream FPS: {self.cap_fps}\tshape: ({self.cap_height}, {self.cap_width})')
self.mylogger.info(
f'Load video stream from {self.video_input_param} done.')
self.cap_load_done = True
return cap
def run(self, queue_i):
self.mylogger.info('VideoReader running ...')
cap = self.load_camera()
frame_idx = 0
mytimer = MyTimer()
while not self.stopped:
mytimer.restart()
ret = cap.grab()
frame_idx += 1
if not ret:
self.mylogger.info(
f'---VideoReader--- Grab NONE FRAME, Cap is opened: {cap.isOpened()}'
)
if self.reload_video:
cap = self.load_camera()
else:
self.mylogger.info(
f'---VideoReader--- Grab NONE FRAME, exit.'
)
self.stopped = True
continue
if self.auto_drop_frame:
if queue_i.full():
continue
else:
if frame_idx % self.skip_frames != 0:
continue
ret, image = cap.retrieve()
self.mylogger.debug(
f'---VideoReader--- cap read elapsed: {mytimer.elapsed():.2f}ms'
)
if ret:
frame = FrameInfo(image=image,
frame_idx=frame_idx,
frame_elapsed_ms=cap.get(
cv2.CAP_PROP_POS_MSEC))
queue_i.put(frame)
self.mylogger.debug(
f'---VideoReader--- Put Frame-{frame_idx} to the list ---- len:{queue_i.qsize()} '
f'elapsed: {mytimer.elapsed():.2f}ms')
else:
self.mylogger.info(
f'---VideoReader--- READ NONE FRAME, Cap is opened: {cap.isOpened()}'
)
if self.reload_video:
cap = self.load_camera()
cap.release()
self.mylogger.info('Camera is closed.')
def stop(self):
self.stopped = True
def _reset_cap_writer(self, record_save_root, fps, width, height):
time_str = get_time_str()
video_save_path = os.path.join(record_save_root, time_str[:10], time_str + '.avi')
makedirs(os.path.dirname(video_save_path))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
cap_writer = cv2.VideoWriter(video_save_path, fourcc, fps, (int(width), int(height)))
self.mylogger.info('CapWriter reset.')
return cap_writer, video_save_path
def show_video(self, **kwargs):
self._show_or_record(show=True, **kwargs)
def record_video(self, record_save_root, **kwargs):
kwargs['record_save_root'] = record_save_root
self._show_or_record(record=True, **kwargs)
def save_frames(self, save_image_root, **kwargs):
kwargs['save_image_root'] = save_image_root
self._show_or_record(save_image=True, **kwargs)
def _show_or_record(self, show=False, record=False, save_image=False, **kwargs):
queue_i = Queue(maxsize=1)
video_reader_worker = Thread(target=self.run, kwargs={"queue_i": queue_i}, daemon=True)
video_reader_worker.start()
record_save_root = kwargs.get('record_save_root', None)
fps = kwargs.get('fps', None)
width = kwargs.get('width', None)
height = kwargs.get('height', None)
record_write_interval_m = kwargs.get('record_write_interval_m', -1)
show_window_name = kwargs.get('show_window_name', 'Demo')
show_window_width = kwargs.get('show_window_width', 1920)
show_window_height = kwargs.get('show_window_height', 1080)
show_fps = kwargs.get('show_fps', True)
save_image_root = kwargs.get('save_image_root', None)
while True:
if self.cap_load_done:
break
if fps is None:
fps = self.cap_fps
if width is None:
width = self.cap_width
if height is None:
height = self.cap_height
if save_image_root:
makedirs(save_image_root)
run_time_total = -1
mytimer = MyTimer()
mytimer2 = MyTimer()
if record:
cap_writer, video_save_path = self._reset_cap_writer(record_save_root, fps, width, height)
if show:
cv2.namedWindow(show_window_name, 0) # 0可调大小,注意:窗口名必须imshow里面的一窗口名一直
cv2.resizeWindow(show_window_name, show_window_width, show_window_height) # 设置宽和高
if show_fps:
fps_obj = FPSRealTime(buffer_len=150)
while True:
if record and record_write_interval_m > 0 and mytimer2.elapsed(restart=False,
unit='min') >= record_write_interval_m:
mytimer2.restart()
cap_writer.release()
cap_writer, video_save_path = self._reset_cap_writer(record_save_root, fps, width, height)
mytimer.restart()
try:
frame = queue_i.get(timeout=1)
frame_idx = frame.get_frame_idx()
self.mylogger.debug(
f'---show_or_record--- Get Frame-{frame_idx} *** last elapsed: {run_time_total:.1f}ms')
image_bgr = frame.get_image()
if width is not None and height is not None:
image_bgr = cv2.resize(image_bgr, (int(width), int(height)))
except:
frame = None
if frame is not None:
if save_image:
save_path = os.path.join(save_image_root, f'{frame_idx:08d}.jpg')
cv2.imwrite(save_path, image_bgr)
if record:
cap_writer.write(image_bgr)
if show:
if show_fps:
# h, w = image_bgr.shape[:2]
# position = (int(0.02 * w), int(0.04 * h))
position = (20, 25)
fps = fps_obj.get_fps(number=1)
cv2.putText(image_bgr, f'FPS: {fps}', position, cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
if cv2.waitKey(1) & 0xFF == ord("q"):
break
cv2.imshow(show_window_name, image_bgr)
cv2.setMouseCallback(show_window_name, onMouse)
else:
self.mylogger.warning(
f'---show_or_record--- read image timeout, break.')
break
run_time_total = mytimer.elapsed(unit='ms')
if record:
cap_writer.release()
self.mylogger.info(f'CapWriter is released.\n video is saved to {video_save_path}')
if show:
cv2.destroyAllWindows()
def onMouse(event, x, y, flags, params):
if event == cv2.EVENT_LBUTTONDOWN:
print(f'({x}, {y})')
def main():
pass
if __name__ == '__main__':
main() | zj-utils-zjj421 | /zj_utils_zjj421-0.0.15-py3-none-any.whl/zj_utils/video.py | video.py |
# Deep Reinforcement Learning
>#### _Shyamal H Anadkat | Fall '21_
# Background
Hello! This is a repository for AIPI530 DeepRL final project. **The goal is to build a pipeline for offline RL**. The
starter code has been forked from [d3rlpy](https://github.com/takuseno/d3rlpy) (_see citation at the bottom_)
Offline reinforcement learning (RL) defines the task of learning from a fixed batch of data.
Before diving in, I would recommend getting familiarized with basic Reinforcement Learning.
Here is a link to my blog post on Reinforcement Learning to get you started:
[RL Primer](https://shyamalanadkat.medium.com/reinforcement-learning-a-primer-29116d487e42)
The blog post briefly covers the following:
* What is reinforcement learning ? <br/>
* What are the pros and cons of reinforcement learning ? <br/>
* When should we consider applying reinforcement learning (and when should not) ? <br/>
* What's the difference between supervised learning and reinforcement learning ? <br/>
* What is offline reinforcement learning ? What are the pros and cons of offline reinforcement learning ? <br/>
* When should we consider applying offline reinforcement learning (and when should not) ? <br/>
* Have an example of offline reinforcement learning in the real-world <br/>
>
_source: https://bair.berkeley.edu/blog/2020/12/07/offline/_
# Getting Started
#### (_please read carefully_)
This project is customized to training CQL on a custom dataset in d3rlpy, and training OPE (FQE) to evaluate the trained
policy. Important scripts:
1. `cql_train.py`: at the root of the project is the main script, used to train cql & get evaluation scores
2. `plot_helper.py`: utility script to help produce the plots required
### How do I install & run this project ?
---
**1. Clone this repository**
```
git clone https://github.com/shyamal-anadkat/offlinerl
```
**2. Install **pybullet** from source:**
```
pip install git+https://github.com/takuseno/d4rl-pybullet
```
**3. Install requirements:**
```
pip install Cython numpy
pip install -e .
```
4. **Execute **`cql_train.py`** found at the root of the project**
* Default dataset is `hopper-bullet-mixed-v0`
* Default no. of `epochs` is `10`. You can change this via custom args `--epochs_cql` & `--epochs_fqe`
* For example if we want to run for 10 epochs:
```
python cql_train.py --epochs_cql 10 --epochs_fqe 10
```
(see colab example below for more clarity)
5. **Important Logs:**
* Estimated Q values vs training steps (CQL): `d3rlpy_logs/CQL_hopper-bullet-mixed-v0_1/init_value.csv`
* Average reward vs training steps (CQL): `d3rlpy_logs/CQL_hopper-bullet-mixed-v0_1/environment.csv`
* True Q values vs training steps (CQL): `d3rlpy_logs/CQL_hopper-bullet-mixed-v0_1/true_q_value.csv`
* True Q & Estimated Q values vs training steps (FQE): `d3rlpy_logs/FQE_hopper-bullet-mixed-v0_1/..`
* Note: **I created my own scorer to calculate the true q values**. See `scorer.py` (`true_q_value_scorer`) for
implementation details)
6. For plotting, I wrote a utility script (at root of the project) which can be executed like so
```
python plot_helper.py
```
_Note: you can provide arguments that correspond to the path to the logs or it will use the default._
* If you're curious here's
the [benchmark/reproduction](https://github.com/takuseno/d3rlpy-benchmarks/tree/main/reproductions/CQL_hopper-medium-v0_3_20210617172248)
#### Other scripts:
* Format: `./scripts/format`
* Linting: `./scripts/lint`
### Sample Plots (with 100 epochs):
>
Note: logs can be found in `/d3rlpy_logs`
### Examples speak more: [](https://colab.research.google.com/drive/1S5RDTwaqVjA4wAJISxApra_G0ewSuS0R?usp=sharing)
**_Walkthrough:_**

---
# Background on d3rlpy
> d3rlpy is an offline deep reinforcement learning library for practitioners and researchers.
- Documentation: https://d3rlpy.readthedocs.io
- Paper: https://arxiv.org/abs/2111.03788
### How do I install d3rlpy?
d3rlpy supports Linux, macOS and Windows. d3rlpy is not only easy, but also completely compatible with scikit-learn API,
which means that you can maximize your productivity with the useful scikit-learn's utilities.
### PyPI (recommended)
[](https://badge.fury.io/py/d3rlpy)

```
$ pip install d3rlpy
```
## _More examples around d3rlpy usage_
```py
import d3rlpy
dataset, env = d3rlpy.datasets.get_dataset("hopper-medium-v0")
# prepare algorithm
sac = d3rlpy.algos.SAC()
# train offline
sac.fit(dataset, n_steps=1000000)
# train online
sac.fit_online(env, n_steps=1000000)
# ready to control
actions = sac.predict(x)
```
### MuJoCo
```py
import d3rlpy
# prepare dataset
dataset, env = d3rlpy.datasets.get_d4rl('hopper-medium-v0')
# prepare algorithm
cql = d3rlpy.algos.CQL(use_gpu=True)
# train
cql.fit(dataset,
eval_episodes=dataset,
n_epochs=100,
scorers={
'environment': d3rlpy.metrics.evaluate_on_environment(env),
'td_error': d3rlpy.metrics.td_error_scorer
})
```
See more datasets at [d4rl](https://github.com/rail-berkeley/d4rl).
### Atari 2600
```py
import d3rlpy
from sklearn.model_selection import train_test_split
# prepare dataset
dataset, env = d3rlpy.datasets.get_atari('breakout-expert-v0')
# split dataset
train_episodes, test_episodes = train_test_split(dataset, test_size=0.1)
# prepare algorithm
cql = d3rlpy.algos.DiscreteCQL(n_frames=4, q_func_factory='qr', scaler='pixel', use_gpu=True)
# start training
cql.fit(train_episodes,
eval_episodes=test_episodes,
n_epochs=100,
scorers={
'environment': d3rlpy.metrics.evaluate_on_environment(env),
'td_error': d3rlpy.metrics.td_error_scorer
})
```
See more Atari datasets at [d4rl-atari](https://github.com/takuseno/d4rl-atari).
### PyBullet
```py
import d3rlpy
# prepare dataset
dataset, env = d3rlpy.datasets.get_pybullet('hopper-bullet-mixed-v0')
# prepare algorithm
cql = d3rlpy.algos.CQL(use_gpu=True)
# start training
cql.fit(dataset,
eval_episodes=dataset,
n_epochs=100,
scorers={
'environment': d3rlpy.metrics.evaluate_on_environment(env),
'td_error': d3rlpy.metrics.td_error_scorer
})
```
See more PyBullet datasets at [d4rl-pybullet](https://github.com/takuseno/d4rl-pybullet).
### How about some tutorials?
Try a cartpole example on Google Colaboratory:
* official offline RL
tutorial: [](https://colab.research.google.com/github/takuseno/d3rlpy/blob/master/tutorials/cartpole.ipynb)
# _Citation_
> Thanks to [Takuma Seno](https://github.com/takuseno) and his work on [d3rlpy](https://github.com/takuseno/d3rlpy.git)
This wouldn't have been possible without it.
> Seno, T., & Imai, M. (2021). d3rlpy: An Offline Deep Reinforcement Learning Library [Conference paper](https://arxiv.org/abs/2111.03788). 35th Conference on Neural Information Processing Systems, Offline Reinforcement Learning Workshop, 2021
```
@InProceedings{seno2021d3rlpy,
author = {Takuma Seno, Michita Imai},
title = {d3rlpy: An Offline Deep Reinforcement Library},
booktitle = {NeurIPS 2021 Offline Reinforcement Learning Workshop},
month = {December},
year = {2021}
}
```
| zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/README.md | README.md |
import json
import os
import time
from contextlib import contextmanager
from datetime import datetime
from typing import Any, Dict, Iterator, List, Optional
import numpy as np
import structlog
from tensorboardX import SummaryWriter
from typing_extensions import Protocol
class _SaveProtocol(Protocol):
def save_model(self, fname: str) -> None:
...
# default json encoder for numpy objects
def default_json_encoder(obj: Any) -> Any:
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
raise ValueError(f"invalid object type: {type(obj)}")
LOG: structlog.BoundLogger = structlog.get_logger(__name__)
class D3RLPyLogger:
_experiment_name: str
_logdir: str
_save_metrics: bool
_verbose: bool
_metrics_buffer: Dict[str, List[float]]
_params: Optional[Dict[str, float]]
_writer: Optional[SummaryWriter]
def __init__(
self,
experiment_name: str,
tensorboard_dir: Optional[str] = None,
save_metrics: bool = True,
root_dir: str = "logs",
verbose: bool = True,
with_timestamp: bool = True,
):
self._save_metrics = save_metrics
self._verbose = verbose
# add timestamp to prevent unintentional overwrites
while True:
if with_timestamp:
date = datetime.now().strftime("%Y%m%d%H%M%S")
self._experiment_name = experiment_name + "_" + date
else:
self._experiment_name = experiment_name
if self._save_metrics:
self._logdir = os.path.join(root_dir, self._experiment_name)
if not os.path.exists(self._logdir):
os.makedirs(self._logdir)
LOG.info(f"Directory is created at {self._logdir}")
break
if with_timestamp:
time.sleep(1.0)
else:
raise ValueError(f"{self._logdir} already exists.")
else:
break
self._metrics_buffer = {}
if tensorboard_dir:
tfboard_path = os.path.join(
tensorboard_dir, "runs", self._experiment_name
)
self._writer = SummaryWriter(logdir=tfboard_path)
else:
self._writer = None
self._params = None
def add_params(self, params: Dict[str, Any]) -> None:
assert self._params is None, "add_params can be called only once."
if self._save_metrics:
# save dictionary as json file
params_path = os.path.join(self._logdir, "params.json")
with open(params_path, "w") as f:
json_str = json.dumps(
params, default=default_json_encoder, indent=2
)
f.write(json_str)
if self._verbose:
LOG.info(
f"Parameters are saved to {params_path}", params=params
)
elif self._verbose:
LOG.info("Parameters", params=params)
# remove non-scaler values for HParams
self._params = {k: v for k, v in params.items() if np.isscalar(v)}
def add_metric(self, name: str, value: float) -> None:
if name not in self._metrics_buffer:
self._metrics_buffer[name] = []
self._metrics_buffer[name].append(value)
def commit(self, epoch: int, step: int) -> Dict[str, float]:
metrics = {}
for name, buffer in self._metrics_buffer.items():
metric = sum(buffer) / len(buffer)
if self._save_metrics:
path = os.path.join(self._logdir, f"{name}.csv")
with open(path, "a") as f:
print(f"{epoch},{step},{metric}", file=f)
if self._writer:
self._writer.add_scalar(f"metrics/{name}", metric, epoch)
metrics[name] = metric
if self._verbose:
LOG.info(
f"{self._experiment_name}: epoch={epoch} step={step}",
epoch=epoch,
step=step,
metrics=metrics,
)
if self._params and self._writer:
self._writer.add_hparams(
self._params,
metrics,
name=self._experiment_name,
global_step=epoch,
)
# initialize metrics buffer
self._metrics_buffer = {}
return metrics
def save_model(self, epoch: int, algo: _SaveProtocol) -> None:
if self._save_metrics:
# save entire model
model_path = os.path.join(self._logdir, f"model_{epoch}.pt")
algo.save_model(model_path)
LOG.info(f"Model parameters are saved to {model_path}")
@contextmanager
def measure_time(self, name: str) -> Iterator[None]:
name = "time_" + name
start = time.time()
try:
yield
finally:
self.add_metric(name, time.time() - start)
@property
def logdir(self) -> str:
return self._logdir
@property
def experiment_name(self) -> str:
return self._experiment_name | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/logger.py | logger.py |
import copy
import json
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Generator,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
)
import gym
import numpy as np
from tqdm.auto import tqdm
from .argument_utility import (
ActionScalerArg,
RewardScalerArg,
ScalerArg,
UseGPUArg,
check_action_scaler,
check_reward_scaler,
check_scaler,
)
from .constants import (
CONTINUOUS_ACTION_SPACE_MISMATCH_ERROR,
DISCRETE_ACTION_SPACE_MISMATCH_ERROR,
IMPL_NOT_INITIALIZED_ERROR,
ActionSpace,
)
from .context import disable_parallel
from .dataset import Episode, MDPDataset, Transition, TransitionMiniBatch
from .decorators import pretty_repr
from .gpu import Device
from .iterators import RandomIterator, RoundIterator, TransitionIterator
from .logger import LOG, D3RLPyLogger
from .models.encoders import EncoderFactory, create_encoder_factory
from .models.optimizers import OptimizerFactory
from .models.q_functions import QFunctionFactory, create_q_func_factory
from .online.utility import get_action_size_from_env
from .preprocessing import (
ActionScaler,
RewardScaler,
Scaler,
create_action_scaler,
create_reward_scaler,
create_scaler,
)
class ImplBase(metaclass=ABCMeta):
@abstractmethod
def save_model(self, fname: str) -> None:
pass
@abstractmethod
def load_model(self, fname: str) -> None:
pass
@property
@abstractmethod
def observation_shape(self) -> Sequence[int]:
pass
@property
@abstractmethod
def action_size(self) -> int:
pass
def _serialize_params(params: Dict[str, Any]) -> Dict[str, Any]:
for key, value in params.items():
if isinstance(value, Device):
params[key] = value.get_id()
elif isinstance(
value,
(
Scaler,
ActionScaler,
RewardScaler,
EncoderFactory,
QFunctionFactory,
),
):
params[key] = {
"type": value.get_type(),
"params": value.get_params(),
}
elif isinstance(value, OptimizerFactory):
params[key] = value.get_params()
return params
def _deseriealize_params(params: Dict[str, Any]) -> Dict[str, Any]:
for key, value in params.items():
if key == "scaler" and params["scaler"]:
scaler_type = params["scaler"]["type"]
scaler_params = params["scaler"]["params"]
scaler = create_scaler(scaler_type, **scaler_params)
params[key] = scaler
elif key == "action_scaler" and params["action_scaler"]:
scaler_type = params["action_scaler"]["type"]
scaler_params = params["action_scaler"]["params"]
action_scaler = create_action_scaler(scaler_type, **scaler_params)
params[key] = action_scaler
elif key == "reward_scaler" and params["reward_scaler"]:
scaler_type = params["reward_scaler"]["type"]
scaler_params = params["reward_scaler"]["params"]
reward_scaler = create_reward_scaler(scaler_type, **scaler_params)
params[key] = reward_scaler
elif "optim_factory" in key:
params[key] = OptimizerFactory(**value)
elif "encoder_factory" in key:
params[key] = create_encoder_factory(
value["type"], **value["params"]
)
elif key == "q_func_factory":
params[key] = create_q_func_factory(
value["type"], **value["params"]
)
return params
@pretty_repr
class LearnableBase:
_batch_size: int
_n_frames: int
_n_steps: int
_gamma: float
_scaler: Optional[Scaler]
_action_scaler: Optional[ActionScaler]
_reward_scaler: Optional[RewardScaler]
_real_ratio: float
_generated_maxlen: int
_impl: Optional[ImplBase]
_eval_results: DefaultDict[str, List[float]]
_loss_history: DefaultDict[str, List[float]]
_active_logger: Optional[D3RLPyLogger]
_grad_step: int
def __init__(
self,
batch_size: int,
n_frames: int,
n_steps: int,
gamma: float,
scaler: ScalerArg = None,
action_scaler: ActionScalerArg = None,
reward_scaler: RewardScalerArg = None,
real_ratio: float = 1.0,
generated_maxlen: int = 100000,
kwargs: Optional[Dict[str, Any]] = None,
):
self._batch_size = batch_size
self._n_frames = n_frames
self._n_steps = n_steps
self._gamma = gamma
self._scaler = check_scaler(scaler)
self._action_scaler = check_action_scaler(action_scaler)
self._reward_scaler = check_reward_scaler(reward_scaler)
self._real_ratio = real_ratio
self._generated_maxlen = generated_maxlen
self._impl = None
self._eval_results = defaultdict(list)
self._loss_history = defaultdict(list)
self._active_logger = None
self._grad_step = 0
if kwargs and len(kwargs.keys()) > 0:
LOG.warning("Unused arguments are passed.", **kwargs)
def __setattr__(self, name: str, value: Any) -> None:
super().__setattr__(name, value)
# propagate property updates to implementation object
if hasattr(self, "_impl") and self._impl and hasattr(self._impl, name):
setattr(self._impl, name, value)
@classmethod
def from_json(
cls, fname: str, use_gpu: UseGPUArg = False
) -> "LearnableBase":
"""Returns algorithm configured with json file.
The Json file should be the one saved during fitting.
.. code-block:: python
from d3rlpy.algos import Algo
# create algorithm with saved configuration
algo = Algo.from_json('d3rlpy_logs/<path-to-json>/params.json')
# ready to load
algo.load_model('d3rlpy_logs/<path-to-model>/model_100.pt')
# ready to predict
algo.predict(...)
Args:
fname: file path to `params.json`.
use_gpu: flag to use GPU, device ID or device.
Returns:
algorithm.
"""
with open(fname, "r") as f:
params = json.load(f)
observation_shape = tuple(params["observation_shape"])
action_size = params["action_size"]
del params["observation_shape"]
del params["action_size"]
# reconstruct objects from json
params = _deseriealize_params(params)
# overwrite use_gpu flag
params["use_gpu"] = use_gpu
algo = cls(**params)
algo.create_impl(observation_shape, action_size)
return algo
def set_params(self, **params: Any) -> "LearnableBase":
"""Sets the given arguments to the attributes if they exist.
This method sets the given values to the attributes including ones in
subclasses. If the values that don't exist as attributes are
passed, they are ignored.
Some of scikit-learn utilities will use this method.
.. code-block:: python
algo.set_params(batch_size=100)
Args:
params: arbitrary inputs to set as attributes.
Returns:
itself.
"""
for key, val in params.items():
if hasattr(self, key):
try:
setattr(self, key, val)
except AttributeError:
# try passing to protected keys
assert hasattr(self, "_" + key), f"{key} does not exist."
setattr(self, "_" + key, val)
else:
assert hasattr(self, "_" + key), f"{key} does not exist."
setattr(self, "_" + key, val)
return self
def get_params(self, deep: bool = True) -> Dict[str, Any]:
"""Returns the all attributes.
This method returns the all attributes including ones in subclasses.
Some of scikit-learn utilities will use this method.
.. code-block:: python
params = algo.get_params(deep=True)
# the returned values can be used to instantiate the new object.
algo2 = AlgoBase(**params)
Args:
deep: flag to deeply copy objects such as `impl`.
Returns:
attribute values in dictionary.
"""
rets = {}
for key in dir(self):
# remove magic properties
if key[:2] == "__":
continue
# remove specific keys
if key in [
"_eval_results",
"_loss_history",
"_active_logger",
"_grad_step",
"active_logger",
"grad_step",
"observation_shape",
"action_size",
]:
continue
value = getattr(self, key)
# remove underscore
if key[0] == "_":
key = key[1:]
# pick scalar parameters
if np.isscalar(value):
rets[key] = value
elif isinstance(value, object) and not callable(value):
if deep:
rets[key] = copy.deepcopy(value)
else:
rets[key] = value
return rets
def save_model(self, fname: str) -> None:
"""Saves neural network parameters.
.. code-block:: python
algo.save_model('model.pt')
Args:
fname: destination file path.
"""
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
self._impl.save_model(fname)
def load_model(self, fname: str) -> None:
"""Load neural network parameters.
.. code-block:: python
algo.load_model('model.pt')
Args:
fname: source file path.
"""
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
self._impl.load_model(fname)
def fit(
self,
dataset: Union[List[Episode], List[Transition], MDPDataset],
n_epochs: Optional[int] = None,
n_steps: Optional[int] = None,
n_steps_per_epoch: int = 10000,
save_metrics: bool = True,
experiment_name: Optional[str] = None,
with_timestamp: bool = True,
logdir: str = "d3rlpy_logs",
verbose: bool = True,
show_progress: bool = True,
tensorboard_dir: Optional[str] = None,
eval_episodes: Optional[List[Episode]] = None,
save_interval: int = 1,
scorers: Optional[
Dict[str, Callable[[Any, List[Episode]], float]]
] = None,
shuffle: bool = True,
callback: Optional[Callable[["LearnableBase", int, int], None]] = None,
) -> List[Tuple[int, Dict[str, float]]]:
"""Trains with the given dataset.
.. code-block:: python
algo.fit(episodes, n_steps=1000000)
Args:
dataset: list of episodes to train.
n_epochs: the number of epochs to train.
n_steps: the number of steps to train.
n_steps_per_epoch: the number of steps per epoch. This value will
be ignored when ``n_steps`` is ``None``.
save_metrics: flag to record metrics in files. If False,
the log directory is not created and the model parameters are
not saved during training.
experiment_name: experiment name for logging. If not passed,
the directory name will be `{class name}_{timestamp}`.
with_timestamp: flag to add timestamp string to the last of
directory name.
logdir: root directory name to save logs.
verbose: flag to show logged information on stdout.
show_progress: flag to show progress bar for iterations.
tensorboard_dir: directory to save logged information in
tensorboard (additional to the csv data). if ``None``, the
directory will not be created.
eval_episodes: list of episodes to test.
save_interval: interval to save parameters.
scorers: list of scorer functions used with `eval_episodes`.
shuffle: flag to shuffle transitions on each epoch.
callback: callable function that takes ``(algo, epoch, total_step)``
, which is called every step.
Returns:
list of result tuples (epoch, metrics) per epoch.
"""
results = list(
self.fitter(
dataset,
n_epochs,
n_steps,
n_steps_per_epoch,
save_metrics,
experiment_name,
with_timestamp,
logdir,
verbose,
show_progress,
tensorboard_dir,
eval_episodes,
save_interval,
scorers,
shuffle,
callback,
)
)
return results
def fitter(
self,
dataset: Union[List[Episode], List[Transition], MDPDataset],
n_epochs: Optional[int] = None,
n_steps: Optional[int] = None,
n_steps_per_epoch: int = 10000,
save_metrics: bool = True,
experiment_name: Optional[str] = None,
with_timestamp: bool = True,
logdir: str = "d3rlpy_logs",
verbose: bool = True,
show_progress: bool = True,
tensorboard_dir: Optional[str] = None,
eval_episodes: Optional[List[Episode]] = None,
save_interval: int = 1,
scorers: Optional[
Dict[str, Callable[[Any, List[Episode]], float]]
] = None,
shuffle: bool = True,
callback: Optional[Callable[["LearnableBase", int, int], None]] = None,
) -> Generator[Tuple[int, Dict[str, float]], None, None]:
"""Iterate over epochs steps to train with the given dataset. At each
iteration algo methods and properties can be changed or queried.
.. code-block:: python
for epoch, metrics in algo.fitter(episodes):
my_plot(metrics)
algo.save_model(my_path)
Args:
dataset: offline dataset to train.
n_epochs: the number of epochs to train.
n_steps: the number of steps to train.
n_steps_per_epoch: the number of steps per epoch. This value will
be ignored when ``n_steps`` is ``None``.
save_metrics: flag to record metrics in files. If False,
the log directory is not created and the model parameters are
not saved during training.
experiment_name: experiment name for logging. If not passed,
the directory name will be `{class name}_{timestamp}`.
with_timestamp: flag to add timestamp string to the last of
directory name.
logdir: root directory name to save logs.
verbose: flag to show logged information on stdout.
show_progress: flag to show progress bar for iterations.
tensorboard_dir: directory to save logged information in
tensorboard (additional to the csv data). if ``None``, the
directory will not be created.
eval_episodes: list of episodes to test.
save_interval: interval to save parameters.
scorers: list of scorer functions used with `eval_episodes`.
shuffle: flag to shuffle transitions on each epoch.
callback: callable function that takes ``(algo, epoch, total_step)``
, which is called every step.
Returns:
iterator yielding current epoch and metrics dict.
"""
transitions = []
if isinstance(dataset, MDPDataset):
for episode in dataset.episodes:
transitions += episode.transitions
elif not dataset:
raise ValueError("empty dataset is not supported.")
elif isinstance(dataset[0], Episode):
for episode in cast(List[Episode], dataset):
transitions += episode.transitions
elif isinstance(dataset[0], Transition):
transitions = list(cast(List[Transition], dataset))
else:
raise ValueError(f"invalid dataset type: {type(dataset)}")
# check action space
if self.get_action_type() == ActionSpace.BOTH:
pass
elif transitions[0].is_discrete:
assert (
self.get_action_type() == ActionSpace.DISCRETE
), DISCRETE_ACTION_SPACE_MISMATCH_ERROR
else:
assert (
self.get_action_type() == ActionSpace.CONTINUOUS
), CONTINUOUS_ACTION_SPACE_MISMATCH_ERROR
iterator: TransitionIterator
if n_epochs is None and n_steps is not None:
assert n_steps >= n_steps_per_epoch
n_epochs = n_steps // n_steps_per_epoch
iterator = RandomIterator(
transitions,
n_steps_per_epoch,
batch_size=self._batch_size,
n_steps=self._n_steps,
gamma=self._gamma,
n_frames=self._n_frames,
real_ratio=self._real_ratio,
generated_maxlen=self._generated_maxlen,
)
LOG.debug("RandomIterator is selected.")
elif n_epochs is not None and n_steps is None:
iterator = RoundIterator(
transitions,
batch_size=self._batch_size,
n_steps=self._n_steps,
gamma=self._gamma,
n_frames=self._n_frames,
real_ratio=self._real_ratio,
generated_maxlen=self._generated_maxlen,
shuffle=shuffle,
)
LOG.debug("RoundIterator is selected.")
else:
raise ValueError("Either of n_epochs or n_steps must be given.")
# setup logger
logger = self._prepare_logger(
save_metrics,
experiment_name,
with_timestamp,
logdir,
verbose,
tensorboard_dir,
)
# add reference to active logger to algo class during fit
self._active_logger = logger
# initialize scaler
if self._scaler:
LOG.debug("Fitting scaler...", scaler=self._scaler.get_type())
self._scaler.fit(transitions)
# initialize action scaler
if self._action_scaler:
LOG.debug(
"Fitting action scaler...",
action_scaler=self._action_scaler.get_type(),
)
self._action_scaler.fit(transitions)
# initialize reward scaler
if self._reward_scaler:
LOG.debug(
"Fitting reward scaler...",
reward_scaler=self._reward_scaler.get_type(),
)
self._reward_scaler.fit(transitions)
# instantiate implementation
if self._impl is None:
LOG.debug("Building models...")
transition = iterator.transitions[0]
action_size = transition.get_action_size()
observation_shape = tuple(transition.get_observation_shape())
self.create_impl(
self._process_observation_shape(observation_shape), action_size
)
LOG.debug("Models have been built.")
else:
LOG.warning("Skip building models since they're already built.")
# save hyperparameters
self.save_params(logger)
# refresh evaluation metrics
self._eval_results = defaultdict(list)
# refresh loss history
self._loss_history = defaultdict(list)
# training loop
total_step = 0
for epoch in range(1, n_epochs + 1):
# dict to add incremental mean losses to epoch
epoch_loss = defaultdict(list)
range_gen = tqdm(
range(len(iterator)),
disable=not show_progress,
desc=f"Epoch {int(epoch)}/{n_epochs}",
)
iterator.reset()
for itr in range_gen:
# generate new transitions with dynamics models
new_transitions = self.generate_new_data(
transitions=iterator.transitions,
)
if new_transitions:
iterator.add_generated_transitions(new_transitions)
LOG.debug(
f"{len(new_transitions)} transitions are generated.",
real_transitions=len(iterator.transitions),
fake_transitions=len(iterator.generated_transitions),
)
with logger.measure_time("step"):
# pick transitions
with logger.measure_time("sample_batch"):
batch = next(iterator)
# update parameters
with logger.measure_time("algorithm_update"):
loss = self.update(batch)
# record metrics
for name, val in loss.items():
logger.add_metric(name, val)
epoch_loss[name].append(val)
# update progress postfix with losses
if itr % 10 == 0:
mean_loss = {
k: np.mean(v) for k, v in epoch_loss.items()
}
range_gen.set_postfix(mean_loss)
total_step += 1
# call callback if given
if callback:
callback(self, epoch, total_step)
# save loss to loss history dict
self._loss_history["epoch"].append(epoch)
self._loss_history["step"].append(total_step)
for name, vals in epoch_loss.items():
if vals:
self._loss_history[name].append(np.mean(vals))
if scorers and eval_episodes:
self._evaluate(eval_episodes, scorers, logger)
# save metrics
metrics = logger.commit(epoch, total_step)
# save model parameters
if epoch % save_interval == 0:
logger.save_model(total_step, self)
yield epoch, metrics
# drop reference to active logger since out of fit there is no active
# logger
self._active_logger = None
def create_impl(
self, observation_shape: Sequence[int], action_size: int
) -> None:
"""Instantiate implementation objects with the dataset shapes.
This method will be used internally when `fit` method is called.
Args:
observation_shape: observation shape.
action_size: dimension of action-space.
"""
if self._impl:
LOG.warn("Parameters will be reinitialized.")
self._create_impl(observation_shape, action_size)
def _create_impl(
self, observation_shape: Sequence[int], action_size: int
) -> None:
raise NotImplementedError
def build_with_dataset(self, dataset: MDPDataset) -> None:
"""Instantiate implementation object with MDPDataset object.
Args:
dataset: dataset.
"""
observation_shape = dataset.get_observation_shape()
self.create_impl(
self._process_observation_shape(observation_shape),
dataset.get_action_size(),
)
def build_with_env(self, env: gym.Env) -> None:
"""Instantiate implementation object with OpenAI Gym object.
Args:
env: gym-like environment.
"""
observation_shape = env.observation_space.shape
self.create_impl(
self._process_observation_shape(observation_shape),
get_action_size_from_env(env),
)
def _process_observation_shape(
self, observation_shape: Sequence[int]
) -> Sequence[int]:
if len(observation_shape) == 3:
n_channels = observation_shape[0]
image_size = observation_shape[1:]
# frame stacking for image observation
observation_shape = (self._n_frames * n_channels, *image_size)
return observation_shape
def update(self, batch: TransitionMiniBatch) -> Dict[str, float]:
"""Update parameters with mini-batch of data.
Args:
batch: mini-batch data.
Returns:
dictionary of metrics.
"""
loss = self._update(batch)
self._grad_step += 1
return loss
def _update(self, batch: TransitionMiniBatch) -> Dict[str, float]:
raise NotImplementedError
def generate_new_data(
self, transitions: List[Transition]
) -> Optional[List[Transition]]:
"""Returns generated transitions for data augmentation.
This method is for model-based RL algorithms.
Args:
transitions: list of transitions.
Returns:
list of new transitions.
"""
return None
def _prepare_logger(
self,
save_metrics: bool,
experiment_name: Optional[str],
with_timestamp: bool,
logdir: str,
verbose: bool,
tensorboard_dir: Optional[str],
) -> D3RLPyLogger:
if experiment_name is None:
experiment_name = self.__class__.__name__
logger = D3RLPyLogger(
experiment_name,
save_metrics=save_metrics,
root_dir=logdir,
verbose=verbose,
tensorboard_dir=tensorboard_dir,
with_timestamp=with_timestamp,
)
return logger
def _evaluate(
self,
episodes: List[Episode],
scorers: Dict[str, Callable[[Any, List[Episode]], float]],
logger: D3RLPyLogger,
) -> None:
for name, scorer in scorers.items():
# evaluation with test data
test_score = scorer(self, episodes)
# logging metrics
logger.add_metric(name, test_score)
# store metric locally
if test_score is not None:
self._eval_results[name].append(test_score)
def save_params(self, logger: D3RLPyLogger) -> None:
"""Saves configurations as params.json.
Args:
logger: logger object.
"""
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
# get hyperparameters without impl
params = {}
with disable_parallel():
for k, v in self.get_params(deep=False).items():
if isinstance(v, (ImplBase, LearnableBase)):
continue
params[k] = v
# save algorithm name
params["algorithm"] = self.__class__.__name__
# save shapes
params["observation_shape"] = self._impl.observation_shape
params["action_size"] = self._impl.action_size
# serialize objects
params = _serialize_params(params)
logger.add_params(params)
def get_action_type(self) -> ActionSpace:
"""Returns action type (continuous or discrete).
Returns:
action type.
"""
raise NotImplementedError
@property
def batch_size(self) -> int:
"""Batch size to train.
Returns:
int: batch size.
"""
return self._batch_size
@batch_size.setter
def batch_size(self, batch_size: int) -> None:
self._batch_size = batch_size
@property
def n_frames(self) -> int:
"""Number of frames to stack.
This is only for image observation.
Returns:
int: number of frames to stack.
"""
return self._n_frames
@n_frames.setter
def n_frames(self, n_frames: int) -> None:
self._n_frames = n_frames
@property
def n_steps(self) -> int:
"""N-step TD backup.
Returns:
int: N-step TD backup.
"""
return self._n_steps
@n_steps.setter
def n_steps(self, n_steps: int) -> None:
self._n_steps = n_steps
@property
def gamma(self) -> float:
"""Discount factor.
Returns:
float: discount factor.
"""
return self._gamma
@gamma.setter
def gamma(self, gamma: float) -> None:
self._gamma = gamma
@property
def scaler(self) -> Optional[Scaler]:
"""Preprocessing scaler.
Returns:
Optional[Scaler]: preprocessing scaler.
"""
return self._scaler
@scaler.setter
def scaler(self, scaler: Scaler) -> None:
self._scaler = scaler
@property
def action_scaler(self) -> Optional[ActionScaler]:
"""Preprocessing action scaler.
Returns:
Optional[ActionScaler]: preprocessing action scaler.
"""
return self._action_scaler
@action_scaler.setter
def action_scaler(self, action_scaler: ActionScaler) -> None:
self._action_scaler = action_scaler
@property
def reward_scaler(self) -> Optional[RewardScaler]:
"""Preprocessing reward scaler.
Returns:
Optional[RewardScaler]: preprocessing reward scaler.
"""
return self._reward_scaler
@reward_scaler.setter
def reward_scaler(self, reward_scaler: RewardScaler) -> None:
self._reward_scaler = reward_scaler
@property
def impl(self) -> Optional[ImplBase]:
"""Implementation object.
Returns:
Optional[ImplBase]: implementation object.
"""
return self._impl
@impl.setter
def impl(self, impl: ImplBase) -> None:
self._impl = impl
@property
def observation_shape(self) -> Optional[Sequence[int]]:
"""Observation shape.
Returns:
Optional[Sequence[int]]: observation shape.
"""
if self._impl:
return self._impl.observation_shape
return None
@property
def action_size(self) -> Optional[int]:
"""Action size.
Returns:
Optional[int]: action size.
"""
if self._impl:
return self._impl.action_size
return None
@property
def active_logger(self) -> Optional[D3RLPyLogger]:
"""Active D3RLPyLogger object.
This will be only available during training.
Returns:
logger object.
"""
return self._active_logger
def set_active_logger(self, logger: D3RLPyLogger) -> None:
"""Set active D3RLPyLogger object
Args:
logger: logger object.
"""
self._active_logger = logger
@property
def grad_step(self) -> int:
"""Total gradient step counter.
This value will keep counting after ``fit`` and ``fit_online``
methods finish.
Returns:
total gradient step counter.
"""
return self._grad_step
def set_grad_step(self, grad_step: int) -> None:
"""Set total gradient step counter.
This method can be used to restart the middle of training with an
arbitrary gradient step counter, which has effects on periodic
functions such as the target update.
Args:
grad_step: total gradient step counter.
"""
self._grad_step = grad_step | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/base.py | base.py |
import collections
from inspect import signature
from typing import Any, Callable, Dict, List, Optional, Sequence, Union
import numpy as np
import torch
from torch import nn
from torch.optim import Optimizer
from torch.utils.data._utils.collate import default_collate
from typing_extensions import Protocol
from .dataset import TransitionMiniBatch
from .preprocessing import ActionScaler, RewardScaler, Scaler
BLACK_LIST = [
"policy",
"q_function",
"policy_optim",
"q_function_optim",
] # special properties
def _get_attributes(obj: Any) -> List[str]:
return [key for key in dir(obj) if key not in BLACK_LIST]
def soft_sync(targ_model: nn.Module, model: nn.Module, tau: float) -> None:
with torch.no_grad():
params = model.parameters()
targ_params = targ_model.parameters()
for p, p_targ in zip(params, targ_params):
p_targ.data.mul_(1 - tau)
p_targ.data.add_(tau * p.data)
def hard_sync(targ_model: nn.Module, model: nn.Module) -> None:
with torch.no_grad():
params = model.parameters()
targ_params = targ_model.parameters()
for p, p_targ in zip(params, targ_params):
p_targ.data.copy_(p.data)
def sync_optimizer_state(targ_optim: Optimizer, optim: Optimizer) -> None:
# source optimizer state
state = optim.state_dict()["state"]
# destination optimizer param_groups
param_groups = targ_optim.state_dict()["param_groups"]
# update only state
targ_optim.load_state_dict({"state": state, "param_groups": param_groups})
def set_eval_mode(impl: Any) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, torch.nn.Module):
module.eval()
def set_train_mode(impl: Any) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, torch.nn.Module):
module.train()
def to_cuda(impl: Any, device: str) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, (torch.nn.Module, torch.nn.Parameter)):
module.cuda(device)
def to_cpu(impl: Any) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, (torch.nn.Module, torch.nn.Parameter)):
module.cpu()
def freeze(impl: Any) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, torch.nn.Module):
for p in module.parameters():
p.requires_grad = False
def unfreeze(impl: Any) -> None:
for key in _get_attributes(impl):
module = getattr(impl, key)
if isinstance(module, torch.nn.Module):
for p in module.parameters():
p.requires_grad = True
def get_state_dict(impl: Any) -> Dict[str, Any]:
rets = {}
for key in _get_attributes(impl):
obj = getattr(impl, key)
if isinstance(obj, (torch.nn.Module, torch.optim.Optimizer)):
rets[key] = obj.state_dict()
return rets
def set_state_dict(impl: Any, chkpt: Dict[str, Any]) -> None:
for key in _get_attributes(impl):
obj = getattr(impl, key)
if isinstance(obj, (torch.nn.Module, torch.optim.Optimizer)):
obj.load_state_dict(chkpt[key])
def reset_optimizer_states(impl: Any) -> None:
for key in _get_attributes(impl):
obj = getattr(impl, key)
if isinstance(obj, torch.optim.Optimizer):
obj.state = collections.defaultdict(dict)
def map_location(device: str) -> Any:
if "cuda" in device:
return lambda storage, loc: storage.cuda(device)
if "cpu" in device:
return "cpu"
raise ValueError(f"invalid device={device}")
class _WithDeviceAndScalerProtocol(Protocol):
@property
def device(self) -> str:
...
@property
def scaler(self) -> Optional[Scaler]:
...
@property
def action_scaler(self) -> Optional[ActionScaler]:
...
@property
def reward_scaler(self) -> Optional[RewardScaler]:
...
def _convert_to_torch(array: np.ndarray, device: str) -> torch.Tensor:
dtype = torch.uint8 if array.dtype == np.uint8 else torch.float32
tensor = torch.tensor(data=array, dtype=dtype, device=device)
return tensor.float()
class TorchMiniBatch:
_observations: torch.Tensor
_actions: torch.Tensor
_rewards: torch.Tensor
_next_observations: torch.Tensor
_next_actions: torch.Tensor
_next_rewards: torch.Tensor
_terminals: torch.Tensor
_masks: Optional[torch.Tensor]
_n_steps: torch.Tensor
_device: str
def __init__(
self,
batch: TransitionMiniBatch,
device: str,
scaler: Optional[Scaler] = None,
action_scaler: Optional[ActionScaler] = None,
reward_scaler: Optional[RewardScaler] = None,
):
# convert numpy array to torch tensor
observations = _convert_to_torch(batch.observations, device)
actions = _convert_to_torch(batch.actions, device)
rewards = _convert_to_torch(batch.rewards, device)
next_observations = _convert_to_torch(batch.next_observations, device)
next_actions = _convert_to_torch(batch.next_actions, device)
next_rewards = _convert_to_torch(batch.next_rewards, device)
terminals = _convert_to_torch(batch.terminals, device)
masks: Optional[torch.Tensor]
if batch.masks is None:
masks = None
else:
masks = _convert_to_torch(batch.masks, device)
n_steps = _convert_to_torch(batch.n_steps, device)
# apply scaler
if scaler:
observations = scaler.transform(observations)
next_observations = scaler.transform(next_observations)
if action_scaler:
actions = action_scaler.transform(actions)
next_actions = action_scaler.transform(next_actions)
if reward_scaler:
rewards = reward_scaler.transform(rewards)
next_rewards = reward_scaler.transform(next_rewards)
self._observations = observations
self._actions = actions
self._rewards = rewards
self._next_observations = next_observations
self._next_actions = next_actions
self._next_rewards = next_rewards
self._terminals = terminals
self._masks = masks
self._n_steps = n_steps
self._device = device
@property
def observations(self) -> torch.Tensor:
return self._observations
@property
def actions(self) -> torch.Tensor:
return self._actions
@property
def rewards(self) -> torch.Tensor:
return self._rewards
@property
def next_observations(self) -> torch.Tensor:
return self._next_observations
@property
def next_actions(self) -> torch.Tensor:
return self._next_actions
@property
def next_rewards(self) -> torch.Tensor:
return self._next_rewards
@property
def terminals(self) -> torch.Tensor:
return self._terminals
@property
def masks(self) -> Optional[torch.Tensor]:
return self._masks
@property
def n_steps(self) -> torch.Tensor:
return self._n_steps
@property
def device(self) -> str:
return self._device
def torch_api(
scaler_targets: Optional[List[str]] = None,
action_scaler_targets: Optional[List[str]] = None,
reward_scaler_targets: Optional[List[str]] = None,
) -> Callable[..., np.ndarray]:
def _torch_api(f: Callable[..., np.ndarray]) -> Callable[..., np.ndarray]:
# get argument names
sig = signature(f)
arg_keys = list(sig.parameters.keys())[1:]
def wrapper(
self: _WithDeviceAndScalerProtocol, *args: Any, **kwargs: Any
) -> np.ndarray:
tensors: List[Union[torch.Tensor, TorchMiniBatch]] = []
# convert all args to torch.Tensor
for i, val in enumerate(args):
tensor: Union[torch.Tensor, TorchMiniBatch]
if isinstance(val, torch.Tensor):
tensor = val
elif isinstance(val, list):
tensor = default_collate(val)
tensor = tensor.to(self.device)
elif isinstance(val, np.ndarray):
if val.dtype == np.uint8:
dtype = torch.uint8
else:
dtype = torch.float32
tensor = torch.tensor(
data=val, dtype=dtype, device=self.device,
)
elif val is None:
tensor = None
elif isinstance(val, TransitionMiniBatch):
tensor = TorchMiniBatch(
val,
self.device,
scaler=self.scaler,
action_scaler=self.action_scaler,
reward_scaler=self.reward_scaler,
)
else:
tensor = torch.tensor(
data=val, dtype=torch.float32, device=self.device,
)
if isinstance(tensor, torch.Tensor):
# preprocess
if self.scaler and scaler_targets:
if arg_keys[i] in scaler_targets:
tensor = self.scaler.transform(tensor)
# preprocess action
if self.action_scaler and action_scaler_targets:
if arg_keys[i] in action_scaler_targets:
tensor = self.action_scaler.transform(tensor)
# preprocessing reward
if self.reward_scaler and reward_scaler_targets:
if arg_keys[i] in reward_scaler_targets:
tensor = self.reward_scaler.transform(tensor)
# make sure if the tensor is float32 type
if tensor is not None and tensor.dtype != torch.float32:
tensor = tensor.float()
tensors.append(tensor)
return f(self, *tensors, **kwargs)
return wrapper
return _torch_api
def eval_api(f: Callable[..., np.ndarray]) -> Callable[..., np.ndarray]:
def wrapper(self: Any, *args: Any, **kwargs: Any) -> np.ndarray:
set_eval_mode(self)
return f(self, *args, **kwargs)
return wrapper
def train_api(f: Callable[..., np.ndarray]) -> Callable[..., np.ndarray]:
def wrapper(self: Any, *args: Any, **kwargs: Any) -> np.ndarray:
set_train_mode(self)
return f(self, *args, **kwargs)
return wrapper
class View(nn.Module): # type: ignore
_shape: Sequence[int]
def __init__(self, shape: Sequence[int]):
super().__init__()
self._shape = shape
def forward(self, x: torch.Tensor) -> torch.Tensor:
return x.view(self._shape)
class Swish(nn.Module): # type: ignore
def forward(self, x: torch.Tensor) -> torch.Tensor:
return x * torch.sigmoid(x) | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/torch_utility.py | torch_utility.py |
from typing import Optional, Union
from .gpu import Device
from .models.encoders import EncoderFactory, create_encoder_factory
from .models.q_functions import QFunctionFactory, create_q_func_factory
from .preprocessing.action_scalers import ActionScaler, create_action_scaler
from .preprocessing.reward_scalers import RewardScaler, create_reward_scaler
from .preprocessing.scalers import Scaler, create_scaler
EncoderArg = Union[EncoderFactory, str]
QFuncArg = Union[QFunctionFactory, str]
ScalerArg = Optional[Union[Scaler, str]]
ActionScalerArg = Optional[Union[ActionScaler, str]]
RewardScalerArg = Optional[Union[RewardScaler, str]]
UseGPUArg = Optional[Union[bool, int, Device]]
def check_encoder(value: EncoderArg) -> EncoderFactory:
"""Checks value and returns EncoderFactory object.
Returns:
d3rlpy.encoders.EncoderFactory: encoder factory object.
"""
if isinstance(value, EncoderFactory):
return value
if isinstance(value, str):
return create_encoder_factory(value)
raise ValueError("This argument must be str or EncoderFactory object.")
def check_q_func(value: QFuncArg) -> QFunctionFactory:
"""Checks value and returns QFunctionFactory object.
Returns:
d3rlpy.q_functions.QFunctionFactory: Q function factory object.
"""
if isinstance(value, QFunctionFactory):
return value
if isinstance(value, str):
return create_q_func_factory(value)
raise ValueError("This argument must be str or QFunctionFactory object.")
def check_scaler(value: ScalerArg) -> Optional[Scaler]:
"""Checks value and returns Scaler object.
Returns:
scaler object.
"""
if isinstance(value, Scaler):
return value
if isinstance(value, str):
return create_scaler(value)
if value is None:
return None
raise ValueError("This argument must be str or Scaler object.")
def check_action_scaler(value: ActionScalerArg) -> Optional[ActionScaler]:
"""Checks value and returns Scaler object.
Returns:
action scaler object.
"""
if isinstance(value, ActionScaler):
return value
if isinstance(value, str):
return create_action_scaler(value)
if value is None:
return None
raise ValueError("This argument must be str or ActionScaler object.")
def check_reward_scaler(value: RewardScalerArg) -> Optional[RewardScaler]:
"""Checks value and returns Scaler object.
Returns:
reward scaler object.
"""
if isinstance(value, RewardScaler):
return value
if isinstance(value, str):
return create_reward_scaler(value)
if value is None:
return None
raise ValueError("This argument must be str or RewardScaler object.")
def check_use_gpu(value: UseGPUArg) -> Optional[Device]:
"""Checks value and returns Device object.
Returns:
d3rlpy.gpu.Device: device object.
"""
# isinstance cannot tell difference between bool and int
if type(value) == bool:
if value:
return Device(0)
return None
if type(value) == int:
return Device(value)
if isinstance(value, Device):
return value
if value is None:
return None
raise ValueError("This argument must be bool, int or Device.") | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/argument_utility.py | argument_utility.py |
import os
import random
import re
from typing import List, Tuple
from urllib import request
import gym
import numpy as np
from .dataset import Episode, MDPDataset, Transition
from .envs import ChannelFirst
DATA_DIRECTORY = "d3rlpy_data"
DROPBOX_URL = "https://www.dropbox.com/s"
CARTPOLE_URL = f"{DROPBOX_URL}/l1sdnq3zvoot2um/cartpole.h5?dl=1"
CARTPOLE_RANDOM_URL = f"{DROPBOX_URL}/rwf4pns5x0ku848/cartpole_random.h5?dl=1"
PENDULUM_URL = f"{DROPBOX_URL}/vsiz9pwvshj7sly/pendulum.h5?dl=1"
PENDULUM_RANDOM_URL = f"{DROPBOX_URL}/qldf2vjvvc5thsb/pendulum_random.h5?dl=1"
def get_cartpole(
create_mask: bool = False, mask_size: int = 1, dataset_type: str = "replay"
) -> Tuple[MDPDataset, gym.Env]:
"""Returns cartpole dataset and environment.
The dataset is automatically downloaded to ``d3rlpy_data/cartpole.h5`` if
it does not exist.
Args:
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
dataset_type: dataset type. Available options are
``['replay', 'random']``.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
if dataset_type == "replay":
url = CARTPOLE_URL
file_name = "cartpole_replay.h5"
elif dataset_type == "random":
url = CARTPOLE_RANDOM_URL
file_name = "cartpole_random.h5"
else:
raise ValueError(f"Invalid dataset_type: {dataset_type}.")
data_path = os.path.join(DATA_DIRECTORY, file_name)
# download dataset
if not os.path.exists(data_path):
os.makedirs(DATA_DIRECTORY, exist_ok=True)
print(f"Donwloading cartpole.pkl into {data_path}...")
request.urlretrieve(url, data_path)
# load dataset
dataset = MDPDataset.load(
data_path, create_mask=create_mask, mask_size=mask_size
)
# environment
env = gym.make("CartPole-v0")
return dataset, env
def get_pendulum(
create_mask: bool = False, mask_size: int = 1, dataset_type: str = "replay",
) -> Tuple[MDPDataset, gym.Env]:
"""Returns pendulum dataset and environment.
The dataset is automatically downloaded to ``d3rlpy_data/pendulum.h5`` if
it does not exist.
Args:
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
dataset_type: dataset type. Available options are
``['replay', 'random']``.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
if dataset_type == "replay":
url = PENDULUM_URL
file_name = "pendulum_replay.h5"
elif dataset_type == "random":
url = PENDULUM_RANDOM_URL
file_name = "pendulum_random.h5"
else:
raise ValueError(f"Invalid dataset_type: {dataset_type}.")
data_path = os.path.join(DATA_DIRECTORY, file_name)
if not os.path.exists(data_path):
os.makedirs(DATA_DIRECTORY, exist_ok=True)
print(f"Donwloading pendulum.pkl into {data_path}...")
request.urlretrieve(url, data_path)
# load dataset
dataset = MDPDataset.load(
data_path, create_mask=create_mask, mask_size=mask_size
)
# environment
env = gym.make("Pendulum-v0")
return dataset, env
def get_pybullet(
env_name: str, create_mask: bool = False, mask_size: int = 1
) -> Tuple[MDPDataset, gym.Env]:
"""Returns pybullet dataset and envrironment.
The dataset is provided through d4rl-pybullet. See more details including
available dataset from its GitHub page.
.. code-block:: python
from d3rlpy.datasets import get_pybullet
dataset, env = get_pybullet('hopper-bullet-mixed-v0')
References:
* https://github.com/takuseno/d4rl-pybullet
Args:
env_name: environment id of d4rl-pybullet dataset.
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
try:
import d4rl_pybullet # type: ignore
env = gym.make(env_name)
dataset = MDPDataset(
create_mask=create_mask, mask_size=mask_size, **env.get_dataset()
)
return dataset, env
except ImportError as e:
raise ImportError(
"d4rl-pybullet is not installed.\n"
"pip install git+https://github.com/takuseno/d4rl-pybullet"
) from e
def get_atari(
env_name: str, create_mask: bool = False, mask_size: int = 1
) -> Tuple[MDPDataset, gym.Env]:
"""Returns atari dataset and envrironment.
The dataset is provided through d4rl-atari. See more details including
available dataset from its GitHub page.
.. code-block:: python
from d3rlpy.datasets import get_atari
dataset, env = get_atari('breakout-mixed-v0')
References:
* https://github.com/takuseno/d4rl-atari
Args:
env_name: environment id of d4rl-atari dataset.
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
try:
import d4rl_atari # type: ignore
env = ChannelFirst(gym.make(env_name))
dataset = MDPDataset(
discrete_action=True,
create_mask=create_mask,
mask_size=mask_size,
**env.get_dataset(),
)
return dataset, env
except ImportError as e:
raise ImportError(
"d4rl-atari is not installed.\n"
"pip install git+https://github.com/takuseno/d4rl-atari"
) from e
def get_atari_transitions(
game_name: str, fraction: float = 0.01, index: int = 0
) -> Tuple[List[Transition], gym.Env]:
"""Returns atari dataset as a list of Transition objects and envrironment.
The dataset is provided through d4rl-atari.
The difference from ``get_atari`` function is that this function will
sample transitions from all epochs.
This function is necessary for reproducing Atari experiments.
.. code-block:: python
from d3rlpy.datasets import get_atari_transitions
# get 1% of transitions from all epochs (1M x 50 epoch x 1% = 0.5M)
dataset, env = get_atari_transitions('breakout', fraction=0.01)
References:
* https://github.com/takuseno/d4rl-atari
Args:
game_name: Atari 2600 game name in lower_snake_case.
fraction: fraction of sampled transitions.
index: index to specify which trial to load.
Returns:
tuple of a list of :class:`d3rlpy.dataset.Transition` and gym
environment.
"""
try:
import d4rl_atari
# each epoch consists of 1M steps
num_transitions_per_epoch = int(1000000 * fraction)
transitions = []
for i in range(50):
env = gym.make(
f"{game_name}-epoch-{i + 1}-v{index}", sticky_action=True
)
dataset = MDPDataset(discrete_action=True, **env.get_dataset())
episodes = list(dataset.episodes)
# copy episode data to release memory of unused data
random.shuffle(episodes)
num_data = 0
copied_episodes = []
for episode in episodes:
copied_episode = Episode(
observation_shape=tuple(episode.get_observation_shape()),
action_size=episode.get_action_size(),
observations=episode.observations.copy(),
actions=episode.actions.copy(),
rewards=episode.rewards.copy(),
terminal=episode.terminal,
)
copied_episodes.append(copied_episode)
num_data += len(copied_episode)
if num_data > num_transitions_per_epoch:
break
transitions_per_epoch = []
for episode in copied_episodes:
transitions_per_epoch += episode.transitions
transitions += transitions_per_epoch[:num_transitions_per_epoch]
return transitions, ChannelFirst(env)
except ImportError as e:
raise ImportError(
"d4rl-atari is not installed.\n"
"pip install git+https://github.com/takuseno/d4rl-atari"
) from e
def get_d4rl(
env_name: str, create_mask: bool = False, mask_size: int = 1
) -> Tuple[MDPDataset, gym.Env]:
"""Returns d4rl dataset and envrironment.
The dataset is provided through d4rl.
.. code-block:: python
from d3rlpy.datasets import get_d4rl
dataset, env = get_d4rl('hopper-medium-v0')
References:
* `Fu et al., D4RL: Datasets for Deep Data-Driven Reinforcement
Learning. <https://arxiv.org/abs/2004.07219>`_
* https://github.com/rail-berkeley/d4rl
Args:
env_name: environment id of d4rl dataset.
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
try:
import d4rl # type: ignore
env = gym.make(env_name)
dataset = env.get_dataset()
observations = []
actions = []
rewards = []
terminals = []
episode_terminals = []
episode_step = 0
cursor = 0
dataset_size = dataset["observations"].shape[0]
while cursor < dataset_size:
# collect data for step=t
observation = dataset["observations"][cursor]
action = dataset["actions"][cursor]
if episode_step == 0:
reward = 0.0
else:
reward = dataset["rewards"][cursor - 1]
observations.append(observation)
actions.append(action)
rewards.append(reward)
terminals.append(0.0)
# skip adding the last step when timeout
if dataset["timeouts"][cursor]:
episode_terminals.append(1.0)
episode_step = 0
cursor += 1
continue
episode_terminals.append(0.0)
episode_step += 1
if dataset["terminals"][cursor]:
# collect data for step=t+1
dummy_observation = observation.copy()
dummy_action = action.copy()
next_reward = dataset["rewards"][cursor]
# the last observation is rarely used
observations.append(dummy_observation)
actions.append(dummy_action)
rewards.append(next_reward)
terminals.append(1.0)
episode_terminals.append(1.0)
episode_step = 0
cursor += 1
mdp_dataset = MDPDataset(
observations=np.array(observations, dtype=np.float32),
actions=np.array(actions, dtype=np.float32),
rewards=np.array(rewards, dtype=np.float32),
terminals=np.array(terminals, dtype=np.float32),
episode_terminals=np.array(episode_terminals, dtype=np.float32),
create_mask=create_mask,
mask_size=mask_size,
)
return mdp_dataset, env
except ImportError as e:
raise ImportError(
"d4rl is not installed.\n"
"pip install git+https://github.com/rail-berkeley/d4rl"
) from e
ATARI_GAMES = [
"adventure",
"air-raid",
"alien",
"amidar",
"assault",
"asterix",
"asteroids",
"atlantis",
"bank-heist",
"battle-zone",
"beam-rider",
"berzerk",
"bowling",
"boxing",
"breakout",
"carnival",
"centipede",
"chopper-command",
"crazy-climber",
"defender",
"demon-attack",
"double-dunk",
"elevator-action",
"enduro",
"fishing-derby",
"freeway",
"frostbite",
"gopher",
"gravitar",
"hero",
"ice-hockey",
"jamesbond",
"journey-escape",
"kangaroo",
"krull",
"kung-fu-master",
"montezuma-revenge",
"ms-pacman",
"name-this-game",
"phoenix",
"pitfall",
"pong",
"pooyan",
"private-eye",
"qbert",
"riverraid",
"road-runner",
"robotank",
"seaquest",
"skiing",
"solaris",
"space-invaders",
"star-gunner",
"tennis",
"time-pilot",
"tutankham",
"up-n-down",
"venture",
"video-pinball",
"wizard-of-wor",
"yars-revenge",
"zaxxon",
]
def get_dataset(
env_name: str, create_mask: bool = False, mask_size: int = 1
) -> Tuple[MDPDataset, gym.Env]:
"""Returns dataset and envrironment by guessing from name.
This function returns dataset by matching name with the following datasets.
- cartpole-replay
- cartpole-random
- pendulum-replay
- pendulum-random
- d4rl-pybullet
- d4rl-atari
- d4rl
.. code-block:: python
import d3rlpy
# cartpole dataset
dataset, env = d3rlpy.datasets.get_dataset('cartpole')
# pendulum dataset
dataset, env = d3rlpy.datasets.get_dataset('pendulum')
# d4rl-pybullet dataset
dataset, env = d3rlpy.datasets.get_dataset('hopper-bullet-mixed-v0')
# d4rl-atari dataset
dataset, env = d3rlpy.datasets.get_dataset('breakout-mixed-v0')
# d4rl dataset
dataset, env = d3rlpy.datasets.get_dataset('hopper-medium-v0')
Args:
env_name: environment id of the dataset.
create_mask: flag to create binary mask for bootstrapping.
mask_size: ensemble size for binary mask.
Returns:
tuple of :class:`d3rlpy.dataset.MDPDataset` and gym environment.
"""
if env_name == "cartpole-replay":
return get_cartpole(create_mask, mask_size, dataset_type="replay")
elif env_name == "cartpole-random":
return get_cartpole(create_mask, mask_size, dataset_type="random")
elif env_name == "pendulum-replay":
return get_pendulum(create_mask, mask_size, dataset_type="replay")
elif env_name == "pendulum-random":
return get_pendulum(create_mask, mask_size, dataset_type="random")
elif re.match(r"^bullet-.+$", env_name):
return get_d4rl(env_name, create_mask, mask_size)
elif re.match(r"^.+-bullet-.+$", env_name):
return get_pybullet(env_name, create_mask, mask_size)
elif re.match(r"hopper|halfcheetah|walker|ant", env_name):
return get_d4rl(env_name, create_mask, mask_size)
elif re.match(re.compile("|".join(ATARI_GAMES)), env_name):
return get_atari(env_name, create_mask, mask_size)
raise ValueError(f"Unrecognized env_name: {env_name}.") | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/datasets.py | datasets.py |
import glob
import json
import os
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple
import click
import gym
import numpy as np
from scipy.ndimage.filters import uniform_filter1d
from . import algos
from ._version import __version__
from .envs import Monitor
from .metrics.scorer import evaluate_on_environment
if TYPE_CHECKING:
import matplotlib.pyplot
def print_stats(path: str) -> None:
data = np.loadtxt(path, delimiter=",")
print("FILE NAME : ", path)
print("EPOCH : ", data[-1, 0])
print("TOTAL STEPS: ", data[-1, 1])
print("MAX VALUE : ", np.max(data[:, 2]))
print("MIN VALUE : ", np.min(data[:, 2]))
print("STD VALUE : ", np.std(data[:, 2]))
def get_plt() -> "matplotlib.pyplot":
import matplotlib.pyplot as plt
try:
# enable seaborn style if available
import seaborn as sns
sns.set()
except ImportError:
pass
return plt
@click.group()
def cli() -> None:
print(f"d3rlpy command line interface (Version {__version__})")
@cli.command(short_help="Show statistics of save metrics.")
@click.argument("path")
def stats(path: str) -> None:
print_stats(path)
@cli.command(short_help="Plot saved metrics (requires matplotlib).")
@click.argument("path", nargs=-1)
@click.option(
"--window", default=1, show_default=True, help="moving average window."
)
@click.option("--show-steps", is_flag=True, help="use iterations on x-axis.")
@click.option("--show-max", is_flag=True, help="show maximum value.")
@click.option("--label", multiple=True, help="label in legend.")
@click.option("--xlim", nargs=2, type=float, help="limit on x-axis (tuple).")
@click.option("--ylim", nargs=2, type=float, help="limit on y-axis (tuple).")
@click.option("--title", help="title of the plot.")
@click.option("--ylabel", default="value", help="label on y-axis.")
@click.option("--save", help="flag to save the plot as an image.")
def plot(
path: List[str],
window: int,
show_steps: bool,
show_max: bool,
label: Optional[Sequence[str]],
xlim: Optional[Tuple[float, float]],
ylim: Optional[Tuple[float, float]],
title: Optional[str],
ylabel: str,
save: str,
) -> None:
plt = get_plt()
max_y_values = []
min_x_values = []
max_x_values = []
if label:
assert len(label) == len(
path
), "--labels must be provided as many as the number of paths"
for i, p in enumerate(path):
data = np.loadtxt(p, delimiter=",")
# filter to smooth data
y_data = uniform_filter1d(data[:, 2], size=window)
# create label
if label:
_label = label[i]
elif len(p.split(os.sep)) > 1:
_label = "/".join(p.split(os.sep)[-2:])
else:
_label = p
if show_steps:
x_data = data[:, 1]
else:
x_data = data[:, 0]
max_y_values.append(np.max(data[:, 2]))
min_x_values.append(np.min(x_data))
max_x_values.append(np.max(x_data))
# show statistics
print("")
print_stats(p)
plt.plot(x_data, y_data, label=_label)
if show_max:
plt.plot(
[np.min(min_x_values), np.max(max_x_values)],
[np.max(max_y_values), np.max(max_y_values)],
color="black",
linestyle="dashed",
)
plt.xlabel("steps" if show_steps else "epochs")
plt.ylabel(ylabel)
if xlim:
plt.xlim(xlim[0], xlim[1])
if ylim:
plt.ylim(ylim[0], ylim[1])
if title:
plt.title(title)
plt.legend()
if save:
plt.savefig(save)
else:
plt.show()
@cli.command(short_help="Plot saved metrics in a grid (requires matplotlib).")
@click.argument("path")
@click.option("--title", help="title of the plot.")
@click.option("--save", help="flag to save the plot as an image.")
def plot_all(path: str, title: Optional[str], save: str,) -> None:
plt = get_plt()
# print params.json
if os.path.exists(os.path.join(path, "params.json")):
with open(os.path.join(path, "params.json"), "r") as f:
params = json.loads(f.read())
print("")
for k, v in params.items():
print(f"{k}={v}")
metrics_names = sorted(list(glob.glob(os.path.join(path, "*.csv"))))
n_cols = int(np.ceil(len(metrics_names) ** 0.5))
n_rows = int(np.ceil(len(metrics_names) / n_cols))
plt.figure(figsize=(12, 7))
for i in range(n_rows):
for j in range(n_cols):
index = j + n_cols * i
if index >= len(metrics_names):
break
plt.subplot(n_rows, n_cols, index + 1)
data = np.loadtxt(metrics_names[index], delimiter=",")
plt.plot(data[:, 0], data[:, 2])
plt.title(os.path.basename(metrics_names[index]))
plt.xlabel("epoch")
plt.ylabel("value")
if title:
plt.suptitle(title)
plt.tight_layout()
if save:
plt.savefig(save)
else:
plt.show()
def _get_params_json_path(path: str) -> str:
dirname = os.path.dirname(path)
if not os.path.exists(os.path.join(dirname, "params.json")):
raise RuntimeError(
"params.json is not found in %s. Please specify"
"the path to params.json by --params-json."
)
return os.path.join(dirname, "params.json")
@cli.command(short_help="Export saved model as inference model format.")
@click.argument("path")
@click.option(
"--format",
default="onnx",
show_default=True,
help="model format (torchscript, onnx).",
)
@click.option(
"--params-json", default=None, help="explicitly specify params.json."
)
@click.option("--out", default=None, help="output path.")
def export(
path: str, format: str, params_json: Optional[str], out: Optional[str]
) -> None:
# check format
if format not in ["onnx", "torchscript"]:
raise ValueError("Please specify onnx or torchscript.")
# find params.json
if params_json is None:
params_json = _get_params_json_path(path)
# load params
with open(params_json, "r") as f:
params = json.loads(f.read())
# load saved model
print(f"Loading {path}...")
algo = getattr(algos, params["algorithm"]).from_json(params_json)
algo.load_model(path)
if out is None:
ext = "onnx" if format == "onnx" else "torchscript"
export_name = os.path.splitext(os.path.basename(path))[0]
out = os.path.join(os.path.dirname(path), export_name + "." + ext)
# export inference model
print(f"Exporting to {out}...")
algo.save_policy(out, as_onnx=format == "onnx")
def _exec_to_create_env(code: str) -> gym.Env:
print(f"Executing '{code}'")
variables: Dict[str, Any] = {}
exec(code, globals(), variables)
if "env" not in variables:
raise RuntimeError("env must be defined in env_header.")
return variables["env"]
@cli.command(short_help="Record episodes with the saved model.")
@click.argument("model_path")
@click.option("--env-id", default=None, help="Gym environment id.")
@click.option(
"--env-header", default=None, help="one-liner to create environment."
)
@click.option("--out", default="videos", help="output directory path.")
@click.option(
"--params-json", default=None, help="explicityly specify params.json."
)
@click.option(
"--n-episodes", default=3, help="the number of episodes to record."
)
@click.option("--frame-rate", default=60, help="video frame rate.")
@click.option("--record-rate", default=1, help="record frame rate.")
@click.option("--epsilon", default=0.0, help="epsilon-greedy evaluation.")
def record(
model_path: str,
env_id: Optional[str],
env_header: Optional[str],
params_json: Optional[str],
out: str,
n_episodes: int,
frame_rate: float,
record_rate: int,
epsilon: float,
) -> None:
if params_json is None:
params_json = _get_params_json_path(model_path)
# load params
with open(params_json, "r") as f:
params = json.loads(f.read())
# load saved model
print(f"Loading {model_path}...")
algo = getattr(algos, params["algorithm"]).from_json(params_json)
algo.load_model(model_path)
# wrap environment with Monitor
env: gym.Env
if env_id is not None:
env = gym.make(env_id)
elif env_header is not None:
env = _exec_to_create_env(env_header)
else:
raise ValueError("env_id or env_header must be provided.")
wrapped_env = Monitor(
env,
out,
video_callable=lambda ep: ep % 1 == 0,
frame_rate=float(frame_rate),
record_rate=int(record_rate),
)
# run episodes
evaluate_on_environment(wrapped_env, n_episodes, epsilon=epsilon)(algo)
@cli.command(short_help="Run evaluation episodes with rendering.")
@click.argument("model_path")
@click.option("--env-id", default=None, help="Gym environment id.")
@click.option(
"--env-header", default=None, help="one-liner to create environment."
)
@click.option(
"--params-json", default=None, help="explicityly specify params.json."
)
@click.option("--n-episodes", default=3, help="the number of episodes to run.")
def play(
model_path: str,
env_id: Optional[str],
env_header: Optional[str],
params_json: Optional[str],
n_episodes: int,
) -> None:
if params_json is None:
params_json = _get_params_json_path(model_path)
# load params
with open(params_json, "r") as f:
params = json.loads(f.read())
# load saved model
print(f"Loading {model_path}...")
algo = getattr(algos, params["algorithm"]).from_json(params_json)
algo.load_model(model_path)
# wrap environment with Monitor
env: gym.Env
if env_id is not None:
env = gym.make(env_id)
elif env_header is not None:
env = _exec_to_create_env(env_header)
else:
raise ValueError("env_id or env_header must be provided.")
# run episodes
evaluate_on_environment(env, n_episodes, render=True)(algo) | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/cli.py | cli.py |
from typing import Any, Dict, Optional, Sequence
from ..argument_utility import (
ActionScalerArg,
EncoderArg,
RewardScalerArg,
ScalerArg,
UseGPUArg,
check_encoder,
check_use_gpu,
)
from ..constants import IMPL_NOT_INITIALIZED_ERROR, ActionSpace
from ..dataset import TransitionMiniBatch
from ..gpu import Device
from ..models.encoders import EncoderFactory
from ..models.optimizers import AdamFactory, OptimizerFactory
from .base import DynamicsBase
from .torch.probabilistic_ensemble_dynamics_impl import (
ProbabilisticEnsembleDynamicsImpl,
)
class ProbabilisticEnsembleDynamics(DynamicsBase):
r"""Probabilistic ensemble dynamics.
The ensemble dynamics model consists of :math:`N` probablistic models
:math:`\{T_{\theta_i}\}_{i=1}^N`.
At each epoch, new transitions are generated via randomly picked dynamics
model :math:`T_\theta`.
.. math::
s_{t+1}, r_{t+1} \sim T_\theta(s_t, a_t)
where :math:`s_t \sim D` for the first step, otherwise :math:`s_t` is the
previous generated observation, and :math:`a_t \sim \pi(\cdot|s_t)`.
Note:
Currently, ``ProbabilisticEnsembleDynamics`` only supports vector
observations.
References:
* `Yu et al., MOPO: Model-based Offline Policy Optimization.
<https://arxiv.org/abs/2005.13239>`_
Args:
learning_rate (float): learning rate for dynamics model.
optim_factory (d3rlpy.models.optimizers.OptimizerFactory):
optimizer factory.
encoder_factory (d3rlpy.models.encoders.EncoderFactory or str):
encoder factory.
batch_size (int): mini-batch size.
n_frames (int): the number of frames to stack for image observation.
n_ensembles (int): the number of dynamics model for ensemble.
variance_type (str): variance calculation type. The available options
are ``['max', 'data']``.
discrete_action (bool): flag to take discrete actions.
scaler (d3rlpy.preprocessing.scalers.Scaler or str): preprocessor.
The available options are ``['pixel', 'min_max', 'standard']``.
action_scaler (d3rlpy.preprocessing.Actionscalers or str):
action preprocessor. The available options are ``['min_max']``.
reward_scaler (d3rlpy.preprocessing.RewardScaler or str):
reward preprocessor. The available options are
``['clip', 'min_max', 'standard']``.
use_gpu (bool or d3rlpy.gpu.Device): flag to use GPU or device.
impl (d3rlpy.dynamics.torch.ProbabilisticEnsembleDynamicsImpl):
dynamics implementation.
"""
_learning_rate: float
_optim_factory: OptimizerFactory
_encoder_factory: EncoderFactory
_n_ensembles: int
_variance_type: str
_discrete_action: bool
_use_gpu: Optional[Device]
_impl: Optional[ProbabilisticEnsembleDynamicsImpl]
def __init__(
self,
*,
learning_rate: float = 1e-3,
optim_factory: OptimizerFactory = AdamFactory(weight_decay=1e-4),
encoder_factory: EncoderArg = "default",
batch_size: int = 100,
n_frames: int = 1,
n_ensembles: int = 5,
variance_type: str = "max",
discrete_action: bool = False,
scaler: ScalerArg = None,
action_scaler: ActionScalerArg = None,
reward_scaler: RewardScalerArg = None,
use_gpu: UseGPUArg = False,
impl: Optional[ProbabilisticEnsembleDynamicsImpl] = None,
**kwargs: Any
):
super().__init__(
batch_size=batch_size,
n_frames=n_frames,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
kwargs=kwargs,
)
self._learning_rate = learning_rate
self._optim_factory = optim_factory
self._encoder_factory = check_encoder(encoder_factory)
self._n_ensembles = n_ensembles
self._variance_type = variance_type
self._discrete_action = discrete_action
self._use_gpu = check_use_gpu(use_gpu)
self._impl = impl
def _create_impl(
self, observation_shape: Sequence[int], action_size: int
) -> None:
self._impl = ProbabilisticEnsembleDynamicsImpl(
observation_shape=observation_shape,
action_size=action_size,
learning_rate=self._learning_rate,
optim_factory=self._optim_factory,
encoder_factory=self._encoder_factory,
n_ensembles=self._n_ensembles,
variance_type=self._variance_type,
discrete_action=self._discrete_action,
scaler=self._scaler,
action_scaler=self._action_scaler,
reward_scaler=self._reward_scaler,
use_gpu=self._use_gpu,
)
self._impl.build()
def _update(self, batch: TransitionMiniBatch) -> Dict[str, float]:
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
loss = self._impl.update(batch)
return {"loss": loss}
def get_action_type(self) -> ActionSpace:
return ActionSpace.BOTH | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/dynamics/probabilistic_ensemble_dynamics.py | probabilistic_ensemble_dynamics.py |
from abc import abstractmethod
from typing import Any, Dict, List, Optional, Tuple, Union
import numpy as np
from ..argument_utility import ActionScalerArg, RewardScalerArg, ScalerArg
from ..base import ImplBase, LearnableBase
from ..constants import IMPL_NOT_INITIALIZED_ERROR
class DynamicsImplBase(ImplBase):
@abstractmethod
def predict(
self,
x: Union[np.ndarray, List[Any]],
action: Union[np.ndarray, List[Any]],
indices: Optional[np.ndarray],
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
pass
class DynamicsBase(LearnableBase):
_impl: Optional[DynamicsImplBase]
def __init__(
self,
batch_size: int,
n_frames: int,
scaler: ScalerArg,
action_scaler: ActionScalerArg,
reward_scaler: RewardScalerArg,
kwargs: Dict[str, Any],
):
super().__init__(
batch_size=batch_size,
n_frames=n_frames,
n_steps=1,
gamma=1.0,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
kwargs=kwargs,
)
self._impl = None
def predict(
self,
x: Union[np.ndarray, List[Any]],
action: Union[np.ndarray, List[Any]],
with_variance: bool = False,
indices: Optional[np.ndarray] = None,
) -> Union[
Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray, np.ndarray, np.ndarray]
]:
"""Returns predicted observation and reward.
Args:
x: observation
action: action
with_variance: flag to return prediction variance.
indices: index of ensemble model to return.
Returns:
tuple of predicted observation and reward. If ``with_variance`` is
``True``, the prediction variance will be added as the 3rd element.
"""
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
observations, rewards, variances = self._impl.predict(
x, action, indices,
)
if with_variance:
return observations, rewards, variances
return observations, rewards | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/dynamics/base.py | base.py |
from abc import abstractmethod
from typing import Optional, Sequence, Tuple
import numpy as np
import torch
from ...gpu import Device
from ...preprocessing import ActionScaler, RewardScaler, Scaler
from ...torch_utility import (
eval_api,
get_state_dict,
map_location,
set_state_dict,
to_cpu,
to_cuda,
torch_api,
)
from ..base import DynamicsImplBase
class TorchImplBase(DynamicsImplBase):
_observation_shape: Sequence[int]
_action_size: int
_scaler: Optional[Scaler]
_action_scaler: Optional[ActionScaler]
_reward_scaler: Optional[RewardScaler]
_device: str
def __init__(
self,
observation_shape: Sequence[int],
action_size: int,
scaler: Optional[Scaler],
action_scaler: Optional[ActionScaler],
reward_scaler: Optional[RewardScaler],
):
self._observation_shape = observation_shape
self._action_size = action_size
self._scaler = scaler
self._action_scaler = action_scaler
self._reward_scaler = reward_scaler
self._device = "cpu:0"
@eval_api
@torch_api(scaler_targets=["x"], action_scaler_targets=["action"])
def predict(
self, x: torch.Tensor, action: torch.Tensor, indices: torch.Tensor,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
with torch.no_grad():
observation, reward, variance = self._predict(x, action, indices)
if self._scaler:
observation = self._scaler.reverse_transform(observation)
if self._reward_scaler:
reward = self._reward_scaler.reverse_transform(reward)
observation = observation.cpu().detach().numpy()
reward = reward.cpu().detach().numpy()
variance = variance.cpu().detach().numpy()
return observation, reward, variance
@abstractmethod
def _predict(
self,
x: torch.Tensor,
action: torch.Tensor,
indices: Optional[torch.Tensor],
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
pass
def to_gpu(self, device: Device = Device()) -> None:
self._device = f"cuda:{device.get_id()}"
to_cuda(self, self._device)
def to_cpu(self) -> None:
self._device = "cpu:0"
to_cpu(self)
def save_model(self, fname: str) -> None:
torch.save(get_state_dict(self), fname)
def load_model(self, fname: str) -> None:
chkpt = torch.load(fname, map_location=map_location(self._device))
set_state_dict(self, chkpt)
@property
def observation_shape(self) -> Sequence[int]:
return self._observation_shape
@property
def action_size(self) -> int:
return self._action_size
@property
def device(self) -> str:
return self._device
@property
def scaler(self) -> Optional[Scaler]:
return self._scaler
@property
def action_scaler(self) -> Optional[ActionScaler]:
return self._action_scaler
@property
def reward_scaler(self) -> Optional[RewardScaler]:
return self._reward_scaler | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/dynamics/torch/base.py | base.py |
from typing import Optional, Sequence, Tuple
import numpy as np
import torch
from torch.optim import Optimizer
from ...gpu import Device
from ...models.builders import create_probabilistic_ensemble_dynamics_model
from ...models.encoders import EncoderFactory
from ...models.optimizers import OptimizerFactory
from ...models.torch import ProbabilisticEnsembleDynamicsModel
from ...preprocessing import ActionScaler, RewardScaler, Scaler
from ...torch_utility import TorchMiniBatch, torch_api, train_api
from .base import TorchImplBase
class ProbabilisticEnsembleDynamicsImpl(TorchImplBase):
_learning_rate: float
_optim_factory: OptimizerFactory
_encoder_factory: EncoderFactory
_n_ensembles: int
_variance_type: str
_discrete_action: bool
_use_gpu: Optional[Device]
_dynamics: Optional[ProbabilisticEnsembleDynamicsModel]
_optim: Optional[Optimizer]
def __init__(
self,
observation_shape: Sequence[int],
action_size: int,
learning_rate: float,
optim_factory: OptimizerFactory,
encoder_factory: EncoderFactory,
n_ensembles: int,
variance_type: str,
discrete_action: bool,
scaler: Optional[Scaler],
action_scaler: Optional[ActionScaler],
reward_scaler: Optional[RewardScaler],
use_gpu: Optional[Device],
):
super().__init__(
observation_shape=observation_shape,
action_size=action_size,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
)
self._learning_rate = learning_rate
self._optim_factory = optim_factory
self._encoder_factory = encoder_factory
self._n_ensembles = n_ensembles
self._variance_type = variance_type
self._discrete_action = discrete_action
self._use_gpu = use_gpu
# initialized in build
self._dynamics = None
self._optim = None
def build(self) -> None:
self._build_dynamics()
self.to_cpu()
if self._use_gpu:
self.to_gpu(self._use_gpu)
self._build_optim()
def _build_dynamics(self) -> None:
self._dynamics = create_probabilistic_ensemble_dynamics_model(
self._observation_shape,
self._action_size,
self._encoder_factory,
n_ensembles=self._n_ensembles,
discrete_action=self._discrete_action,
)
def _build_optim(self) -> None:
assert self._dynamics is not None
self._optim = self._optim_factory.create(
self._dynamics.parameters(), lr=self._learning_rate
)
def _predict(
self,
x: torch.Tensor,
action: torch.Tensor,
indices: Optional[torch.Tensor],
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
assert self._dynamics is not None
if indices is None:
indices = torch.randint(self._n_ensembles, size=(x.shape[0],))
else:
assert indices.shape == (x.shape[0],)
return self._dynamics.predict_with_variance(
x,
action,
variance_type=self._variance_type,
indices=indices.long(),
)
@train_api
@torch_api()
def update(self, batch: TorchMiniBatch) -> np.ndarray:
assert self._dynamics is not None
assert self._optim is not None
loss = self._dynamics.compute_error(
obs_t=batch.observations,
act_t=batch.actions,
rew_tp1=batch.next_rewards,
obs_tp1=batch.next_observations,
masks=batch.masks,
)
self._optim.zero_grad()
loss.backward()
self._optim.step()
return loss.cpu().detach().numpy() | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/dynamics/torch/probabilistic_ensemble_dynamics_impl.py | probabilistic_ensemble_dynamics_impl.py |
from typing import Any, Dict, List, Optional, Sequence, Union
import numpy as np
from ..algos import AlgoBase
from ..argument_utility import (
ActionScalerArg,
EncoderArg,
QFuncArg,
RewardScalerArg,
ScalerArg,
UseGPUArg,
check_encoder,
check_q_func,
check_use_gpu,
)
from ..constants import (
ALGO_NOT_GIVEN_ERROR,
IMPL_NOT_INITIALIZED_ERROR,
ActionSpace,
)
from ..dataset import TransitionMiniBatch
from ..gpu import Device
from ..models.encoders import EncoderFactory
from ..models.optimizers import AdamFactory, OptimizerFactory
from ..models.q_functions import QFunctionFactory
from .torch.fqe_impl import DiscreteFQEImpl, FQEBaseImpl, FQEImpl
class _FQEBase(AlgoBase):
_algo: Optional[AlgoBase]
_learning_rate: float
_optim_factory: OptimizerFactory
_encoder_factory: EncoderFactory
_q_func_factory: QFunctionFactory
_n_critics: int
_target_update_interval: int
_use_gpu: Optional[Device]
_impl: Optional[FQEBaseImpl]
def __init__(
self,
*,
algo: Optional[AlgoBase] = None,
learning_rate: float = 1e-4,
optim_factory: OptimizerFactory = AdamFactory(),
encoder_factory: EncoderArg = "default",
q_func_factory: QFuncArg = "mean",
batch_size: int = 100,
n_frames: int = 1,
n_steps: int = 1,
gamma: float = 0.99,
n_critics: int = 1,
target_update_interval: int = 100,
use_gpu: UseGPUArg = False,
scaler: ScalerArg = None,
action_scaler: ActionScalerArg = None,
reward_scaler: RewardScalerArg = None,
impl: Optional[FQEBaseImpl] = None,
**kwargs: Any
):
super().__init__(
batch_size=batch_size,
n_frames=n_frames,
n_steps=n_steps,
gamma=gamma,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
kwargs=kwargs,
)
self._algo = algo
self._learning_rate = learning_rate
self._optim_factory = optim_factory
self._encoder_factory = check_encoder(encoder_factory)
self._q_func_factory = check_q_func(q_func_factory)
self._n_critics = n_critics
self._target_update_interval = target_update_interval
self._use_gpu = check_use_gpu(use_gpu)
self._impl = impl
def save_policy(self, fname: str) -> None:
assert self._algo is not None, ALGO_NOT_GIVEN_ERROR
self._algo.save_policy(fname)
def predict(self, x: Union[np.ndarray, List[Any]]) -> np.ndarray:
assert self._algo is not None, ALGO_NOT_GIVEN_ERROR
return self._algo.predict(x)
def sample_action(self, x: Union[np.ndarray, List[Any]]) -> np.ndarray:
assert self._algo is not None, ALGO_NOT_GIVEN_ERROR
return self._algo.sample_action(x)
def _update(self, batch: TransitionMiniBatch) -> Dict[str, float]:
assert self._algo is not None, ALGO_NOT_GIVEN_ERROR
assert self._impl is not None, IMPL_NOT_INITIALIZED_ERROR
next_actions = self._algo.predict(batch.next_observations)
loss = self._impl.update(batch, next_actions)
if self._grad_step % self._target_update_interval == 0:
self._impl.update_target()
return {"loss": loss}
class FQE(_FQEBase):
r"""Fitted Q Evaluation.
FQE is an off-policy evaluation method that approximates a Q function
:math:`Q_\theta (s, a)` with the trained policy :math:`\pi_\phi(s)`.
.. math::
L(\theta) = \mathbb{E}_{s_t, a_t, r_{t+1} s_{t+1} \sim D}
[(Q_\theta(s_t, a_t) - r_{t+1}
- \gamma Q_{\theta'}(s_{t+1}, \pi_\phi(s_{t+1})))^2]
The trained Q function in FQE will estimate evaluation metrics more
accurately than learned Q function during training.
References:
* `Le et al., Batch Policy Learning under Constraints.
<https://arxiv.org/abs/1903.08738>`_
Args:
algo (d3rlpy.algos.base.AlgoBase): algorithm to evaluate.
learning_rate (float): learning rate.
optim_factory (d3rlpy.models.optimizers.OptimizerFactory or str):
optimizer factory.
encoder_factory (d3rlpy.models.encoders.EncoderFactory or str):
encoder factory.
q_func_factory (d3rlpy.models.q_functions.QFunctionFactory or str):
Q function factory.
batch_size (int): mini-batch size.
n_frames (int): the number of frames to stack for image observation.
n_steps (int): N-step TD calculation.
gamma (float): discount factor.
n_critics (int): the number of Q functions for ensemble.
target_update_interval (int): interval to update the target network.
use_gpu (bool, int or d3rlpy.gpu.Device):
flag to use GPU, device ID or device.
scaler (d3rlpy.preprocessing.Scaler or str): preprocessor.
The available options are `['pixel', 'min_max', 'standard']`.
action_scaler (d3rlpy.preprocessing.ActionScaler or str):
action preprocessor. The available options are ``['min_max']``.
reward_scaler (d3rlpy.preprocessing.RewardScaler or str):
reward preprocessor. The available options are
``['clip', 'min_max', 'standard']``.
impl (d3rlpy.metrics.ope.torch.FQEImpl): algorithm implementation.
"""
_impl: Optional[FQEImpl]
def _create_impl(
self, observation_shape: Sequence[int], action_size: int
) -> None:
self._impl = FQEImpl(
observation_shape=observation_shape,
action_size=action_size,
learning_rate=self._learning_rate,
optim_factory=self._optim_factory,
encoder_factory=self._encoder_factory,
q_func_factory=self._q_func_factory,
gamma=self._gamma,
n_critics=self._n_critics,
use_gpu=self._use_gpu,
scaler=self._scaler,
action_scaler=self._action_scaler,
reward_scaler=self._reward_scaler,
)
self._impl.build()
def get_action_type(self) -> ActionSpace:
return ActionSpace.CONTINUOUS
class DiscreteFQE(_FQEBase):
r"""Fitted Q Evaluation for discrete action-space.
FQE is an off-policy evaluation method that approximates a Q function
:math:`Q_\theta (s, a)` with the trained policy :math:`\pi_\phi(s)`.
.. math::
L(\theta) = \mathbb{E}_{s_t, a_t, r_{t+1} s_{t+1} \sim D}
[(Q_\theta(s_t, a_t) - r_{t+1}
- \gamma Q_{\theta'}(s_{t+1}, \pi_\phi(s_{t+1})))^2]
The trained Q function in FQE will estimate evaluation metrics more
accurately than learned Q function during training.
References:
* `Le et al., Batch Policy Learning under Constraints.
<https://arxiv.org/abs/1903.08738>`_
Args:
algo (d3rlpy.algos.base.AlgoBase): algorithm to evaluate.
learning_rate (float): learning rate.
optim_factory (d3rlpy.models.optimizers.OptimizerFactory or str):
optimizer factory.
encoder_factory (d3rlpy.models.encoders.EncoderFactory or str):
encoder factory.
q_func_factory (d3rlpy.models.q_functions.QFunctionFactory or str):
Q function factory.
batch_size (int): mini-batch size.
n_frames (int): the number of frames to stack for image observation.
n_steps (int): N-step TD calculation.
gamma (float): discount factor.
n_critics (int): the number of Q functions for ensemble.
target_update_interval (int): interval to update the target network.
use_gpu (bool, int or d3rlpy.gpu.Device):
flag to use GPU, device ID or device.
scaler (d3rlpy.preprocessing.Scaler or str): preprocessor.
The available options are `['pixel', 'min_max', 'standard']`
reward_scaler (d3rlpy.preprocessing.RewardScaler or str):
reward preprocessor. The available options are
``['clip', 'min_max', 'standard']``.
impl (d3rlpy.metrics.ope.torch.FQEImpl): algorithm implementation.
"""
_impl: Optional[DiscreteFQEImpl]
def _create_impl(
self, observation_shape: Sequence[int], action_size: int
) -> None:
self._impl = DiscreteFQEImpl(
observation_shape=observation_shape,
action_size=action_size,
learning_rate=self._learning_rate,
optim_factory=self._optim_factory,
encoder_factory=self._encoder_factory,
q_func_factory=self._q_func_factory,
gamma=self._gamma,
n_critics=self._n_critics,
use_gpu=self._use_gpu,
scaler=self._scaler,
action_scaler=None,
reward_scaler=self._reward_scaler,
)
self._impl.build()
def get_action_type(self) -> ActionSpace:
return ActionSpace.DISCRETE | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/ope/fqe.py | fqe.py |
import copy
from abc import abstractmethod
from typing import Optional, Sequence
import numpy as np
import torch
from torch.optim import Optimizer
from ...algos.torch.base import TorchImplBase
from ...algos.torch.utility import (
ContinuousQFunctionMixin,
DiscreteQFunctionMixin,
)
from ...gpu import Device
from ...models.builders import (
create_continuous_q_function,
create_discrete_q_function,
)
from ...models.encoders import EncoderFactory
from ...models.optimizers import OptimizerFactory
from ...models.q_functions import QFunctionFactory
from ...models.torch import (
EnsembleContinuousQFunction,
EnsembleDiscreteQFunction,
EnsembleQFunction,
)
from ...preprocessing import ActionScaler, RewardScaler, Scaler
from ...torch_utility import TorchMiniBatch, hard_sync, torch_api, train_api
class FQEBaseImpl(TorchImplBase):
_learning_rate: float
_optim_factory: OptimizerFactory
_encoder_factory: EncoderFactory
_q_func_factory: QFunctionFactory
_gamma: float
_n_critics: int
_use_gpu: Optional[Device]
_q_func: Optional[EnsembleQFunction]
_targ_q_func: Optional[EnsembleQFunction]
_optim: Optional[Optimizer]
def __init__(
self,
observation_shape: Sequence[int],
action_size: int,
learning_rate: float,
optim_factory: OptimizerFactory,
encoder_factory: EncoderFactory,
q_func_factory: QFunctionFactory,
gamma: float,
n_critics: int,
use_gpu: Optional[Device],
scaler: Optional[Scaler],
action_scaler: Optional[ActionScaler],
reward_scaler: Optional[RewardScaler],
):
super().__init__(
observation_shape=observation_shape,
action_size=action_size,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
)
self._learning_rate = learning_rate
self._optim_factory = optim_factory
self._encoder_factory = encoder_factory
self._q_func_factory = q_func_factory
self._gamma = gamma
self._n_critics = n_critics
self._use_gpu = use_gpu
# initialized in build
self._q_func = None
self._targ_q_func = None
self._optim = None
def build(self) -> None:
self._build_network()
self._targ_q_func = copy.deepcopy(self._q_func)
if self._use_gpu:
self.to_gpu(self._use_gpu)
else:
self.to_cpu()
self._build_optim()
@abstractmethod
def _build_network(self) -> None:
pass
def _build_optim(self) -> None:
assert self._q_func is not None
self._optim = self._optim_factory.create(
self._q_func.parameters(), lr=self._learning_rate
)
@train_api
@torch_api()
def update(
self, batch: TorchMiniBatch, next_actions: torch.Tensor
) -> np.ndarray:
assert self._optim is not None
q_tpn = self.compute_target(batch, next_actions)
loss = self.compute_loss(batch, q_tpn)
self._optim.zero_grad()
loss.backward()
self._optim.step()
return loss.cpu().detach().numpy()
def compute_loss(
self, batch: TorchMiniBatch, q_tpn: torch.Tensor,
) -> torch.Tensor:
assert self._q_func is not None
return self._q_func.compute_error(
obs_t=batch.observations,
act_t=batch.actions,
rew_tp1=batch.next_rewards,
q_tp1=q_tpn,
ter_tp1=batch.terminals,
gamma=self._gamma ** batch.n_steps,
)
def compute_target(
self, batch: TorchMiniBatch, next_actions: torch.Tensor
) -> torch.Tensor:
assert self._targ_q_func is not None
with torch.no_grad():
return self._targ_q_func.compute_target(
batch.next_observations, next_actions
)
def update_target(self) -> None:
assert self._q_func is not None
assert self._targ_q_func is not None
hard_sync(self._targ_q_func, self._q_func)
def save_policy(self, fname: str) -> None:
raise NotImplementedError
class FQEImpl(ContinuousQFunctionMixin, FQEBaseImpl):
_q_func: Optional[EnsembleContinuousQFunction]
_targ_q_func: Optional[EnsembleContinuousQFunction]
def _build_network(self) -> None:
self._q_func = create_continuous_q_function(
self._observation_shape,
self._action_size,
self._encoder_factory,
self._q_func_factory,
n_ensembles=self._n_critics,
)
class DiscreteFQEImpl(DiscreteQFunctionMixin, FQEBaseImpl):
_q_func: Optional[EnsembleDiscreteQFunction]
_targ_q_func: Optional[EnsembleDiscreteQFunction]
def _build_network(self) -> None:
self._q_func = create_discrete_q_function(
self._observation_shape,
self._action_size,
self._encoder_factory,
self._q_func_factory,
n_ensembles=self._n_critics,
)
def compute_loss(
self, batch: TorchMiniBatch, q_tpn: torch.Tensor,
) -> torch.Tensor:
assert self._q_func is not None
return self._q_func.compute_error(
obs_t=batch.observations,
act_t=batch.actions.long(),
rew_tp1=batch.next_rewards,
q_tp1=q_tpn,
ter_tp1=batch.terminals,
gamma=self._gamma ** batch.n_steps,
)
def compute_target(
self, batch: TorchMiniBatch, next_actions: torch.Tensor
) -> torch.Tensor:
assert self._targ_q_func is not None
with torch.no_grad():
return self._targ_q_func.compute_target(
batch.next_observations, next_actions.long(),
) | zjkdemo2 | /zjkdemo2-0.91.tar.gz/zjkdemo2-0.91/d3rlpy/ope/torch/fqe_impl.py | fqe_impl.py |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.